query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
r""" Transposition (i,j) in the vector v
def swap2(d, n, v, i, j): for a in range(n): for k in range(d): if k == i or k == j: continue x = a*d*d + d*i + k y = a*d*d + d*j + k v[x], v[y] = v[y], v[x] x = a*d*d + d*k + i y = a*d*d + d*k + j v[x], v[y] = v[y], v[x] x = a*d*d + d*i + i y = a*d*d + d*j + j v[x], v[y] = v[y], v[x] x = a*d*d + d*j + i y = a*d*d + d*i + j v[x], v[y] = v[y], v[x]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transpose():", "def transpose(m):\n\n pass", "def transp(lon, z, v):\n\tdx = np.diff(lon, axis=1) * 111 * 1000 # valid only for low latitudes!!!\n\taux = dx[:,0]; aux.shape = (np.size(aux), 1)\n\tdx = np.concatenate( (dx, aux), axis=1) \n\n\tdz = np.diff(z, axis=0)\n\taux = dz[0,:]; aux.shape = (1, np.size(aux))\n\tdz = np.concatenate( (dz, aux), axis=0)\n\n\ttransp = np.abs(dx) * np.abs(dz) * v; transp = transp.sum()\n\ttransp = transp / 1e6\n\n\treturn transp", "def _t(a):\n return a.transpose((0, 2, 1))", "def rotate(q, v):\n if v.ndim == 1:\n qv = np.append(v,0)\n else:\n qv = np.hstack([v,np.zeros((len(v),1))])\n out = mult(q,qv)\n out = mult(out, inv(q))\n return out[:,:3]", "def transpose(self):\n order = list(self.order)\n order[-2], order[-1] = order[-1], order[-2]\n self.order = order", "def transpose(self):\n pass", "def transpose(self):\n return Matrix([[row[index]\n for row in self.values]\n for index in range(len(self.values[0]))])", "def svecRotate(v, T):\n \n return svec(Rotate(smat(v), T))", "def orthogonal(v):\n return np.array([-v[1], v[0]])", "def transpose(self):\n return self.conjugate()", "def transpose(tensor):\n raise NotImplementedError", "def T(self):\n return Op('transpose', self)", "def transpose(x):\n return x[:, np.newaxis]", "def vector_as_matrix(v):\r\n return [[v_i] for v_i in v]", "def _transpose_vectorized(M):\n ndim = M.ndim\n assert ndim == 3\n return np.transpose(M, [0, ndim-1, ndim-2])", "def transpose(self) -> None:\n ...", "def Transpose(self):\n return _hypre.HypreParMatrix_Transpose(self)", "def transpose(self, order):\n return _coordsys.coordsys_transpose(self, order)", "def vector_trans(self, v, T, V0):\n v = np.array(v)\n newv = np.add(v[0:2].dot(T), V0)\n self.log.debug(\"Transformation of vector {}, with transformation matrix {} nad V0 {}, to: {}\".format(v, T, V0, newv))\n return newv", "def transp(self, x1, x2, d):\n raise NotImplementedError", "def toMatrix(self,v):\n return Matrix([[v.x],[v.y],[v.z]])", "def transpose(X):\n if len(X.shape) == 1:\n return X\n else:\n Xt = zeros((X.shape[1], X.shape[0]))\n for i in range(X.shape[0]):\n for j in range(X.shape[1]):\n Xt[j][i] = X[i][j]\n\n\n return Xt", "def transpose(self,mat):\n result = [[mat[j][i] for j in range(len(mat))] for i in range(len(mat[0]))]\n self.out = result\n return self.out", "def t2v(T):\n x = T[0, 2]\n y = T[1, 2]\n theta = np.arctan2(T[1, 0], T[0, 0])\n v = np.array([x, y, theta])\n return v", "def reverse(self,v):\n return np.tensordot(self._inverseTransform,\n v-self._translation,axes=([1],[0]))", "def transpose(self):\n return Matrix([[self.data[r][c] for r in range(len(self.data))]\n for c in range(len(self.data[1]))])", "def permute2en(v, ndim_st=1):\n nd = v.ndim\n return v.transpose([*range(ndim_st, nd)] + [*range(ndim_st)])", "def multiply_fisher_factor_transpose(\n self,\n vector: jnp.ndarray\n ) -> jnp.ndarray:\n return utils.scalar_mul(\n self.multiply_fisher_factor_transpose_unweighted(vector),\n jnp.sqrt(self.weight))", "def transpose(self):\n trans = Matrix(self.ncols,self.nrows)\n for i in range(self.nrows):\n for j in range(self.ncols):\n trans.matrix[j][i] = self.matrix[i][j]\n return trans", "def transpose(self):\n return self.from_rep(self.rep.transpose())", "def rotvec2tr(theta, v):\n return r2t(rotvec2r(theta, v))", "def _eval_transpose(self):\n coeff, matrices = self.as_coeff_matrices()\n return MatMul(\n coeff, *[transpose(arg) for arg in matrices[::-1]]).doit()", "def transpose(traj):\r\n return Trajectory(np.transpose(traj.modes, axes = [0, *range(1, traj.modes.ndim)[::-1][0:]]))", "def transpose(self):\n returnvalue = Matrix()\n for i in range(self._width):\n row = list()\n for j in range(self._height):\n row.append(self._value[j][i])\n returnvalue.addRow(*row)\n return returnvalue", "def transpose(self):\r\n m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist\r\n return mat4(m11,m21,m31,m41,\r\n m12,m22,m32,m42,\r\n m13,m23,m33,m43,\r\n m14,m24,m34,m44)", "def transpose(ts: Tensor) -> Tensor:\n assert len(ts.shape) == 2\n return permute(ts, (1, 0))", "def qrot(q, v):\n assert q.shape[-1] == 4\n assert v.shape[-1] == 3\n assert q.shape[:-1] == v.shape[:-1]\n\n original_shape = v.shape\n q = q.view(-1, 4)\n v = v.view(-1, 3)\n\n qvec = q[:, 1:]\n uv = torch.cross(qvec, v, dim=1)\n uuv = torch.cross(qvec, uv, dim=1)\n return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)", "def compute_rot(v):\n if v[0] >= 0:\n M = nd.eye(len(v))\n else:\n M = - nd.eye(len(v))\n for i in range(1, len(v)):\n if v[i] == 0:\n continue\n rot_minus_theta = nd.eye(len(v))\n temp = nd.dot(M, v)\n\n theta = nd.arctan(temp[i]/temp[0])\n c = nd.cos(theta)\n s = nd.sin(theta)\n\n rot_minus_theta[0,0] = c\n rot_minus_theta[i,i] = c\n rot_minus_theta[0,i] = s\n rot_minus_theta[i,0] = -s\n\n M = nd.dot(rot_minus_theta, M)\n return M", "def cross_matrix(v):\n\treturn np.array([\n\t\t[ 0, -v[2], v[1]],\n\t\t[ v[2], 0, -v[0]],\n\t\t[-v[1], v[0], 0]])", "def inverse_transform(v):\n v, k = divmod(v - 1, N)\n v, j = divmod(v, N)\n v, i = divmod(v, N)\n return i, j, k", "def transpositions(self):\n a = self.cyclic_form\n res = []\n for x in a:\n nx = len(x)\n if nx == 2:\n res.append(tuple(x))\n elif nx > 2:\n first = x[0]\n for y in x[nx-1:0:-1]:\n res.append((first,y))\n return res", "def transpose(matrix):\n return list(zip(*matrix))", "def tensorinv(a, ind=2):\n return TensorInv(ind)(a)", "def transpose(self):\n for i in range(self.columns): # This for loop makes the rows we need in our transposed matrix\n matrix = [] # List that will contain the rows of the transpose matrix\n for j in range(self.rows): # This for loop makes the columns we need in our transposed matrix\n matrix.append(self.Matrix[j][i]) \n self.Transpose.append(matrix) # Appending the rows into the bigger list we created\n print(\"This is your transposed matrix: \", self.Transpose)", "def reflection_matrix(v):\n n = len(v)\n v = np.array(v)[np.newaxis]\n return np.eye(n) - 2 * np.dot(v.T, v)", "def multiply_ggn_factor_transpose_unweighted(\n self,\n vector: jnp.ndarray\n ) -> jnp.ndarray:\n pass", "def extend_vector_to_homogeneous_transf(vector):\n T = np.eye(4)\n if vector.__class__.__name__ == 'dict':\n T[0, 3] = vector['x']\n T[1, 3] = vector['y']\n T[2, 3] = vector['z']\n elif type(vector) == np.array:\n T[0:3, 3] = vector[0:3, 0]\n else:\n T[0:3, 3] = vector[0:3]\n return T", "def transpose_inplace(x, **kwargs):\r\n dims = range(x.ndim-1, -1, -1)\r\n return elemwise.DimShuffle(x.broadcastable, dims, inplace=True)(x)", "def T(self):\n return F.Transpose.apply(self)", "def transpose_3d():\n tmp = np.random.random((10, 10, 10))\n\n a = tmp.T\n b = np.empty(tmp.shape)\n for j in range(tmp.shape[1]):\n b[:, j, :] = tmp[:, j, :].T\n\n print(np.all(a == b))", "def transplant(self, u, v):\n if u.parent is None:\n self.root=v\n elif u==u.parent.left:\n u.parent.left=v\n else:\n u.parent.right=v\n if v is not None:\n v.parent=u.parent", "def transpose(self, transposition):\r\n top_node = self.top_node\r\n self.top_node = self.transpose_helper(top_node, transposition)\r\n int(self.top_node.split('|')[0][1::].strip('()').split(', ')[0])\r\n self.run_clean_up()", "def transpose(self):\n data = [list(col) for col in zip(*self.data)]\n return self.__class__(self.n, self.m, data)", "def _swap_permutation(i, j, permutation_vector):\n permutation_vector[i], permutation_vector[j] = permutation_vector[j], permutation_vector[i]", "def transpose(self):\n return self._transpose", "def transl(x, y, z):\n displace_vector = [[x],\n [y],\n [z]]\n return np.matrix(displace_vector)", "async def infer_shape_transpose(track, v, permutation):\n perm = await permutation['value']\n if perm == ANYTHING:\n perm_t = await permutation['type']\n return (ANYTHING,) * len(perm_t.elements)\n\n v_shp = await v['shape']\n if list(sorted(perm)) != list(range(len(v_shp))):\n raise MyiaShapeError(\n 'The second argument of transpose must be a permutation of'\n ' all of the array\\'s axes.',\n refs=[permutation]\n )\n\n shp = tuple(v_shp[i] for i in perm)\n return shp", "def transp_matrix(matrix):\n matrix_t = []\n n_rows = matrix.__len__()\n n_cols = matrix[0].__len__()\n for i in range(n_rows):\n row_t = []\n for j in range(n_cols):\n try:\n row_t.append(matrix[j][i])\n except:\n row_t.append(0)\n matrix_t.append(row_t)\n \n return matrix_t", "def transpose(self):\n return self._new(self.rep.transpose(), (self.cols, self.rows), self.domain)", "def transpose_matrix(matrix):\n n = len(matrix[0])\n m = len(matrix)\n matrix_t = create_matrix(n,m)\n for j in range(n):\n for i in range(m):\n matrix_t[j][i] = matrix[i][j]\n \n return matrix_t", "def np_transpose(matrix):\n\n return matrix.transpose()", "def transpose(self):\n transposed_data = []\n for i in range(1, self.columns + 1):\n transposed_data.extend(self.column(i))\n\n return Matrix(rows = self.columns, columns = self.rows, data = transposed_data)", "def qrot(q, v):\n assert q.shape[-1] == 4\n assert v.shape[-1] == 3\n assert q.shape[:-1] == v.shape[:-1]\n\n qvec = q[..., 1:]\n uv = torch.cross(qvec.double(), v.double(), dim=len(q.shape) - 1)\n uuv = torch.cross(qvec.double(), uv.double(), dim=len(q.shape) - 1)\n return v + 2 * (q[..., :1] * uv + uuv)", "def transpose(self):\n data = np.transpose(self._data)\n return self.create(self.cols, self.rows, data)", "def cross(u,v):\n u1, u2, u3 = u\n v1, v2, v3 = v\n return np.array([u2*v3 - u3*v2,\n u3*v1 - u1*v3,\n u1*v2 - u2*v1], dtype=u.dtype)", "def T(self):\n # TODO - your code here\n transpose = []\n for col in range(self.w):\n new_row = []\n for row in range(self.h):\n new_row.append(self.g[row][col])\n transpose.append(new_row)\n return Matrix(transpose)\n # TODO - your code here", "def T(self):\n # TODO - your code here\n matrix_transpose = [];\n \n for j in range(self.w):\n matrix_transpose.append(self.get_column(j));\n \n return Matrix(matrix_transpose);", "def _transpose_by_1_vnchwconv():\n\n pass", "def _np_transpose(image):\n return np.transpose(image, (2, 0, 1))", "def relay_transpose(c, a, ax):\n na = c.ref(a)\n assert ax.is_constant(tuple)\n return relay.op.transpose(na, axes=ax.value)", "def transform(self, v):\n #matrix vector multiply, convert from matrix to array type at the end\n return np.array( v * self.M )", "def vector(self,\n i: int,\n j: int) -> np.ndarray:\n return self[j].coord - self[i].coord", "def vector_perp(v):\n assert len(v) == 2\n x, y = v\n return Vector(-y, x)", "def v2t(pose):\n c = np.cos(pose[2])\n s = np.sin(pose[2])\n T = np.array([[c, -s, pose[0]], [s, c, pose[1]], [0, 0, 1]])\n return T", "def rotate(v: vect2d, angle: float) -> vect2d:\n vector = ((v.x * math.cos(angle) - v.y * math.sin(angle)),\n (v.x * math.sin(angle) + v.x * math.cos(angle)))\n return vector", "def row(v):\n return v.reshape((1, v.size))", "def _rowvec(x):\n return _colvec(x).transpose()", "def cross_vectors(u, v):\n return [u[1] * v[2] - u[2] * v[1],\n u[2] * v[0] - u[0] * v[2],\n u[0] * v[1] - u[1] * v[0]]", "def rotate_along(axis: Tensor) -> Tensor:\n W = torch.einsum('ijk,j->ik', levi_civita.to(axis), axis)\n return expm(W)", "def screw( v ):\n v = asarray(v)\n z = zeros_like(v[0,...])\n return array([\n [ z, -v[...,5], v[...,4], v[...,0] ],\n [ v[...,5], z,-v[...,3], v[...,1] ],\n [-v[...,4], v[...,3], z, v[...,2] ],\n [ z, z, z, z] ])", "def test_transpose():\n x = np.array([\n [1, 2],\n [3, 4]\n ])\n y = np.array([\n [5, 6],\n [7, 8]\n ])\n z = np.dstack([x, y])\n return z, z.transpose(1, 0, 2)", "def multiply_ggn_factor_transpose(self, vector: jnp.ndarray) -> jnp.ndarray:\n return utils.scalar_mul(\n self.multiply_ggn_factor_transpose_unweighted(vector),\n jnp.sqrt(self.weight))", "def transpose(x: torch.Tensor, dims):\n _dims = list(dims)\n for i in range(len(_dims)):\n if _dims[i] != i:\n x = x.transpose(i, _dims[i])\n j = _dims.index(i)\n _dims[i], _dims[j] = i, _dims[i]\n return x", "def transpose(matrix):\n\n res = [[0] * len(matrix) for i in range(len(matrix[0]))]\n\n for i in range(len(matrix[0])):\n for j in range(len(matrix)):\n res[i][j] = matrix[j][i]\n\n return res", "def TransformVector(self, *args):\n return _itkTranslationTransformPython.itkTranslationTransformD3_TransformVector(self, *args)", "def _flip_vectors(jn_matrix, m_matrix):\r\n m_matrix_trans = m_matrix.transpose()\r\n jn_matrix_trans = jn_matrix.transpose()\r\n new_matrix = zeros(jn_matrix_trans.shape, float)\r\n for i, m_vector in enumerate(m_matrix_trans):\r\n jn_vector = jn_matrix_trans[i]\r\n disT = list(m_vector - jn_vector)\r\n disT = sum(map(abs, disT))\r\n jn_flip = jn_vector * [-1]\r\n disF = list(m_vector - jn_flip)\r\n disF = sum(map(abs, disF))\r\n if disT > disF:\r\n new_matrix[i] = jn_flip\r\n else:\r\n new_matrix[i] = jn_vector\r\n return new_matrix.transpose()", "def vec_to_mat(vec,i=0):\n if i==0: return vec.reshape((-1,1))\n else: return vec.reshape((1,-1))", "def swap(t, i, j):\n t[i], t[j] = t[j], t[i]", "def transpose(lsts):\r\n\r\n new_lsts = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]\r\n for i in range(4):\r\n for j in range(4):\r\n new_lsts[i][j] = lsts[j][i]\r\n lsts = new_lsts\r\n\r\n return lsts", "def matrix_transpose(matrix):\n transpose = [[] for i in range(len(matrix[0]))]\n\n for i in range(len(matrix)):\n for j in range(len(matrix[i])):\n transpose[j].append(matrix[i][j])\n\n return transpose", "def transpose(self):\n temp_matrix = [[0] * self.TILES_PER_ROW for _ in range(self.TILES_PER_ROW)]\n for i in range(len(self.main_grid_values)):\n for j in range(len(self.main_grid_values)):\n temp_matrix[j][i] = self.main_grid_values[i][j]\n\n self.main_grid_values = temp_matrix", "def _cswap(i, j, S):\n N = _rswap(i, j, S.transpose()).transpose()\n return N", "def transpose(self) -> 'Matrix' :\n # -------------------------------------------------------\n # TODO: You write this one.\n # Hint: create a matrix of a given size, using one of the methods above, and then update it.\n N, M = self.shape()\n T = Matrix.zeros((M, N))\n for i in range(N):\n for j in range(M):\n T.mat[j][i] = self.mat[i][j]\n return T\n\n return Matrix([[\"Not yet written\"]]) # remove this when you add your code.\n # -------------------------------------------------------", "def Jv(t,y,v):\n return A@v", "def matrix_transpose(matrix):\n res = [[matrix[j][i] for j in range(len(matrix))] for i in\n range(len(matrix[0]))]\n return (res)", "def permute2st(v, ndim_en=1):\n nd = v.ndim\n return v.transpose([*range(-ndim_en, 0)] + [*range(nd - ndim_en)])", "def image_transpose(img):\n channels = img.shape[2]\n v = [0] * channels\n for i in range(channels):\n v[i] = img[:,:,i].T # Transposing image for each channel\n return np.dstack((v[0],v[1],v[2])) #Returing transposed image", "def Rotation(v, theta):\n\n v = np.array(v)\n if v.shape != (3,) or abs(v.dot(v) - 1.0) > 1e-8 or not np.all(np.isreal(v)):\n raise ValueError('Rotation vector v should be a 3D real unit vector.')\n\n return np.cos(theta/2) * Identity() - 1j * np.sin(theta/2) * (\n v[0] * PauliX() + v[1] * PauliY() + v[2] * PauliZ())", "def householder(vector):\n identity_mat = np.eye(len(vector))\n v = vector[np.newaxis]\n denominator = np.matmul(v, v.T)\n numerator = np.matmul(v.T, v)\n rot_mat = identity_mat - (2 * numerator / denominator)\n return rot_mat", "def v(i, j, d):\n return 81 * (i - 1) + 9 * (j - 1) + d" ]
[ "0.66814506", "0.6551252", "0.65357876", "0.6345111", "0.63199097", "0.6183269", "0.6159493", "0.6156704", "0.61289173", "0.6096206", "0.6076558", "0.6074613", "0.6038157", "0.6010176", "0.60027725", "0.5979507", "0.5971244", "0.5955453", "0.59221447", "0.5877151", "0.58756465", "0.5858023", "0.58222085", "0.58154875", "0.58102345", "0.58067024", "0.5802244", "0.57999057", "0.5794418", "0.57922286", "0.57587403", "0.5748859", "0.5710855", "0.57043874", "0.5701074", "0.5679273", "0.5666426", "0.5658155", "0.5654056", "0.5602022", "0.5579826", "0.5571014", "0.55523205", "0.55458444", "0.554537", "0.554351", "0.55375326", "0.5533935", "0.5520123", "0.55124116", "0.55065674", "0.5503949", "0.54923826", "0.54695904", "0.5468578", "0.54658353", "0.54647744", "0.54569554", "0.54443103", "0.54401433", "0.5439484", "0.54317665", "0.54304427", "0.54241055", "0.5416193", "0.5410578", "0.5409529", "0.54094553", "0.5408752", "0.5404887", "0.5398955", "0.5393289", "0.5390062", "0.53820795", "0.5364251", "0.53572077", "0.53397155", "0.53337973", "0.53336436", "0.53326976", "0.5325403", "0.53230315", "0.5321866", "0.53126764", "0.53113395", "0.52907157", "0.52869034", "0.52861094", "0.5286053", "0.52798784", "0.5276005", "0.52731884", "0.526953", "0.5261395", "0.52593476", "0.5249578", "0.52488345", "0.5242915", "0.52420384", "0.52317137", "0.522983" ]
0.0
-1
r""" permutation (i,j,k) in the vector v
def swap3(d, n, v, i, j, k): for a in range(n): for m in range(d): if m == i or m == j or m == k: continue x = a*d*d + d*i + m y = a*d*d + d*j + m z = a*d*d + d*k + m v[x],v[y],v[z] = v[z],v[x],v[y] x = a*d*d + d*m + i y = a*d*d + d*m + j z = a*d*d + d*m + k v[x],v[y],v[z] = v[z],v[x],v[y] x = a*d*d + d*i + i y = a*d*d + d*j + j z = a*d*d + d*k + k v[x],v[y],v[z] = v[z],v[x],v[y] x = a*d*d + d*i + j y = a*d*d + d*j + k z = a*d*d + d*k + i v[x],v[y],v[z] = v[z],v[x],v[y] x = a*d*d + d*i + k y = a*d*d + d*j + i z = a*d*d + d*k + j v[x],v[y],v[z] = v[z],v[x],v[y]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tuple_permutation(v,P):\r\n u = []\r\n w = list(v)[:]\r\n test = True\r\n for i in range(len(v)):\r\n if ((isinstance(v[i], int) == True) or (isinstance(v[i], str) == True)):\r\n if (v[i] in P):\r\n w[i] = P(v[i])\r\n else:\r\n u.append(tuple_permutation(tuple(v[i]),P))\r\n test = False\r\n if (test == True):\r\n return tuple(w)\r\n else:\r\n return tuple(u)", "def _swap_permutation(i, j, permutation_vector):\n permutation_vector[i], permutation_vector[j] = permutation_vector[j], permutation_vector[i]", "def vector_perp(v):\n assert len(v) == 2\n x, y = v\n return Vector(-y, x)", "def permutation_in_simplex_test(vec, P):\r\n s = P_chains([],[])\r\n if (vec.dic != {}):\r\n v = list(vec.dic.keys())\r\n p = len(v[0]) - 1\r\n faces = []\r\n values = []\r\n for a in v:\r\n if (isinstance(a, int) == True): \r\n return vec\r\n else:\r\n w = tuple_permutation(a,P)\r\n w1 = tuple_sorted(w)\r\n if (orientation_function(w1,w,p) == True):\r\n faces.append(tuple(w1))\r\n values.append(vec.dic[a])\r\n else:\r\n faces.append(tuple(w1))\r\n values.append((-1)*vec.dic[a])\r\n s = P_chains(faces,values)\r\n return s\r\n else:\r\n return s", "def check_permutation(u, v):\n for permutation in itertools.permutations(u):\n if v == permutation:\n return True\n return False", "def apply_permutation(hyper, pol, perm):\n pass", "def new_permutation(V,m,adj):\r\n\r\n global tent\r\n\r\n perm = V.copy()\r\n \r\n \"\"\" try to select two vertices to swipe wisely. \"\"\"\r\n \r\n #we select 1 vertex among the m first vertices\r\n p1 = randint(0,m-1)\r\n \r\n #we select 1 vertex among the vertices left\r\n p2 = randint(m,len(V)-1)\r\n\r\n def comp(p1,p2,adj,perm):\r\n \"\"\"\r\n retrieve the degree of the 2 vertices and\r\n compare the degree\r\n Args:\r\n p1 (int): index of the vertex\r\n p2 (int): index of the vertex\r\n adj (set): set of the edges\r\n perm (int): current permutation of the vertices\r\n\r\n Returns:\r\n bool: true if degree of p2 is higher than the one of p1 ,false otherwise\r\n \"\"\"\r\n #degree of p1\r\n f1 = 0\r\n #degree of p2\r\n f2 = 0\r\n \r\n #compute the degrees\r\n for i in range(m):\r\n if (V[p1],V[i]) in adj or (V[i],V[p1]) in adj:\r\n f1 += 1\r\n\r\n for i in range(m):\r\n if (V[p2],V[i]) in adj or (V[i],V[p2]) in adj:\r\n f2 += 1\r\n \r\n if f2 > f1:\r\n return True\r\n else:\r\n return False\r\n\r\n def check_prior(p1,p2,adj,perm,tent):\r\n \"\"\"\r\n recursive function which try to swipe the 2 vertices \r\n by comparing the degre of the vertexe.\r\n\r\n Args:\r\n p1 (int): index of the vertex\r\n p2 (int): index of the vertex\r\n d (set): set of the edges\r\n perm (set): new permutation\r\n tent (int): we fix the swipe process to tent try.\r\n\r\n Returns:\r\n int: the new neighbor aka permutation\r\n \"\"\"\r\n \r\n #if the degree of the node p2 is higher or if the try is over we swipe \r\n if comp(p1,p2,adj,perm) or tent == 0:\r\n temp = perm[p1]\r\n perm[p1] = perm[p2]\r\n perm[p2] = temp\r\n return perm\r\n \r\n tent -= 1\r\n \r\n #select a new vertex to swipe\r\n p2 = randint(m,len(V)-1)\r\n\r\n return check_prior(p1,p2,adj,perm,tent)\r\n \r\n return check_prior(p1,p2,adj,perm,tent)", "def permutations(k: int) -> int:\n return factorial(k)", "def TAoCPpermutation(n,k):\n perms = []\n for subset in itertools.combinations(range(n), k):\n A = []; B = []; C = []; min = 0; j = 0; up = 0\n for i in xrange(n):\n if(j>=k or i != subset[j]):\n B.append(i)\n up +=1\n else:\n up -=1\n j += 1\n if(up < min):\n min = up\n B.append(i)\n else:\n A.append(i)\n C.append(B.pop())\n perms.append(A+B+C)\n return perms", "def invert_permutation(p):\n s = np.empty_like(p)\n s[p] = np.arange(p.size)\n return s", "def permute(seq, permutation):\n return [seq[i] for i in permutation]", "def __permutation(orgset, k):\n if k == 1:\n for i in orgset:\n yield (i,)\n elif k > 1:\n for i, x in enumerate(orgset):\n # iterates though to near the end\n for s in __permutation(orgset[:i] + orgset[i + 1 :], k - 1):\n yield (x,) + s", "def perm(n, k):\n return factorial(n)/factorial(n-k)", "def cr_v(self, v, k):\n\n return self.cr(v[:, 0], v[:, 1], v[:, 2], k).T", "def partition(v,m,I,V,sym):\n T = s.Symbol(\"T\")\n return qvib(v) + qtrans(m,V) + qrot(I,sym)", "def permute(self, arr):\n\n return arr[self.permutation_idxs]", "def lift_perm(p: Dict[int, int]) -> np.ndarray:\n n = len(p)\n pm = np.zeros((1 << n, 1 << n), dtype=complex)\n for i in range(1 << n):\n j = 0\n mask = 1 << n\n for q in range(n):\n mask >>= 1\n if (i & mask) != 0:\n j |= 1 << (n - 1 - p[q])\n pm[j][i] = 1\n return pm", "def dexpinv(self, u, v, _=None):\n A, a = np.split(u, 2)\n B, b = np.split(v, 2)\n alpha = np.linalg.norm(A)\n rho = np.inner(A, a)\n if np.isclose(alpha, 0):\n return v\n c1 = (\n B\n - 0.5 * np.cross(A, B)\n + self._dexpinv_helper_1(alpha) * np.cross(A, np.cross(A, B))\n )\n c2 = (\n b\n - 0.5 * (np.cross(a, B) + np.cross(A, b))\n + self._dexpinv_helper_2(alpha, rho) * np.cross(A, np.cross(A, B))\n + self._dexpinv_helper_1(alpha)\n * (\n np.cross(a, np.cross(A, B))\n + np.cross(A, np.cross(a, B))\n + np.cross(A, np.cross(A, b))\n )\n )\n return np.hstack((c1, c2))", "def rotate2(self, nums, k) -> None:\n foo = nums.copy()\n arrayLength= len(nums)\n for i in range(arrayLength):\n index = (i+k) % arrayLength\n foo[index] = nums[i]\n for j in range(arrayLength):\n nums[j] = foo[j]", "def inverse_transform(v):\n v, k = divmod(v - 1, N)\n v, j = divmod(v, N)\n v, i = divmod(v, N)\n return i, j, k", "def compute_rot(v):\n if v[0] >= 0:\n M = nd.eye(len(v))\n else:\n M = - nd.eye(len(v))\n for i in range(1, len(v)):\n if v[i] == 0:\n continue\n rot_minus_theta = nd.eye(len(v))\n temp = nd.dot(M, v)\n\n theta = nd.arctan(temp[i]/temp[0])\n c = nd.cos(theta)\n s = nd.sin(theta)\n\n rot_minus_theta[0,0] = c\n rot_minus_theta[i,i] = c\n rot_minus_theta[0,i] = s\n rot_minus_theta[i,0] = -s\n\n M = nd.dot(rot_minus_theta, M)\n return M", "def permute(p,l,length):\n assert length >= 0\n if length == 0:\n\tprint p\n\treturn\n\n for i in range(0,length):\n\tn = p + (l[i],) \n\tpermute(n,l[0:i]+l[i+1:],length-1)", "def permute_via_gather(val, permutation, inverse_permutation, axis=0):\n # It is *not* safe to use jax.custom_vjp here. The most likely cause is that\n # it can't close over values: https://github.com/google/jax/issues/2676\n # The error only occurs in some configurations (e.g. use_python_loop = True,\n # num_parallel_heads = 1) but not others.\n permutation = jax.lax.stop_gradient(permutation)\n inverse_permutation = jax.lax.stop_gradient(inverse_permutation)\n def permute_impl(val):\n return jnp.take(val, permutation, axis=axis)\n def permute_vjp(val):\n permuted = permute_impl(jax.lax.stop_gradient(val))\n def vjpfun(permuted_grad):\n # JAX autodiff would synthesize a scatter operation because it doesn't\n # know that the indices are a permutatation. However on TPU, gathers are\n # faster than scatters (at least in the regime the LSH attention uses).\n return (jnp.take(permuted_grad, inverse_permutation, axis=axis),)\n return permuted, vjpfun\n permute = jax.custom_transforms(permute_impl)\n jax.defvjp_all(permute, permute_vjp)\n return permute(val)", "def permutation(self, x):\r\n x = array(x)\r\n x = roll(x, self.num_calls)\r\n self.num_calls += 1\r\n return x", "def perm_invert(p):\n q = [None] * len(p)\n for i, j in enumerate(p):\n q[j] = i\n return q", "def apply_permutation(l, p):\n\n for i in xrange(len(l)):\n nxt = i\n print 'change - ', i\n while p[nxt] >= 0:\n print 'before-', i, p[nxt], l, p\n\n l[i], l[p[nxt]] = l[p[nxt]], l[i]\n temp = p[nxt]\n p[nxt] -= len(p)\n nxt = temp\n print 'after -', i, p[nxt], l, p\n\n\n print l", "def permutation(a):\n rs = _generator.get_random_state()\n return rs.permutation(a)", "def all_permutations(support):\n support = np.array(support)\n\n def gen(p):\n for perm in permutations(support):\n perm = np.array(perm)\n p[perm] = p[support]\n yield p\n p[support] = p[perm]\n return gen", "def _qij_vec_dagger(i: int, j: int):\n return [hermitian_conjugated(i) for i in _qij_vec(i, j)]", "def transform(i, j, k):\n return i * N * N + j * N + k + 1", "def make_permutation(partition):\r\n P = Permutation()\r\n c = 0\r\n for j in range(len(partition)):\r\n a = []\r\n for h in range(partition[j]):\r\n a.append(c)\r\n c = c + 1 \r\n if (c == 1):\r\n P1 = Permutation()\r\n c = 0\r\n else:\r\n P1 = Permutation([a])\r\n P = P*P1\r\n return P", "def from_inversion_vector(self, inversion):\n size = len(inversion) + 1\n N = [i for i in xrange(size)]\n perm = []\n try:\n for k in xrange(size - 1):\n val = N[inversion[k]]\n perm.append(val)\n N.remove(val)\n except IndexError:\n raise ValueError(\"The inversion vector is not valid.\")\n perm.extend(N)\n return _new_from_array_form(perm)", "def rotate(q, v):\n if v.ndim == 1:\n qv = np.append(v,0)\n else:\n qv = np.hstack([v,np.zeros((len(v),1))])\n out = mult(q,qv)\n out = mult(out, inv(q))\n return out[:,:3]", "def get_all_pandigitals(i, k):\n l = range(i, k + 1)\n return itertools.permutations(l)", "def permute_rtype_vector(x):\n\n return np.array([x[0],x[5],x[6],x[7],x[8],x[1],x[2],\n x[3],x[4],x[9],x[11],x[10],x[12]],dtype=np.float)\n\n #return np.array([x[0],x[5],x[6],x[7],x[8],x[1],x[2],\n # x[3],x[4],x[9],x[13],x[14],x[15],x[10],\n # x[11],x[12],x[16],x[17],x[22],x[23],x[24],\n # x[25],x[18],x[19],x[20],x[21],x[27],x[26],x[28]],\n # dtype=np.float)", "def permute(self, idx):\n\n # Check that input is a true permutation\n if set(idx) != set(range(self.rank)):\n raise ValueError('Invalid permutation specified.')\n\n # Update factors\n self.factors = [f[:, idx] for f in self.factors]\n return self.factors", "def decoder(permutation):\n depermutation = []\n for x in range (0, len (permutation)):\n depermutation.append (permutation.index(x))\n return depermutation", "def check_permutation2(u, v):\n u_chars = {}\n for c in u:\n try:\n u_chars[c] += 1\n except KeyError:\n u_chars[c] = 1\n\n v_chars = {}\n for d in v:\n try:\n v_chars[d] += 1\n except KeyError:\n v_chars[d] = 1\n\n if sum(u_chars.values()) != sum(v_chars.values()):\n #u and v are not of the same length.\n return False\n\n for c in u:\n c_count_in_u = u_chars[c]\n c_count_in_v = v_chars.get(c, 0)\n if c_count_in_u != c_count_in_v:\n return False\n\n return True", "def creer_matrice(n, p, v):\n return [[v] * p for i in range(n)]", "def test_1_2(self):\r\n input = vector()\r\n p = imatrix()\r\n out = permute_row_elements(input, p)\r\n permute = function([input, p], out)\r\n\r\n rng = numpy.random.RandomState(utt.fetch_seed())\r\n input_val = rng.uniform(size=(5,)).astype(config.floatX)\r\n p_val = numpy.asarray([rng.permutation(5) for i in range(3)\r\n ], dtype='int32')\r\n out_val = permute(input_val, p_val)\r\n\r\n # Each row of p contains a permutation to apply to the input vector\r\n out_bis = numpy.asarray([input_val[p_row] for p_row in p_val])\r\n assert numpy.all(out_val == out_bis)\r\n\r\n # Verify gradient\r\n def permute_fixed(s_input):\r\n \"\"\"Auxiliary op defined to get rid of gradient wrt p_val\"\"\"\r\n return permute_row_elements(s_input, p_val)\r\n utt.verify_grad(permute_fixed, [input_val])", "def test_permutation(self):\r\n # Check over two calls to see if the random state is correctly updated.\r\n m = Module()\r\n m.random = RandomStreams(utt.fetch_seed())\r\n m.fn = Method([], m.random.permutation((20,), 10))\r\n\r\n made = m.make()\r\n made.random.initialize()\r\n fn_val0 = made.fn()\r\n fn_val1 = made.fn()\r\n\r\n rng_seed = numpy.random.RandomState(utt.fetch_seed()).randint(2**30)\r\n rng = numpy.random.RandomState(int(rng_seed)) #int() is for 32bit\r\n\r\n # rng.permutation outputs one vector at a time, so we iterate.\r\n numpy_val0 = numpy.asarray([rng.permutation(10) for i in range(20)])\r\n numpy_val1 = numpy.asarray([rng.permutation(10) for i in range(20)])\r\n\r\n assert numpy.all(fn_val0 == numpy_val0)\r\n assert numpy.all(fn_val1 == numpy_val1)", "def allpermutations(orgset, k):\n return itertools.chain(*[permutation(orgset, i) for i in range(1, k + 1)])", "def rotate(self, nums: List[int], k: int) -> None:\n # 暴力\n for _ in range(k):\n for j in range(1, len(nums)):\n nums[0], nums[j] = nums[j], nums[0]\n\n return nums", "def permutation_matrix(order):\n matrix = np.zeros([order,order])\n matrix[-1,0] = 1\n matrix[0:-1,1::] = np.identity(order-1)\n return matrix", "def find_partitions(V,k):\n k_subs = k_subset(V,k)\n k_subs = uniq_subsets(k_subs)\n\n return k_subs", "def vorticity(self, u, v):\n return self.to_grid(self.vorticity_spectral(u, v))", "def partial_permutations(n, k):\n return int((factorial(n) / factorial(n - k)) % 1000000)", "def v(i, j, d):\n return 81 * (i - 1) + 9 * (j - 1) + d", "def pov_vector(v):\n return \"<{}>\".format(\", \".join(str(x) for x in v))", "def multiply_ggn_factor_transpose_unweighted(\n self,\n vector: jnp.ndarray\n ) -> jnp.ndarray:\n pass", "def perm2(a,ix):\n if ix==len(a):\n print(a)\n else:\n for j in range(ix+1):\n swap(a,j,ix)\n perm2(a,ix+1)\n swap(a,j,ix)", "def rotate(self, nums: List[int], k: int) -> None:\n n = len(nums)\n i = 0\n while i < n-(k % n):\n v = nums.pop(0)\n nums.append(v)\n i += 1\n return nums", "def test_permutation(self):\r\n # Check over two calls to see if the random state is correctly updated.\r\n random = RandomStreams(utt.fetch_seed())\r\n fn = function([], random.permutation((20,), 10), updates=random.updates())\r\n\r\n fn_val0 = fn()\r\n fn_val1 = fn()\r\n\r\n rng_seed = numpy.random.RandomState(utt.fetch_seed()).randint(2**30)\r\n rng = numpy.random.RandomState(int(rng_seed)) #int() is for 32bit\r\n\r\n # rng.permutation outputs one vector at a time, so we iterate.\r\n numpy_val0 = numpy.asarray([rng.permutation(10) for i in range(20)])\r\n numpy_val1 = numpy.asarray([rng.permutation(10) for i in range(20)])\r\n\r\n assert numpy.all(fn_val0 == numpy_val0)\r\n assert numpy.all(fn_val1 == numpy_val1)", "def permute(n: int, k: int) -> int:\n\n # no possible permutations if k > n\n if n < k:\n return 0\n\n # if faster, compute n! and (n - k)! and return their quotient\n fact_count = len(_factorial_sequence)\n if n - fact_count <= k:\n return factorial(n) // factorial(n - k)\n\n # compute the product (n - k + 1) * (n - k + 2) * ... * n\n return seqs.arithmetic_product(n - k + 1, k)", "def rotate(self, nums, k):\n length=len(nums)\n nums[:]=nums[length-k:length]+nums[0:length-k]\n return nums", "def generate_permutation(self, idxs):\n ret = _CAPI_DGLNDArrayPartitionGeneratePermutation(\n self._partition, F.zerocopy_to_dgl_ndarray(idxs)\n )\n return F.zerocopy_from_dgl_ndarray(ret(0)), F.zerocopy_from_dgl_ndarray(\n ret(1)\n )", "def permutations(self, key):\n yield key", "def rotate(self, nums: List[int], k: int) -> None:\n if not k:\n return\n lcm = self.lcm(len(nums), k)\n r = lcm // k\n for i in range(len(nums) // r):\n prev = nums[i]\n for _ in range(r):\n index = (i + k) % len(nums)\n cur = nums[index]\n nums[index] = prev\n prev = cur\n i = index", "def rotate(self, nums:[int], k: int) -> None:\n l = len(nums)\n k = k%l\n \n if k == l:\n return \n \n \n index = 0\n i = 0\n \n while index < l:\n a = b = i\n b = (i+k)%l\n tmp2 = nums[a]\n\n while b != i:\n tmp1 = tmp2\n tmp2 = nums[b]\n nums[b] = tmp1\n a = b\n b = (b+k)%l\n index += 1\n nums[i] = tmp2\n i += 1\n index += 1", "def nextPermutation(self, nums) -> None:\n inv = None\n for i in reversed(range(len(nums) - 1)):\n if nums[i] < nums[i + 1]:\n inv = i\n break\n if inv is None:\n nums.sort()\n return\n k = None\n for i in range(inv+1, len(nums)):\n if nums[i] > nums[inv] and (k is None or nums[i] < nums[k]):\n k = i\n if k is not None:\n temp = nums[k]\n nums[k] = nums[inv]\n nums[inv] = temp\n nums[inv+1:] = sorted(nums[inv+1:])\n return", "def rotate1(self, nums, k) -> None:\n foo = None\n previous = None\n for i in range(k):\n previous = nums[len(nums)-1]\n for j in range(len(nums)):\n foo = nums[j]\n nums[j] = previous\n previous = foo", "def rotate(self, nums: List[int], k: int) -> None:\n res = []\n for i in range(len(nums)):\n res.append(nums[(i - k) % len(nums)])\n for i in range(len(nums)):\n nums[i] = res[i]", "def order_v(self):\n return self._degree_v + 1", "def rotate(self, nums: List[int], k: int):\n tmp = nums.copy()\n n = len(nums)\n # 映射:i -> (i+k)%n\n for i in range(n):\n nums[(i+k) % n] = tmp[i]", "def test_2_1(self):\r\n input = matrix()\r\n p = ivector()\r\n out = permute_row_elements(input, p)\r\n permute = function([input, p], out)\r\n\r\n rng = numpy.random.RandomState(utt.fetch_seed())\r\n input_val = rng.uniform(size=(3, 5)).astype(config.floatX)\r\n p_val = rng.permutation(5).astype('int32')\r\n out_val = permute(input_val, p_val)\r\n\r\n # The same permutation should be applied to every row of the input matrix.\r\n out_bis = numpy.asarray([r[p_val] for r in input_val])\r\n assert numpy.all(out_val == out_bis)\r\n\r\n # Verify gradient\r\n def permute_fixed(s_input):\r\n \"\"\"Auxiliary op defined to get rid of gradient wrt p_val\"\"\"\r\n return permute_row_elements(s_input, p_val)\r\n utt.verify_grad(permute_fixed, [input_val])", "def rotate(self, nums: List[int], k: int) -> None:\n new_nums = nums.copy()\n count = len(nums)\n if count == 0:\n return\n new_nums.extend(new_nums)\n i = count - k % count\n j = 0\n while j < count:\n nums[j] = new_nums[i+j]\n j += 1", "def rotate(self, nums, k):\n lenth = len(nums)\n nums[:] = nums[lenth-k:]+nums[:lenth-k]\n return nums", "def to_permutation(self):\n sp = SetPartitions(self.parent()._n)(self)\n perm = sp.to_permutation().to_cycles()\n return perm", "def nextPermutation(self, nums: List[int]) -> None:\n pass", "def mat24_int_to_perm(k):\n oct, k = divmod(k, 322560)\n if oct >= 759: return None\n oct -= 759 - STD_OCTAD\n oct += (oct >> 12) & 759 # give number 0 to standard octad\n #print(\"i2p\", oct)\n oct = gc.octad_to_vect(oct);\n #print(\"i2p oct\", hex(oct))\n p = [None]*24\n oct, j = 8 * oct, 0x8 \n for i in range(24):\n o = oct & 8\n p[(j >> o) & 0x1f] = i\n j += 1 << o\n oct >>= 1\n p[8] = p[8 + (k & 15)]\n #print(\"i2pfinal\", k & 15)\n k >>= 4\n k *= (1 << 28) // 2520 + 1\n for i in range(6):\n k1 = i + (k >> 28)\n #print(\"i2p%d\" % i, k >> 28)\n p[i], p[k1] = p[k1], p[i] \n k = (k & 0xfffffff) * (7-i)\n mat24_complete_heptad(p)\n return p", "def knotvector_v(self):\n return self._knot_vector_v", "def rotate(self, nums: list[int], k: int) -> None:\n for i in range(k):\n prev = nums[-1]\n for j in range(len(nums)):\n nums[j], prev = prev, nums[j]", "def _pfunc(i,j,perm):\n if perm[i-1] == j:\n return 1\n else:\n return 0", "def rotate3(self, nums, k) -> None:\n k = k % len(nums)\n count = 0\n for i in range(len(nums)):\n if count >= len(nums):\n break\n current = i\n previous = nums[i]\n while True:\n next = (current + k) % len(nums)\n temp = nums[next]\n nums[next] = previous\n previous = temp\n current = next\n count += 1\n if(i == current):\n break", "def perm(t,i,n):\n if i==n:\n print(t)\n else:\n for j in range(i,n+1):\n swap(t,i,j)\n perm(t,i+1,n)\n swap(t,i,j)", "def mult(v):\n\treturn sum([v[a][1] for a in range(len(v))])", "def swap2(d, n, v, i, j):\n for a in range(n):\n for k in range(d):\n if k == i or k == j:\n continue\n x = a*d*d + d*i + k\n y = a*d*d + d*j + k\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*k + i\n y = a*d*d + d*k + j\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*i + i\n y = a*d*d + d*j + j\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*j + i\n y = a*d*d + d*i + j\n v[x], v[y] = v[y], v[x]", "def permute(p, dims, perm):\n if issparse(p):\n return _permute_sparse(p, dims, perm)\n return _permute_dense(p, dims, perm)", "def permute_2d(m, p):\r\n return m[p][:, p]\r\n # unused below\r\n m_t = transpose(m)\r\n r_t = take(m_t, p, axis=0)\r\n return take(transpose(r_t), p, axis=0)", "def permutation(nums):\n list = []\n temp = []\n backtrack(list, temp, nums)\n return list", "def apply(self, v):\n u = np.zeros(self.Dimension, dtype=complex)\n for me in self.Elements:\n for index in range(v.Elements.size):\n if index == me.j:\n u[me.i] += me.val * v.Elements[index]\n u = Vector(u) \n return u", "def rotate(self, nums, k):\r\n # l = len(nums)\r\n # nums[:] = nums[l-k:] + nums[:l-k]\r\n \r\n # nums1 = nums[:]\r\n # l = len(nums)\r\n # for i in range(l):\r\n # nums[(i+k)%l] = nums1[i]\r\n \r\n l = len(nums)\r\n nums[l-k:] = nums[l-k:][::-1]\r\n nums[:l-k] = nums[:l-k][::-1]\r\n nums[:] = nums[::-1]", "def hash_vector(self,v):\r\n # you will need to use self.functions for this method\r\n x = np.array([f(v[0]) for f in self.functions])\r\n #print (x)\r\n return x\r\n raise NotImplementedError", "def test_permutation(self):\r\n rng_R = random_state_type()\r\n post_r, out = permutation(rng_R, size=(9,), n=6)\r\n print 'OUT NDIM', out.ndim\r\n f = compile.function(\r\n [compile.In(rng_R,\r\n value=numpy.random.RandomState(utt.fetch_seed()),\r\n update=post_r, mutable=True)],\r\n [out], accept_inplace=True)\r\n\r\n numpy_rng = numpy.random.RandomState(utt.fetch_seed())\r\n # Check over two calls to see if the random state is correctly updated.\r\n # numpy_rng.permutation outputs one vector at a time,\r\n # so we call it iteratively to generate all the samples.\r\n val0 = f()\r\n val1 = f()\r\n numpy_val0 = numpy.asarray([numpy_rng.permutation(6)\r\n for i in range(9)])\r\n numpy_val1 = numpy.asarray([numpy_rng.permutation(6)\r\n for i in range(9)])\r\n print val0\r\n print numpy_val0\r\n print val1\r\n print numpy_val1\r\n self.assertTrue(numpy.all(val0 == numpy_val0))\r\n self.assertTrue(numpy.all(val1 == numpy_val1))", "def pvector_pp(i, q):\n\tc0 = coords_cut[i]\n\tra, dec = c0.ra.value, c0.dec.value\n\tr = hp.rotator.Rotator([ra, dec, 0])\n\tsT = np.matmul(r.mat, np.matmul(s_tensor_cut[:,:,i], r.mat.transpose()))\n\tevals, evecs = np.linalg.eigh(sT[1:,1:])\n\tevecA, evecB = evecs[:,0], evecs[:,1]\n\tif evecB[0] < 0:\n\t\tevecB = -evecB\n\ttheta = np.arctan2(evecB[1], evecB[0])\n\tres = 180*theta.item()/np.pi, i\n\tq.put(res)\n\treturn res", "def permutations(iterable):\n pass", "def permute(self):\n raise NotImplementedError()", "def jaccard(self, u, v):\n\n dist = np.dot(u, v) / np.double(np.bitwise_or(u, v).sum())\n return 1 - dist", "def permutations(cube):\r\n yield from rotations24(cube)\r\n yield from rotations24(np.flip(cube, 0))\r\n yield from rotations24(np.flip(cube, 1))\r\n yield from rotations24(np.flip(cube, 2))", "def rotate4(self, nums, k) -> None:\n k = k % len(nums)\n self.reverse(nums, 0, len(nums)-1)\n self.reverse(nums, 0, k-1)\n self.reverse(nums, k, len(nums)-1)", "def permute4(values: List) -> List:\n o = []\n ld4 = len(values) // 4\n for i in range(ld4):\n o.extend(\n [values[i], values[i + ld4], values[i + ld4 * 2], values[i + ld4 * 3]])\n return o", "def multiplyByVector(matrix:[[int]], vector: [int]):\n # assuming vector and result are transposed\n _validate(matrix, vector)\n if len(matrix[0]) != len(vector):\n raise InvalidArgumentError(f\"cannot multiply vector which length is {len(vector)} by matrix that has a {len(matrix[0])} columns\")\n result = [0 for _ in range(len(matrix))] # initialize empty array\n for matrix_row_idx, _ in enumerate(matrix):\n for matrix_column_idx, v_value in enumerate(vector):\n result[matrix_row_idx] ^= (v_value * matrix[matrix_row_idx][matrix_column_idx])\n return result", "def rotate(self, nums, k) -> None:\r\n l = len(nums)\r\n d = l - ( k % l)\r\n b = nums[d:] + nums[:d]\r\n nums.clear()\r\n nums += b", "def permute2en(v, ndim_st=1):\n nd = v.ndim\n return v.transpose([*range(ndim_st, nd)] + [*range(ndim_st)])", "def rotate(self, nums: list, k: int) -> None:\n a = [0] * len(nums)\n for i in range(len(nums)):\n a[(i+k)%len(nums)] = nums[i] #recycle\n\n for i in range(len(nums)):\n nums[i] = a[i]", "def reveal_sort(k, D, reverse=False):\n assert len(k) == len(D)\n library.break_point()\n shuffle = types.sint.get_secure_shuffle(len(k))\n k_prime = k.get_vector().secure_permute(shuffle).reveal()\n idx = types.Array.create_from(k_prime)\n if reverse:\n D.assign_vector(D.get_slice_vector(idx))\n library.break_point()\n D.secure_permute(shuffle, reverse=True)\n else:\n D.secure_permute(shuffle)\n library.break_point()\n v = D.get_vector()\n D.assign_slice_vector(idx, v)\n library.break_point()\n instructions.delshuffle(shuffle)", "def generador_n(vector_v, constante):\n\n n = []\n\n for x in range(len(vector_v)):\n nn = vector_v[x] * constante\n n.append(nn)\n\n # print(\"valores v: \", vector_v)\n # print(\"valores n: \", n)\n\n return n", "def vcycle(v, b):\n if (len(v) - 1) & (len(v) - 2) != 0:\n raise ValueError(\"Lenth of v must be 2**n + 1.\")\n\n for i in range(3):\n jacobi23(v, b)\n\n if len(v) <= 3:\n return\n\n r = b - Amul(v)\n r2 = 4. * restrict(r)\n e2 = np.zeros_like(r2)\n vcycle(e2, r2)\n v += prolong(e2)\n\n for i in range(3):\n jacobi23(v, b)", "def nextPermutation(self, nums: List[int]) -> None:\n \n k = len(nums) - 1\n while (k>0) and (nums[k]<=nums[k-1]):\n k = k - 1\n if (k == 0):\n for i in range(int(len(nums)/2)):\n temp = nums[i]\n nums[i] = nums[len(nums)-1-i]\n nums[len(nums)-1-i] = temp\n else:\n k = k - 1\n for i in range(len(nums)-1, k-1, -1):\n if nums[i]>nums[k]:\n break\n temp = nums[i]\n nums[i] = nums[k]\n nums[k] = temp\n k = k + 1\n l = len(nums) - k \n for i in range(int(l/2)):\n temp = nums[k+i]\n nums[k+i] = nums[len(nums)-1-i]\n nums[len(nums)-1-i] = temp\n return nums", "def rotate(self, nums, k):\n n = len(nums)\n k = k % n\n nums.reverse()\n self.reverse(nums, 0, k-1)\n self.reverse(nums, k, n-1)\n return nums" ]
[ "0.6331758", "0.62894833", "0.6197115", "0.6137187", "0.6044073", "0.6019328", "0.5991224", "0.5982989", "0.5912814", "0.5890045", "0.58892596", "0.5880598", "0.5838851", "0.5801919", "0.5771089", "0.57405615", "0.56747866", "0.56370187", "0.56347495", "0.5619772", "0.56053793", "0.5600076", "0.55870146", "0.5585318", "0.55783653", "0.55752426", "0.55669105", "0.5563655", "0.5547959", "0.5539659", "0.55386144", "0.55366445", "0.55276304", "0.5506465", "0.5502725", "0.54954416", "0.548177", "0.5480084", "0.5478226", "0.5470066", "0.5467128", "0.54635686", "0.5461794", "0.54563355", "0.5449651", "0.54416305", "0.54403424", "0.5421942", "0.5421897", "0.5387164", "0.5384498", "0.538422", "0.53836066", "0.5377528", "0.5365553", "0.53650385", "0.5364781", "0.53647214", "0.53581", "0.5332999", "0.53305435", "0.53261465", "0.53251165", "0.53212476", "0.53182703", "0.53175646", "0.528714", "0.5287095", "0.52804446", "0.5271984", "0.5264395", "0.5264374", "0.5259264", "0.52565664", "0.52483094", "0.524336", "0.52413607", "0.52379286", "0.52366894", "0.52309644", "0.523002", "0.5227574", "0.52224797", "0.5221978", "0.5214049", "0.5211417", "0.5209881", "0.5208997", "0.52086794", "0.5203711", "0.51981175", "0.51980335", "0.5188661", "0.51876074", "0.5184156", "0.5183715", "0.5179514", "0.51790804", "0.51783943", "0.51777947" ]
0.5475577
39
r""" Perform a cyclic swap on the vertices. This is used in multiplication of symbolic upper matrices. Currently it is suboptimal but on the other hand, this cost much less than whatever convex hull computation.
def vertex_cyclic_swap(nvars, l, i): if i == 0 or not l: return l ll = [] F = l[0].parent() for v in l: assert not v[-i:] ll.append(F(tuple(v[-i:]) + tuple(v[:-i]))) for v in ll: v.set_immutable() return tuple(ll)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vertex_swap(d, n, l, i1, i2, j1, j2):\n if i1 == i2 and j1 == j2:\n return l\n if i1 == j1:\n # (i1,i1) -> (i2,i2)\n assert i2 == j2\n def swap(v):\n swap2(d, n, v, i1, i2)\n elif i1 == i2:\n # (i,j1) -> (i,j2)\n def swap(v):\n swap2(d, n, v, j1, j2)\n elif j1 == j2:\n # (i1,j) -> (i2,j)\n def swap(v):\n swap2(d, n, v, i1, i2)\n elif i1 == j2 and i2 == j1:\n # (i1,j1) -> (j1,i1)\n def swap(v):\n swap2(d, n, v, i1, j1)\n elif i1 == j2:\n # (i1,j1) -> (i2,i1)\n def swap(v):\n swap3(d, n, v, j1, i1, i2)\n elif i2 == j1:\n # (i1,j1) -> (j1,j2)\n def swap(v):\n swap3(d, n, v, i1, j1, j2)\n else:\n # (i1,j1) -> (i2,j2)\n def swap(v):\n swap2(d, n, v, i1, i2)\n swap2(d, n, v, j1, j2)\n ll = []\n for v in l:\n v = v.__copy__()\n swap(v)\n v.set_immutable()\n ll.append(v)\n ll.sort()\n return tuple(ll)", "def swap_vertices(self, i, j):\r\n store_vertex_i = self.vertices[i]\r\n store_vertex_j = self.vertices[j]\r\n self.vertices[j] = store_vertex_i\r\n self.vertices[i] = store_vertex_j\r\n for k in range(len(self.vertices)):\r\n for swap_list in [self.vertices[k].children, self.vertices[k].parents]:\r\n if i in swap_list:\r\n swap_list[swap_list.index(i)] = -1\r\n if j in swap_list:\r\n swap_list[swap_list.index(j)] = i\r\n if -1 in swap_list:\r\n swap_list[swap_list.index(-1)] = j", "def apply_cycle(points, cycle):\n j = cycle[0]\n for i in cycle:\n points[i], points[j] = points[j], points[i] # swap points i and j", "def SwapSides(self):\n for c in self.reactants:\n c.coeff = -c.coeff", "def swap(ix, jx, ax, ay):\n tempx, tempy = ax[ix], ay[ix]\n ax[ix] = ax[jx]\n ay[ix] = ay[jx]\n ax[jx] = tempx\n ay[jx] = tempy", "def test_swap(self, dim):\r\n graph = nx.complete_graph(dim)\r\n graph.remove_edge(0, dim - 1)\r\n s = list(range(dim - 1))\r\n assert set(clique.swap(s, graph)) == set(range(1, dim))", "def swap(self, *args):\n return _osgAnimation.VertexList_swap(self, *args)", "def _swap1(self, cids, iids):\n # The coupling indexes of the two legs to swap\n c1, c2 = cids\n # The index of the two legs to swap within the given coupling\n i1, i2 = iids\n assert c1 != c2\n\n # Get the connecting leg between the two couplings\n legs = self.get_legs()\n intersect = set(legs[c1]).intersection(set(legs[c2]))\n\n assert len(intersect) == 1 # Only one internal leg between couplings\n ileg = intersect.pop()\n # index of the internal leg in c1 and c2\n ii = [legs[cid].index(ileg) for cid in cids]\n\n assert ii[0] != i1 and ii[1] != i2\n # Check that the flow is consistent along the internal bond\n assert self.coupling[c1][ii[0]][1] is not self.coupling[c2][ii[1]][1]\n\n # Order such that first bond is in the one with out\n if self.coupling[c1][ii[0]][1]:\n c1, c2, i1, i2, ii = c2, c1, i2, i1, (ii[1], ii[0])\n assert not self.coupling[c1][ii[0]][1] and self.coupling[c2][ii[1]][1]\n\n def permute_key(key):\n copy = list(list(k) for k in key)\n copy[c1][i1], copy[c2][i2] = copy[c2][i2], copy[c1][i1]\n return copy\n self._coupling = tuple(tuple(c) for c in permute_key(self.coupling))\n f1, f2 = ([x[1] for x in self.coupling[c]] for c in (c1, c2))\n\n def mappingf(okey):\n nk = permute_key(okey)\n # All good interal symmetry sectors in for the swapped 1st coupling\n for k in sls.allowed_couplings(nk[c1], f1, ii[0], self.symmetries):\n # Assign the key of the internal leg\n nk[c1][ii[0]], nk[c2][ii[1]] = k, k\n if sls.is_allowed_coupling(nk[c2], f2, self.symmetries):\n yield tuple(tuple(e) for e in nk)\n\n prefdict = sls._prefswap1((i1, i2), ii)\n\n def prefactorf(okey, nkey):\n return np.prod([prefdict.get(ss, lambda x, y: 1.)(\n [el[i] for j in (c1, c2) for el in okey[j]],\n [el[i] for j in (c1, c2) for el in nkey[j]]\n ) for i, ss in enumerate(self.symmetries)])\n\n self._manipulate_coupling(mappingf, prefactorf)\n return self", "def swap(self, adjacent_transposition):\n\n result = Tensor()\n for key_self in self.keys():\n # ensure that the swap can be made with the available slots\n if max(adjacent_transposition) < len(key_self):\n prefix = Tensor({Tensor._merge_keys(*key_self[0 : min(adjacent_transposition)]): self[key_self]})\n root = type(self)._clifford_swap(\n *key_self[min(adjacent_transposition) : max(adjacent_transposition) + 1]\n )\n postfix = Tensor({Tensor._merge_keys(*key_self[max(adjacent_transposition) + 1 :]): 1})\n result = result + prefix * root * postfix\n else:\n result = result + Tensor({key_self: self[key_self]})\n self.clear()\n self.update(result)\n return self", "def test_flip_loop():\n conv = ToPointsAndSegments()\n ring = [ (0,0), (3,0), (3.8,2), (4,0), (6,0), (6.3, 2), (7,0), (10,0), (13,4), (10,5), (7,5), (6.5, 3), (6,5), (4,5), (3.5,3), (3,5), (0,5), (-2,2), (0,0)]\n conv.add_polygon([ring])\n skel = calc_skel(conv, pause=True, output=True)", "def test_flip_loop2():\n conv = ToPointsAndSegments()\n ring = [ (0,0), (3,0), (3.8,2), (5,0), (6.3, 2), (7,0), (10,0), (13,4), (10,5), (7,5), (6.5, 3), (5,5), (3.5,3), (3,5), (0,5), (-2,2), (0,0)]\n conv.add_polygon([ring])\n skel = calc_skel(conv, pause=True, output=True)", "def _swap2(self, cids, iids):\n # The coupling indexes of the two legs to swap\n c1, c2 = cids\n # The index of the two legs to swap within the given coupling\n i1, i2 = iids\n assert c1 != c2\n\n # Get the connecting coupling between the two couplings\n cnx = self.get_couplingnetwork().to_undirected(as_view=True)\n ci = set(nx.common_neighbors(cnx, *[self.coupling[ii] for ii in cids]))\n if len(ci) != 1:\n raise ValueError(f'cids: {cids} have {len(ci)} common neighbors')\n ci = self.coupling.index(ci.pop())\n\n # internal legs\n l1 = cnx.edges[self.coupling[c1], self.coupling[ci], 0]['leg']\n l2 = cnx.edges[self.coupling[c2], self.coupling[ci], 0]['leg']\n\n # index of the internal leg in c1 and c2\n legs = self.get_legs()\n il1, il2 = [[legs[x].index(ll) for x in (y, ci)]\n for y, ll in zip(cids, (l1, l2))]\n\n assert il1[0] != i1 and il2[0] != i2\n assert il1[1] != il2[1]\n # Check that the flow is consistent along the internal bond\n assert self.coupling[c1][il1[0]][1] is not self.coupling[ci][il1[1]][1]\n assert self.coupling[c2][il2[0]][1] is not self.coupling[ci][il2[1]][1]\n\n def permute_key(key):\n copy = list(list(k) for k in key)\n copy[c1][i1], copy[c2][i2] = copy[c2][i2], copy[c1][i1]\n return copy\n f1, f2, fi = ([x[1] for x in self.coupling[c]] for c in (c1, c2, ci))\n self._coupling = tuple(tuple(c) for c in permute_key(self.coupling))\n\n # All good interal symmetry sectors in for the swapped 1st coupling\n nkeys = set(tuple(tuple(e) for e in permute_key(k)) for k in self)\n c1set = {}\n r11, r12 = set(range(3)).difference([il1[0]])\n for k in set(key[c1] for key in nkeys):\n kn = (k[r11], k[r12])\n if kn not in c1set:\n c1set[kn] = set(\n sls.allowed_couplings(k, f1, il1[0], self.symmetries))\n c2set = {}\n r21, r22 = set(range(3)).difference([il2[0]])\n for k in set(key[c2] for key in nkeys):\n kn = (k[r21], k[r22])\n if kn not in c2set:\n c2set[kn] = set(\n sls.allowed_couplings(k, f2, il2[0], self.symmetries))\n\n vac = sls.vacuumIrrep(self.symmetries)\n Z1 = set().union(*c1set.values())\n Z2 = set().union(*c2set.values())\n rf = set(range(3)).difference([il1[1], il2[1]]).pop()\n fit = [fi[rf], fi[il1[1]], fi[il2[1]]]\n oks = {(k1, k2): set(sls.allowed_couplings((vac, k1, k2),\n fit, 0, self.symmetries))\n for k1, k2 in itertools.product(Z1, Z2)}\n\n def mappingf(okey):\n nk = permute_key(okey)\n set1 = c1set[(nk[c1][r11], nk[c1][r12])]\n set2 = c2set[(nk[c2][r21], nk[c2][r22])]\n for kk1 in set1:\n for kk2 in set2:\n if nk[ci][rf] not in oks[(kk1, kk2)]:\n continue\n\n # Assign the key of the internal leg\n nk[c1][il1[0]], nk[ci][il1[1]] = kk1, kk1\n nk[c2][il2[0]], nk[ci][il2[1]] = kk2, kk2\n yield tuple(tuple(e) for e in nk)\n\n prefdict = sls._prefswap2(iids, il1, il2, f1, f2, fi)\n\n def prefactorf(okey, nkey):\n flokey = [list(x) for x in\n zip(*[el for j in (c1, c2, ci) for el in okey[j]])]\n flnkey = [list(x) for x in\n zip(*[el for j in (c1, c2, ci) for el in nkey[j]])]\n return np.prod([prefdict.get(ss, lambda x, y: 1.)(o, n) for\n o, n, ss in zip(flokey, flnkey, self.symmetries)])\n\n self._manipulate_coupling(mappingf, prefactorf)\n return self", "def contiguousFlip(currPath, i, j):\n if i != j and (i+1)%len(currPath)!=j and (j+1)%len(currPath)!=i:\n iP = i\n jP = j\n if (i < j):\n maxx=(j-i+1)//2\n else:\n maxx=(j+1+len(currPath)-i)//2\n for _ in range(maxx):\n temp = currPath[iP]\n currPath[iP] = currPath[jP]\n currPath[jP] = temp\n iP = (iP + 1)%len(currPath)\n jP = (jP - 1)%len(currPath)", "def swap(self, *args):\n return _osgAnimation.mapVertexInfluence_swap(self, *args)", "def test_swap_degree(self, dim):\r\n graph = nx.lollipop_graph(dim, 1)\r\n graph.remove_edge(0, dim - 1)\r\n graph.remove_edge(0, dim - 2)\r\n s = list(range(dim - 2))\r\n result = set(clique.swap(s, graph, node_select=\"degree\"))\r\n expected = set(range(1, dim - 2)) | {dim - 1}\r\n assert result == expected", "def swap2(d, n, v, i, j):\n for a in range(n):\n for k in range(d):\n if k == i or k == j:\n continue\n x = a*d*d + d*i + k\n y = a*d*d + d*j + k\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*k + i\n y = a*d*d + d*k + j\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*i + i\n y = a*d*d + d*j + j\n v[x], v[y] = v[y], v[x]\n\n x = a*d*d + d*j + i\n y = a*d*d + d*i + j\n v[x], v[y] = v[y], v[x]", "def _cswap(i, j, S):\n N = _rswap(i, j, S.transpose()).transpose()\n return N", "def swap(C):\n \n return [c.swap() for c in C]", "def invert(self):\n self.vertices.reverse()", "def swap(self):\n return _coconut_tail_call(Eq, self.b, self.a)", "def reduce_sequential(edges, start, end):\n dd = get_degrees_dictionary(edges) # O(len(edges))\n tvs = get_transition_vertexes(dd, start, end) # O(len(dd))\n logger.debug(\"dd: {}\".format(dd))\n logger.debug(\"tvs: {}\".format(tvs))\n\n for v in tvs: # for each vertex in transitional vertexes\n # edges\n ei1 = tvs[v][0]\n ei2 = tvs[v][1]\n\n e1 = edges[ei1] # e1 is going to save resulted edge\n e2 = edges[ei2] # e2 is going to become cycled and then removed\n\n # vertexes\n # v - vertex to be removed\n # v1 - vertex, connected to v by e1 edge (unchanged)\n # v2 - vertex, connected to v by e2 edge\n # will be moved to e1 substituting v there\n # edges list in transitional vertex dictionary will be updated\n\n logger.debug(\"Substituted {}: {}:{}, {}:{} -> \".format(\n v, ei1, e1, ei2, e2))\n\n # v is going to be substituted in e1 by value of \"not v\" vertex in e2\n substitute_index_in_ei2 = 1 - e2.index(v) # if vi=0 s=1; v=1 s=0\n\n # replace v in ei1 by substitute from ei2\n v2 = e2[substitute_index_in_ei2]\n\n e1[e1.index(v)] = v2\n e2[substitute_index_in_ei2] = v\n\n # here we will have 2 edges\n # edges[ei1] -> ['v1', 'v2', ?] #\n # edges[ei2] -> ['v', 'v', 5] # delay not changed\n\n # updated edges for substituted vertex in tvs dict to point to\n # ei1 edge instead of ei2\n # e.g. 'v2' was connected by ei2, now is connected by ei1\n\n if v2 != start and v2 != end:\n # v2 is not present in tvi and shouldn't be updated\n v2ei = tvs[v2] # list of edges indexes for v2\n vei = tvs[v] # list of edges indexes for v\n v2ei[v2ei.index(ei2)] = ei1\n\n logger.debug(\"tvs[{}][2] = t[1] : {} = {}\".format(\n v2,\n tvs[v2][2],\n t[1]))\n\n # update weight\n new_weight = e1[2] + e2[2]\n e1[2] = new_weight\n\n # normalize result edge\n redirect_edge_alpabetically(e1)\n\n # here we will have 2 edges\n # edges[ei1] -> ['v1', 'v2', 8] #\n # edges[ei2] -> ['v', 'v', 5] # delay not changed\n\n # only thing left is to remove the ei2 edge, this will be done later\n # not to break iteration over edges\n\n logger.debug(\"{}:{}, {}:{}\".format(ei1, e1, ei2, e2))\n\n # get indexes of edges to be removed\n indexes = [i for i in reversed(sorted([tvs[v][1] for v in tvs]))]\n logger.debug(\"Edges index removed after sequential update: {}\".format(\n indexes))\n\n for i in indexes:\n edges.pop(i)\n\n return len(tvs) # amount of edges removed", "def _swap(self, i, j, k):\n\t\tif self.verbose:\n\t\t\tprint(i, k)\n\t\t\tprint(i, j)\n\t\t\tprint(j, k)\n\t\tself.arrangement[i],self.arrangement[k] = self.arrangement[k],self.arrangement[i]\n\t\tself.arrangement[i],self.arrangement[j] = self.arrangement[j],self.arrangement[i]\n\t\tself.arrangement[j],self.arrangement[k] = self.arrangement[k],self.arrangement[j]", "def swap(self):\n if self.cnt_swap == 0:\n i = self.swaplist[self.cnt_swap][0]\n j = self.swaplist[self.cnt_swap][1]\n self.b[i], self.b[j] = self.b[j], self.b[i]\n self.f[i], self.f[j] = self.f[j], self.f[i]\n elif self.cnt_swap < self.nb_swaps:\n i = self.swaplist[self.cnt_swap - 1][0]\n j = self.swaplist[self.cnt_swap - 1][1]\n self.b[i], self.b[j] = self.b[j], self.b[i]\n self.f[i], self.f[j] = self.f[j], self.f[i]\n i = self.swaplist[self.cnt_swap][0]\n j = self.swaplist[self.cnt_swap][1]\n self.b[i], self.b[j] = self.b[j], self.b[i]\n self.f[i], self.f[j] = self.f[j], self.f[i]\n else:\n return 0\n self.cnt_swap += 1\n return 1", "def _clifford_swap(cls, slot_i, slot_j) -> Tensor:\n\n return Tensor(\n {\n Tensor._merge_keys((slot_j,), (slot_i,)): -1,\n Tensor._merge_keys(): 2 * cls.symmetric_bilinear_form(slot_i, slot_j),\n }\n )", "def cw_rotate(self):\n self.grid = [list(x) for x in zip(*self.grid[::-1])]\n self.find_edges()", "def switch_vertex(vertex, cutVectors):\n\n cutVectors[vertex] = (-1) * cutVectors[vertex]", "def flip_cycles_mesh(mesh):\n mesh.halfedge = dict((key, {}) for key in mesh.vertices_iter())\n for fkey, face in mesh.face.iteritems():\n mesh.face[fkey] = dict((nbr, key) for key, nbr in face.items())\n for u, v in face.iteritems():\n mesh.halfedge[v][u] = fkey\n if v not in mesh.halfedge[u]:\n mesh.halfedge[u][v] = None", "def cyclic_sort_vertices_2d(Vlist):\n if len(Vlist)==0: return Vlist\n\n adjacency_matrix = Vlist[0].polyhedron().vertex_adjacency_matrix()\n result = [ Vlist.pop() ]\n while len(Vlist)>0:\n for i in range(len(Vlist)):\n if adjacency_matrix[Vlist[i].index(), result[-1].index()] == 1:\n result.append( Vlist.pop(i) )\n break;\n else:\n raise ValueError\n return result", "def test_swap_weight(self, dim):\r\n graph = nx.complete_graph(dim)\r\n graph.remove_edge(dim - 1, dim - 3)\r\n graph.remove_edge(dim - 2, dim - 4)\r\n s = list(range(dim - 2))\r\n weights = list(range(dim))\r\n result = set(clique.swap(s, graph, node_select=weights))\r\n expected = set(range(dim - 3)) | {dim - 1}\r\n assert result == expected", "def _rswap(i, j, S):\n N = copy.deepcopy(S)\n row = copy.deepcopy(N[i])\n N[i] = copy.deepcopy(N[j])\n N[j] = row\n return N", "def td_flip(self):\n self.cw_rotate()\n self.cw_rotate()\n self.lr_flip()\n self.find_edges()", "def lr_flip(self):\n for g in self.grid:\n g.reverse()", "def _swap(self, i, j):\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def _swap(self, i, j):\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def flip(graph, node):\n old_neighs = graph.neighbors(node)\n new_node = np.asarray(old_neighs[0]) + np.asarray(old_neighs[1]) + np.asarray(old_neighs[2]) - 2*np.asarray(node)\n new_node = tuple(new_node)\n old_vecs = -np.asarray(node) + old_neighs\n new_neighs = [(new_node, tuple(new_node - v)) for v in old_vecs if tuple(new_node - v) in graph.nodes()]\n \n # remove the node\n graph.remove_node(node)\n # add the new node and connect it to its neighbors\n graph.add_edges_from(new_neighs)\n \n # recompute the projections and change them\n graph.node[new_node]['para'] = np.dot(P, new_node)\n graph.node[new_node]['perp'] = np.dot(Pi, new_node)", "def test_swap_degree_tie(self, dim, monkeypatch):\r\n graph = nx.complete_graph(dim)\r\n graph.remove_edge(0, dim - 1)\r\n graph.remove_edge(0, dim - 2)\r\n s = set(range(dim - 2))\r\n\r\n patch_random_choice_1 = functools.partial(patch_random_choice, element=0)\r\n patch_random_choice_2 = functools.partial(patch_random_choice, element=1)\r\n\r\n with monkeypatch.context() as m:\r\n m.setattr(np.random, \"choice\", patch_random_choice_1)\r\n c1 = clique.swap(s, graph, node_select=\"degree\")\r\n\r\n with monkeypatch.context() as m:\r\n m.setattr(np.random, \"choice\", patch_random_choice_2)\r\n c2 = clique.swap(s, graph, node_select=\"degree\")\r\n\r\n assert c1 != c2", "def swapped_relation(*args):\n return _ida_hexrays.swapped_relation(*args)", "def randomize_by_edge_swaps(G, num_iterations):\n G_copy = G.copy()\n edge_list = list(G_copy.edges())\n num_edges = len(edge_list)\n total_iterations = num_edges * num_iterations\n\n for _ in range(total_iterations):\n i, j = np.random.choice(num_edges, 2, replace=False)\n u, v = edge_list[i]\n x, y = edge_list[j]\n\n if len(set([u, v, x, y])) < 4:\n continue\n\n # Save edge data\n i_data = G_copy[u][v]\n j_data = G_copy[x][y]\n\n if G_copy.has_edge(u, x) or G_copy.has_edge(v, y):\n # Interchange edge data\n G_copy.remove_edges_from(((u, v), (x, y)))\n G_copy.add_edges_from(((u, v, j_data), (x, y, i_data)))\n else:\n # Regular swap\n G_copy.remove_edges_from(((u, v), (x, y)))\n G_copy.add_edges_from(((u, x, i_data), (v, y, j_data)))\n\n edge_list[i] = (u, x)\n edge_list[j] = (v, y)\n\n assert len(G_copy.edges()) == num_edges\n return G_copy", "def _move_swap_el(self, j):\n\t\ti = self.swap_idx\n\t\twhile i < j:\n\t\t\tif self.verbose:\n\t\t\t\tprint(i, i+1)\n\t\t\tself.arrangement[i],self.arrangement[i+1]=self.arrangement[i+1],self.arrangement[i]\n\t\t\ti += 1\n\t\twhile i > j:\n\t\t\tif self.verbose:\n\t\t\t\tprint(i, i-1)\n\t\t\tself.arrangement[i],self.arrangement[i-1]=self.arrangement[i-1],self.arrangement[i]\n\t\t\ti -= 1\n\t\tself.swap_idx = j", "def _swap0(self, cid, permute):\n from collections import Counter\n permute = tuple(permute)\n if Counter(permute) != Counter(range(3)):\n raise ValueError(\"Permutation array should be 0, 1, 2 shuffled\")\n\n def permute_key(key):\n return tuple(tuple(key[cid][p] for p in permute) if i == cid else c\n for i, c in enumerate(key))\n\n self._coupling = permute_key(self.coupling)\n\n def mappingf(okey):\n yield permute_key(okey)\n\n prefdict = sls._prefswap0(permute)\n\n def prefactorf(okey, nkey):\n kk = [x for x in zip(*okey[cid])]\n return np.prod([prefdict[ss](*k) if prefdict.get(ss, None)\n else 1. for k, ss in zip(kk, self.symmetries)])\n\n self._manipulate_coupling(mappingf, prefactorf)\n return self", "def swap(t, i, j):\n t[i], t[j] = t[j], t[i]", "def consecutiveFlip(currPath, i):\n j = (i + 1)%len(currPath)\n temp = currPath[i]\n currPath[i] = currPath[j]\n currPath[j] = temp", "def swap((u, v)):\n return (v, u)", "def _swap(self, node1, node2):\n arr = self._array\n arr[node1._index], arr[node2._index] = arr[node2._index], \\\n arr[node1._index]\n # Swap indices stored in nodes as well\n node1._index, node2._index = node2._index, node1._index", "def _swap_permutation(i, j, permutation_vector):\n permutation_vector[i], permutation_vector[j] = permutation_vector[j], permutation_vector[i]", "def move_vertices(self, vertices, code):\n for vertex in vertices:\n self.move_vertice(vertex, code)", "def flip(self):\n self._start, self._end = self._end, self._start", "def _swap(self, i, j):\r\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def test_weyl_specialize_flip_swap(self):\n a, b, c = np.pi / 4, np.pi / 4, -np.pi / 4\n for da, db, dc in DELTAS:\n for k1l, k1r, k2l, k2r in K1K2SB:\n k1 = np.kron(k1l.data, k1r.data)\n k2 = np.kron(k2l.data, k2r.data)\n self.check_two_qubit_weyl_specialization(\n k1 @ Ud(a + da, b + db, c + dc) @ k2,\n 0.999,\n TwoQubitWeylSWAPEquiv,\n {\"rz\": 4, \"ry\": 2, \"swap\": 1},\n )", "def order_vertices(self):\r\n \r\n ordered = False\r\n while ordered == False:\r\n for i in range(len(self.vertices)):\r\n ordered = True\r\n for parent in self.vertices[i].parents:\r\n if parent>i:\r\n ordered = False\r\n self.swap_vertices(i, parent)", "def _neighbor_swap(self, solution, i, j):\n\n solution_tmp = deepcopy(solution)\n solution_tmp[i], solution_tmp[j] = solution_tmp[j], solution_tmp[i]\n return solution_tmp", "def _swap(self, i, j):\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def swap3(d, n, v, i, j, k):\n for a in range(n):\n for m in range(d):\n if m == i or m == j or m == k:\n continue\n x = a*d*d + d*i + m\n y = a*d*d + d*j + m\n z = a*d*d + d*k + m\n v[x],v[y],v[z] = v[z],v[x],v[y]\n\n x = a*d*d + d*m + i\n y = a*d*d + d*m + j\n z = a*d*d + d*m + k\n v[x],v[y],v[z] = v[z],v[x],v[y]\n\n x = a*d*d + d*i + i\n y = a*d*d + d*j + j\n z = a*d*d + d*k + k\n v[x],v[y],v[z] = v[z],v[x],v[y]\n\n x = a*d*d + d*i + j\n y = a*d*d + d*j + k\n z = a*d*d + d*k + i\n v[x],v[y],v[z] = v[z],v[x],v[y]\n\n x = a*d*d + d*i + k\n y = a*d*d + d*j + i\n z = a*d*d + d*k + j\n v[x],v[y],v[z] = v[z],v[x],v[y]", "def edge_velocity(self):\n #reflext x values at x edges\n self.u[1,:,0] = -self.u[1,:,1]\n self.u[1,:,-1] = -self.u[1,:,-2]\n #mirror x values at y edges \n self.u[1,0,:] = self.u[1,1,:]\n self.u[1,-1,:] = self.u[1,-2,:]\n #mirror y values at x edges\n self.u[0,:,0] = self.u[0,:,1]\n self.u[0,:,-1] = self.u[0,:,-2]\n #mirror y values at y edges \n self.u[0,0,:] = -self.u[0,1,:]\n self.u[0,-1,:] = -self.u[0,-2,:]", "def __swap(self, x1, y1, x2, y2):\n temp = self.puzzle.copy()\n temp[x1, y1] = temp[x2, y2]\n temp[x2, y2] = self.puzzle[x1, y1]\n return temp", "def swap(clique: list, graph: nx.Graph, node_select: str = \"uniform\") -> list:\r\n\r\n if not utils.is_subgraph(clique, graph):\r\n raise ValueError(\"Input is not a valid subgraph\")\r\n\r\n if not utils.is_clique(graph.subgraph(clique)):\r\n raise ValueError(\"Input subgraph is not a clique\")\r\n\r\n clique = set(clique)\r\n c_1 = utils.c_1(clique, graph)\r\n\r\n if c_1:\r\n if node_select == \"uniform\":\r\n swap_index = np.random.choice(len(c_1))\r\n swap_nodes = c_1[swap_index]\r\n elif node_select == \"degree\":\r\n degrees = np.array([graph.degree(n[1]) for n in c_1])\r\n to_swap_index = np.random.choice(np.where(degrees == degrees.max())[0])\r\n swap_nodes = c_1[to_swap_index]\r\n else:\r\n raise ValueError(\"Node selection method not recognized\")\r\n\r\n clique.remove(swap_nodes[0])\r\n clique.add(swap_nodes[1])\r\n\r\n return sorted(clique)", "def swap(indiv, Optimizer):\n if 'MU' in Optimizer.debug:\n debug = True\n else:\n debug = False\n Optimizer.output.write('Swap Mutation performed on individual\\n')\n Optimizer.output.write('Index = '+repr(indiv.index)+'\\n')\n if len(indiv[0]) > 4:\n natomsswap=random.randint(1,len(indiv[0])/5)\n else:\n natomsswap=1\n Optimizer.output.write('Number of swaps = '+repr(natomsswap)+'\\n')\n syms=list(set(indiv[0].get_chemical_symbols()))\n if len(syms)<len(Optimizer.atomlist):\n syms=[sym for sym,c,m,u in Optimizer.atomlist]\n if len(syms)==1:\n Optimizer.output.write('WARNING: Swap Mutation attempted on single atom structure system\\n')\n else:\n for i in range(natomsswap):\n if len(indiv[0])>1:\n a1=indiv[0][random.randint(0,indiv[0].get_number_of_atoms()-1)]\n else:\n a1=indiv[0][0]\n osym=a1.symbol\n nsymlist=[sym for sym in syms if sym != osym]\n a1.symbol=random.choice(nsymlist)\n nsym=a1.symbol\n Optimizer.output.write('Swapped '+osym+' atom with '+nsym+'\\n')\n Optimizer.output.write(repr(indiv[0])+'\\n')\n muttype='S'+repr(natomsswap)\n if indiv.energy==0:\n indiv.history_index=indiv.history_index+'m'+muttype\n else:\n indiv.history_index=repr(indiv.index)+'m'+muttype\n return indiv", "def jmatswap(ind: int):\n return _jmswap[ind - 1]", "def _sort_vec(no_state: _NOState, swapper: GenQuadDrudge.Swapper, resolvers):\n\n orig_term = no_state.term\n vecs = orig_term.vecs\n n_vecs = len(vecs)\n pivot = no_state.pivot\n\n if pivot >= n_vecs:\n # End of the outer loop.\n return None\n\n # To be used by the end of the inner loop.\n move2front = [_NOState(\n pivot=no_state.front, front=no_state.front + 1, term=no_state.term\n )]\n\n if pivot == 0:\n return move2front\n\n prev = pivot - 1\n swap_res = swapper(vecs[prev], vecs[pivot])\n if swap_res is None:\n return move2front\n\n phase = sympify(swap_res[0])\n comm_terms = []\n for i in parse_terms(swap_res[1]):\n comm_terms.extend(i.expand())\n\n # Now we need to do a swap.\n head = vecs[:prev]\n tail = vecs[pivot + 1:]\n res_states = []\n\n swapped_vecs = head + (vecs[pivot], vecs[prev]) + tail\n swapped_amp = phase * orig_term.amp\n if swapped_amp != 0:\n swapped_term = Term(\n orig_term.sums, swapped_amp, swapped_vecs\n )\n res_states.append(_NOState(\n pivot=prev, front=no_state.front, term=swapped_term\n ))\n\n for comm_term in comm_terms:\n if comm_term.amp == 0:\n continue\n\n comm_vecs = comm_term.vecs\n res_term = Term(orig_term.sums, comm_term.amp * orig_term.amp, tuple(\n itertools.chain(head, comm_vecs, tail)\n )).simplify_deltas(resolvers)\n if res_term.amp == 0:\n continue\n\n if len(comm_vecs) == 0:\n # Old front, new index.\n res_pivot = no_state.front - 2\n else:\n # Index of the first newly inserted vector.\n res_pivot = prev\n\n res_states.append(_NOState(\n pivot=res_pivot, front=res_pivot + 1, term=res_term\n ))\n continue\n\n return res_states", "def mirrorcross(component):\n a,b,c = component()\n d,e,f = component()\n d.reverse()\n e.reverse()\n f.reverse()\n return b+f+c+e,d+a", "def pswap(target1: QubitInput, target2: QubitInput, angle: float) -> Instruction:\n return Instruction(PSwap(angle), target=[target1, target2])", "def _swap(heap, i, j):\n heap[i], heap[j] = heap[j], heap[i]", "def _swap(self, x1, y1, x2, y2):\n puzzle_copy = [list(row) for row in self.position] # copy the puzzle\n puzzle_copy[x1][y1], puzzle_copy[x2][y2] = puzzle_copy[x2][y2], puzzle_copy[x1][y1]\n\n return puzzle_copy", "def rotate(self, m):\n n = len(m)\n for i in range(n//2):\n for j in range(i,n-i-1):\n m[j][~i],m[~i][~j],m[~j][i],m[i][j] = \\\n m[i][j],m[j][~i],m[~i][~j],m[~j][i]", "def _swap(self, i, j):\n tmp = self._heap[i]\n self._heap[i] = self._heap[j]\n self._index[self._heap[j].key] = i\n self._heap[j] = tmp\n self._index[tmp.key] = j", "def reduce_to_planar_3_coloring(G):\n\n if G.is_planar():\n return G\n\n H = FalsePlanarGraph.from_graph(G)\n\n for e in H.false_edges:\n u, v = e\n\n # set a combinatorial embedding in H\n H.is_planar()\n\n # find the minimum crossing path from u to v\n path = min_crossing_path(H, u, v)\n\n # apply the planar gadget to every crossing\n uncross_edges(H, e, path)\n\n return H.planar_copy()", "def __swap(self, index_1, index_2):\n temp = self._lits[index_1]\n self._lits[index_1] = self._lits[index_2]\n self._lits[index_2] = temp", "def swap(self,i,j):\r\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def modSwapSort(L):\n print(\"Original L: \", L)\n for i in range(len(L)):\n for j in range(len(L)):\n if L[j] < L[i]:\n # the next line is a short\n # form for swap L[i] and L[j]\n L[j], L[i] = L[i], L[j]\n print(L)\n print(\"Final L: \", L)", "def evert(self):\n for e in self.edges:\n self.invert()\n for f in self.faces:\n f.invert()", "def unify_cycles_mesh(mesh, root=None):\n def unify(node, nbr):\n for u, v in mesh.face[nbr].iteritems():\n if u in mesh.face[node]:\n if v == mesh.face[node][u]:\n # if the traversal of a neighbouring halfedge\n # is in the same direction\n # flip the neighbour\n mesh.face[nbr] = dict((v, u) for u, v in mesh.face[nbr].iteritems())\n return\n if root is None:\n root = mesh.face.iterkeys().next()\n # pass unify as callback function\n # what about the return value?\n network_bfs(mesh.face_adjacency(), root, unify)\n mesh.halfedge = dict((key, {}) for key in mesh.vertices_iter())\n for fkey, face in mesh.face.iteritems():\n for u, v in face.iteritems():\n mesh.halfedge[u][v] = fkey\n if u not in mesh.halfedge[v]:\n mesh.halfedge[v][u] = None", "def edges_flip_orientation(edges):\n edges_flipped = []\n for e in edges:\n edges_flipped.insert(0, (e[1], e[0]))\n return edges_flipped", "def swap(i: int, j: int, data: List[int]) -> None:\n temp = data[i]\n data[i] = data[j]\n data[j] = temp", "def pairwise_rebalance(self):\n path_pairs = self.get_overlapping_path_pairs()\n if len(path_pairs) > 0:\n pair = path_pairs.pop()\n else:\n pair = None\n while pair is not None:\n i = pair[0]\n j = pair[1]\n print(\"Testing {},{}\".format(i, j))\n print(\"weight i: {}, weight j: {}\".format(self.weights[i],\n self.weights[j]))\n p_i = self.paths[i]\n p_j = self.paths[j]\n p_i_alone = set(p_i).difference(set(p_j))\n p_j_alone = set(p_j).difference(set(p_i))\n overlap = set(p_i).intersection(set(p_j))\n print(\"Finding bounds for i\")\n L_wi, U_wi = self.compute_pair_bounds(p_i_alone, pair)\n print(\"Finding bounds for j\")\n L_wj, U_wj = self.compute_pair_bounds(p_j_alone, pair)\n print(\"Finding bounds for overlap\")\n L_overlap, U_overlap = self.compute_pair_bounds(overlap, pair)\n w_j = self.find_wj(L_wi, U_wi, L_wj, U_wj, L_overlap, U_overlap)\n if w_j is not None:\n # rebalance is possible\n # set weight of j to w_j\n print(\"Removing path {}\".format(i))\n print(\"W_j was {}\".format(w_j))\n print(\"Path i w={} is {}\".format(self.weights[i], p_i))\n print(\"path j w={} is {}\".format(self.weights[j], p_j))\n print((\"L_wi={}, U_wi={}, L_wj={}, U_wj={}, L_overlap={},\" +\\\n \"U_overlap={}\").format(L_wi, U_wi, L_wj, U_wj,\n L_overlap, U_overlap))\n self.weights[j] = w_j\n # remove path i and weight j\n del self.paths[i]\n del self.weights[i]\n # udpate the edge weights based on new paths\n self.update_edge_weights()\n # increment counter\n self.pairwise_rebalances += 1\n # recompute overlapping pairs.\n # TODO: do this more efficiently.\n path_pairs = self.get_overlapping_path_pairs()\n\n print(\"Checking bounds\")\n self.check_flow()\n # update current pair\n if path_pairs:\n pair = path_pairs.pop()\n else:\n pair = None", "def reverse_iterative(S):\n start, stop = 0, len(S) - 1\n while start < stop:\n S[start], S[stop] = S[stop], S[start]\n start, stop = start + 1, stop - 1", "def r_degenerate(self):\n self.tmp = self.left\n self.left = self.right", "def swapSort(L):\n print(\"Original L: \", L)\n for i in range(len(L)):\n for j in range(i + 1, len(L)):\n if L[j] < L[i]:\n # the next line is a short\n # form for swap L[i] and L[j]\n L[j], L[i] = L[i], L[j]\n print(L)\n print(\"Final L: \", L)", "def swap(a,b):\n temp = a\n a = b\n b = temp\n return(a,b)", "def swap(permutation, transposition, remaining=[]):\n i, j = transposition\n nb_positions = len(permutation)\n res = np.array(permutation)\n\n if j < nb_positions:\n res[i], res[j] = res[j], res[i]\n else:\n res[i] = remaining[j-nb_positions]\n\n return tuple(res)", "def _triangulate(self,x):\n\n t = tr.triangulate({\"vertices\": x},\"-n\")\n tri = t[\"triangles\"]\n neighbours = t[\"neighbors\"]\n\n b_cells = np.zeros(self.n_c)\n b_cells[self.n_C:] = 1\n\n three_b_cell_mask = b_cells[tri].sum(axis=1)==3\n tri = tri[~three_b_cell_mask]\n\n neigh_map = np.cumsum(~three_b_cell_mask)-1\n neigh_map[three_b_cell_mask] = -1\n neigh_map = np.concatenate((neigh_map,[-1]))\n\n neighbours = neighbours[~three_b_cell_mask]\n neighbours = neigh_map[neighbours]\n\n #6. Store outputs\n self.tris = tri\n self.n_v = tri.shape[0]\n self.Cents = x[self.tris]\n self.vs = self.get_vertex()\n\n\n #7. Manually calculate the neighbours. See doc_string for conventions.\n self.v_neighbours = neighbours\n self.neighbours = self.vs[neighbours]\n self.neighbours[neighbours == -1] = np.nan\n\n self.reset_k2s()", "def twoEdgesIntoSamePortResolvesCrossingWhenSwitched(self):\n graph = self.graph\n makeLayer = self.makeLayer\n eastWestEdgeFromTo = self.eastWestEdgeFromTo\n addNodeToLayer = self.addNodeToLayer\n addPortOnSide = self.addPortOnSide\n addEdgeBetweenPorts = self.addEdgeBetweenPorts\n\n leftLayer = makeLayer(graph)\n rightLayer = makeLayer(graph)\n\n topLeft = addNodeToLayer(leftLayer)\n bottomLeft = addNodeToLayer(leftLayer)\n topRight = addNodeToLayer(rightLayer)\n bottomRight = addNodeToLayer(rightLayer)\n\n topLeftPort = addPortOnSide(topLeft, PortSide.EAST)\n bottomLeftPort = addPortOnSide(bottomLeft, PortSide.EAST)\n bottomRightPort = addPortOnSide(bottomRight, PortSide.WEST)\n\n addEdgeBetweenPorts(topLeftPort, bottomRightPort)\n addEdgeBetweenPorts(bottomLeftPort, bottomRightPort)\n\n eastWestEdgeFromTo(bottomLeft, topRight)\n\n return graph", "def vcycle(v, b):\n if (len(v) - 1) & (len(v) - 2) != 0:\n raise ValueError(\"Lenth of v must be 2**n + 1.\")\n\n for i in range(3):\n jacobi23(v, b)\n\n if len(v) <= 3:\n return\n\n r = b - Amul(v)\n r2 = 4. * restrict(r)\n e2 = np.zeros_like(r2)\n vcycle(e2, r2)\n v += prolong(e2)\n\n for i in range(3):\n jacobi23(v, b)", "def SqrtSwap():\n\n return Operator(np.array([[[[ 1.0, 0.0],\n [ 0.0, 0.5 * (1 + 1j)]],\n [[ 0.0, 0.0],\n [ 0.5 * (1 - 1j), 0.0]]],\n [[[ 0.0, 0.5 * (1 - 1j)],\n [ 0.0, 0.0]],\n [[ 0.5 * (1 + 1j), 0.0],\n [ 0.0, 1.0]]]]))", "def swap(array, x, y):\n array[x], array[y] = array[y], array[x]", "def swap(theList, i, j):\n\n temp = theList[i]\n theList[i] = theList[j]\n theList[j] = temp", "def cross(subpaths, j, k):\r\n for q in range(j, k):\r\n subpaths[q].direct_close()\r\n subpaths[q].reverse()\r\n subpaths[j:k] = subpaths[j:k][::-1]", "def swap(newGraph):\n\n oldGraph = globals()[\"currentGraph\"]\n globals()[\"currentGraph\"] = newGraph\n return oldGraph", "def clockwise(self):\n temp = self._top\n self._top = self._left\n self._left = self._bottom\n self._bottom = self._right\n self._right = temp", "def rotateMatrixAttempt(matrix):\n for idxring in range(len(matrix) / 2):\n swap = []\n ringgen = ringCoords(len(matrix), idxring)\n for x, y in ringgen:\n swap.append(matrix[y][x])\n ringgen = ringCoords(len(matrix), idxring)\n start_idx = len(matrix) - 1 - idxring\n for swapidx in range(-1 * start_idx, len(swap) - start_idx):\n x, y = ringgen.next()\n matrix[y][x] = swap[swapidx]", "def new_permutation(V,m,adj):\r\n\r\n global tent\r\n\r\n perm = V.copy()\r\n \r\n \"\"\" try to select two vertices to swipe wisely. \"\"\"\r\n \r\n #we select 1 vertex among the m first vertices\r\n p1 = randint(0,m-1)\r\n \r\n #we select 1 vertex among the vertices left\r\n p2 = randint(m,len(V)-1)\r\n\r\n def comp(p1,p2,adj,perm):\r\n \"\"\"\r\n retrieve the degree of the 2 vertices and\r\n compare the degree\r\n Args:\r\n p1 (int): index of the vertex\r\n p2 (int): index of the vertex\r\n adj (set): set of the edges\r\n perm (int): current permutation of the vertices\r\n\r\n Returns:\r\n bool: true if degree of p2 is higher than the one of p1 ,false otherwise\r\n \"\"\"\r\n #degree of p1\r\n f1 = 0\r\n #degree of p2\r\n f2 = 0\r\n \r\n #compute the degrees\r\n for i in range(m):\r\n if (V[p1],V[i]) in adj or (V[i],V[p1]) in adj:\r\n f1 += 1\r\n\r\n for i in range(m):\r\n if (V[p2],V[i]) in adj or (V[i],V[p2]) in adj:\r\n f2 += 1\r\n \r\n if f2 > f1:\r\n return True\r\n else:\r\n return False\r\n\r\n def check_prior(p1,p2,adj,perm,tent):\r\n \"\"\"\r\n recursive function which try to swipe the 2 vertices \r\n by comparing the degre of the vertexe.\r\n\r\n Args:\r\n p1 (int): index of the vertex\r\n p2 (int): index of the vertex\r\n d (set): set of the edges\r\n perm (set): new permutation\r\n tent (int): we fix the swipe process to tent try.\r\n\r\n Returns:\r\n int: the new neighbor aka permutation\r\n \"\"\"\r\n \r\n #if the degree of the node p2 is higher or if the try is over we swipe \r\n if comp(p1,p2,adj,perm) or tent == 0:\r\n temp = perm[p1]\r\n perm[p1] = perm[p2]\r\n perm[p2] = temp\r\n return perm\r\n \r\n tent -= 1\r\n \r\n #select a new vertex to swipe\r\n p2 = randint(m,len(V)-1)\r\n\r\n return check_prior(p1,p2,adj,perm,tent)\r\n \r\n return check_prior(p1,p2,adj,perm,tent)", "def CopyReplaceVertices(self, *args):\n return _ShapeBuild.ShapeBuild_Edge_CopyReplaceVertices(self, *args)", "def l_degenerate(self):\n self.tmp = self.right\n self.right = self.left", "def run(self, dag):\n new_dag = DAGCircuit()\n\n if self.initial_layout is None:\n if self.property_set[\"layout\"]:\n self.initial_layout = self.property_set[\"layout\"]\n else:\n self.initial_layout = Layout.generate_trivial_layout(*dag.qregs.values())\n\n if len(dag.qubits()) != len(self.initial_layout):\n raise TranspilerError('The layout does not match the amount of qubits in the DAG')\n\n if len(self.coupling_map.physical_qubits) != len(self.initial_layout):\n raise TranspilerError(\n \"Mappers require to have the layout to be the same size as the coupling map\")\n\n current_layout = self.initial_layout.copy()\n\n for layer in dag.serial_layers():\n subdag = layer['graph']\n\n for gate in subdag.twoQ_gates():\n physical_q0 = current_layout[gate.qargs[0]]\n physical_q1 = current_layout[gate.qargs[1]]\n if self.coupling_map.distance(physical_q0, physical_q1) != 1:\n # Insert a new layer with the SWAP(s).\n swap_layer = DAGCircuit()\n\n path = self.coupling_map.shortest_undirected_path(physical_q0, physical_q1)\n for swap in range(len(path) - 2):\n connected_wire_1 = path[swap]\n connected_wire_2 = path[swap + 1]\n\n qubit_1 = current_layout[connected_wire_1]\n qubit_2 = current_layout[connected_wire_2]\n\n # create qregs\n for qreg in current_layout.get_registers():\n if qreg not in swap_layer.qregs.values():\n swap_layer.add_qreg(qreg)\n\n # create the swap operation\n swap_layer.apply_operation_back(SwapGate(),\n qargs=[qubit_1, qubit_2],\n cargs=[])\n\n # layer insertion\n edge_map = current_layout.combine_into_edge_map(self.initial_layout)\n new_dag.compose_back(swap_layer, edge_map)\n\n # update current_layout\n for swap in range(len(path) - 2):\n current_layout.swap(path[swap], path[swap + 1])\n\n edge_map = current_layout.combine_into_edge_map(self.initial_layout)\n new_dag.extend_back(subdag, edge_map)\n\n return new_dag", "def test_swap_k_order_backward(self):\n \n nums = [4, 3, 2, 1]\n k = 2\n\n a1.swap_k(nums, k)\n\n self.assertEqual(nums, [2, 1, 4, 3])", "def swap_main(self):\n self.swap(self.clients.current_client, self.clients[0])", "def swap(self, i, j):\r\n self._data[i], self._data[j] = self._data[j], self._data[i]", "def reweight_simplex_vertices(X,\n simplices,\n X_vertices=None,\n vertex_weights=None):\n if X_vertices is None:\n X_vertices = X\n\n # extracting the weights of each neighbor vector\n vertex_weights_simplices = vertex_weights[simplices[:,1:]]\n\n X_base = X[simplices[:,0]]\n X_neighbors = X_vertices[simplices[:,1:]]\n\n # calculating the direction vectors (neighbor vectors - base vector)\n diff_vectors = X_neighbors - X_base[:,None]\n\n # reweighting the direction vectors of neighbors\n weighted = (diff_vectors.T*vertex_weights_simplices.T).T\n\n # shifint the scaled neighbors back by the base vectors\n reverted = weighted + X_base[:,None]\n\n # concatenating the base vectors and the scaled vectors\n return np.concatenate([X_base[:,None], reverted], axis=1)", "def labeled_subsimplex(\n label_func, init, disc\n): # pylint: disable=too-many-locals,too-many-statements\n init = np.asarray(init, float)\n dim = init.size\n # Base vertex of the subsimplex currently being used\n dinit = _discretize_mixture(init, disc)\n base = np.append(dinit, 0)\n base[0] += 1\n # permutation array of [1,dim] where v0 = base,\n # v{i+1} = [..., vi_{perms[i] - 1} - 1, vi_{perms[i]} + 1, ...]\n perms = np.arange(1, dim + 1)\n # Array of labels for each vertex\n labels = np.arange(dim + 1)\n labels[dim] = label_func(dinit / disc)\n # Vertex used to label initial vertices (vertex[-1] == 0)\n label_vertex = base[:-1].copy()\n # Last index moved\n index = dim\n # Most recent created index, should be set to\n new_vertex = None\n\n while labels[index] < dim:\n # Find duplicate index. this is O(dim) but not a bottleneck\n (dup_labels,) = np.nonzero(labels == labels[index])\n (index,) = dup_labels[dup_labels != index]\n\n # Flip simplex over at index\n if index == 0:\n base[perms[0]] += 1\n base[perms[0] - 1] -= 1\n perms = np.roll(perms, -1)\n labels = np.roll(labels, -1)\n index = dim\n\n elif index == dim:\n base[perms[-1] - 1] += 1\n base[perms[-1]] -= 1\n perms = np.roll(perms, 1)\n labels = np.roll(labels, 1)\n index = 0\n\n else: # 0 < index < dim\n perms[index - 1], perms[index] = perms[index], perms[index - 1]\n\n # Compute actual value of flipped vertex\n new_vertex = base.copy()\n new_vertex[perms[:index]] += 1\n new_vertex[perms[:index] - 1] -= 1\n\n utils.check(\n np.all(new_vertex >= 0) and new_vertex.sum() == disc + 1,\n \"vertex rotation failed, check labeling function\",\n )\n\n # Update label of new vertex\n if new_vertex[-1] == 2:\n labels[index] = dim\n elif new_vertex[-1] == 0:\n labels[index] = np.argmax(new_vertex[:-1] - label_vertex)\n else: # == 1\n labels[index] = label_func(new_vertex[:-1] / disc)\n utils.check(\n 0 <= labels[index] < dim and new_vertex[labels[index]],\n \"labeling function was not proper (see help)\",\n )\n\n # Average out all vertices in simplex we care about\n current = base\n if index == 0: # pragma: no cover\n count = 0\n mean = np.zeros(dim)\n else: # pragma: no cover\n count = 1\n mean = current.astype(float)\n for i, j in enumerate(perms, 1):\n current[j] += 1\n current[j - 1] -= 1\n if i != index:\n count += 1\n mean += (current - mean) / count\n return mean[:-1] / disc", "def modSwapSort(L):\n print(\"Original L: \", L)\n count=0\n\n for i in range(len(L)):\n for j in range(len(L)):\n if L[j] < L[i]:\n # the next line is a short \n # form for swap L[i] and L[j]\n L[j], L[i] = L[i], L[j] \n print(L)\n count=count+1\n print(\"Final L: \", L)\n print(count)", "def swap(arr, left, right):\n arr[left], arr[right] = arr[right], arr[left]" ]
[ "0.6700525", "0.6548219", "0.62071115", "0.619347", "0.6155958", "0.61089027", "0.6073555", "0.6059136", "0.6011071", "0.59793395", "0.5920292", "0.591253", "0.59060276", "0.5894991", "0.58805555", "0.5820859", "0.5787564", "0.5771385", "0.57236433", "0.57217413", "0.5709025", "0.56853545", "0.5668851", "0.5668137", "0.56564784", "0.5648565", "0.5632435", "0.5571116", "0.55704314", "0.55592334", "0.5509323", "0.54803944", "0.54183394", "0.54183394", "0.5410956", "0.54058135", "0.5389054", "0.5369509", "0.5341835", "0.53261465", "0.53255844", "0.5309761", "0.530917", "0.5306572", "0.5299184", "0.52980995", "0.52868307", "0.52735543", "0.5271864", "0.5259869", "0.5237636", "0.5230144", "0.5227966", "0.52273947", "0.5221856", "0.521727", "0.5208372", "0.5192802", "0.51857734", "0.51837045", "0.51774937", "0.51713157", "0.5170847", "0.51699924", "0.5162504", "0.5155172", "0.51472366", "0.5142659", "0.51408523", "0.51400125", "0.51226044", "0.51154447", "0.5112613", "0.50906223", "0.5089197", "0.50853664", "0.50780344", "0.50734735", "0.5056666", "0.5052258", "0.50522304", "0.50515026", "0.5041662", "0.50367486", "0.50341463", "0.50339806", "0.50324845", "0.5027237", "0.5025527", "0.50249934", "0.5017255", "0.5003737", "0.49977046", "0.49952146", "0.4973932", "0.49738774", "0.49735573", "0.49701297", "0.4969083", "0.49631116" ]
0.6623225
1
Creates a dashboard of plots for time steps, potential, kintetic, and total energy
def create_dashboard(h, t, k, p): plt.style.use('seaborn') # Initialize the dashboard fig = plt.figure(figsize=(20, 8)) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2) ax3 = fig.add_subplot(2, 2, 3) ax4 = fig.add_subplot(2, 2, 4) # Create individual graphs dt_line, = ax1.plot(h, lw=3, c='k') total_line, = ax2.plot(t, lw=3, c='#d62728') k_line, = ax3.plot(k, lw=3, c='#1f77b4') p_line = ax4.plot(p, lw=3, c='#2ca02c') ax1.set_title(r'Variation in $\Delta t$') ax1.set_ylabel(r'$\Delta t$') ax2.set_title(r'Total Energy over Time') ax2.set_ylabel('Total Energy') ax3.set_title('Kinetic Energy over Time') ax3.set_ylabel('Kinetic Energy') ax3.set_xlabel('Time Steps') ax4.set_title('Potential Energy over Time') ax4.set_ylabel('Potential Energy') ax4.set_xlabel('Time Steps') plt.show() """im = ax[0, 0].imshow(model.lattice, cmap='Greys', vmin=-1, vmax=1) energy_line, = ax[0, 1].plot([], [], lw=3) mag_line, = ax[1, 0].plot([], [], lw=3) heat_line, = ax[1, 1].plot([], [], lw=3) susceptibility_line, = ax[2, 0].plot([], [], lw=3) acceptance_line, = ax[2, 1].plot([], [], lw=3)"""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_vanHove_dt(comp,conn,start,step_size,steps):\n \n (fin,) = conn.execute(\"select fout from comps where comp_key = ?\",comp).fetchone()\n (max_step,) = conn.execute(\"select max_step from vanHove_prams where comp_key = ?\",comp).fetchone()\n Fin = h5py.File(fin,'r')\n g = Fin[fd('vanHove',comp[0])]\n\n temp = g.attrs['temperature']\n dtime = g.attrs['dtime']\n\n\n # istatus = plots.non_i_plot_start()\n \n fig = mplt.figure()\n fig.suptitle(r'van Hove dist temp: %.2f dtime: %d'% (temp,dtime))\n dims = figure_out_grid(steps)\n \n plt_count = 1\n outs = []\n tmps = []\n for j in range(start,start+step_size*steps, step_size):\n (edges,count,x_lim) = _extract_vanHove(g,j+1,1,5)\n if len(count) < 50:\n plt_count += 1\n continue\n #count = count/np.sum(count)\n \n sp_arg = dims +(plt_count,)\n ax = fig.add_subplot(*sp_arg)\n ax.grid(True)\n\n \n alpha = _alpha2(edges,count)\n \n ax.set_ylabel(r'$\\log{P(N)}$')\n ax.step(edges,np.log((count/np.sum(count))),lw=2)\n ax.set_title(r'$\\alpha_2 = %.2f$'%alpha + ' j:%d '%j )\n ax.set_xlim(x_lim)\n plt_count += 1\n\n mplt.draw()\n\n # plots.non_i_plot_start(istatus)\n\n del g\n Fin.close()\n del Fin", "def results_plot_fuel_reactor(self):\n \n import matplotlib.pyplot as plt \n\n # Total pressure profile\n P = []\n for z in self.MB_fuel.z:\n P.append(value(self.MB_fuel.P[z]))\n fig_P = plt.figure(1)\n plt.plot(self.MB_fuel.z, P)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total Pressure [bar]\") \n\n # Temperature profile\n Tg = []\n Ts = []\n# Tw = []\n for z in self.MB_fuel.z:\n Tg.append(value(self.MB_fuel.Tg[z] - 273.15))\n Ts.append(value(self.MB_fuel.Ts[z] - 273.15))\n# Tw.append(value(self.MB_fuel.Tw[z]))\n fig_T = plt.figure(2)\n plt.plot(self.MB_fuel.z, Tg, label='Tg')\n plt.plot(self.MB_fuel.z, Ts, label='Ts')\n# plt.plot(self.MB_fuel.z, Tw, label='Tw')\n plt.legend(loc=0,ncol=2)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Temperature [C]\") \n \n # Superficial gas velocity and minimum fluidization velocity\n vg = []\n umf = []\n for z in self.MB_fuel.z:\n vg.append(value(self.MB_fuel.vg[z]))\n umf.append(value(self.MB_fuel.umf[z]))\n fig_vg = plt.figure(3)\n plt.plot(self.MB_fuel.z, vg, label='vg')\n plt.plot(self.MB_fuel.z, umf, label='umf')\n plt.legend(loc=0,ncol=2)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Superficial gas velocity [m/s]\")\n \n # Gas components molar flow rate\n for j in self.MB_fuel.GasList:\n F = []\n for z in self.MB_fuel.z:\n F.append(value(self.MB_fuel.F[z,j]))\n fig_F = plt.figure(4)\n plt.plot(self.MB_fuel.z, F, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Gas component molar flow rate, F [mol/s]\") \n \n # Bulk gas phase total molar flow rate\n Ftotal = []\n for z in self.MB_fuel.z:\n Ftotal.append(value(self.MB_fuel.Ftotal[z]))\n fig_Ftotal = plt.figure(5)\n plt.plot(self.MB_fuel.z, Ftotal)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total molar gas flow rate [mol/s]\") \n\n # Solid components mass flow rate\n for j in self.MB_fuel.SolidList:\n M = []\n for z in self.MB_fuel.z:\n M.append(value(self.MB_fuel.Solid_M[z,j]))\n fig_M = plt.figure(6)\n plt.plot(self.MB_fuel.z, M, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.SolidList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Solid components mass flow rate [kg/s]\")\n \n # Bulk solid phase total molar flow rate\n Mtotal = []\n for z in self.MB_fuel.z:\n Mtotal.append(value(self.MB_fuel.Solid_M_total[z]))\n fig_Mtotal = plt.figure(7)\n plt.plot(self.MB_fuel.z, Mtotal)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Solid total mass flow rate [kg/s]\") \n \n # Gas phase concentrations\n for j in self.MB_fuel.GasList:\n Cg = []\n for z in self.MB_fuel.z:\n Cg.append(value(self.MB_fuel.Cg[z,j]))\n fig_Cg = plt.figure(8)\n plt.plot(self.MB_fuel.z, Cg, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Concentration [mol/m3]\") \n \n # Gas phase mole fractions\n for j in self.MB_fuel.GasList:\n y = []\n for z in self.MB_fuel.z:\n y.append(value(self.MB_fuel.y[z,j]))\n fig_y = plt.figure(9)\n plt.plot(self.MB_fuel.z, y, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"y [-]\") \n \n # Solid phase mass fractions\n for j in self.MB_fuel.SolidList:\n x = []\n for z in self.MB_fuel.z:\n x.append(value(self.MB_fuel.x[z,j]))\n fig_x = plt.figure(10)\n plt.plot(self.MB_fuel.z, x, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.SolidList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"x [-]\") \n\n # Total mass fraction\n xtot = []\n for z in self.MB_fuel.z:\n xtot.append(value(self.MB_fuel.xtot[z]))\n fig_xtot = plt.figure(11)\n plt.plot(self.MB_fuel.z, xtot)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total mass fraction [-]\") \n \n # # Gas mix density\n # rhog = []\n # for z in self.MB_fuel.z:\n # rhog.append(value(self.MB_fuel.rho_vap[z]))\n # fig_rhog = plt.figure(23)\n # plt.plot(self.MB_fuel.z, rhog)\n # plt.grid()\n # plt.xlabel(\"Bed height [-]\")\n # plt.ylabel(\"Gas mix density [kg/m3]\") \n \n # Fe conversion\n X_Fe = []\n for z in self.MB_fuel.z:\n X_Fe.append(value(self.MB_fuel.X[z])*100)\n fig_X_Fe = plt.figure(13)\n plt.plot(self.MB_fuel.z, X_Fe)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Fraction of metal oxide converted [%]\")", "def visualize(self):\n print('{0} is {1} time steps old'.format(self.name, self.timestep))\n\n self.amygdala.visualize(self.timestep, self.name, self.log_dir)\n self.cerebellum.visualize(self.name, self.log_dir)\n self.cingulate.visualize(self.name, self.log_dir)\n self.hippocampus.visualize(self.name, self.log_dir)\n #self.ganglia.visualize(self.name, self.log_dir)\n #self.cortex.visualize(self.name, self.log_dir)", "def main():\n # Load properties that will be needed\n store = [Storage.Storage(2), Storage.Storage(4)] \n pre_energy = [s.get(\"free_energy\") for s in store]\n post_energy = [s.get(\"post_energy\") for s in store]\n x_range = store[0].get(\"x_range\")\n xlocs = np.arange(x_range[0], x_range[1], x_range[2])\n y_range = store[0].get(\"y_range\")\n ylocs = np.arange(y_range[0], y_range[1], y_range[2])\n # Calculate step size\n xb2steps = stepsize(pre_energy[0], post_energy[0], xlocs) \n xb4steps = stepsize(pre_energy[1], post_energy[1], xlocs) \n # Set up the figure\n fig = plt.figure(1, figsize=(7.5,2.5)) \n axe = (fig.add_subplot(1, 2, 1), fig.add_subplot(1, 2, 2))\n # Plot the results\n axe[0].plot(ylocs, xb4steps, color='#FF466F', lw=4)\n axe[1].plot(ylocs, xb2steps, color='#76D753', lw=4)\n # Annotate the plots\n axe[0].set_title(\"4sXB step size\")\n axe[0].set_xlabel(\"Lattice spacing (nm)\") \n axe[0].set_ylabel(\"Step size (nm)\")\n axe[0].set_xlim((25.5, 39))\n axe[0].set_ylim((1, 8))\n axe[1].set_title(\"2sXB step size\")\n axe[1].set_xlabel(\"Lattice spacing (nm)\") \n axe[1].set_ylabel(\"Step size (nm)\")\n axe[1].set_xlim((25.5, 39))\n axe[1].set_ylim((1, 8))\n # Display the plots\n fig.subplots_adjust(wspace=0.25, hspace=0.48,\n left=0.08, right=0.98,\n top=0.85, bottom=0.21)\n plt.show()", "def main_time_chart(self) -> Component:\n logger.debug('Generating time graph.')\n df = self.activity_manager.metadata_weekly_time_series(activity_type='run')\n\n freq_dropdown = dcc.Dropdown('overview_main_time_chart_freq_dropdown', options=[\n {'label': 'Weekly', 'value': 'weekly'},\n {'label': 'Monthly', 'value': 'monthly'}\n ], value='monthly')\n\n y_dropdown = dcc.Dropdown('overview_main_time_chart_y_dropdown', options=[\n {'label': 'Average speed', 'value': 'mean_speed'},\n {'label': 'Total distance', 'value': 'total_distance'},\n {'label': 'Total duration', 'value': 'total_duration'},\n {'label': 'Average heart rate', 'value': 'mean_hr'},\n {'label': 'Number of activities', 'value': 'activity_count'}\n ], value='activity_count')\n\n graph = dcc.Graph(\n id='overview_main_time_chart',\n figure=self.main_time_fig('weekly', 'activity_count')\n )\n return html.Div([\n html.H2('Progress over time'),\n dbc.Row([\n dbc.Col(html.Div(['Frequency:', freq_dropdown])),\n dbc.Col(html.Div(['y axis:', y_dropdown]))\n ]),\n graph\n ])", "def visualizations():\r\n raise NotImplementedError\r\n # df = pandas.read_csv('accidents_by_hour.csv', index_col=0, header=0)\r\n # plt.plot(0, 0, data=df)\r\n # plt.show()\r", "def plot(self):\n\t\tplot_chain(self.database_path, self.temp_folder)\n\t\tplot_density(self.database_path, self.temp_folder, self.cal_params)", "def display(self):\n \n # initialize SQL kit to access database\n s = SQL_Kit(self.userID, self.password, self.database)\n \n \n \"\"\" Total Activity by hour \"\"\"\n \n # get activity data\n all_date_times = self.activity().index\n\n all_days = []\n all_hours = []\n for item in all_date_times:\n all_days.append((item.timetuple().tm_yday))\n all_hours.append(item.hour)\n\n x = all_days\n y = all_hours\n x_labels = pd.Series(all_days).unique()\n\n fig1, ax1 = plt.subplots()\n ax1.set_title('Hourly Activity')\n ax1.scatter(x,y,color='mediumspringgreen',linewidths=1)\n ax1.set_xlabel('day of year')\n ax1.set_ylabel('hour')\n ax1.xaxis.grid(True)\n\n if len(x_labels) > 5:\n ax1.xaxis.set_ticks([min(all_days), max(all_days)])\n else:\n ax1.xaxis.set_ticks(x_labels)\n\n ax1.yaxis.grid(False) \n plt.show()\n \n \n \"\"\" MOVING AVERAGE \"\"\"\n \n df = self.activity().reset_index()\n\n def day_of_year(datetime_entry):\n return datetime_entry.timetuple().tm_yday\n\n df['day_of_year'] = list(df.apply(lambda x: day_of_year(x['EventDateTime']),axis=1))\n daily_count = df['day_of_year'].value_counts().sort_index()\n\n averages = []\n i=1\n for value_count in daily_count:\n values = daily_count[:i]\n average = round(sum(values)/len(values),2)\n averages.append(average)\n i+=1\n\n day_list = list(df['day_of_year'].unique())\n\n avg_move_df = pd.DataFrame([day_list,averages]).T\n avg_move_df.rename(columns={0: 'day_id', 1: 'moving_avg'},inplace=True)\n avg_move_df.set_index('day_id',inplace=True)\n \n fig1, ax1 = plt.subplots()\n ax1.plot(avg_move_df.index.astype(int),avg_move_df['moving_avg'], color='mediumspringgreen')\n ax1.set_title('Moving AVG')\n ax1.set_xlabel('day_of_year')\n ax1.xaxis.set_ticks([min(all_days), max(all_days)])\n ax1.set_ylabel('Daily Activity')\n plt.show()\n \n \n \n \"\"\" Top 5 Samples \"\"\"\n \n data = s.select_table('sample')['SoundCategory'].value_counts()\n \n objects = list(data)[:5]\n y_pos = list(data.index)[:5]\n\n # get class info from class_absence_stats dataframe\n #fig2 = plt.figure(2) \n plt.bar(y_pos, objects, align='center', alpha=0.8, color='mediumspringgreen')\n plt.ylabel('Usage')\n plt.xlabel('Sound Category')\n plt.title('Top 5 Samples')\n plt.show()\n \n \n \"\"\" Top 3 Chords \"\"\"\n \n data = s.select_table('chord')['ChordLabel'].value_counts()\n\n objects = list(data)[:3]\n y_pos = list(data.index)[:3]\n\n # get class info from class_absence_stats dataframe\n #fig2 = plt.figure(2) \n plt.bar(y_pos, objects, align='center', alpha=0.8, color='mediumspringgreen')\n plt.ylabel('Usage')\n plt.xlabel('Chord Label')\n plt.title('Top 3 Chords')\n plt.show()\n \n \n \"\"\" Top 3 Wave Types \"\"\"\n \n # get SQL table data\n set_1 = s.select_table('createwave')\n set_2 = s.select_table('sequence')\n set_3 = s.select_table('arpeggio')\n set_4 = s.select_table('chord')\n\n # concat tables into single pandas series\n all_wave_types = pd.concat([set_1['WaveType'], set_2['WaveType'], set_3['WaveType'], set_4['WaveType']])\n\n # sort values, show top 3\n top_3 = all_wave_types.value_counts().head(3)\n\n\n # Pie chart, where the slices will be ordered and plotted counter-clockwise:\n labels = list(top_3.index)\n sizes = list(top_3.values)\n explode = (0, 0, 0) # only \"explode\" the 2nd slice (i.e. 'Hogs')\n\n fig1, ax1 = plt.subplots()\n ax1.pie(sizes, explode=explode, labels=labels, autopct='%1.1f%%',\n shadow=True, colors=['g','b','r'], startangle=90)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n ax1.set_title('Top Wave Types')\n\n plt.show()", "def energies():\n # Hardcoded initial values\n numsteps = 10000\n time_max = 1\n # Running the calculation in the solver class using the velocity verlet method\n # for better accuracy.\n verlet = solver(input_matrix, 'verlet', time_max, numsteps)\n output_matrix, KE, PE, AM = verlet.main()\n # Creating a simple time axis for plotting\n x = np.linspace(0, 1, numsteps+1)\n\n # Plotting kinetic energy over time\n plt.figure(1, figsize=(10, 10))\n plt.plot(x, KE)\n plt.suptitle('Total kinetic energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE'])\n\n # Plotting potential energy over time\n plt.figure(2, figsize=(10, 10))\n plt.plot(x, PE)\n plt.suptitle('Total potential energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE'])\n\n # Plotting total energy against time\n plt.figure(3, figsize=(10, 10))\n plt.plot(x, PE+KE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE+PE'])\n\n # Plotting angular momentum against time. print the amplitude to terminal\n amplitude = max(AM)-min(AM)\n print('Amplitude of angular momentum during 1 year: %g[AU²/yr²]' %(amplitude))\n plt.figure(4, figsize=(10, 10))\n plt.plot(x, AM)\n plt.suptitle('Total angular momentum in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²/yr²]', fontsize=16)\n plt.legend(['AM'])\n\n # Plotting the kinetic, potential and total energy against time to see\n # how great the variations are\n plt.figure(5, figsize=(10, 10))\n plt.plot(x, PE, x, KE, x, KE+PE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE', 'KE', 'KE+PE'])\n plt.show()", "def main():\n save = False\n show = True\n\n #hd_parameter_plots = HDparameterPlots(save=save)\n #hd_parameter_plots.flow_parameter_distribution_for_non_lake_cells_for_current_HD_model()\n #hd_parameter_plots.flow_parameter_distribution_current_HD_model_for_current_HD_model_reprocessed_without_lakes_and_wetlands()\n #hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs()\n #hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_no_tuning()\n #ice5g_comparison_plots = Ice5GComparisonPlots(save=save)\n #ice5g_comparison_plots.plotLine()\n #ice5g_comparison_plots.plotFilled()\n #ice5g_comparison_plots.plotCombined()\n #ice5g_comparison_plots.plotCombinedIncludingOceanFloors()\n #flowmapplot = FlowMapPlots(save)\n #flowmapplot.FourFlowMapSectionsFromDeglaciation()\n #flowmapplot.Etopo1FlowMap()\n #flowmapplot.ICE5G_data_all_points_0k()\n #flowmapplot.ICE5G_data_all_points_0k_no_sink_filling()\n #flowmapplot.ICE5G_data_all_points_0k_alg4_two_color()\n #flowmapplot.ICE5G_data_all_points_21k_alg4_two_color()\n #flowmapplot.Etopo1FlowMap_two_color()\n #flowmapplot.Etopo1FlowMap_two_color_directly_upscaled_fields()\n #flowmapplot.Corrected_HD_Rdirs_FlowMap_two_color()\n #flowmapplot.ICE5G_data_ALG4_true_sinks_21k_And_ICE5G_data_ALG4_true_sinks_0k_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_sinkless_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_no_true_sinks_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_HD_as_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplot.Ten_Minute_Data_from_Virna_data_ALG4_corr_orog_downscaled_lsmask_no_sinks_21k_vs_0k_FlowMap_comparison()\n #flowmapplot.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n flowmapplotwithcatchment = FlowMapPlotsWithCatchments(save)\n #flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_virna_data_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_present_day_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()\n #flowmapplotwithcatchment.compare_lgm_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()\n #flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.upscaled_rdirs_with_and_without_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #upscaled_rdirs_with_and_without_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_glcc_olson_lsmask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE5G_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE6G_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_ICE5G_and_ICE6G_with_catchments_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs()\n #flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs_original_ts()\n flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs_new_ts_10min()\n outflowplots = OutflowPlots(save)\n #outflowplots.Compare_Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_true_sinks_all_points_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_sinkless_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_true_sinks_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_downscaled_ls_mask_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_plus_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()\n #outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k()\n outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k_new_truesinks()\n #outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k_new_truesinks_individual_rivers()\n #outflowplots.Compare_ICE5G_with_and_without_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()\n #hd_output_plots = HDOutputPlots()\n #hd_output_plots.check_water_balance_of_1978_for_constant_forcing_of_0_01()\n #hd_output_plots.plot_comparison_using_1990_rainfall_data()\n #hd_output_plots.plot_comparison_using_1990_rainfall_data_adding_back_to_discharge()\n #coupledrunoutputplots = CoupledRunOutputPlots(save=save)\n #coupledrunoutputplots.ice6g_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.extended_present_day_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.ocean_grid_extended_present_day_rdirs_vs_ice6g_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_echam()\n #coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_mpiom_pem()\n #lake_plots = LakePlots()\n #lake_plots.plotLakeDepths()\n #lake_plots.LakeAndRiverMap()\n #lake_plots.LakeAndRiverMaps()\n if show:\n plt.show()", "def show_dashboard():\n script, div = plots.make_plot()\n script_tab, div_tab = plots.make_tabs()\n script_trend, div_trend = plots.make_trend()\n\n return render_template('layout.html',\n script=script,\n div=div,\n script_trend=script_trend,\n div_trend=div_trend,\n script_tab=script_tab,\n div_tab=div_tab)", "def send_to_dashboard():\n\t# purchased and conventional\n\tpurchase_activity_conv = db.session.query(PurchaseActivity).filter_by(purchased=True, conventional=True).all()\n\t# purchased and organic\n\tpurchase_activity_organic = db.session.query(PurchaseActivity).filter_by(purchased=True, organic=True).all()\n\t# search activity \n\tsearch_activity = db.session.query(SearchActivity).all()\n\n\t# plot search_activity over time and purchase_activity over time\n\n\tlist_of_dict = []\n\n\n\tdata = {}\n\tdatasets_dict = {}\n\tdatasets_dict['label'] = \"Search Activity, Items Purchased over Time\"\n\tdatasets_dict['fillColor'] = \"rgba(220,220,220,0.5)\"\n\tdatasets_dict['strokeColor'] = \"rgba(220,220,220,0.8)\"\n\tdatasets_dict['highlightFill'] = \"rgba(220,220,220,0.75)\"\n\tdatasets_dict['highlightStroke'] = \"rgba(220,220,220,1)\"\n\tdatasets_dict['data'] =search_activity, purchase_activity_organic, purchase_activity_conv\n\tdata['labels'] = time\n\tdata['datasets'] = [datasets_dict]\n \n\n\tlist_of_dict.append(data)\n\tprint list_of_dict \t\n\n\treturn render_template(\"/dashboard.html\")", "def showPlot1():\n\n interested_in = list(range(5,30,5))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(1, 1.0, item, item, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on room size')\n xlabel('area of the room (tiles)')\n ylabel('mean time (clocks)')\n show()", "def showPlot2():\n interested_in = list(range(1,10))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(item, 1.0, 25, 25, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on number of robots')\n xlabel('number of robots (tiles)')\n ylabel('mean time (clocks)')\n show()", "def experimental_report(environment, species, time_series,path=None,events=None):\n\n\n M = len(environment)+1\n L = int(np.ceil(1 + len(time_series)/2))\n fig = plt.figure(figsize=(5*M,5*L))\n \n colormaps = [\"Greens\",\"bwr\",\"Blues\",\"Oranges\",\"RdPu\",\"Reds\"]\n for i,(k,v) in enumerate(environment):\n plt.subplot(L,M,i+1)\n plt.imshow(v,\n interpolation='None',\n cmap=colormaps[i%len(colormaps)],\n vmin=0,vmax=1,\n aspect=\"equal\")\n plt.xticks([])\n plt.yticks([])\n plt.title(k)\n plt.colorbar(orientation=\"horizontal\", fraction=0.045)\n plt.subplot(L,M,M)\n niches(species,path=path)\n\n colors = [\"blue\",\"green\",\"brown\",\"purple\",\"red\"]\n host = [host_subplot(L*100+10+2+j, axes_class=AA.Axes) for j in range(L-1)]\n\n\n for i,(k,v) in enumerate(time_series):\n #if False and i%2 != 0:\n # ax = host[int(i/2)].twinx()\n #else:\n ax = host[int(i/2)]\n ax.set_ylabel(k)\n if len(v) == 2:\n T = len(v[0])\n ax.plot(v[0],\n label=k,\n color=colors[i%len(colors)],\n linewidth=2)\n ax.fill_between(range(len(v[0])),\n v[0]-v[1], v[0]+v[1],\n alpha=0.3,\n color=colors[i%len(colors)])\n else:\n T = len(v)\n ax.plot(range(len(v)),v, color=colors[i%len(colors)], label=k)\n \n \n for h in host:\n h.set_xlim((0,T-1))\n h.legend()\n h.set_xlabel(\"Time\")\n \n h.set_ymargin(0.05)\n h.autoscale(enable=True, axis=u'both', tight=False)\n\n if events is not None:\n h.vlines(events,*h.get_ylim(),alpha=0.1)", "def plot_energies(self):\n plt.plot(self.energies[0], self.energies[1])\n plt.xlabel('Time (s)')\n plt.ylabel('Energy (J)')\n plt.show()", "def plot_steps(out_dict, units):\n from bokeh.models import BoxAnnotation\n from bokeh.plotting import figure, show, output_notebook\n import bokeh.models as bmd\n\n tooltips = [\n (\"Step (total)\", \"@index\"),\n (\"Step (stage)\", \"@step\"),\n (\"Energy\", \"@energy eV/atom\"),\n (\"Energy (dispersion)\", \"@dispersion_energy_au Ha\"),\n (\"SCF converged\", \"@scf_converged\"),\n (\"Cell A\", \"@cell_a_angs Angs\"),\n (\"Cell Vol\", \"@cell_vol_angs3 Angs^3\"),\n (\"MAX Step\", \"@max_step_au Bohr\"),\n (\"Pressure\", \"@pressure_bar bar\")\n ]\n hover = bmd.HoverTool(tooltips=tooltips)\n TOOLS = [\"pan\", \"wheel_zoom\", \"box_zoom\", \"reset\", \"save\", hover]\n\n natoms = out_dict['natoms']\n values = [ x/natoms*ha2u[units] for x in out_dict['step_info']['energy_au'] ]\n values = [ x-min(values) for x in values ]\n\n data = bmd.ColumnDataSource(data=dict( index=range(len(values)),\n step=out_dict['step_info']['step'],\n energy=values,\n dispersion_energy_au=out_dict['step_info']['dispersion_energy_au'],\n scf_converged=out_dict['step_info']['scf_converged'],\n cell_a_angs=out_dict['step_info']['cell_a_angs'],\n cell_vol_angs3=out_dict['step_info']['cell_vol_angs3'],\n max_step_au=out_dict['step_info']['max_step_au'],\n pressure_bar=out_dict['step_info']['pressure_bar'],\n ))\n\n p = figure(tools=TOOLS, title='Energy profile of the DFT minimization',\n height=350, width=550)\n\n p.xgrid.grid_line_color=None\n p.xaxis.axis_label = 'Steps'\n p.yaxis.axis_label = 'Energy ({}/atom)'.format(units)\n\n # Colored background\n colors = ['red','orange','green','yellow','cyan','pink','palegreen']\n start = 0\n for i,steps in enumerate(out_dict['stage_info']['nsteps']):\n end = start+steps\n p.add_layout(BoxAnnotation(left=start, right=end, fill_alpha=0.2, fill_color=colors[i]))\n start = end\n\n # Trace line and markers\n p.line('index', 'energy', source=data, line_color='blue')\n p.circle('index', 'energy', source=data, line_color='blue', size=3)\n return p", "def page_dashboard(state):\n\n st.title(\":chart_with_upwards_trend: Prediction Results Dashboard\")\n\n st.markdown(\"# Select Stocks to View Results:\")\n if state.finalized_data:\n for stock_data in state.finalized_data:\n st.write(\"---\")\n st.markdown(\"## \" + stock_data[\"stock\"])\n if st.checkbox(\"View Results for \" + stock_data[\"stock\"]):\n\n ############################################\n\n st.markdown(\"### Historical Predictions:\")\n\n df2 = pd.DataFrame.from_dict(stock_data[\"prev_predictions\"])\n\n select_lbl = (\n \"Enter the names of models for \" + stock_data[\"stock\"] + \":\"\n )\n models_selections = st.multiselect(\n label=select_lbl,\n options=df2.columns,\n ) # allow users to display specific model results on dataframe graph\n\n if not models_selections: # if nothing is selected show all models!\n st.line_chart(df2)\n else:\n st.line_chart(df2[models_selections])\n\n st.markdown(\n \"*Note:* 'Prices' are the actual prices for those days. The rest are model predictions for those days.\\nPrices (in USD) are on the y-axis, the day number in the data is on the x-axis.\"\n )\n\n ############################################\n\n st.markdown(\"### Future (Next-Day) Predictions:\")\n\n df = pd.DataFrame()\n df = df.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"swing_predictions\"]]\n )\n )\n df = df.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"next_day_predictions\"]]\n )\n )\n df = df.append(\n pd.DataFrame([stock_data[\"prediction_results\"][\"model_scores\"]])\n )\n\n df.index = [\n \"Swing Predicton\",\n \"Price Prediction ($)\",\n \"Model Fit Score\",\n ]\n df = df.transpose()\n df # display chart\n\n st.markdown(\n \"- The current price of the stock is *$\"\n + str(\n round(stock_data[\"prediction_results\"][\"current_prev_close\"], 2)\n )\n + \"*.\"\n )\n\n if state.period == \"1mo\":\n st.markdown(\"- *Recommended Model (for 1mo):* SVR-RBF\")\n st.markdown(\n \"- *View the homescreen for more model & dataset size combination recommendations.*\"\n )\n elif state.period == \"6mo\":\n st.markdown(\n \"- *Recommended Model (for 6mo):* SVR-Poly (most recommended), LR, EN, or Lasso.\"\n )\n st.markdown(\n \"- *View the homescreen for more model & dataset size combination recommendations.*\"\n )\n elif state.period == \"1y\":\n st.markdown(\"- *Recommended Model (for 1yr):* SVR-Poly\")\n st.markdown(\n \"- *View the homescreen for more model & dataset size combination recommendations.*\"\n )\n else:\n st.markdown(\n \"- *Note:* View the home screen for information about the best models and training data size combinations.\"\n )\n\n ############################################\n st.markdown(\"### View Other Information:\")\n\n if st.checkbox(\n \"View \" + stock_data[\"stock\"] + \"'s Model Efficiency Timings\"\n ):\n st.markdown(\"#### Model Efficiencies:\")\n st.markdown(\n \"Shows the time in seconds it took models to complete specific tasks:\"\n )\n df3 = pd.DataFrame()\n df3 = df3.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"training_times\"]]\n )\n )\n df3 = df3.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"testing_times\"]]\n )\n )\n df3 = df3.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"new_predictions_times\"]]\n )\n )\n df3 = df3.append(\n pd.DataFrame(\n [stock_data[\"prediction_results\"][\"prev_predictions_times\"]]\n )\n )\n df3.index = [\n \"Training\",\n \"Testing/Scoring\",\n \"Future Predictions\",\n \"Historical Predictions\",\n ]\n df3 = df3.transpose()\n df3\n\n ############################################\n\n if st.checkbox(\"View \" + stock_data[\"stock\"] + \"'s Information\"):\n st.markdown(\"#### Company Information:\")\n for key in stock_data[\"stock_info\"].keys():\n st.write(\"*\", key + \":\", stock_data[\"stock_info\"][key])\n else:\n st.markdown(\n \"## Generate data to populate and initialize this page by going to the 'Settings' page and running the tool!\"\n )", "def historial():\r\n global EnergiaK, EnergiaP, EnergiaT\r\n \r\n t = dt*np.arange(npasos_temporales+1)\r\n plt.figure('Energias del sistema')\r\n plt.title('Energies')\r\n plt.plot(t, EnergiaP, 'b', label='Potential')\r\n plt.plot(t, EnergiaK, 'r', label='Kinetic')\r\n plt.plot(t, EnergiaT, 'black', label='Total')\r\n plt.xlabel('t', fontsize = 18)\r\n plt.xticks(np.linspace(0,14,6), fontsize = 18)\r\n plt.yticks(np.linspace(0,35e-7,6), fontsize = 18)\r\n plt.ylim(0,40e-7)\r\n plt.xlim(0,14)\r\n plt.legend(loc=1)\r\n plt.ticklabel_format(style = 'sci', axis = 'y', scilimits = (0,0))\r\n plt.figure('Potential Energy')\r\n plt.plot(t, EnergiaP, 'b')\r\n plt.xlabel('t', fontsize = 18)\r\n plt.ylabel('Ex Energy', fontsize = 18)\r\n plt.xticks(np.linspace(0,100,11), fontsize = 18)\r\n plt.yticks(np.linspace(0,16,8), fontsize = 18)\r\n plt.xlim(0,100)\r\n plt.ylim(0,25)\r\n if os.path.exists(\"Energias\") and\\\r\n os.path.isfile(\"Energias/Energias.png\")==\\\r\n True:\r\n os.remove(\"Energias/Energias.png\") \r\n plt.savefig('Energias.png',dpi=720)\r\n shutil.move('Energias.png',\"Energias\")\r\n os.remove(\"Energias/energies.out\")\r\n # Escribe y guarda el archivo con los valores de la energia en el tiempo:\r\n sp.savetxt('energies.out', sp.column_stack((t,EnergiaP,EnergiaK,EnergiaT)),fmt=('%1.4e','%1.4e','%1.4e','%1.4e')) \r\n shutil.move('energies.out',\"Energias\") \r\n \r\n else:\r\n os.mkdir(\"Energias\")\r\n plt.savefig('Energias.png',dpi=720)\r\n shutil.move('Energias.png',\"Energias\") \r\n # Escribe y guarda el archivo con los valores de la energia en el tiempo:\r\n sp.savetxt('energies.out', sp.column_stack((t,EnergiaP,EnergiaK,EnergiaT)),fmt=('%1.4e','%1.4e','%1.4e','%1.4e')) \r\n shutil.move('energies.out',\"Energias\")", "def main(args=None):\n if args is None:\n args = sys.argv[1:]\n parsed_args = parse_arguments(arguments=args)\n\n conn = connect_to_database(db_path=parsed_args.database)\n c = conn.cursor()\n\n scenario_id, scenario = get_scenario_id_and_name(\n scenario_id_arg=parsed_args.scenario_id,\n scenario_name_arg=parsed_args.scenario,\n c=c,\n script=\"capacity_total_plot\",\n )\n\n tech_colors = get_tech_colors(c)\n tech_plotting_order = get_tech_plotting_order(c)\n power_unit = get_unit(c, \"power\")\n\n plot_title = \"{}Total Capacity by Period - {} - Subproblem {} - Stage {}\".format(\n \"{} - \".format(scenario) if parsed_args.scenario_name_in_title else \"\",\n parsed_args.load_zone,\n parsed_args.subproblem,\n parsed_args.stage,\n )\n\n # TODO: is this used?\n plot_name = \"TotalCapacityPlot-{}-{}-{}\".format(\n parsed_args.load_zone, parsed_args.subproblem, parsed_args.stage\n )\n\n df = get_plotting_data(\n conn=conn,\n scenario_id=scenario_id,\n load_zone=parsed_args.load_zone,\n subproblem=parsed_args.subproblem,\n stage=parsed_args.stage,\n )\n\n source, x_col_reordered = process_stacked_plot_data(\n df=df,\n y_col=\"capacity_mw\",\n x_col=[\"period\", \"scenario\"],\n category_col=\"technology\",\n )\n\n # Multi-level index in CDS will be joined into one column with \"_\" separator\n x_col_cds = \"_\".join(x_col_reordered)\n x_col_label = \", \".join([x.capitalize() for x in x_col_reordered])\n plot = create_stacked_bar_plot(\n source=source,\n x_col=x_col_cds,\n x_label=x_col_label,\n y_label=\"Capacity ({})\".format(power_unit),\n category_label=\"Technology\",\n category_colors=tech_colors,\n category_order=tech_plotting_order,\n title=plot_title,\n ylimit=parsed_args.ylimit,\n )\n\n # Show plot in HTML browser file if requested\n if parsed_args.show:\n show_plot(\n plot=plot,\n plot_name=plot_name,\n plot_write_directory=parsed_args.plot_write_directory,\n scenario=scenario,\n )\n\n # Return plot in json format if requested\n if parsed_args.return_json:\n return json_item(plot, \"plotHTMLTarget\")", "def _run():\n\n temperatures_kelvins = _create_temperature_grid()\n first_derivs_kelvins_pt01 = numpy.gradient(temperatures_kelvins)\n second_derivs_kelvins_pt01 = numpy.gradient(\n numpy.absolute(first_derivs_kelvins_pt01)\n )\n\n this_ratio = (\n numpy.max(temperatures_kelvins) /\n numpy.max(first_derivs_kelvins_pt01)\n )\n\n first_derivs_unitless = first_derivs_kelvins_pt01 * this_ratio\n\n this_ratio = (\n numpy.max(temperatures_kelvins) /\n numpy.max(second_derivs_kelvins_pt01)\n )\n\n second_derivs_unitless = second_derivs_kelvins_pt01 * this_ratio\n\n _, axes_object = pyplot.subplots(\n 1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)\n )\n\n temperature_handle = axes_object.plot(\n temperatures_kelvins, color=TEMPERATURE_COLOUR, linestyle='solid',\n linewidth=SOLID_LINE_WIDTH\n )[0]\n\n second_deriv_handle = axes_object.plot(\n second_derivs_unitless, color=SECOND_DERIV_COLOUR, linestyle='solid',\n linewidth=SOLID_LINE_WIDTH\n )[0]\n\n first_deriv_handle = axes_object.plot(\n first_derivs_unitless, color=FIRST_DERIV_COLOUR, linestyle='dashed',\n linewidth=DASHED_LINE_WIDTH\n )[0]\n\n this_min_index = numpy.argmin(second_derivs_unitless)\n second_derivs_unitless[\n (this_min_index - 10):(this_min_index + 10)\n ] = second_derivs_unitless[this_min_index]\n\n tfp_handle = axes_object.plot(\n -1 * second_derivs_unitless, color=TFP_COLOUR, linestyle='dashed',\n linewidth=DASHED_LINE_WIDTH\n )[0]\n\n axes_object.set_yticks([0])\n axes_object.set_xticks([], [])\n\n x_label_string = r'$x$-coordinate (increasing to the right)'\n axes_object.set_xlabel(x_label_string)\n\n legend_handles = [\n temperature_handle, first_deriv_handle, second_deriv_handle,\n tfp_handle\n ]\n\n legend_strings = [\n TEMPERATURE_LEGEND_STRING, FIRST_DERIV_LEGEND_STRING,\n SECOND_DERIV_LEGEND_STRING, TFP_LEGEND_STRING\n ]\n\n axes_object.legend(legend_handles, legend_strings, loc='lower right')\n\n print 'Saving figure to file: \"{0:s}\"...'.format(OUTPUT_FILE_NAME)\n pyplot.savefig(OUTPUT_FILE_NAME, dpi=FIGURE_RESOLUTION_DPI)\n pyplot.close()", "def plot(self):\n # get data without totals\n data = self.woe_report[self.woe_report.index != 'total']\n # setup panel\n fig, axs = plt.subplots(1, 3, figsize=(12, 3))\n plt.subplots_adjust(wspace=0.3)\n # first chart\n data['P(Hi|A)'].plot(ax=axs[0], linewidth=3, alpha=0.7)\n data['P(Hi|Ā)'].plot(ax=axs[0], linewidth=3, alpha=0.7)\n axs[0].set_title('Probability distribution')\n axs[0].set_xlabel(data.index.name)\n axs[0].set_ylabel('probability')\n axs[0].legend(['P(Hi|A)', 'P(Hi|Ā)'])\n # second chart\n data['weight-of-evidence'].plot(ax=axs[1], linewidth=3, alpha=0.7)\n axs[1].set_title('WoE')\n axs[1].set_xlabel(data.index.name)\n axs[1].set_ylabel('WoE')\n # third chart\n data['information-value'].plot(ax=axs[2], linewidth=3, alpha=0.7)\n axs[2].set_title('Information value')\n axs[2].set_ylabel('IV')", "def plotter(self, Result, outcome):\n # Plot results time histories\n fig, axs = plt.subplots(2, 3, figsize=(20, 10))\n axs = axs.reshape(-1)\n axs[0].plot(Result.time, Result.velocity)\n axs[1].plot(Result.time, Result.mass)\n axs[2].plot(Result.time, Result.angle)\n axs[3].plot(Result.time, Result.altitude)\n axs[4].plot(Result.time, Result.distance)\n axs[5].plot(Result.time, Result.radius)\n axs[0].set_title('velocity (m/s) vs time (s)', fontsize=16)\n axs[1].set_title('mass (kg) vs time (s)', fontsize=16)\n axs[2].set_title('angle (rad) vs time (s)', fontsize=16)\n axs[3].set_title('altitude (m) vs time (s)', fontsize=16)\n axs[4].set_title('distance (m) vs time (s)', fontsize=16)\n axs[5].set_title('radius (m) vs time (s)', fontsize=16)\n plt.tight_layout()\n\n # Plot energy deposition curve\n fig, ax = plt.subplots(1, 1, figsize=(8, 8))\n ax.plot(Result.dedz, Result.altitude / 1e3)\n ax.set_xlabel('Energy per unit height [Kt/km]', fontsize=14)\n ax.set_ylabel('Altitude [km]', fontsize=14)\n plt.show()", "def render(self, agents, episode):\n fig = plt.figure(figsize=(12, 12))\n fig.suptitle(f\"Episode {episode}\")\n gs = gridspec.GridSpec(2, 2)\n\n a_thousand = 1000\n a_million = 1000000\n\n # Price Process\n ax = plt.subplot(gs[0, 0])\n ax.set(title=\"Price Process\")\n plt.plot(self.step_array * self.tau, self.S_tilde, label=\"Price Including Temporary Price Impact\")\n plt.plot(self.step_array * self.tau, self.S, label=\"Price\")\n plt.legend()\n ax.set(ylabel=\"Price\")\n ax.grid(True)\n\n # Revenue Process\n ax = plt.subplot(gs[0, 1])\n ax.set(title=\"Revenue Process\")\n for a in range(len(agents)):\n plt.plot(self.step_array * self.tau, agents[a].R / a_thousand, label=f\"Agent {a+1}\")\n plt.legend()\n ax.grid(True)\n ax.set(ylabel=\"Revenue ($k)\")\n ax.set(xlabel=\"Time Step\")\n\n # Inventory Process\n ax = plt.subplot(gs[1, :])\n ax.set(title=\"Inventory Process\")\n for a in range(len(agents)):\n plt.plot(self.step_array * self.tau, agents[a].x / a_million, label=f\"Agent {a+1}\")\n plt.legend()\n ax.grid(True)\n ax.set(ylabel=\"Inventory (M)\")\n ax.set(xlabel=\"Time Step\")\n\n filename = PPODirectories.tmp + f\"episode-{episode}-simulation.png\"\n\n plt.savefig(filename)\n plt.close()\n\n return filename", "def generate_statistics_plots(graph_name, graph_steps):\n df_final_situation = pd.DataFrame(columns=[\"type\", \"value\"])\n df_step = pd.DataFrame(columns=[\"type\", \"step\", \"value\"])\n df_exposed = pd.DataFrame(columns=[\"step\", \"type\", \"value\"])\n\n st.markdown(\"\")\n\n for i in range(graph_steps):\n # read graph and print stats\n graph_result_path = \"./data/output/\"\n G = nx.read_gexf(f\"{graph_result_path}G_{graph_name}_step{i}.gexf\")\n print_stats(G, i, graph_name)\n\n # LINE CHART (append informations into dataframe)\n df_step = df_step.append(\n {\"type\": \"not_exposed\", \"step\": i, \"value\": cn.count_not_exposed(G)},\n ignore_index=True,\n )\n df_step = df_step.append(\n {\"type\": \"exposed\", \"step\": i, \"value\": cn.count_exposed(G)},\n ignore_index=True,\n )\n df_step = df_step.append(\n {\"type\": \"infected\", \"step\": i, \"value\": cn.count_infected(G)},\n ignore_index=True,\n )\n\n line_chart = px.line(\n df_step,\n x=\"step\",\n y=\"value\",\n color=\"type\",\n title=f\"Infection overall: {graph_name} step: {i}\",\n )\n\n # BAR CHART (append informations into dataframe)\n df_exposed = df_exposed.append(\n {\n \"step\": i,\n \"type\": \"opinion_leader\",\n \"value\": cn.count_exposed_opinion_leader(G),\n },\n ignore_index=True,\n )\n df_exposed = df_exposed.append(\n {\"step\": i, \"type\": \"bot\", \"value\": cn.count_exposed_bot(G)},\n ignore_index=True,\n )\n df_exposed = df_exposed.append(\n {\"step\": i, \"type\": \"user\", \"value\": cn.count_exposed_user(G)},\n ignore_index=True,\n )\n bar_chart = px.bar(\n df_exposed,\n x=\"step\",\n y=\"value\",\n color=\"type\",\n title=f\"Type of agents exposed: {graph_name} step: {i}\",\n )\n\n # PIE CHART (append informations into dataframe)\n if i == 4:\n df_final_situation = df_final_situation.append(\n {\"type\": \"not_exposed\", \"value\": cn.count_not_exposed(G)},\n ignore_index=True,\n )\n df_final_situation = df_final_situation.append(\n {\"type\": \"exposed\", \"value\": cn.count_exposed(G)},\n ignore_index=True,\n )\n df_final_situation = df_final_situation.append(\n {\"type\": \"infected\", \"value\": cn.count_infected(G)},\n ignore_index=True,\n )\n\n #### CREATE THE PLOTS\n ##Uncomment plot(..) to save the plots to disk in html format\n\n plot_folder = \"./data/plots/\"\n\n # Plotly Line Plot\n # plot(line_chart, filename=f\"{plot_folder}steps_{graph_name}.html\")\n st.plotly_chart(line_chart, use_container_width=True)\n\n # Plotly bar plot\n # plot(bar_chart, filename=f\"{plot_folder}exposed_type_{graph_name}.html\")\n st.plotly_chart(bar_chart, use_container_width=True)\n\n # Plotly final pie chart\n final_pie_chart = px.pie(\n df_final_situation, values=\"value\", names=\"type\", title=f\"Final situation plot of: {graph_name}\"\n )\n # plot(final_pie_chart, filename=f\"{plot_folder}final_situation.html\")\n st.plotly_chart(final_pie_chart, use_container_width=True)\n\n print(\"\\nStatistics calculated succesfully\")\n\n return True", "def show_results(self):\n\n N = split_list(self.N)\n # create subplot\n fig = make_subplots(rows=1,cols=2,\n subplot_titles=('Fish population', 'Harvested fish'),\n specs=[[{'type': 'xy'}, {'type': 'pie'}]])\n #Add population line graph\n fig.add_trace(go.Scatter(y=N['odds'], x=np.linspace(1, 11, 6), name='odd year population',\n hovertemplate =\n 'Year: %{x}'+ '<br>Pop: %{y}'),\n row=1, col=1)\n fig.add_trace(go.Scatter(y=N['evens'], x=np.linspace(2, 12, 6), name='even year population',\n hovertemplate =\n 'Year: %{x}'+ '<br>Pop: %{y}'),\n row=1, col=1)\n fig.update_xaxes(title_text=\"year\", row=1, col=1)\n fig.update_yaxes(title_text=\"population\", row=1, col=1)\n\n # cannot use 'paper' as yref due to bug in sublplot.\n fig.add_shape(type='line',\n xref='x', yref='y',\n x0=2.5, y0=-10, x1=2.5, y1=1000,\n line=dict(color='Black', width=3),\n row=1, col=1)\n\n # create pie chart\n colors = ['#636EFA', '#EF553B'] \n labels = ['total odd year harvest', 'total even year harvest']\n M = split_list(self.harvest_record)\n values = [sum(M['odds']), sum(M['evens'])]\n fig.add_trace(go.Pie(labels=labels, values=values, hoverinfo='label', textinfo='value', marker=dict(colors=colors)), \n row=1, col=2)\n\n # add title\n fig.update_layout(title_text='Results') \n fig.write_html(\"fish_trap_simulation.html\")\n\n \n return fig", "def visualise_food_consumption(data: LogData, directory: Path):\n\n figure, axes = plot.subplots()\n\n food_history = get_food_history(data)\n\n axes.plot(food_history.keys(), food_history.values(), label=\"Food\", color=\"blue\", **{\"ls\": \"--\"})\n\n axes.legend(loc=\"upper left\")\n axes.set_xlim(0, data.duration_secs())\n axes.set_xlabel(\"Time (seconds)\")\n axes.set_ylabel(\"Amount\")\n axes.set_title(\"Food availability\")\n\n plot.savefig(directory / Path(\"food_consumption.png\"))\n plot.close()", "def charts():\n\n global show_gaps\n global timespan\n\n form = ChartForm(\n request.form,\n graph_type=timespans.index(timespan),\n graph_gaps=show_gaps\n )\n\n if request.method == 'POST':\n if form.submit_button.data:\n timespan = timespans[int(form.graph_type.data)]\n show_gaps = form.graph_gaps.data\n else:\n flash('Unknown Event', 'error')\n\n chart = Chart(app)\n data_values1, data_values2, data_values3, data_labels = \\\n chart.get_data(timespan, show_gaps)\n\n if len(data_values3) > 0:\n cb = np.array(data_values3)\n peaks = peakutils.indexes(cb, thres=0.02 / max(cb), min_dist=5)\n\n starts_total = len(peaks)\n starts_per_h = int(round(float(starts_total) / \\\n float(hourtable[timespan]), 0))\n else:\n starts_total = 0\n starts_per_h = 0\n\n return render_template(\n 'charts.html',\n form=form,\n user=current_user,\n values1=data_values1,\n values2=data_values2,\n values3=data_values3,\n labels=data_labels,\n burner_total=starts_total,\n burner_ph=starts_per_h,\n )", "def get_ecg_graph():\n titles = ['ecg1', 'ecg2', 'ecg3']\n colors = ['rgb(240,0,0)', 'rgb(0,240,0)', 'rgb(0,0,240)']\n update()\n signames_ecg = queries['signames_ecg']\n signals = queries['signals']\n latesthr = queries['latesthr']\n return html.Div(className='ecg', children=[\n html.Div(style={'display': 'flex', 'height': '40vh'},\n children=[dcc.Graph(\n id=titles[i] + signame,\n style={'width': '100%'},\n figure={\n 'data': [\n {'x': signals[signame]['time'],\n 'y': signals[signame][titles[i]],\n 'mode': 'line', 'name': signame, 'line': {'color':colors[i]}}\n ],\n 'layout': {\n 'font': {'color':'#fff'},\n 'title': '{}-{}'.format(signame, titles[i]),\n 'xaxis': {'title': 'time', 'color': '#fff', 'showgrid': 'False'},\n 'yaxis': {'title': 'voltage (mv)', 'color': '#fff', 'showgrid': 'False', 'range': np.linspace(-2.5, 2.5, 10)},\n 'paper_bgcolor':'#000', 'plot_bgcolor':'#000'\n }\n }\n ) for i in range(len(titles))]\n +\n [html.Div(\n style={'justify-content': 'center', 'display': 'flex',\n 'align-items': 'center', 'width': '10vh', 'font-size': '30pt', 'color': 'white'},\n children=['{}'.format(latesthr[signame][0])])\n ]\n ) for signame in signames_ecg])", "def charts(request):\n \n def histogram():\n x0 = np.random.randn(500)\n # Add 1 to shift the mean of the Gaussian distribution\n x1 = np.random.randn(500) + 1\n\n fig = go.Figure()\n fig.add_trace(go.Histogram(x=x0))\n fig.add_trace(go.Histogram(x=x1))\n\n # Overlay both histograms\n fig.update_layout(barmode='overlay')\n fig.update_layout(title='Histogram')\n # Reduce opacity to see both histograms\n fig.update_traces(opacity=0.75)\n plot_div = plot(fig, output_type='div', include_plotlyjs=False)\n return plot_div\n \n def box_plot():\n np.random.seed(1)\n y0 = np.random.randn(50) - 1\n y1 = np.random.randn(50) + 1\n\n fig = go.Figure()\n fig.add_trace(go.Box(y=y0))\n fig.add_trace(go.Box(y=y1))\n fig.update_layout(title='Box Plot')\n plot_div = plot(fig, output_type='div', include_plotlyjs=False)\n return plot_div\n \n def heat_map():\n \n np.random.seed(1)\n programmers = ['Alex','Nicole','Sara','Etienne','Chelsea','Jody','Marianne']\n base = datetime.datetime.today()\n dates = base - np.arange(180) * datetime.timedelta(days=1)\n z = np.random.poisson(size=(len(programmers), len(dates)))\n\n fig = go.Figure(data=go.Heatmap(\n z=z,\n x=dates,\n y=programmers,\n colorscale='Viridis'))\n\n fig.update_layout(\n title='Heat Map',\n xaxis_nticks=36)\n\n plot_div = plot(fig, output_type='div', include_plotlyjs=False)\n return plot_div\n \n def scatter():\n x1 = [1,2,3,4]\n y1 = [30, 35, 25, 45]\n text1 = ['A', 'B', 'C', 'D']\n trace = go.Scatter(\n x=x1, y = y1, text= text1, mode='markers+text'\n )\n layout = dict(\n title='Scatter Plots',\n xaxis=dict(range=[min(x1), max(x1)]),\n yaxis=dict(range=[min(y1), max(y1)])\n )\n fig = go.Figure(data=[trace],layout=layout)\n plot_div = plot(fig, output_type='div', include_plotlyjs=False)\n return plot_div\n\n context = {\n 'plot1':heat_map(),\n 'plot2':scatter(),\n 'plot3':histogram(),\n 'plot4':box_plot()\n }\n return render(request, 'base/charts.html', context)", "def plots():\n out = interactive_output(generate_plots, {'gsize':gridSlider, 'ra':RABox, 'ra':RASlider, 'dec':DECBox, 'dec':DECSlider, 'ang':radBox, 'ang':radSlider, 'style':hexDrop})\n return display(widgrid, out)", "def plot(self):\n\t\t\n\t\ttf=tfData(self.shotno,tStart=None,tStop=None)\n\t\t\n\t\t_plt.figure()\n\t\tax1 = _plt.subplot2grid((3,2), (0,1), rowspan=3) #tf\n\t\tax2 = _plt.subplot2grid((3,2), (0,0)) #vf\n\t\tax3 = _plt.subplot2grid((3,2), (1,0),sharex=ax2) #oh\n\t\tax4 = _plt.subplot2grid((3,2), (2, 0),sharex=ax2) #sh\n\t\tfig=_plt.gcf()\n\t\tfig.set_size_inches(10,5)\n\t\t\t\t\n\t\ttStart=-2\n\t\ttStop=20\n\t\t\n\t\tax1.plot(tf.time*1e3,tf.tfBankField)\n\t\tax1.axvspan(tStart,tStop,color='r',alpha=0.3)\n\t\t_plot.finalizeSubplot(ax1,xlabel='Time (s)',xlim=[-150,450],ylabel='TF Field (T)')#,title=self.title\n\t\t\n\t\tax2.plot(self.vfTime*1e3,self.vfBankCurrent*1e-3)\n\t\t_plot.finalizeSubplot(ax2,ylabel='VF Current\\n(kA)')\n\t\t\n\t\tax3.plot(self.ohTime*1e3,self.ohBankCurrent*1e-3)\n\t\t_plot.finalizeSubplot(ax3,ylim=[-20,30],ylabel='OH Current\\n(kA)')\n\t\t\n\t\tax4.plot(self.shTime*1e3,self.shBankCurrent*1e-3)\n\t\t_plot.finalizeSubplot(ax4,ylim=[tStart,tStop],xlabel='Time (s)',ylabel='SH Current\\n(kA)')\n\t\t\n\t\t_plot.finalizeFigure(fig,title=self.title)\n#\t\tfig.set_tight_layout(True)\n\t\t\n\t\treturn fig", "def get_energy(edr, annealing_times, energy_type = 'Potential', out_fig = 'energy_distribution.svg'): # Could be Total-Energy\n fig, ax = plt.subplots(figsize = (16,9))\n data = pd.DataFrame()\n xvg_tmp_file = tempfile.NamedTemporaryFile(suffix='.xvg')\n energy = []\n iterator = range(0, len(annealing_times)-1, 2)\n\n for state, index in tqdm.tqdm(enumerate(iterator), total=len(iterator)):#enumerate(iterator):# # the calculation is per pair of times, beetween the first to time the temperature was keep constant, then the system was heated and repeated again.\n run = tools.run(f\"export GMX_MAXBACKUP=-1; echo {energy_type} | gmx energy -f {edr} -b {annealing_times[index]} -e {annealing_times[index + 1]} -o {xvg_tmp_file.name} | grep \\'{energy_type.replace('-',' ')}\\'\")\n energy.append(float(run.stdout.split()[-5]))\n \"\"\"\n Energy Average Err.Est. RMSD Tot-Drift\n -------------------------------------------------------------------------------\n Potential -1.30028e+06 -- 1682.1 -2422.24 (kJ/mol)\n Total Energy -952595 -- 2606.81 -3688.3 (kJ/mol)\n \"\"\"\n # Getting the histograms and checking for the same len in all intervals\n if state == 0:\n data[state] = xvg.XVG(xvg_tmp_file.name).data[:,1]\n else:\n xvg_data = xvg.XVG(xvg_tmp_file.name).data[:,1]\n if xvg_data.shape[0] > data.shape[0]:\n data[state] = xvg_data[:data.shape[0]]\n else:\n data = data.iloc[:xvg_data.shape[0]]\n data[state] = xvg_data\n\n\n print(data)\n sns.histplot(data = data, element='poly', stat = 'probability', axes = ax)\n ax.set(\n xlabel = f'{energy_type} [kJ/mol]',\n ylabel = 'Probability',\n title = f'Distribution of {energy_type}')\n # plt.show()\n fig.savefig(out_fig)\n return energy", "def plot_results(outputs_table_totals, elec_benefits, gas_benefits):\n summer_months = [6, 7, 8, 9]\n shoulder_months = [3, 4, 5, 10]\n winter_months = [11, 12, 1, 2]\n peak_hours = [16, 17, 18, 19, 20]\n pct_hours_in_summer = 2928 / 8760\n pct_hours_in_shoulder = 2952 / 8760\n pct_hours_in_winter = 2880 / 8760\n\n trc_costs_record = outputs_table_totals[\"TRC Costs ($)\"]\n pac_costs_record = outputs_table_totals[\"PAC Costs ($)\"]\n trc_record = outputs_table_totals[\"TRC\"]\n pac_record = outputs_table_totals[\"PAC\"]\n lifecycle_net_mwh = outputs_table_totals[\"Electricity Lifecycle Net Savings (MWh)\"]\n lifecycle_net_therms = outputs_table_totals[\"Gas Lifecycle Net Savings (Therms)\"]\n lifecycle_net_ghg = outputs_table_totals[\"Total Lifecycle GHG Savings (Tons)\"]\n\n # Getting variables for plots\n elec_benefits_cols = (\n [\"hourly_savings\"] + ACC_COMPONENTS_ELECTRICITY + [\"av_csts_levelized\"]\n )\n\n elec_benefits_hour_month_year = (\n elec_benefits.groupby([\"hour_of_day\", \"year\", \"month\"])\n .agg(\n {\n **{component: \"sum\" for component in ACC_COMPONENTS_ELECTRICITY},\n **{\n \"hourly_savings\": \"sum\",\n \"marginal_ghg\": \"sum\",\n \"av_csts_levelized\": \"mean\",\n },\n }\n )\n .reset_index()\n )\n\n total_benefits = list(\n elec_benefits_hour_month_year.groupby([\"hour_of_day\"])[\"total\"].sum()\n )\n\n summer_benefits = list(\n elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n ]\n .groupby([\"hour_of_day\"])[\"total\"]\n .sum()\n )\n summer_peak_benefits = elec_benefits_hour_month_year[\"total\"][\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n & (elec_benefits_hour_month_year[\"hour_of_day\"].isin(peak_hours))\n ].sum()\n shoulder_benefits = list(\n elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(shoulder_months))\n ]\n .groupby([\"hour_of_day\"])[\"total\"]\n .sum()\n )\n winter_benefits = list(\n elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(winter_months))\n ]\n .groupby([\"hour_of_day\"])[\"total\"]\n .sum()\n )\n total_savings = list(\n elec_benefits_hour_month_year.groupby([\"hour_of_day\"])[\"hourly_savings\"].sum()\n )\n summer_savings = list(\n elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n ]\n .groupby([\"hour_of_day\"])[\"hourly_savings\"]\n .sum()\n )\n shoulder_savings = list(\n elec_benefits_hour_month_year[\n ((elec_benefits_hour_month_year[\"month\"].isin(shoulder_months)))\n ]\n .groupby([\"hour_of_day\"])[\"hourly_savings\"]\n .sum()\n )\n summer_peak_savings = elec_benefits_hour_month_year[\"hourly_savings\"][\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n & (elec_benefits_hour_month_year[\"hour_of_day\"].isin(peak_hours))\n ].sum()\n winter_savings = list(\n elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(winter_months))\n ]\n .groupby([\"hour_of_day\"])[\"hourly_savings\"]\n .sum()\n )\n total_av_csts_avg = list(\n elec_benefits_hour_month_year.groupby([\"hour_of_day\"])[\n \"av_csts_levelized\"\n ].mean()\n )\n summer_av_csts_avg = list(\n pct_hours_in_summer\n * elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n ]\n .groupby([\"hour_of_day\"])[\"av_csts_levelized\"]\n .mean()\n )\n summer_peak_av_csts_avg = elec_benefits_hour_month_year[\"av_csts_levelized\"][\n (elec_benefits_hour_month_year[\"month\"].isin(summer_months))\n & (elec_benefits_hour_month_year[\"hour_of_day\"].isin(peak_hours))\n ].mean()\n shoulder_av_csts_avg = list(\n pct_hours_in_shoulder\n * elec_benefits_hour_month_year[\n ((elec_benefits_hour_month_year[\"month\"].isin(shoulder_months)))\n ]\n .groupby([\"hour_of_day\"])[\"av_csts_levelized\"]\n .mean()\n )\n winter_av_csts_avg = list(\n pct_hours_in_winter\n * elec_benefits_hour_month_year[\n (elec_benefits_hour_month_year[\"month\"].isin(winter_months))\n ]\n .groupby([\"hour_of_day\"])[\"av_csts_levelized\"]\n .mean()\n )\n\n elec_benefits_sum_by_hod = (\n elec_benefits[elec_benefits_cols].groupby(elec_benefits[\"hour_of_day\"]).sum()\n )\n elec_benefits_hoy = (\n elec_benefits[elec_benefits_cols]\n .groupby(elec_benefits[\"hour_of_year\"])\n .sum()\n .cumsum()\n .reset_index()\n )\n sav_avcsts_288 = (\n elec_benefits.groupby([\"hour_of_day\", \"month\"])\n .agg(\n {\n **{component: \"sum\" for component in ACC_COMPONENTS_ELECTRICITY},\n **{\n \"hourly_savings\": \"sum\",\n \"marginal_ghg\": \"sum\",\n \"av_csts_levelized\": \"mean\",\n },\n }\n )\n .reset_index()\n )\n sav_avcsts_288 = sav_avcsts_288[\n [\"hour_of_day\", \"month\", \"hourly_savings\", \"total\", \"marginal_ghg\"]\n ]\n ghgsav = sav_avcsts_288.pivot(\"hour_of_day\", \"month\", \"marginal_ghg\")\n sav = sav_avcsts_288.pivot(\"hour_of_day\", \"month\", \"hourly_savings\")\n avcsts = sav_avcsts_288.pivot(\"hour_of_day\", \"month\", \"total\")\n\n # savings load shape plot\n fig0, (ax1, ax2, ax3) = plt.subplots(\n 1, 3, figsize=(18, 5), sharex=True, sharey=True\n )\n plt.subplots_adjust(wspace=0, hspace=0)\n axs = [ax1, ax2, ax3]\n hod = elec_benefits_sum_by_hod.index\n legend_labels1 = [\"Summer\"]\n legend_labels2 = [\"Shoulder\"]\n legend_labels3 = [\"Winter\"]\n\n ax1.plot(\n hod,\n summer_savings,\n c=\"firebrick\",\n linewidth=5,\n marker=\"$\\u25EF$\",\n markersize=13,\n linestyle=\"-\",\n )\n ax2.plot(\n hod,\n shoulder_savings,\n c=\"royalblue\",\n linewidth=5,\n marker=\"$\\u2206$\",\n markersize=13,\n linestyle=\"-\",\n )\n ax3.plot(\n hod,\n winter_savings,\n c=\"green\",\n linewidth=5,\n marker=\"$\\u25A1$\",\n markersize=13,\n linestyle=\"-\",\n )\n ax1.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n ax2.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n ax3.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n # Shade peak region\n ax1.axvspan(16, 21, alpha=0.2, color=\"grey\")\n\n leg1 = ax1.legend(legend_labels1, fontsize=14, loc=\"upper left\", frameon=False)\n for line, text in zip(leg1.get_lines(), leg1.get_texts()):\n text.set_color(line.get_color())\n leg2 = ax2.legend(legend_labels2, fontsize=14, loc=\"upper left\", frameon=False)\n for line, text in zip(leg2.get_lines(), leg2.get_texts()):\n text.set_color(line.get_color())\n leg3 = ax3.legend(legend_labels3, fontsize=14, loc=\"upper left\", frameon=False)\n for line, text in zip(leg3.get_lines(), leg3.get_texts()):\n text.set_color(line.get_color())\n\n ax1.set_ylabel(\"Savings (MWh/hr)\", size=16)\n ax2.set_xlabel(\"Hour of Day\", size=16)\n\n if max(summer_savings + shoulder_savings + winter_savings) < 0:\n ymax = 0\n else:\n ymax = max(summer_savings + shoulder_savings + winter_savings)\n if min(summer_savings + shoulder_savings + winter_savings) > 0:\n ymin = 0\n else:\n ymin = min(summer_savings + shoulder_savings + winter_savings)\n\n # Tick and lebel parameters\n ax1.set_ylim(ymin * 1.08, ymax * 1.08)\n ax1.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax2.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax3.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax1.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax2.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax3.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax1.yaxis.set_minor_locator(AutoMinorLocator())\n ax1.set_xticks(np.arange(0, 24, step=4))\n ax1.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax1.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax1.xaxis.set_minor_locator(AutoMinorLocator())\n ax2.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax2.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax2.xaxis.set_minor_locator(AutoMinorLocator())\n ax3.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax3.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax3.xaxis.set_minor_locator(AutoMinorLocator())\n\n # Set plot title, size, and position\n ax1.set_title(\"Seasonal Savings Load Shapes\", size=18, loc=\"left\").set_position(\n [0, 1.03]\n )\n\n # benefits_seasonal_shape_plot\n fig1, (ax1, ax2, ax3) = plt.subplots(\n 1, 3, figsize=(18, 5), sharex=True, sharey=True\n )\n plt.subplots_adjust(wspace=0, hspace=0)\n axs = [ax1, ax2, ax3]\n hod = elec_benefits_sum_by_hod.index\n legend_labels1 = [\"Summer\"]\n legend_labels2 = [\"Shoulder\"]\n legend_labels3 = [\"Winter\"]\n\n ax1.plot(\n hod,\n summer_benefits,\n c=\"firebrick\",\n linewidth=5,\n marker=\"$\\u2B24$\",\n markersize=13,\n linestyle=\":\",\n )\n ax2.plot(\n hod,\n shoulder_benefits,\n c=\"royalblue\",\n linewidth=5,\n marker=\"$\\u25B2$\",\n markersize=13,\n linestyle=\":\",\n )\n ax3.plot(\n hod,\n winter_benefits,\n c=\"green\",\n linewidth=5,\n marker=\"$\\u25A0$\",\n markersize=13,\n linestyle=\":\",\n )\n ax1.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n ax2.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n ax3.axhline(y=0, color=\"gray\", linewidth=1, linestyle=\"--\")\n # Shade peak region\n ax1.axvspan(16, 21, alpha=0.2, color=\"grey\")\n\n leg1 = ax1.legend(legend_labels1, fontsize=15, loc=\"upper left\", frameon=False)\n for line, text in zip(leg1.get_lines(), leg1.get_texts()):\n text.set_color(line.get_color())\n leg2 = ax2.legend(legend_labels2, fontsize=15, loc=\"upper left\", frameon=False)\n for line, text in zip(leg2.get_lines(), leg2.get_texts()):\n text.set_color(line.get_color())\n leg3 = ax3.legend(legend_labels3, fontsize=15, loc=\"upper left\", frameon=False)\n for line, text in zip(leg3.get_lines(), leg3.get_texts()):\n text.set_color(line.get_color())\n\n ax1.set_ylabel(\"TRC Benefits ($/hr)\", size=16)\n ax2.set_xlabel(\"Hour of Day\", size=16)\n\n if max(summer_benefits + shoulder_benefits + winter_benefits) < 0:\n ymax = 0\n else:\n ymax = max(summer_benefits + shoulder_benefits + winter_benefits)\n if min(summer_benefits + shoulder_benefits + winter_benefits) > 0:\n ymin = 0\n else:\n ymin = min(summer_benefits + shoulder_benefits + winter_benefits)\n\n # Tick and label parameters\n ax1.set_ylim(ymin * 1.08, ymax * 1.08)\n ax1.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax2.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax3.set_yticks(\n np.arange(\n ymin * 1.08,\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax1.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax2.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax3.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax1.yaxis.set_minor_locator(AutoMinorLocator())\n ax1.set_xticks(np.arange(0, 24, step=4))\n ax1.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax1.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax1.xaxis.set_minor_locator(AutoMinorLocator())\n ax2.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax2.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax2.xaxis.set_minor_locator(AutoMinorLocator())\n ax3.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=7, width=2, labelsize=14\n )\n ax3.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax3.xaxis.set_minor_locator(AutoMinorLocator())\n\n # Set plot title, size, and position\n ax1.set_title(\n \"Seasonal TRC Benefits by Hour ($)\", size=18, loc=\"left\"\n ).set_position([0, 1.03])\n\n # sum_hourly_plot\n fig2 = plt.figure(figsize=(12, 7), dpi=250)\n ax = fig2.gca()\n colors = [\n \"royalblue\",\n \"black\",\n \"pink\",\n \"firebrick\",\n \"gray\",\n \"darkviolet\",\n \"darkorange\",\n \"green\",\n \"saddlebrown\",\n ]\n legend_labels = []\n x = 1\n while x <= len(ACC_COMPONENTS_ELECTRICITY[1:]):\n if x == 1:\n ax.bar(\n hod,\n elec_benefits_sum_by_hod[ACC_COMPONENTS_ELECTRICITY[x]],\n color=colors[x - 1],\n )\n legend_labels.append(\n re.findall(\n \".*Name: (.*),\",\n str(elec_benefits_sum_by_hod[ACC_COMPONENTS_ELECTRICITY[x]]),\n )[0]\n )\n x += 1\n else:\n ax.bar(\n hod,\n elec_benefits_sum_by_hod[ACC_COMPONENTS_ELECTRICITY[x]],\n bottom=elec_benefits_sum_by_hod.iloc[:, 2 : x + 1].sum(axis=1),\n color=colors[x - 1],\n )\n legend_labels.append(\n re.findall(\n \".*Name: (.*),\",\n str(elec_benefits_sum_by_hod[ACC_COMPONENTS_ELECTRICITY[x]]),\n )[0]\n )\n x += 1\n\n # Set x and y limits based on min and max values\n ymax = elec_benefits_sum_by_hod.iloc[:, 2:x].sum(axis=1).max()\n if elec_benefits_sum_by_hod.iloc[:, 2:x].sum(axis=1).min() > 0:\n ymin = 0\n else:\n ymin = elec_benefits_sum_by_hod.iloc[:, 2:x].sum(axis=1).min()\n\n ax.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax.set_ylim(ymin * 1.1, ymax * 1.08)\n\n # Set x and y axis labels\n ax.set_xlabel(\"Hour of Day\", size=17, labelpad=5)\n ax.set_ylabel(\"$ Avoided Costs\", size=17)\n\n # Set plot title, size, and position\n ax.set_title(\n \"Sum of Electric Avoided Costs by Component and Hour of Day\",\n size=17,\n loc=\"left\",\n )\n\n # Tick and lebel parameters\n ax.tick_params(bottom=True, top=False, left=True, right=False)\n ax.set_xticks(np.arange(0, 24, step=4))\n ax.set_yticks(\n np.arange(\n int(round(ymin * 1.1, 0)),\n ymax * 1.08,\n step=max(round(ymax - ymin, 2) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n\n # Minor ticks\n ax.xaxis.set_minor_locator(AutoMinorLocator())\n ax.yaxis.set_minor_locator(AutoMinorLocator())\n\n # Legend\n plt.legend(\n legend_labels,\n bbox_to_anchor=(1, 1),\n fontsize=12,\n loc=\"upper left\",\n frameon=False,\n )\n\n # avoided_cost_summary_plot\n fig3, (ax1, ax2, ax3) = plt.subplots(\n 3, 1, figsize=(6, 10), sharex=True, sharey=False\n )\n axs = [ax1, ax2, ax3]\n hod = elec_benefits_sum_by_hod.index\n legend_labels = [\"Total\", \"Summer\", \"Shoulder\", \"Winter\"]\n\n ax1.plot(\n hod,\n total_benefits,\n c=\"royalblue\",\n marker=\"$\\u25EF$\",\n markersize=10,\n linewidth=3,\n linestyle=\"-\",\n )\n ax1.plot(hod, summer_benefits, c=\"darkorchid\", linewidth=1, linestyle=\"--\")\n ax1.plot(hod, shoulder_benefits, c=\"olivedrab\", linewidth=1, linestyle=\":\")\n ax1.plot(hod, winter_benefits, c=\"teal\", linewidth=1, linestyle=\"-\")\n ax2.plot(\n hod,\n total_savings,\n c=\"firebrick\",\n marker=\"$\\u2206$\",\n markersize=10,\n linewidth=3,\n linestyle=\"-\",\n )\n ax2.plot(hod, summer_savings, c=\"darkorchid\", linewidth=1, linestyle=\"--\")\n ax2.plot(hod, shoulder_savings, c=\"olivedrab\", linewidth=1, linestyle=\":\")\n ax2.plot(hod, winter_savings, c=\"teal\", linewidth=1, linestyle=\"-\")\n ax3.plot(\n hod,\n total_av_csts_avg,\n c=\"green\",\n marker=\"$\\u25A0$\",\n markersize=10,\n linewidth=3,\n linestyle=\"-\",\n )\n ax3.plot(hod, summer_av_csts_avg, c=\"darkorchid\", linewidth=1, linestyle=\"--\")\n ax3.plot(hod, shoulder_av_csts_avg, c=\"olivedrab\", linewidth=1, linestyle=\":\")\n ax3.plot(hod, winter_av_csts_avg, c=\"teal\", linewidth=1, linestyle=\"-\")\n\n leg1 = ax1.legend(legend_labels, fontsize=11, loc=\"upper left\", frameon=False)\n for line, text in zip(leg1.get_lines(), leg1.get_texts()):\n text.set_color(line.get_color())\n leg2 = ax2.legend(legend_labels, fontsize=11, loc=\"upper left\", frameon=False)\n for line, text in zip(leg2.get_lines(), leg2.get_texts()):\n text.set_color(line.get_color())\n leg3 = ax3.legend(legend_labels, fontsize=11, loc=\"upper left\", frameon=False)\n for line, text in zip(leg3.get_lines(), leg3.get_texts()):\n text.set_color(line.get_color())\n\n ax3.set_xticks(np.arange(0, 24, step=4))\n ax3.set_xlabel(\"Hour of Day\", size=14, labelpad=5)\n ax3.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=12\n )\n ax3.set_xlim(hod.min() - hod.max() * 0.04, hod.max() * 1.04)\n ax3.xaxis.set_minor_locator(AutoMinorLocator())\n\n ax1.set_ylabel(\"TRC Benefits ($)\", size=14)\n ax2.set_ylabel(\"Savings (MWh)\", size=14)\n ax3.set_ylabel(\"Av. Cost ($/MWh)\", size=14)\n\n if max(total_benefits + summer_benefits + shoulder_benefits + winter_benefits) < 0:\n ymax1 = 0\n else:\n ymax1 = max(\n total_benefits + summer_benefits + shoulder_benefits + winter_benefits\n )\n if min(total_benefits + summer_benefits + shoulder_benefits + winter_benefits) > 0:\n ymin1 = 0\n else:\n ymin1 = min(\n total_benefits + summer_benefits + shoulder_benefits + winter_benefits\n )\n if max(total_savings + summer_savings + shoulder_savings + winter_savings) < 0:\n ymax2 = 0\n else:\n ymax2 = max(total_savings + summer_savings + shoulder_savings + winter_savings)\n if min(total_savings + summer_savings + shoulder_savings + winter_savings) > 0:\n ymin2 = 0\n else:\n ymin2 = min(total_savings + summer_savings + shoulder_savings + winter_savings)\n if (\n max(\n total_av_csts_avg\n + summer_av_csts_avg\n + shoulder_av_csts_avg\n + winter_av_csts_avg\n )\n < 0\n ):\n ymax3 = 0\n else:\n ymax3 = max(\n total_av_csts_avg\n + summer_av_csts_avg\n + shoulder_av_csts_avg\n + winter_av_csts_avg\n )\n if (\n min(\n total_av_csts_avg\n + summer_av_csts_avg\n + shoulder_av_csts_avg\n + winter_av_csts_avg\n )\n > 0\n ):\n ymin3 = 0\n else:\n ymin3 = min(\n total_av_csts_avg\n + summer_av_csts_avg\n + shoulder_av_csts_avg\n + winter_av_csts_avg\n )\n\n # Tick and lebel parameters\n ax1.set_ylim(ymin1 * 1.08, ymax1 * 1.08)\n ax2.set_ylim(ymin2 * 1.08, ymax2 * 1.08)\n ax3.set_ylim(ymin3 * 1.08, ymax3 * 1.08)\n\n ax1.set_yticks(\n np.arange(\n ymin1 * 1.08,\n ymax1 * 1.08,\n step=max(round(ymax1 - ymin1, 3) / 5, int((round(ymax1 - ymin1, 0)) / 4)),\n )\n )\n ax2.set_yticks(\n np.arange(\n ymin2 * 1.08,\n ymax2 * 1.08,\n step=max(round(ymax2 - ymin2, 3) / 5, int((round(ymax2 - ymin2, 0)) / 4)),\n )\n )\n ax3.set_yticks(\n np.arange(\n ymin3 * 1.08,\n ymax3 * 1.08,\n step=max(round(ymax3 - ymin3, 3) / 5, int((round(ymax3 - ymin3, 0)) / 4)),\n )\n )\n\n ax1.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=12\n )\n ax2.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=12\n )\n ax3.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=12\n )\n\n # Shade peak region\n ax1.axvspan(16, 21, alpha=0.2, color=\"grey\")\n ax2.axvspan(16, 21, alpha=0.2, color=\"grey\")\n ax3.axvspan(16, 21, alpha=0.2, color=\"grey\")\n\n # Print key information\n plt.annotate(\n \"Electric Benefits = $\" + str(round(elec_benefits[\"total\"].sum(), 2)),\n xy=(350, 530),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Gas Benefits = $\" + str(round(gas_benefits, 2)),\n xy=(350, 505),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Total Benefits = $\"\n + str(round(elec_benefits[\"total\"].sum() + gas_benefits, 2)),\n xy=(350, 480),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"TRC Costs = $\" + str(trc_costs_record),\n xy=(350, 455),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"PAC Costs = $\" + str(pac_costs_record),\n xy=(350, 430),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"TRC = \" + str(trc_record),\n xy=(350, 405),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"PAC = \" + str(pac_record),\n xy=(350, 380),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Net Lifecycle Electric Savings = \" + str(lifecycle_net_mwh) + \" MWh\",\n xy=(350, 335),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Net Lifecycle Gas Savings = \" + str(lifecycle_net_therms) + \" Therms\",\n xy=(350, 310),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Net Lifecycle GHG Savings = \" + str(lifecycle_net_ghg) + \" Tons\",\n xy=(350, 285),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n str(round(100 * ((summer_peak_savings) / sum(total_savings)), 1))\n + \"% MWh savings during summer peak period\",\n xy=(350, 260),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n str(round(100 * ((summer_peak_benefits) / sum(total_benefits)), 1))\n + \"% Electric TRC benefits from summer peak period\",\n xy=(350, 235),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Electric Benefits per MWh = $\"\n + str(round(elec_benefits[\"total\"].sum() / lifecycle_net_mwh, 2)),\n xy=(350, 210),\n xycoords=\"axes points\",\n fontsize=18,\n )\n plt.annotate(\n \"Typical Avoided Cost per MWh = $\"\n + str(round(elec_benefits[\"av_csts_levelized\"].mean(), 2)),\n xy=(350, 145),\n xycoords=\"axes points\",\n fontsize=18,\n )\n\n # Set plot title, size, and position\n ax1.set_title(\n \"Savings and Avoided Cost Profiles\", size=16, loc=\"left\"\n ).set_position([0, 1.03])\n\n # marginal_ghg_savings_plot\n cmp = sns.diverging_palette(16, 260, l=35, n=25, as_cmap=True)\n\n fig4 = plt.figure(figsize=(8, 6), dpi=100)\n ax1 = fig4.gca()\n y_ticks = [\n 0,\n \"\",\n 2,\n \"\",\n 4,\n \"\",\n 6,\n \"\",\n 8,\n \"\",\n 10,\n \"\",\n 12,\n \"\",\n 14,\n \"\",\n 16,\n \"\",\n 18,\n \"\",\n 20,\n \"\",\n 22,\n ]\n hmp = sns.heatmap(ghgsav, cmap=cmp, ax=ax1, yticklabels=y_ticks, center=0.00)\n ax1.set_xlabel(\"Month\", size=15)\n ax1.set_ylabel(\"Hour of Day\", size=15)\n ax1.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=13\n )\n ax1.tick_params(\n which=\"major\",\n axis=\"y\",\n direction=\"out\",\n length=6,\n width=2,\n labelsize=13,\n rotation=0,\n )\n ax1.set_title(\"Electric GHG Savings by Month and Hour\", size=15, loc=\"left\", pad=8)\n cbar1 = hmp.collections[0].colorbar\n cbar1.ax.tick_params(labelsize=14)\n plt.annotate(\"Sum GHG\", xy=(370, 352), xycoords=\"axes points\", fontsize=12)\n plt.annotate(\"Savings (Tons)\", xy=(370, 336), xycoords=\"axes points\", fontsize=12)\n\n # month_hour_savings_benefits_plot\n fig5, (ax1, ax2) = plt.subplots(1, 2, figsize=(21, 10), dpi=200)\n y_ticks = [\n 0,\n \"\",\n 2,\n \"\",\n 4,\n \"\",\n 6,\n \"\",\n 8,\n \"\",\n 10,\n \"\",\n 12,\n \"\",\n 14,\n \"\",\n 16,\n \"\",\n 18,\n \"\",\n 20,\n \"\",\n 22,\n ]\n fleft = sns.heatmap(sav, cmap=cmp, ax=ax1, yticklabels=y_ticks, center=0.00)\n fright = sns.heatmap(avcsts, cmap=cmp, ax=ax2, yticklabels=y_ticks, center=0.00)\n ax1.set_xlabel(\"Month\", size=22)\n ax1.set_ylabel(\"Hour of Day\", size=22)\n ax2.set_xlabel(\"Month\", size=22)\n ax2.set_ylabel(\"Hour of Day\", size=22)\n ax1.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=18\n )\n ax1.tick_params(\n which=\"major\",\n axis=\"y\",\n direction=\"out\",\n length=6,\n width=2,\n labelsize=18,\n rotation=0,\n )\n ax2.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=18\n )\n ax2.tick_params(\n which=\"major\",\n axis=\"y\",\n direction=\"out\",\n length=6,\n width=2,\n labelsize=18,\n rotation=0,\n )\n ax1.set_title(\n \"MWh Savings by Month and Hour\", size=24, loc=\"left\", pad=15\n ).set_position([0, 1.1])\n ax2.set_title(\"$ Benefits by Month and Hour\", size=24, loc=\"left\", pad=15)\n fig4.tight_layout(pad=2.0)\n cbar1 = fleft.collections[0].colorbar\n cbar1.ax.tick_params(labelsize=18)\n cbar2 = fright.collections[0].colorbar\n cbar2.ax.tick_params(labelsize=18)\n plt.annotate(\"Sum MWh\", xy=(-200, 585), xycoords=\"axes points\", fontsize=20)\n plt.annotate(\"Savings\", xy=(-193, 560), xycoords=\"axes points\", fontsize=20)\n plt.annotate(\"Sum TRC\", xy=(435, 585), xycoords=\"axes points\", fontsize=20)\n plt.annotate(\"Benefits\", xy=(442, 560), xycoords=\"axes points\", fontsize=20)\n\n # savings_benefits_cumulative_sum_plot\n fig6 = plt.figure(figsize=(12, 7), dpi=250)\n ax1 = fig6.gca()\n ax1.plot(\n elec_benefits_hoy[\"hour_of_year\"],\n elec_benefits_hoy[\"hourly_savings\"],\n color=\"royalblue\",\n linewidth=3,\n )\n ax2 = ax1.twinx()\n ax2.plot(\n elec_benefits_hoy[\"hour_of_year\"],\n elec_benefits_hoy[\"total\"],\n color=\"firebrick\",\n linewidth=3,\n linestyle=\"--\",\n )\n ax2.axhline(y=0, color=\"gray\", linewidth=0.7, linestyle=\"--\")\n\n # Set x and y limits based on min and max values\n\n if (\n elec_benefits_hoy[\"hourly_savings\"].max() >= 0\n and elec_benefits_hoy[\"total\"].max() >= 0\n ):\n ymax1 = elec_benefits_hoy[\"hourly_savings\"].max()\n ymax2 = elec_benefits_hoy[\"total\"].max()\n elif (\n elec_benefits_hoy[\"hourly_savings\"].max() < 0\n and elec_benefits_hoy[\"total\"].max() < 0\n ):\n ymax1 = 0\n ymax2 = 0\n elif (\n elec_benefits_hoy[\"hourly_savings\"].max() < 0\n and elec_benefits_hoy[\"total\"].max() > 0\n ):\n ymax1 = (\n -1\n * elec_benefits_hoy[\"hourly_savings\"].min()\n * (\n elec_benefits_hoy[\"total\"].max()\n / (elec_benefits_hoy[\"total\"].max() - elec_benefits_hoy[\"total\"].min())\n )\n / (\n 1\n - elec_benefits_hoy[\"total\"].max()\n / (elec_benefits_hoy[\"total\"].max() - elec_benefits_hoy[\"total\"].min())\n )\n )\n ymax2 = elec_benefits_hoy[\"total\"].max()\n else:\n ymax1 = 0\n ymax2 = (\n -1\n * elec_benefits_hoy[\"total\"].min()\n * (\n elec_benefits_hoy[\"hourly_savings\"].max()\n / (\n elec_benefits_hoy[\"hourly_savings\"].max()\n - elec_benefits_hoy[\"hourly_savings\"].min()\n )\n )\n )\n\n if (\n elec_benefits_hoy[\"hourly_savings\"].min() <= 0\n and elec_benefits_hoy[\"total\"].min() <= 0\n ):\n ymin1 = elec_benefits_hoy[\"hourly_savings\"].min()\n ymin2 = elec_benefits_hoy[\"total\"].min()\n elif (\n elec_benefits_hoy[\"hourly_savings\"].min() > 0\n and elec_benefits_hoy[\"total\"].min() > 0\n ):\n ymin1 = 0\n ymin2 = 0\n elif (\n elec_benefits_hoy[\"hourly_savings\"].min() > 0\n and elec_benefits_hoy[\"total\"].min() < 0\n ):\n ymin1 = (\n -1\n * elec_benefits_hoy[\"hourly_savings\"].max()\n * (\n elec_benefits_hoy[\"total\"].min()\n / (elec_benefits_hoy[\"total\"].min() - elec_benefits_hoy[\"total\"].max())\n )\n / (\n 1\n - elec_benefits_hoy[\"total\"].min()\n / (elec_benefits_hoy[\"total\"].min() - elec_benefits_hoy[\"total\"].max())\n )\n )\n ymin2 = elec_benefits_hoy[\"total\"].min()\n else:\n ymin1 = 0\n ymin2 = (\n -1\n * elec_benefits_hoy[\"total\"].min()\n * (\n elec_benefits_hoy[\"hourly_savings\"].min()\n / (\n elec_benefits_hoy[\"hourly_savings\"].min()\n - elec_benefits_hoy[\"hourly_savings\"].min()\n )\n )\n )\n\n # Set x and y axis limits\n ax1.set_xlim(-340, 9000)\n ax1.set_ylim(ymin1 * 1.08, ymax1 * 1.08)\n ax2.set_ylim(ymin2 * 1.08, ymax2 * 1.08)\n\n # Set x and y axis labels\n ax1.set_xlabel(\"Hour of Year\", size=17, labelpad=5)\n ax1.set_ylabel(\"Net Lifecycle Savings (MWh)\", size=17)\n ax2.set_ylabel(\"$ TRC Benefits\", size=17, rotation=-90, labelpad=20)\n\n # Set plot title, size, and position\n ax1.set_title(\n \"Cumulative Savings and TRC Benefits by Hour of Year\",\n size=17,\n loc=\"left\",\n pad=8,\n )\n\n # Tick and lebel parameters\n ax1.set_xticks(np.arange(0, 8760, step=1000))\n ax1.set_yticks(\n np.arange(\n int(round(ymin1 * 1.1, 0)),\n ymax1 * 1.08,\n step=max(round(ymax1 - ymin1, 2) / 5, int((round(ymax1 - ymin1, 0)) / 4)),\n )\n )\n ax1.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax1.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n\n ax2.set_xticks(np.arange(0, 8760, step=1000))\n ax2.set_yticks(\n np.arange(\n int(round(ymin2 * 1.1, 0)),\n ymax2 * 1.08,\n step=max(round(ymax2 - ymin2, 2) / 5, int((round(ymax2 - ymin2, 0)) / 4)),\n )\n )\n ax2.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax2.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n\n # Minor ticks\n ax1.xaxis.set_minor_locator(AutoMinorLocator())\n ax1.yaxis.set_minor_locator(AutoMinorLocator())\n ax2.yaxis.set_minor_locator(AutoMinorLocator())\n\n # Legend\n ax1.legend(\n [\"Savings\"],\n fontsize=12,\n bbox_to_anchor=(0.02, 1),\n loc=\"upper left\",\n frameon=False,\n )\n ax2.legend(\n [\"TRC Beneftis\"],\n fontsize=12,\n bbox_to_anchor=(0.02, 0.95),\n loc=\"upper left\",\n frameon=False,\n )\n\n fig7 = plt.figure(figsize=(12, 7), dpi=250)\n ax = fig7.gca()\n colors1 = [\n \"black\",\n \"royalblue\",\n \"black\",\n \"pink\",\n \"firebrick\",\n \"gray\",\n \"darkviolet\",\n \"darkorange\",\n \"green\",\n \"saddlebrown\",\n ]\n legend_labels2 = []\n\n ax.plot(\n elec_benefits_hoy[\"hour_of_year\"],\n elec_benefits_hoy[ACC_COMPONENTS_ELECTRICITY[0]],\n color=colors1[0],\n linewidth=3,\n )\n legend_labels2.append(ACC_COMPONENTS_ELECTRICITY[0])\n x = 1\n while x <= len(ACC_COMPONENTS_ELECTRICITY) - 2:\n ax.plot(\n elec_benefits_hoy[\"hour_of_year\"],\n elec_benefits_hoy[ACC_COMPONENTS_ELECTRICITY[x]],\n color=colors1[x],\n )\n legend_labels2.append(ACC_COMPONENTS_ELECTRICITY[x])\n x += 1\n\n # Set x and y limits based on min and max values\n if max(elec_benefits_hoy.iloc[:, 2:x].max()) < 0:\n ymax = 0\n else:\n ymax = max(elec_benefits_hoy.iloc[:, 2:x].max())\n if min(elec_benefits_hoy.iloc[:, 2:x].min()) > 0:\n ymin = 0\n else:\n ymin = min(elec_benefits_hoy.iloc[:, 2:x].min())\n\n ax.set_xlim(-340, 9000)\n ax.set_ylim(ymin * 1.1, ymax * 1.08)\n\n # Set x and y axis labels\n ax.set_xlabel(\"Hour of Year\", size=17, labelpad=5)\n ax.set_ylabel(\"$ TRC Benefits\", size=17)\n\n # Set plot title, size, and position\n ax.set_title(\n \"Sum of Avoided Costs by Component and Hour of Day\", size=17, loc=\"left\"\n )\n\n # Tick and lebel parameters\n ax.set_xticks(np.arange(0, 8760, step=1000))\n ax.set_yticks(\n np.arange(\n int(round(ymin * 1.1, 0)),\n ymax * 1.08,\n step=max(round(ymax - ymin, 3) / 5, int((round(ymax - ymin, 0)) / 4)),\n )\n )\n ax.tick_params(\n which=\"major\", axis=\"x\", direction=\"out\", length=6, width=2, labelsize=14\n )\n ax.tick_params(\n which=\"major\", axis=\"y\", direction=\"out\", length=6, width=2, labelsize=14\n )\n\n # Minor ticks\n ax.xaxis.set_minor_locator(AutoMinorLocator())\n ax.yaxis.set_minor_locator(AutoMinorLocator())\n\n # Legend\n plt.legend(\n legend_labels2,\n bbox_to_anchor=(1, 1),\n fontsize=12,\n loc=\"upper left\",\n frameon=False,\n )", "def dashboard(df):\n panamax = (df.loc[:, \"Class\"] == \"Panamax\")\n post_panamax = (df.loc[:, \"Class\"] == \"Post-Panamax\")\n nearshore = (df.loc[:, \"Location\"] == \"Nearshore\")\n offshore = (df.loc[:, \"Location\"] == \"Offshore\")\n inbound = (df.loc[:, \"Course Behavior\"] == \"Inbound\")\n outbound = (df.loc[:, \"Course Behavior\"] == \"Outbound\")\n dat = {\"Proportion<br>of Transits\":[\n str(round(sum(panamax) / len(df) * 100, 2)) + \"%\",\n str(round(sum(post_panamax) / len(df) * 100, 2)) + \"%\", \"100%\"\n ],\n \"Compliance<br>Rate\":[\n str(round(sum(panamax & (df.loc[:, \"VSPD kn\"] <= 10)) /\n sum(panamax) * 100, 2)) + \"%\",\n str(round(sum(post_panamax & (df.loc[:, \"VSPD kn\"] <= 10)) /\n sum(post_panamax) * 100, 2)) + \"%\",\n str(round(sum(df.loc[:, \"VSPD kn\"] <= 10) / len(df) * 100, 2)) + \"%\"\n ],\n \"Mean<br>VSPD\":[\n str(round(df[panamax].loc[:, \"VSPD kn\"].mean(), 2)) + \" kn\",\n str(round(df[post_panamax].loc[:, \"VSPD kn\"].mean(), 2)) + \" kn\",\n str(round(df.loc[:, \"VSPD kn\"].mean(), 2)) + \" kn\"\n ],\n \"Nearshore<br>Median VSPD\":[\n str(round(df[nearshore & panamax].loc[:, \"VSPD kn\"].median(), 2)) +\n \" kn\",\n str(round(df[nearshore & post_panamax].loc[:,\n (\"VSPD kn\")].median(), 2)) + \" kn\",\n str(round(df[nearshore].loc[:, \"VSPD kn\"].median(), 2)) + \" kn\"\n ],\n \"Offshore<br>Median VSPD\":[\n str(round(df[offshore & panamax].loc[:, \"VSPD kn\"].median(), 2)) +\n \" kn\",\n str(round(df[offshore & post_panamax].loc[:,\n (\"VSPD kn\")].median(), 2)) + \" kn\",\n str(round(df[offshore].loc[:, \"VSPD kn\"].median(), 2)) + \" kn\"\n ],\n \"Inbound<br>Median VSPD\":[\n str(round(df[inbound & panamax].loc[:, \"VSPD kn\"].median(), 2)) +\n \" kn\",\n str(round(df[inbound & post_panamax].loc[:,\n (\"VSPD kn\")].median(), 2)) + \" kn\",\n str(round(df[inbound].loc[:, \"VSPD kn\"].median(), 2)) + \" kn\"\n ],\n \"Outbound<br>Median VSPD\":[\n str(round(df[outbound & panamax].loc[:, \"VSPD kn\"].median(), 2)) +\n \" kn\",\n str(round(df[outbound & post_panamax].loc[:,\n (\"VSPD kn\")].median(), 2)) + \" kn\",\n str(round(df[outbound].loc[:, \"VSPD kn\"].median(), 2)) + \" kn\"\n ],\n \"VSPD-WSPD<br>Correlation\":[\n str(round(df[panamax].dropna().loc[:, (\"VSPD kn\", \"WSPD mph\")].corr()\n .iloc[0][1], 2)),\n str(round(df[post_panamax].dropna().loc[:,\n (\"VSPD kn\", \"WSPD mph\")].corr().iloc[0][1], 2)),\n str(round(df.dropna().loc[:,\n (\"VSPD kn\", \"WSPD mph\")].corr().iloc[0][1], 2))\n ]\n }\n index = [\"Panamax\", \"Post-Panamax\", \"Combined\"]\n return pd.DataFrame(dat, index)", "def generate_plots():\n\n hmp = homemonitor_plot()\n hmp.load_data()\n hmp.plot_day()\n hmp.plot_hist()", "def mainprint():\r\n global iteration\r\n global windowsize\r\n global path\r\n\r\n #The readout function is called...\r\n thetime, T_BL100, MF_BL100, Rho_BL100, T_Cori, MF_Cori, Rho_Cori, DP = readout()\r\n #... and the results are added to the dataframe...\r\n df.loc[iteration]= [thetime, T_BL100, MF_BL100, Rho_BL100, T_Cori, MF_Cori, Rho_Cori, DP]\r\n #...which is then printed to a csv file.\r\n df.to_csv(path+\"output.csv\", index=False)\r\n\r\n #Writing the plots in jpeg format.\r\n #The script takes the last X rows from the database, corresponding to the window size of the plot.\r\n gdf = df.tail(windowsize)\r\n\r\n #Temerature plot\r\n tempplot = gdf.plot(x='Time',y=['T_Cori', 'T_BL100'],grid=True, xticks = [0,5,10,15,20])\r\n tempplot.set_ylabel('Temperature (degC)')\r\n tempplot.set_xlabel('Time')\r\n tempplot.legend(['Cori','BL100'],loc='upper left')\r\n plt.savefig(path+\"Temp.jpeg\") #This is the file name, located in the specified path.\r\n\r\n #To prevent potential errors, the plot is closed again.\r\n plt.close('all')\r\n\r\n #Mass flow plot\r\n mfplot = gdf.plot(x='Time',y=['MF_Cori', 'MF_BL100'],grid=True, xticks = [0,5,10,15,20])\r\n mfplot.set_ylabel('Mass Flow (g/h)')\r\n mfplot.set_xlabel('Time')\r\n mfplot.legend(['Cori','BL100'],loc='upper left')\r\n plt.savefig(path+\"MF.jpeg\")\r\n\r\n plt.close('all')\r\n\r\n #Density plot\r\n rhoplot = gdf.plot(x='Time',y=['Rho_Cori', 'Rho_BL100'],grid=True, xticks = [0,5,10,15,20])\r\n rhoplot.set_ylabel('Density (kg/m^3)')\r\n rhoplot.set_xlabel('Time')\r\n rhoplot.legend(['Cori','BL100'],loc='upper left')\r\n plt.savefig(path+\"Rho.jpeg\")\r\n\r\n plt.close('all')\r\n\r\n #DP sensor plot\r\n dpplot = gdf.plot(x='Time',y='DP',grid=True, xticks = [0,5,10,15,20])\r\n dpplot.set_ylabel('Pressure (mbar)')\r\n dpplot.set_xlabel('Time')\r\n plt.savefig(path+\"DP.jpeg\")\r\n\r\n plt.close('all')\r\n\r\n iteration = iteration + 1", "def plotCoulombEnergy(self, phys, forces, step): \r\n self.plotQuantity(step, phys.app.energies.getTable(0), 'coulombenergy')", "def data_vis():\n dataroot = 'solar_data.txt'\n debug = False \n diff = False\n X, y = read_data(dataroot, debug, diff)\n\n # First plot the original timeseries\n fig = plt.figure(figsize=(40,40))\n\n fig.add_subplot(3,3,1)\n plt.plot(y)\n plt.title('Avg Global PSP (vent/cor) [W/m^2]')\n # plt.show()\n\n fig.add_subplot(3,3,2)\n plt.plot(X[:,0])\n plt.title('Avg Zenith Angle [degrees]')\n # plt.show()\n\n fig.add_subplot(3,3,3)\n plt.plot(X[:,1])\n plt.title('Avg Azimuth Angle [degrees]')\n # plt.show()\n\n fig.add_subplot(3,3,4)\n plt.plot(X[:,2])\n plt.title('Avg Tower Dry Bulb Temp [deg C]')\n # plt.show()\n\n fig.add_subplot(3,3,5)\n plt.plot(X[:,3])\n plt.title('Avg Tower RH [%]')\n # plt.show()\n\n fig.add_subplot(3,3,6)\n plt.plot(X[:,4])\n plt.title('Avg Total Cloud Cover [%]')\n # plt.show()\n\n fig.add_subplot(3,3,7)\n plt.plot(X[:,5])\n plt.title('Avg Avg Wind Speed @ 6ft [m/s]')\n # plt.show()\n\n ##########################################################################################\n # Plotting the Fourier Transform of the signals\n\n freq = np.fft.fftfreq(len(y), 1*60*60)\n\n fig = plt.figure(figsize=(40,40))\n\n fig.add_subplot(3,3,1)\n plt.plot(freq, np.abs(np.fft.fft(y)))\n plt.title('Avg Global PSP (vent/cor) [W/m^2]')\n # plt.show()\n\n fig.add_subplot(3,3,2)\n plt.plot(freq, np.abs(np.fft.fft(X[:,0])))\n plt.title('Avg Zenith Angle [degrees]')\n # plt.show()\n\n fig.add_subplot(3,3,3)\n plt.plot(freq, np.abs(np.fft.fft(X[:,1])))\n plt.title('Avg Azimuth Angle [degrees]')\n # plt.show()\n\n fig.add_subplot(3,3,4)\n plt.plot(freq, np.abs(np.fft.fft(X[:,2])))\n plt.title('Avg Tower Dry Bulb Temp [deg C]')\n # plt.show()\n\n fig.add_subplot(3,3,5)\n plt.plot(freq, np.abs(np.fft.fft(X[:,3])))\n plt.title('Avg Tower RH [%]')\n # plt.show()\n\n fig.add_subplot(3,3,6)\n plt.plot(freq, np.abs(np.fft.fft(X[:,4])))\n plt.title('Avg Total Cloud Cover [%]')\n # plt.show()\n\n fig.add_subplot(3,3,7)\n plt.plot(freq, np.abs(np.fft.fft(X[:,5])))\n plt.title('Avg Avg Wind Speed @ 6ft [m/s]')\n # plt.show()\n\n ##################################################################################################\n # Print correlation matrix\n\n df = pd.DataFrame(np.c_[y, X])\n df.columns = ['Avg Global PSP (vent/cor) [W/m^2]','Avg Zenith Angle [degrees]','Avg Azimuth Angle [degrees]','Avg Tower Dry Bulb Temp [deg C]','Avg Tower RH [%]','Avg Total Cloud Cover [%]','Avg Avg Wind Speed @ 6ft [m/s]']\n f = plt.figure(figsize=(19, 15))\n plt.matshow(df.corr(), fignum=f.number)\n plt.xticks(range(df.shape[1]), df.columns, fontsize=14, rotation=20)\n plt.yticks(range(df.shape[1]), df.columns, fontsize=14)\n cb = plt.colorbar()\n cb.ax.tick_params(labelsize=14)\n plt.title('Correlation Matrix', fontsize=16);\n plt.show()", "def render(self, epoch=0):\n\n # Creates just a figure and only one subplot\n fig, ax = plt.subplots()\n ax.set_title(f'Environment {epoch}\\nreward: {self.reward}')\n\n margin = 3\n margin_ext = 6\n xlim = 100\n ylim = 80\n\n # Set drawing limits\n plt.xlim(0, xlim)\n plt.ylim(-ylim, 0)\n\n # Set hight and width for the box\n high = np.floor((ylim - 2 * margin_ext - margin * (self.numBins - 1)) / self.numBins)\n wide = np.floor((xlim - 2 * margin_ext - margin * (self.numSlots - 1)) / self.numSlots)\n\n # Plot slot labels\n for slot in range(self.numSlots):\n x = wide * slot + slot * margin + margin_ext\n plt.text(x + 0.5 * wide, -3, \"slot{}\".format(slot), ha=\"center\", family='sans-serif', size=8)\n\n # Plot bin labels & place empty boxes\n for bin in range(self.numBins):\n y = -high * (bin + 1) - (bin) * margin - margin_ext\n plt.text(0, y + 0.5 * high, \"bin{}\".format(bin), ha=\"center\", family='sans-serif', size=8)\n\n for slot in range(self.numSlots):\n x = wide * slot + slot * margin + margin_ext\n rectangle = mpatches.Rectangle((x, y), wide, high, linewidth=1, edgecolor='black', facecolor='none')\n ax.add_patch(rectangle)\n\n # Select serviceLength colors from a colormap\n cmap = plt.cm.get_cmap('hot')\n colormap = [cmap(np.float32(i + 1) / (self.serviceLength + 1)) for i in range(self.serviceLength)]\n total_occupied_slots = 0\n\n # -------------\n # Workbook is created\n wb = Workbook()\n\n # add_sheet is used to create sheet.\n sheet1 = wb.add_sheet('Sheet 1', cell_overwrite_ok=True)\n # row, column\n sheet1.write(0, 0, 'Bins')\n sheet1.write(0, 1, 'Occupied Slots')\n sheet1.write(0, 2, 'Occupancy Rate')\n sheet1.write(0, 3, 'Devices')\n\n occupancy = np.empty(self.numBins)\n for bin in range(self.numBins):\n occupied = 0\n for slot in range(len(self.cells[bin])):\n if not math.isnan(self.cells[bin][slot]):\n occupied += 1\n\n occupancy[bin] = occupied / len(self.cells[bin])\n occupancy_rate = occupied / self.numSlots\n occupied_slots = \"occupancy slots in bin {} : {} > occupancy rate = {}\".format(bin, occupied,\n occupancy_rate)\n print(occupied_slots)\n\n sheet1.write(bin + 1, 0, bin)\n sheet1.write(bin + 1, 1, occupied)\n sheet1.write(bin + 1, 2, occupancy_rate)\n\n # --------------\n bin_prev = None\n pkt_prev = None\n\n # Plot service boxes\n for idx in range(self.serviceLength):\n # print(idx)\n pkt = self.service[idx]\n bin = self.placement[idx]\n Bin_Dev = \"Bin{}>Device{}\".format(bin, pkt)\n print(Bin_Dev) # recpective bins of devices\n # print(bin)\n # print(pkt)\n\n '''\n if bin == bin_prev:\n # print(\"----\",bin)\n # print(\"++++\", col)\n sheet1.write(bin + 1, 3, pkt)\n print('...', bin_prev)\n #else:\n # sheet1.write(bin + 1, 3, (pkt_prev, pkt))\n #bin_prev = bin\n #pkt_prev = pkt\n '''\n first_slot = self.first_slots[idx]\n subpkt = 0\n pkt_bin = 0\n\n for k in range(self.service_properties[pkt][\"size\"]):\n # print(pkt)\n\n slot = first_slot + k\n # print(slot)\n x = wide * slot + slot * margin + margin_ext\n y = -high * (bin + 1) - bin * margin - margin_ext\n rectangle = mpatches.Rectangle((x, y), wide, high, linewidth=0, facecolor=colormap[idx], alpha=.9)\n ax.add_patch(rectangle)\n plt.text(x + 0.5 * wide, y + 0.5 * high, \"dev{}-{}\".format(pkt, subpkt), ha=\"center\",\n family='sans-serif', size=8)\n\n PKT = \"dev{}-{}\".format(pkt, subpkt)\n\n subpkt = subpkt + 1\n print(PKT)\n\n '''\n # For importing to excel file\n PKT = \"pkt{}{}\".format(pkt,subpkt)\n '''\n # total_occupied_slots = subpkt\n pkt_bin = pkt_bin + subpkt\n # print(pkt_bin)\n\n # BIN = \"Occupied_slots={}/{}\".format(pkt_bin, self.numSlots)\n\n total_occupied_slots = total_occupied_slots + subpkt\n # print(BIN)\n wb.save('xlwt example.xls')\n print('Total Occupied Slots =', total_occupied_slots)\n\n plt.axis('off')\n plt.show()\n return total_occupied_slots", "def index():\n \n currentDateTime = current_datetime()\n fromDateTime = calc_day(currentDateTime, -3)\n\n # Adjust if any graphs should be shown in index page\n # Temperatur=XML(render_graph(3, 5, fromDateTime, currentDateTime, show_dots=False))\n # Procent_smoke=XML(render_graph(3, 6, fromDateTime, currentDateTime, show_dots=False))\n # Kitchen_Stove=XML(render_graph(2, 3, fromDateTime, currentDateTime, show_dots=False))\n # Humid=XML(render_graph(3, 4, fromDateTime, currentDateTime, show_dots=False))\n # Brightness=XML(render_graph(3, 7, fromDateTime, currentDateTime, show_dots=False))\n # Hall_motions=XML(render_graph(1, 1, fromDateTime, currentDateTime, show_dots=False, hits=True))\n # Hall_door=XML(render_graph(1, 2, fromDateTime, currentDateTime, show_dots=False, on_off=['Open', 'Close']))\n\n # return dict(test=locals())\n # return dict(test=device_monitoring)\n return dict()", "def plot_EngineStatus(data_engine, data_vel, dest_folder=None):\n \n # Variables assignment\n t_eng = data_engine[:,0]\n adv_ratio = data_engine[:,9]\n thr_coeff = data_engine[:,11]\n prop_rpm = data_engine[:,12]\n thr_lbf = data_engine[:,16]\n pow_hp = data_engine[:,18]\n \n t_vel = data_vel[:,0]\n V_fts = data_vel[:,17]\n V_kts = 0.592484*V_fts\n \n fig = plt.figure(figsize=(12.3,15))\n \n \n # Advance ratio and thrust coefficient\n ax1a = fig.add_subplot(3,1,1)\n \n ax1a.plot(t_eng, adv_ratio, ls='solid', color='m', label='$\\gamma$')\n \n ax1a.set_title(\"Advance ratio and thrust coefficient\") \n ax1a.set_xlabel('$t$ (s)')\n ax1a.set_ylabel('$\\gamma$')\n \n ax1b = ax1a.twinx()\n ax1b.plot(t_eng, thr_coeff, ls='solid', color='0.65', label='$C_{T}$')\n ax1b.set_ylabel('$C_{T}$')\n \n handlesa, labelsa = ax1a.get_legend_handles_labels()\n handlesb, labelsb = ax1b.get_legend_handles_labels()\n ax1b.legend(handlesa+handlesb, labelsa+labelsb, ncol=2, loc='best')\n \n \n # Propeller rounds-er-minute\n ax2a = fig.add_subplot(3,1,2)\n ax2a.plot(t_eng, prop_rpm, color='b', label='$N$')\n \n ax2a.set_title(\"Propeller rounds-per-minute\") \n ax2a.set_xlabel('$t$ (s)')\n ax2a.set_ylabel('$N$ (rpm)')\n \n ax2b = ax2a.twinx()\n ax2b.plot(t_vel, V_kts, color='k', label='$V$')\n ax2b.set_ylabel('$V$ (kts)')\n \n handlesa, labelsa = ax2a.get_legend_handles_labels()\n handlesb, labelsb = ax2b.get_legend_handles_labels()\n ax2b.legend(handlesa+handlesb, labelsa+labelsb, ncol=2, loc='best')\n \n \n # Thrust and power\n ax3a = fig.add_subplot(3,1,3)\n ax3a.plot(t_eng, thr_lbf, color='0.65', label='$T$')\n \n ax3a.set_title(\"Thrust and power\") \n ax3a.set_xlabel('$t$ (s)')\n ax3a.set_ylabel('T (lbf)')\n \n ax3b = ax3a.twinx() \n ax3b.plot(t_eng, pow_hp, color=(1,0.85,0.28), label='$\\Pi$')\n \n ax3b.set_ylabel('$\\Pi$ (hp)')\n \n handlesa, labelsa = ax3a.get_legend_handles_labels()\n handlesb, labelsb = ax3b.get_legend_handles_labels()\n ax3b.legend(handlesa+handlesb, labelsa+labelsb, ncol=2, loc='best')\n\n plt.tight_layout()\n \n # Export\n if dest_folder != None:\n plt.savefig(dest_folder+'plot_EngineStatus.pdf')", "def comp_time_plot(p1=database['K+'], p2=database['pi+'], pmax=80, plot=True):\r\n dt = []\r\n p_range = np.linspace(10, pmax, 1000)\r\n m1 = p1.mass\r\n m2 = p2.mass\r\n for p in p_range:\r\n t1_per_m = 76.273/(beta(p, m1)*gamma(p, m1)*c)\r\n t2_per_m = 76.273/(beta(p, m2)*gamma(p, m2)*c)\r\n dt.append(abs(t1_per_m - t2_per_m)*1e12)\r\n dt_12_5 = dt[np.argmin(abs(p_range-12.5))]\r\n dt_75 = dt[np.argmin(abs(p_range-75))]\r\n ratio = dt_12_5/dt_75\r\n if plot==True:\r\n fig = plt.figure(figsize=[10, 5])\r\n ax = fig.add_subplot(1, 1, 1)\r\n ax.plot(p_range, dt, 'b', label=r'$\\Delta t$')\r\n ax.axvline(12.5, color='r', label='p=12.5 GeV')\r\n ax.axvline(75, color='g', label='p=75 GeV')\r\n ax.set_xlim(10, pmax)\r\n ax.set_ylim(0)\r\n ax.set_xlabel('p / GeV', fontsize=20)\r\n# ax.set_yscale('log')\r\n ax.set_ylabel(r'$\\Delta t$ / ps', fontsize=20)\r\n title = f'{p1.name} to {p2.name} '\r\n title += r'$\\Delta t$ dependancy on particle momenta'\r\n ax.set_title(title, fontsize=20)\r\n ax.legend(fontsize=20)\r\n text = 'dt(12.5) = {0:.2f} ps, '.format(dt_12_5)\r\n text += 'dt(75) = {0:.2f} ps, '.format(dt_75)\r\n text += 'ratio = {0:.3f}'.format(ratio)\r\n plt.show()\r\n print(text)\r\n return [dt_12_5, dt_75, ratio]", "def visualize(ui_dict, result_dict, result_df, display=True): \r\n ### Generate Bokeh plots\r\n # Summarize the consumer's apparent prices\r\n nyseg_price_plot = out.describe_nyseg_prices(result_df)\r\n # Summarize the consumer's hourly household energy usage\r\n hourly_energy_usage_plot = out.describe_energy_usage(result_df)\r\n # Summarize daily maximum and minimum battery charges\r\n battery_charge_plot = out.describe_battery_charge(result_df, ui_dict)\r\n \r\n ### Display outputs\r\n print(result_dict)\r\n print(result_df.info())\r\n print(result_df.describe())\r\n output_file(filename=out.out_loc(\"nyseg_price_plot.html\", ui_dict[\"run_name\"]),\r\n title=\"Illustration of Hourly Pricing Scheme\")\r\n if display:\r\n show(nyseg_price_plot)\r\n else:\r\n save(nyseg_price_plot)\r\n \r\n output_file(filename=out.out_loc(\"hourly_energy_usage_plot.html\", ui_dict[\"run_name\"]),\r\n title=\"Illustration of Hourly Energy Usage\")\r\n if display:\r\n show(hourly_energy_usage_plot)\r\n else:\r\n save(hourly_energy_usage_plot)\r\n \r\n output_file(filename=out.out_loc(\"battery_charge_plot.html\", ui_dict[\"run_name\"]),\r\n title=\"Illustration of Battery Charge Behavior\")\r\n if display:\r\n show(battery_charge_plot)\r\n else:\r\n save(battery_charge_plot)", "def plotEnergiesOpt(monthlyData, optIdx):\n \n \n dummyRange = np.asarray(range(len(optIdx)))\n \n fig = plt.figure(figsize=(16, 8))\n \n plt.suptitle('Energy Comparison')\n ax1 = plt.subplot(1,1,1)\n plt.plot(monthlyData['H'][optIdx, dummyRange], label = 'H', color='r')\n plt.plot(monthlyData['C'][optIdx, dummyRange], label = 'C', color='b')\n plt.plot(monthlyData['L'][optIdx, dummyRange], label = 'L', color='g')\n plt.plot(monthlyData['PV'][optIdx, dummyRange], label = 'PV', color='c')\n plt.plot(monthlyData['E_HCL'][optIdx, dummyRange], label = 'HCL', color='m')\n plt.plot(monthlyData['E_tot'][optIdx, dummyRange], label = 'E_tot', color='k')\n plt.ylabel('Energy [kWh]')\n plt.xlim(0,288)\n\n# plt.legend()\n \n majorLocator = MultipleLocator(24)\n majorFormatter = FormatStrFormatter('%d')\n minorLocator = MultipleLocator(4)\n minorFormatter = FormatStrFormatter('%d')\n\n ax1.xaxis.set_major_locator(majorLocator)\n ax1.xaxis.set_major_formatter(majorFormatter)\n ax1.xaxis.set_minor_locator(minorLocator)\n# ax1.xaxis.set_minor_formatter(minorFormatter)\n plt.grid(True, which=u'major')\n \n # Shrink current axis by 20%\n box = ax1.get_position()\n ax1.set_position([box.x0, box.y0, box.width * 0.8, box.height])\n \n # Put a legend to the right of the current axis\n ax1.legend(loc='upper left', bbox_to_anchor=(1, 1.05))\n# \n\n plt.xticks(range(0,288,24),('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'))\n# ax2 = plt.subplot(2,1,2, sharex=ax1)\n# plt.plot(multiplier*monthlyData[energyType][indices['H'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for H', color='r')\n# plt.plot(multiplier*monthlyData[energyType][indices['C'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for C', color='b')\n# plt.plot(multiplier*monthlyData[energyType][indices['L'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for L', color='g')\n# plt.plot(multiplier*monthlyData[energyType][indices['PV'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for PV', color='c')\n# plt.plot(multiplier*monthlyData[energyType][indices['E_HCL'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for HCL', color='m')\n# plt.plot(multiplier*monthlyData[energyType][indices['E_tot'], dummyRange]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'optimized for E_tot', color='k')\n# plt.plot(multiplier*monthlyData[energyType][indices['45'],:]-multiplier*monthlyData[energyType][indices[energyType], dummyRange], label = 'fixed at 45 deg', color='y')\n# plt.ylabel('Energy Difference [kWh]')\n# plt.legend()\n#\n# ax2.xaxis.set_major_locator(majorLocator)\n# ax2.xaxis.set_major_formatter(majorFormatter)\n# ax2.xaxis.set_minor_locator(minorLocator)\n## ax2.xaxis.set_minor_formatter(minorFormatter)\n# plt.grid(True, which='both')\n# \n return fig", "def results():\n\n # # 1. tau_e graph\n # # ------------------------------------------------------------\n\n tau_es = np.load(datapath / \"tau_es.npy\", allow_pickle=True)\n\n # I want to plot tau_e against b for various Ns. Annoyingly this\n # means I have to do some index juggling.\n\n # This is all because of the way I set up datagen.DataSet... oh well.\n\n for i, N in enumerate(Ns):\n\n # values to plot against b for the specific N\n vals = []\n\n for j, b in enumerate(bs):\n\n k = Nb_to_ks[i][j]\n vals.append(tau_es[k])\n\n plt.plot(bs, vals, \"-\")\n\n plt.title(\"Auto-correlation e-folding timelag for \"\n \"variable temperatures, grid sizes\")\n\n plt.xlabel(\"$\\\\beta$\")\n plt.ylabel(\"$\\\\tau_e$\")\n\n plt.legend([f\"N={N}\" for N in Ns])\n\n plt.savefig(resultspath / \"tau_es.pdf\")\n # plt.show()\n plt.close()\n\n # 2. magnetisation graphs\n # ------------------------------------------------------------\n\n mags_list = [np.load(datapath / f\"mags-{k}.npy\") for k in range(kcount)]\n\n for i, N in enumerate(Ns):\n\n plt.title(f\"Square magnetisations N={N}\")\n plt.xlabel(\"t\")\n plt.ylabel(\"M\")\n\n for j, b in enumerate(bs):\n\n c = np.max([0, np.min([1, 10 * (b - 0.4)])])\n\n k = Nb_to_ks[i][j]\n vals = np.mean(mags_list[k]**2, axis=1)\n plt.plot(vals, color=(1 - c, 0, c))\n\n plt.savefig(resultspath / f\"mags-{N}.pdf\")\n # plt.show()\n plt.close()\n\n # 3. autoc graphs\n # ------------------------------------------------------------\n\n autocs_list = [\n np.load(datapath / f\"autocs-{k}.npy\") for k in range(kcount)]\n\n for i, N in enumerate(Ns):\n\n plt.figure(figsize=(8, 6))\n plt.axes(position=[.05, .05, .8, .9])\n\n plt.title(f\"Auto-correlation of $|M|$ with N={N}\")\n plt.xlabel(\"$ \\\\tau $\")\n plt.ylabel(\"$ A(\\\\tau) $\")\n\n for j, b in enumerate(bs):\n\n c = np.max([0, np.min([1, 10 * (b - 0.4)])])\n\n k = Nb_to_ks[i][j]\n autocs = np.load(datapath / f\"autocs-{k}.npy\")\n\n iternum = autocs.shape[0]\n sysnum = autocs.shape[1]\n vals = np.mean(autocs, axis=1)\n errs = np.std(autocs, axis=1, ddof=1) / np.sqrt(sysnum)\n\n plt.errorbar(range(iternum), vals, errs,\n color=(1 - c, 0, c), ecolor=(1 - c, 0, c, 0.4),\n elinewidth=1.5)\n\n # plt.plot(np.log(vals))\n\n plt.legend(bs, loc='center left', bbox_to_anchor=(1, 0.5))\n\n plt.savefig(resultspath / f\"autocs-{N}.pdf\")\n # plt.show()\n plt.close()", "def plot(self, **kwargs: Any) -> None:\n # Call base implementation\n self.simulator.plot(**kwargs)\n\n # Extract log data\n log_vars = self.simulator.log_data.get(\"variables\", {})\n if not log_vars:\n raise RuntimeError(\n \"Nothing to plot. Please run a simulation before calling \"\n \"`plot` method.\")\n\n # Extract action.\n # If telemetry action fieldnames is a dictionary, it cannot be nested.\n # In such a case, keys corresponds to subplots, and values are\n # individual scalar data over time to be displayed to the same subplot.\n t = log_vars[\"Global.Time\"]\n tab_data: Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]] = {}\n action_fieldnames = self.log_fieldnames.get(\"action\")\n if action_fieldnames is None:\n # It was impossible to register the action to the telemetry, likely\n # because of incompatible dtype. Early return without adding tab.\n return\n if isinstance(action_fieldnames, dict):\n for group, fieldnames in action_fieldnames.items():\n if not isinstance(fieldnames, list):\n LOGGER.error(\n \"Action space not supported by this method.\")\n return\n tab_data[group] = {\n \".\".join(key.split(\".\")[1:]): value\n for key, value in extract_variables_from_log(\n log_vars, fieldnames, as_dict=True).items()}\n elif isinstance(action_fieldnames, list):\n tab_data.update({\n \".\".join(key.split(\".\")[1:]): value\n for key, value in extract_variables_from_log(\n log_vars, action_fieldnames, as_dict=True).items()})\n\n # Add action tab\n self.simulator.figure.add_tab(\"Action\", t, tab_data)", "def display(self):\r\n \r\n plt.rcParams['font.size'] = 14\r\n plt.rcParams['axes.linewidth'] = 1.2 # 1.2 for single plot, 0.5 for all 6\r\n plt.rcParams['lines.linewidth'] = 20.0 # Aah, this doesn't work because line width is changed later on\r\n\r\n cwd = os.getcwd() # Gets current working directory.\r\n cwd = cwd.replace('\\\\', '/')\r\n path = cwd + directory # This is the folder all the results are stored in.\r\n \r\n if type(array_element) == str:\r\n dataframes = [file + array_element] # This is to pass a single csv file\r\n else:\r\n dataframes = [file + i for i in array_element] # This is a list so you can pass multiple csv files to be overlayed on the same plot.\r\n\r\n colours = ['black', 'darkred', 'darkmagenta', 'darkturquoise', 'saddlebrown'] # Array of colours for the lines.\r\n\r\n dfE = pd.read_csv(cwd + \"/experimental_data.csv\") # Reads in the experimental data as a pandas dataframe.\r\n\r\n # Rescale the x-axis of the experimental data.\r\n ratio_of_capacities = 272.4 / 338.313338 # experimental maximum capacity / theoretical maximum capacity\r\n dfE[\"x_theo\"] = ratio_of_capacities * dfE[\"x\"]\r\n # 'x' is the experimental x and 'x_theo' is the theoretical x.\r\n\r\n # Second derivative of enthalpy for experimental data. One w/ respect to the experimental x and one w/ respect to theoretical x.\r\n secder_enthalpy_experimental_x = np.gradient(np.array(dfE['Enthalpy dH/dx']), np.array(dfE['x']))\r\n secder_enthalpy_experimental_x_theo = np.gradient(np.array(dfE['Enthalpy dH/dx']), np.array(dfE['x_theo']))\r\n dfE['secder enthalpy x'] = secder_enthalpy_experimental_x\r\n dfE['secder enthalpy x theo'] = secder_enthalpy_experimental_x_theo\r\n\r\n # vertical shift on p.m. entropy for vibrational effect\r\n vibrational_shift = 0.0108 # eV K this includes being multiplied by the ratio of capacities.\r\n dfE[\"Entropy dS/dx\"] = (dfE[\"Entropy dS/dx\"]) - vibrational_shift\r\n\r\n # Integrates the p.m. entropy\r\n entropy_list_experimental = integrate.cumtrapz(dfE['Entropy dS/dx'], dfE['x'],\r\n initial=0) # Contains the entropy values\r\n dfE['Entropy'] = entropy_list_experimental\r\n\r\n dfE['x_new'] = ((dfE['x_theo'] - dfE['x_theo'].iloc[0]) * dfE['x_theo'][73]) / (dfE['x_theo'][73] - dfE['x_theo'].iloc[0]) # Rescales the line so that the experimental data starts at 0.\r\n dfE['x'] = ((dfE['x'] - dfE['x'].iloc[0]) * dfE['x'][73]) / (dfE['x'][73] - dfE['x'].iloc[0]) # Same as above but for experimental x axis.\r\n\r\n # Calculates the analytical solution\r\n points = 1000\r\n x_pos = np.linspace(0, 1, points) # x for p.m. entropy\r\n y_pos = np.linspace(0, 1, points) # y for p.m. etropy\r\n s_x = np.linspace(0, 1, points) # x for entropy\r\n s_y = np.linspace(0, 1, points) # y for entropy\r\n l = 0.329217689 # This must be the same as what was used in the main script\r\n R = -0.0000862 # eV/K.Site\r\n T = 288 # K\r\n for index, x in enumerate(x_pos):\r\n if x < l:\r\n s_y[index] = (R * (x * np.log(x / l) - (x - l) * np.log((l - x) / l))) * T\r\n y_pos[index] = T * R * (np.log(x / l) - np.log((l - x) / l))\r\n else:\r\n s_y[index] = (R * l * (\r\n (x / l - 1) * np.log(x / l - 1) + (1 - x) / l * np.log((1 - x) / l) - (1 - l) / l * np.log(\r\n (1 - l) / l))) * T\r\n y_pos[index] = T * R * (np.log(x / l - 1) - np.log(1 / l - x / l))\r\n\r\n # Calculates the single solid state entropy\r\n x_ent = np.linspace(0, 1, points)\r\n y_ent = np.linspace(0, 1, points)\r\n for index, x in enumerate(x_ent):\r\n y_ent[index] = T * R * (x * np.log(x) + (1-x) * np.log(1-x))\r\n \r\n \"\"\"\r\n #\r\n #\r\n # Create plot and formats\r\n #\r\n #\r\n \"\"\"\r\n \r\n fig, axes = plt.subplots(nrows=num_row, ncols=num_col, constrained_layout=True, squeeze=False)\r\n # squeeze=False is needed to prevent errors when plotting a single subplot\r\n plt.rc('legend', fontsize=13, handlelength=1)\r\n plt.rc('tick')\r\n lw = 1.5 # Line width\r\n \r\n plt.tick_params(bottom=True, top=True, left=True, right=True)\r\n plt.tick_params(labelbottom=True, labeltop=False, labelleft=True, labelright=False)\r\n plt.tick_params(direction='in', width=1.2, length=4.5, pad=3) # For single plot\r\n # plt.tick_params(direction='in', width=1, length=4.5, pad=3) # For multiple plots\r\n\r\n marker_list = ['v', '^', 'p', 'o']\r\n mark_size = 3 #0.7 for 6 plots\r\n \r\n colours = ['#176ba0', '#af4bce', 'orangered', '#48a11b', '#3caea3'] #'#af4bce'\r\n common_legend = ['400 Averaging Steps', '800 Averaging Steps', '2000 Averaging Steps']\r\n \r\n if num_col==2 and num_row==3: # This will work when using the original axes dimensions (3 rows, 2 columns)\r\n placement = dict([\r\n ('voltage', axes[0, 0]),\r\n ('dS/dx', axes[0, 1]),\r\n ('dQ/dV', axes[1, 0]),\r\n ('dH/dx', axes[1, 1]),\r\n ('S', axes[2, 0]),\r\n ('d/dx(dH/dx)', axes[2, 1])\r\n ])\r\n else: # If axes dimensions are different, I'm probably trying to plot one graph\r\n \"\"\"\r\n If plotting more than one graph, the position on the plot in the subplot can be adjusted\r\n by appropriately altering the axes[] parameter. For the graphs that are not being plotted, \r\n leave their position as axes[0, 0].\r\n \"\"\"\r\n placement = dict([\r\n ('voltage', axes[0, 0]),\r\n ('dS/dx', axes[0, 0]),\r\n ('dQ/dV', axes[0, 0]),\r\n ('dH/dx', axes[0, 0]),\r\n ('S', axes[0, 0]),\r\n ('d/dx(dH/dx)', axes[0, 0])\r\n ])\r\n \r\n # Plots all of the experimental data\r\n if experimental_plot == True:\r\n if pick_plot['voltage'] == True:\r\n dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['voltage'], x='x_new', y='OCV')\r\n dfE.plot(linestyle='-', color='darkblue', lw=lw, ax=placement['voltage'], x='x', y='OCV')\r\n \r\n if pick_plot['dS/dx'] == True:\r\n ax2 = dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['dS/dx'], x='x_new', y='Entropy dS/dx')\r\n dfE.plot(linestyle='-', color='darkblue', lw=lw, ax=placement['dS/dx'], x='x', y='Entropy dS/dx')\r\n \r\n if pick_plot['dQ/dV'] == True:\r\n dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['dQ/dV'], x='OCV', y='dQdV') \r\n \r\n if pick_plot['dH/dx'] == True:\r\n dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['dH/dx'], x='x_new', y='Enthalpy dH/dx')\r\n dfE.plot(linestyle='-', color='darkblue', lw=lw, ax=placement['dH/dx'], x='x', y='Enthalpy dH/dx')\r\n \r\n if pick_plot['S'] == True:\r\n ax5 = dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['S'], x='x_new', y='Entropy')\r\n \r\n if pick_plot['d/dx(dH/dx)'] == True:\r\n dfE.plot(linestyle='-', color='darkgreen', lw=lw, ax=placement['d/dx(dH/dx)'], x='x_new', y='secder enthalpy x theo')\r\n dfE.plot(linestyle='-', color='darkblue', lw=lw, ax=placement['d/dx(dH/dx)'], x='x', y='secder enthalpy x')\r\n\r\n # Iterate through all the data to be plotted\r\n if simulation_plot == True:\r\n for count, df in enumerate(dataframes):\r\n df1 = pd.read_csv(path + df) # reads file into a dataframe.\r\n \r\n df1 = df1.replace(0, np.nan).dropna(axis=0, how='all') # For the rows with all '0' entries they are replaced with 'nan' and then these rows are dropped.\r\n df1 = df1.replace(np.nan, 0) # As some legitimate 0 entries such as 0 volts we flip back the remaining from 'nan' to 0.\r\n \r\n # Integrates the p.m. entropy\r\n entropy_list = integrate.cumtrapz(df1['Partial molar entropy'], df1['Total mole fraction'],\r\n initial=0) # Contains the entropy values\r\n df1['Entropy'] = entropy_list\r\n \r\n # Rescale voltage profile and p.m. enthalpy by the chain rule.\r\n df1[\"adjusted voltage\"] = df1[\"Chemical potential\"] * ratio_of_capacities\r\n df1[\"adjusted enthalpy\"] = df1[\"Partial molar enthalpy\"] * ratio_of_capacities\r\n df1[\"adjusted entropy\"] = df1[\"Partial molar entropy\"] * ratio_of_capacities\r\n df1[\"adjusted dq/de\"] = df1[\"dq/de\"] * (1/ratio_of_capacities)**2\r\n \r\n # Differentiate the p.m. enthalpy to get the second derivative.\r\n pm_enthalpy = np.array(df1['adjusted enthalpy'])\r\n mole_fraction = np.array(df1['Total mole fraction'])\r\n secder_enthalpy = np.gradient(pm_enthalpy, mole_fraction)\r\n df1['secder enthalpy'] = secder_enthalpy\r\n \r\n if pick_plot['voltage'] == True:\r\n ax1 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['voltage'], x='Total mole fraction', y='adjusted voltage')\r\n ax1.set_xlim([0, 1])\r\n ax1.set_xlabel('Na content $[x]$')\r\n ax1.set_ylabel('Voltage $[V]$')\r\n ax1.legend(common_legend) \r\n # ax1.legend(['Experimental data (Adjusted x)', 'Raw experimental data', 'Monte Carlo data'])\r\n \r\n if pick_plot['dS/dx'] == True:\r\n ax2 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['dS/dx'], x='Total mole fraction', y='adjusted entropy')\r\n # ax2.plot(x_pos, y_pos, linewidth=lw, color='red') # Plots the ideal p.m. entropy\r\n ax2.set_xlim([0, 1])\r\n ax2.set_xlabel('Na content $[x]$')\r\n ax2.set_ylabel('$\\\\frac{dS}{dx}$ $[eV K/site]$')\r\n ax2.legend(common_legend) \r\n # ax2.legend(['Experimental data (Adjusted x)', 'Raw experimental data', 'Monte Carlo data', 'Analytical solution'])\r\n \r\n if pick_plot['dQ/dV'] == True:\r\n ax3 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['dQ/dV'], x='Chemical potential', y='adjusted dq/de') \r\n ax3.set_xlim([-0.1, 1])\r\n ax3.set_xlabel('Voltage $[V]$')\r\n ax3.set_ylabel('$\\\\frac{dQ}{dV}$ [$\\mathregular{eV^{-1}}$]')\r\n ax3.legend(common_legend)\r\n # ax3.legend(['Experimental data', 'Monte Carlo Data'])\r\n \r\n if pick_plot['dH/dx'] == True:\r\n ax4 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['dH/dx'], x='Total mole fraction', y='adjusted enthalpy')\r\n ax4.set_xlim([0, 1])\r\n ax4.set_xlabel('Na content $[x]$')\r\n ax4.set_ylabel('$\\\\frac{dH}{dx}$ $[eV/site]$')\r\n ax4.legend(common_legend) \r\n # ax4.legend(['Experimental data (Adjusted x)', 'Raw experimental data', 'Monte Carlo data'])\r\n \r\n if pick_plot['d/dx(dH/dx)'] == True:\r\n ax5 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['d/dx(dH/dx)'], x='Total mole fraction', y='secder enthalpy')\r\n ax5.set_xlim([0, 1])\r\n ax5.set_ylim([0, 6])\r\n ax5.set_xlabel('Na content $[x]$')\r\n ax5.set_ylabel('$\\\\frac{d^2H}{dx^2}$ $[eV/site]$')\r\n ax5.legend(common_legend)\r\n \r\n # ax5.legend(['Experimental data (Adjusted x)', 'Raw experimental data', 'Monte Carlo data'])\r\n \r\n if pick_plot['S'] == True:\r\n ax6 = df1.plot(linestyle='-', color=colours[count], lw=lw, marker=marker_list[count], markeredgecolor=colours[count],\r\n markersize=mark_size, ax=placement['S'], x='Total mole fraction', y='Entropy')\r\n \r\n # ax6.plot(s_x, s_y, linewidth=lw, color='red') # Plots the entropy for l=0.32...\r\n # ax6.plot(x_ent, y_ent, linewidth=lw, color='grey') # Plots the entropy for solid state solution.\r\n ax6.set_xlim([0, 1])\r\n ax6.set_xlabel('Na content $[x]$')\r\n ax6.set_ylabel('S $[eV K/site]$')\r\n ax6.legend(common_legend)\r\n # ax6.legend(['Experimental data', 'Monte Carlo data', 'Analytical solution', 'Solid state solution'], loc='upper right', bbox_to_anchor=(0.75, 0.5))\r\n \r\n \r\n\r\n # parameter_file = open(path + \"/Input_arguments_\" + uid + \".txt\", \"w\")\r\n # parameter_file.write(str(self.args))\r\n # parameter_file.close()\r\n\r\n # manager = plt.get_current_fig_manager()\r\n # # manager.resize(*manager.window.maxsize())\r\n # # fig_path = cwd + \"/Na_plot_results.png\"\r\n # # plt.savefig(path + \"/Na_monte_carlo_plot_\" + uid + \".png\")\r\n # plt.show()\r\n \r\n plt.savefig(\"Varying sps Overlaid Plots - dQ_dV\", dpi = 300)\r\n\r\n plt.show()", "def _plot(self, step, rewards, losses):\n plt.figure(figsize=(20, 5))\n plt.subplot(131)\n plt.title('Total Episode Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('MSE Loss')\n plt.plot(losses)\n plt.show()", "def main():\n\n if not os.path.exists( os.path.join(os.getcwd(), 'Plots') ):\n os.mkdir('Plots')\n\n # Initialise the canvas and set aesthetics\n canv = TCanvas(\"canv\", \"canv\", 800, 600)\n canv.SetLogy()\n gStyle.SetOptStat(0)\n gStyle.SetOptTitle(0)\n\n # Initialise legend and set colours\n leg_height = len(models) * 0.06 # make y-length of legend dependent on n_models\n myLeg = TLegend(0.6, 0.9 - leg_height, 0.9, 0.9)\n myLeg.SetTextSize(0.02)\n\n # Initialise histogram arrays\n nJetHist = [None] * len(models)\n jetPtHist = [None] * len(models)\n leadJetPtHist = [None] * len(models)\n metPtHist = [None] * len(models)\n dPhiJJHist = [None] * len(models)\n\n # x-axis labels for plots\n nJetLabel = \"#it{n}_{jet}\"\n jetPtLabel = \"#it{p}_{T}^{jet}\"\n leadJetPtLabel = \"#it{p}_{T}^{j_{1}}\"\n metPtLabel = \"#it{E}_{T}^{miss}\"\n dPhiJJLabel = \"#Delta#it{#phi}_{j_{1} j_{2}}\"\n\n # Initialise histograms here so I can use them later\n for i, model in enumerate(models):\n nJetHist[i] = TH1F(\"nJet\"+model, \"nJet dist \"+model, 30, 0, 29)\n jetPtHist[i] = TH1F(\"jetPt\"+model, \"Jet pT dist \"+model, 30, 0, 3000)\n leadJetPtHist[i] = TH1F(\"leadJetPt\"+model, \"Lead jet pT dist \"+model, 30, 0, 3000)\n metPtHist[i] = TH1F(\"met\"+model, \"MET dist \"+model, 30, 0, 3000)\n dPhiJJHist[i] = TH1F(\"dPhijj\"+model, \"DPhi dist \"+model, 20, -1*(pi+0.1), pi+0.1)\n \n\n # Open root files, then draw individual histograms\n for i, model in enumerate(models):\n print Fore.MAGENTA + \"Running over model {0}/{1}.\".format(i+1, len(models))\n openFile = TFile(files[i])\n tree = openFile.Get(\"Events\")\n nEntries = tree.GetEntries()\n\n # Initialise progress bar\n widgets = [Percentage(), Bar('>'), ETA()]\n pbar = ProgressBar(widgets = widgets, maxval = nEntries).start() \n\n for entry in xrange(nEntries):\n treeEntry = tree.GetEntry(entry)\n nJetHist[i].Fill(tree.nJet)\n \n for jet in xrange( len(tree.Jet_pt) ):\n jetPtHist[i].Fill(tree.Jet_pt[jet])\n\n if len(tree.Jet_pt) > 0: leadJetPtHist[i].Fill(tree.Jet_pt[0])\n metPtHist[i].Fill(tree.MET_pt)\n\n if len(tree.Jet_phi) >= 2:\n deltaPhi = tree.Jet_phi[0] - tree.Jet_phi[1]\n dPhiJJHist[i].Fill(deltaPhi) \n\n pbar.update(entry+1)\n \n pbar.finish()\n\n # Normalise histograms\n nJetHist[i].Scale(1./nEntries)\n jetPtHist[i].Scale(1./nEntries)\n leadJetPtHist[i].Scale(1./nEntries)\n metPtHist[i].Scale(1./nEntries)\n dPhiJJHist[i].Scale(1./nEntries)\n\n # Draw individual histograms and save\n drawIndivHistos(model, nJetHist[i], canv, myLeg, nJetLabel, \"nJet\", index=i)\n drawIndivHistos(model, jetPtHist[i], canv, myLeg, jetPtLabel, \"jetPT\", index=i)\n drawIndivHistos(model, leadJetPtHist[i], canv, myLeg, leadJetPtLabel, \"leadJetPT\", index=i)\n drawIndivHistos(model, metPtHist[i], canv, myLeg, metPtLabel, \"MET\", index=i)\n drawIndivHistos(model, dPhiJJHist[i], canv, myLeg, dPhiJJLabel, \"dPhi\", index=i)\n \n\n # Draw histograms for different models overlaid\n drawMultipleHistos(nJetHist, canv, myLeg, nJetLabel, \"nJet\")\n drawMultipleHistos(jetPtHist, canv, myLeg, jetPtLabel, \"jetPT\")\n drawMultipleHistos(leadJetPtHist, canv, myLeg, leadJetPtLabel, \"leadJetPT\")\n drawMultipleHistos(metPtHist, canv, myLeg, metPtLabel, \"MET\")\n drawMultipleHistos(dPhiJJHist, canv, myLeg, dPhiJJLabel, \"dPhi\")", "def main():\n style.use(\"ggplot\")\n start = datetime.datetime(2020, 1, 1)\n end = datetime.datetime(2020, 4, 17)\n\n create_csv(start, end)\n data_frame = read_csv()\n plot_data(data_frame)", "def makePlot(timeStamp):\n\n #-------------------------------------------------------------------------\n # Create figure and axes\n #-------------------------------------------------------------------------\n\n width = 12 # inches\n height = 8 # inches\n fig = plt.figure(figsize=(width, height))\n\n # We'll use gridspec to create axes in rectangular 6-by-5 lattice\n import matplotlib.gridspec as gridspec\n nrows = 6\n ncols = 5\n Grid = gridspec.GridSpec(nrows, ncols)\n\n # axis for elevation time series\n axElev = fig.add_subplot(Grid[:2, :2]) # first 2 rows, first 2 columns\n # axis for slab\n axSlab = fig.add_subplot(Grid[:2, 2:]) # first 2 rows, columns > 2\n # and the transects\n axTran1 = fig.add_subplot(Grid[2:4, :]) # rows 2,3,4, all columns\n # rows 5,6,7, all columns, share x/y axis with previous (sets same ticks\n # etc)\n axTran2 = fig.add_subplot(Grid[4:6, :], sharex=axTran1, sharey=axTran1)\n\n # gridspec allows to tune the spacing between plots (unit is fraction of\n # font size)\n boundary_pad = 3.5\n horizontal_pad = 0.2\n vertical_pad = 1.0\n # figure area left,bottom,right,top in normalized coordinates [0,1]\n bounds = [0, 0, 1, 1]\n Grid.tight_layout(\n fig,\n pad=boundary_pad,\n w_pad=horizontal_pad,\n h_pad=vertical_pad,\n rect=bounds)\n\n #-------------------------------------------------------------------------\n # Create plots\n #-------------------------------------------------------------------------\n\n # for all avaiable colormaps see ( '_r' reverses the colormap )\n # http://matplotlib.org/examples/color/colormaps_reference.html\n colormap = plt.get_cmap('Spectral_r')\n colormap_kine = plt.get_cmap('gist_heat')\n\n # slab\n salt_clim = [0, 32]\n ncontours = 16\n # bouding box for slab [xmin,xmax,ymin,ymax] in model x,y coordinates\n estuarybbox = [330000, 360000, 284500, 297500]\n dia = slabSnapshotDC(\n clabel='Salinity',\n unit='psu',\n clim=salt_clim,\n cmap=colormap)\n dia.setAxes(axSlab)\n dia.addSample(slabDC, timeStamp=timeStamp, plotType='contourf',\n bbox=estuarybbox, N=ncontours)\n # overrides default format for colorbar floats\n dia.showColorBar(format='%.2g')\n #dia.addTitle('in case you want a custom title')\n # get transect (x,y) coordinates from the transectDC\n transectXYCoords = generateTransectFromDataContainer(transectDC_salt, 0)[4]\n # plot transect on the map (thin black on thick white)\n dia.addTransectMarker(transectXYCoords[:, 0], transectXYCoords[:, 1],\n color='w', linewidth=2.0)\n dia.addTransectMarker(transectXYCoords[:, 0], transectXYCoords[:, 1],\n color='k', linewidth=1.0)\n # plot station markers\n for station in stationsToPlot:\n staX = staFileObj.getX(station)\n staY = staFileObj.getY(station)\n dia.addStationMarker(\n staX,\n staY,\n label=station,\n printLabel=True,\n marker='*')\n # add text to plot. x,y are in normalized axis coordinates [0,1]\n dia.ax.text(0.05, 0.98, 'custom text', fontsize=fontsize,\n verticalalignment='top', horizontalalignment='left',\n transform=dia.ax.transAxes)\n\n # elevation time series\n # define the time range to plot\n elevStartTime = datetime.datetime(2012, 5, 4, 0, 0)\n elevEndTime = datetime.datetime(2012, 5, 5, 0, 15)\n elevMeanTime = elevStartTime + (elevEndTime - elevStartTime) / 2\n elevLim = [-1.5, 2.5]\n dia = timeSeriesPlotDC2(\n xlabel=elevMeanTime.strftime('%Y %b %d'),\n ylim=elevLim)\n dia.setAxes(axElev)\n #dia.addShadedRange( timeStamp, timeStamp+datetime.timedelta(seconds=30), facecolor='IndianRed')\n dia.addShadedRange(\n timeStamp,\n timeStamp,\n edgecolor='IndianRed',\n facecolor='none',\n linewidth=2)\n tag = elevDC.getMetaData('tag')\n dia.addSample(\n elevDC.timeWindow(\n elevStartTime,\n elevEndTime),\n label=tag,\n color='k')\n dia.addTitle('Elevation ({0:s}) [m]'.format(\n elevDC.getMetaData('location').upper()))\n # adjust the number of ticks in x/y axis\n dia.updateXAxis(maxticks=5)\n dia.updateYAxis(maxticks=3, prune='lower')\n\n # transects\n dia = transectSnapshotDC(\n clabel='Salinity',\n unit='psu',\n cmap=colormap,\n clim=salt_clim)\n dia.setAxes(axTran1)\n #transectDC_salt.data *= 1e-3\n dia.addSample(transectDC_salt, timeStamp, N=ncontours)\n dia.addTitle('')\n dia.showColorBar()\n # plot station markers\n for station in stationsToPlot:\n staX = staFileObj.getX(station)\n staY = staFileObj.getY(station)\n dia.addStationMarker(staX, staY, label=station, color='k',\n linewidth=1.5, linestyle='dashed')\n # do not show x axis ticks and label for this plot\n dia.hideXTicks()\n\n dia = transectSnapshotDC(clabel='TKE', unit='m2s-1', logScale=True,\n clim=[-7, -2], climIsLog=True, cmap=colormap_kine)\n dia.setAxes(axTran2)\n dia.addSample(transectDC_kine, timeStamp, N=ncontours)\n # plot station markers\n for station in stationsToPlot:\n staX = staFileObj.getX(station)\n staY = staFileObj.getY(station)\n dia.addStationMarker(staX, staY, label=station, color='k',\n linewidth=1.5, linestyle='dashed')\n dia.addTitle('')\n dia.showColorBar()\n dia.updateXAxis(maxticks=15)\n dia.updateYAxis(maxticks=6)\n\n #-------------------------------------------------------------------------\n # Save to disk\n #-------------------------------------------------------------------------\n dateStr = timeStamp.strftime('%Y-%m-%d_%H-%M')\n filename = '_'.join([imgPrefix, dateStr])\n saveFigure(\n imgDir,\n filename,\n imgFiletype,\n verbose=True,\n dpi=200,\n bbox_tight=True)\n plt.close()", "def plot_energy_temperature_history(building, temp_df, prognosis_df, out):\n heat_data = building[list(month_range(building.heating_start, building.heating_stop))]\n df = pd.merge(heat_data, temp_df[\"avg_temp\"], left_index=True, right_index=True)\n # Now to fill the dots in between if data has holes in the end\n for d in month_range(building.heating_stop, prognosis_df.index[-1]):\n if not d in df.index:\n df.loc[d] = np.nan\n # Add prognosis\n df = df.merge(right = prognosis_df, left_index = True, right_index = True, how = 'left')\n # The query results in some random column name for heat values, fix it:\n df.rename(columns={heat_data.name: 'value'}, inplace = True)\n\n # Change to datetime\n df.rename(index = lambda s: datetime.datetime.fromisoformat(s), inplace = True)\n fig = Figure()\n canvas = FigureCanvas(fig)\n ax = fig.add_subplot(111)\n # Heating actualized\n ax.plot(df[\"value\"], 'r-')\n ax.set_ylabel(\"Heat energy usage by month, KWh\")\n ax.legend(\"Energy\", loc=\"upper left\")\n # Prognosis\n ax.plot(df.heating, 'r:')\n # Set year formatting\n ax.xaxis.set_major_locator(mdates.YearLocator())\n ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y'))\n ax.xaxis.set_minor_locator(mdates.MonthLocator())\n\n # Temperatures\n ax2 = ax.twinx()\n fig.gca().invert_yaxis()\n ax2.plot(df[\"avg_temp\"], 'g-', alpha=0.7)\n ax2.set_ylabel(\"Average temperature, °C\")\n ax2.legend(\"Temp (inv)\")\n\n fig.tight_layout()\n fig.autofmt_xdate()\n fig.savefig(out)", "def _output_performance(self):\n self.portfolio.create_equity_curve_dataframe()\n\n print(\"Creating summary statistics...\")\n stats = self.portfolio.output_summary_stats()\n\n print(\"Creating equity curve...\")\n print(self.portfolio.equity_curve.tail(10))\n pprint.pprint(stats)\n\n print(\"Signals: %s\" % self.signals)\n print(\"Orders: %s\" % self.orders)\n print(\"Fills: %s\" % self.fills)", "def plot(dsname, wdir = './', width = 1000.0, dt = 5.0*yt.units.Myr, fields = all_fields,\n thickness = 20.0, outdir = './enrichment_plots_kpc'):\n\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n gal = Galaxy(dsname, wdir = wdir)\n data = gal.df\n\n @derived_field(name=\"logNO\", units=\"\")\n def _logNO(field, data):\n return np.log10(data['N_Abundance'] / data['O_Abundance'])\n gal.ds.add_field((\"gas\", \"logNO\"), function=_logNO, units=\"\")\n\n make_filtered_field(gal.ds, 'logNO', ['O_Fraction','N_Fraction'])\n make_filtered_field(gal.ds, 'O_over_H', ['O_Fraction'])\n make_filtered_field(gal.ds, 'N_over_O', ['O_Fraction','N_Fraction'])\n# def _logNO_filtered(field,data):\n# x = data[('gas','logNO')]\n#\n# f1 = data[('gas','O_Fraction')]\n# f2 = data[('gas','N_Fraction')]\n#\n# x[ (f1 < tol) + (f2 < tol)] = np.nan\n#\n# return x\n# gal.ds.add_field(('gas','logNO_filtered'), function = _logNO_filtered, units = \"\")\n\n M = data['birth_mass']\n t_o = data['creation_time'].convert_to_units('Myr')\n MS_lifetime = data[('io','particle_model_lifetime')].to('Myr')\n MS_death = t_o + MS_lifetime\n px = (data['particle_position_x'] - gal.ds.domain_center[0]).to('pc')\n py = (data['particle_position_y'] - gal.ds.domain_center[1]).to('pc')\n pz = (data['particle_position_z'] - gal.ds.domain_center[2]).to('pc')\n\n recent_death = (MS_death > gal.ds.current_time - dt) * (MS_death <= gal.ds.current_time + 0.001*yt.units.Myr)\n alive = MS_death > gal.ds.current_time + 0.001*yt.units.Myr\n\n AGB = M < 8.0\n massive_star = (M > 8.0) * (M < 25.0)\n\n boxdim = np.array([width*1.25,width*1.25,thickness])*yt.units.pc\n region = gal.ds.box(gal.ds.domain_center - boxdim*0.5, gal.ds.domain_center + boxdim*0.5)\n\n proj = yt.ProjectionPlot(gal.ds, 'z', fields,\n weight_field = 'number_density', data_source = region, width = (width,'pc'))\n\n if 'number_density' in fields:\n proj.set_unit('number_density','cm**(-3)')\n proj.set_cmap('number_density','viridis')\n proj.set_zlim('number_density',1.0E-4,200.0)\n\n if 'O_over_H_filtered' in fields:\n proj.set_cmap('O_over_H_filtered','cubehelix')\n proj.set_log('O_over_H_filtered', False)\n proj.set_zlim('O_over_H_filtered', -5, 1)\n proj.set_colorbar_label('O_over_H_filtered', r'[O/H]')\n\n if 'N_over_O_filtered' in fields:\n proj.set_cmap('N_over_O_filtered','PRGn')\n proj.set_log('N_over_O_filtered',False)\n proj.set_zlim('N_over_O_filtered',-2,2)\n proj.set_colorbar_label('N_over_O_filtered', r'[N/O]')\n\n if 'logNO' in fields:\n proj.set_cmap('logNO','PRGn')\n proj.set_log('logNO',False)\n proj.set_zlim('logNO',-2,0.5)\n proj.set_colorbar_label('logNO', r'log( N / O )')\n\n if 'logNO_filtered' in fields:\n proj.set_cmap('logNO_filtered','PRGn')\n proj.set_log('logNO_filtered',False)\n proj.set_zlim('logNO_filtered',-2,0.5)\n proj.set_colorbar_label('logNO_filtered', r'log( N / O )')\n\n if 'Temperature' in fields:\n proj.set_cmap('Temperature', 'RdYlBu_r')\n proj.set_log('Temperature',True)\n proj.set_zlim('Temperature',10.0, 1.0E7)\n proj.set_colorbar_label('Temperature', r'Temperature (K)')\n\n if 'G_o' in fields:\n proj.set_cmap('G_o', 'cubehelix')\n proj.set_log('G_o', True)\n proj.set_zlim('G_o',0.05, 100.0)\n proj.set_colorbar_label('G_o', r'ISRF (G$_{\\rm o}$)')\n\n if 'Q0_flux':\n proj.set_cmap('Q0_flux', 'magma')\n proj.set_log('Q0_flux',True)\n proj.set_zlim('Q0_flux',1.0E-6, 1.0E-1)\n proj.set_colorbar_label('Q0_flux', r'HI Ionizing Radiation (s$^{-1}$)')\n\n Mstar = np.sum(gal.df['particle_mass'][ gal.df['particle_type'] == 11]).to('Msun')\n time = gal.ds.current_time.to('Myr')\n# proj.annotate_title(r\"Time = %1.1f Myr M$_{*}$ = %2.2E M$_{\\odot}$\"%(time.value,Mstar.value))\n proj.set_font( {'size' : 32} )\n proj.save(outdir + '/') # necessary\n\n\n dt = 5.0 * yt.units.Myr\n # buffer around image. otherwise points plotted near edge of image my run a little outside\n # viewing area, causing weird shifts in plotting. Not sure how to control this otherwise\n buffer = 15.0 # in pc\n in_image = (np.abs(pz) <= boxdim[2]*0.5) *\\\n (np.abs(px) <= (width*0.5 - buffer)) *\\\n (np.abs(py) <= (width*0.5 - buffer))\n\n pp = {}\n pp['massive_star_winds'] = in_image * alive * massive_star\n pp['AGB_winds'] = in_image * recent_death * AGB\n pp['SN'] = in_image * recent_death * massive_star\n #pp['other_stars'] = in_image * alive * (np.logical_not(pp['massive_star_winds']))\n\n for k in list(proj.plots.keys()):\n image = proj.plots[k]\n\n #\n # Now select and annotate the points we want\n #\n for s in list(pp.keys()):\n if np.size(px[pp[s]].value) > 0:\n print(np.size(px[pp[s]]), 'Particles in ', s, px[pp[s]], py[pp[s]])\n image.axes.scatter(px[pp[s]].value,py[pp[s]].value, s = ps[s], marker = markers[s], color = colors[s])\n else:\n print('No particles in ', s)\n\n# proj.refresh()\n# proj.hide_axes()\n proj.save(outdir + '/') # necessary\n\n if 'N_over_O' in fields:\n vmin,vmax = -2,2\n x = proj.plots['N_over_O']\n x.image.set_norm( MidpointNormalize(midpoint= 0.5*(vmin+vmax), vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.5*(vmin+vmax),vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_N_over_O_number_density.png')\n\n if 'logNO' in fields:\n vmin, vmax = -2, 0.25\n x = proj.plots['logNO']\n x.image.set_norm( MidpointNormalize(midpoint= 0.0, vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.0, vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_logNO_number_density.png')\n\n del(proj)\n del(gal)\n\n return", "def _output_performance(self):\n self.portfolio.create_equity_curve_dataframe()\n \n print(\"Creating summary statistics...\")\n stats = self.portfolio.output_summary_stats()\n \n print(\"Creating equity curve...\")\n print(self.portfolio.equity_curve.tail(10))\n pprint.pprint(stats)\n \n print(\"Signals: %s\" % self.signals)\n print(\"Orders: %s\" % self.orders)\n print(\"Fills: %s\" % self.fills)", "def SA_data_display(opt_df, all_df):\n fig, axs = plt.subplots(2, 3)\n\n axs[0,0].set_title(\"Optimal rewire attempts for circularity\")\n axs[0,0].set_ylabel(\"Percent waste %\")\n axs[0,0].set_xlabel(\"Time (s)\")\n axs[0,0].plot(opt_df[\"Time (s)\"], opt_df[\"Percent waste (%)\"])\n\n axs[0,1].set_title(\"Optimal rewire attempts acceptance probability\")\n axs[0,1].set_ylabel(\"Acceptance Probability\")\n axs[0,1].set_xlabel(\"Time (s)\") # time??\n axs[0,1].scatter(opt_df[\"Time (s)\"], opt_df[\"Probability\"])\n\n axs[0,2].set_title(\"Optimal rewire attempts temperature decrease\")\n axs[0,2].set_ylabel(\"Temperature\")\n axs[0,2].set_xlabel(\"Time (s)\") # time??\n axs[0,2].plot(opt_df[\"Time (s)\"], opt_df[\"Temperature\"])\n\n axs[1,0].set_title(\"All rewire attempts for circularity\")\n axs[1,0].set_ylabel(\"Percent waste %\")\n axs[1,0].set_xlabel(\"Time (s)\")\n axs[1,0].plot(all_df[\"Time (s)\"], all_df[\"Percent waste (%)\"])\n\n axs[1,1].set_title(\"All rewire attempts acceptance probability\")\n axs[1,1].set_ylabel(\"Acceptance Probability\")\n axs[1,1].set_xlabel(\"Time (s)\") # time??\n axs[1,1].scatter(all_df[\"Time (s)\"], all_df[\"Probability\"])\n\n axs[1,2].set_title(\"All rewire attempts temperature decrease\")\n axs[1,2].set_ylabel(\"Temperature\")\n axs[1,2].set_xlabel(\"Time (s)\") # time??\n axs[1,2].plot(all_df[\"Time (s)\"], all_df[\"Temperature\"])\n\n return plt.show()", "def plot(self):\n\t\tself.plotOfLoopVoltage()", "def ecdf_plot(ecdf_q1, ecdf_q2, ecdf_q3, ecdf_q4, performance_measure, ecdf_parameter):\n from plotly.offline import iplot\n import plotly.graph_objs as go\n\n performance_measure = performance_measure.replace('_', ' ').capitalize()\n ecdf_parameter = ecdf_parameter.replace('_', ' ').capitalize()\n\n ecdf_1 = go.Scatter(x=ecdf_q1.x,\n y=ecdf_q1.y,\n name='0 to 25',\n mode='lines+markers',\n marker=dict(size='7', color='#0C3383'))\n ecdf_2 = go.Scatter(x=ecdf_q2.x,\n y=ecdf_q2.y,\n name='25 to 50',\n mode='lines+markers',\n marker=dict(size='7', color='#57A18F'))\n ecdf_3 = go.Scatter(x=ecdf_q3.x,\n y=ecdf_q3.y,\n name='50 to 75',\n mode='lines+markers',\n marker=dict(size='7', color='#F2A638'))\n ecdf_4 = go.Scatter(x=ecdf_q4.x,\n y=ecdf_q4.y,\n name='75 to 100 (best wells)',\n mode='lines+markers',\n marker=dict(size='7', color='#D91E1E'))\n\n data = [ecdf_1, ecdf_2, ecdf_3, ecdf_4]\n\n layout = go.Layout(height=650,\n width=650,\n title='ECDF ' + ecdf_parameter,\n titlefont=dict(size=18),\n\n xaxis=dict(title=ecdf_parameter,\n titlefont=dict(size=16),\n type=None,\n zeroline=False,\n showgrid=True,\n showline=False,\n autorange=True),\n\n yaxis=dict(title='Cumulative Probability',\n titlefont=dict(size=16),\n showgrid=True,\n showline=False,\n zeroline=False,\n tickvals=[0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1],\n range=[-0.03, 1.03]),\n\n legend=dict(x=0.65, y=0.1, font=dict(size=14)),\n margin={'l': 50, 'r': 10, 'b': 50, 't': 85})\n\n layout.update(dict(annotations=[go.Annotation(text='Quantiles: ' + performance_measure,\n x=np.max(ecdf_q4.x),\n y=0.3,\n showarrow=False,\n bgcolor='#FFFFFF',\n font=dict(size=16))]))\n\n plot = go.Figure(data=data, layout=layout)\n\n iplot(plot, show_link=False)", "def data_visualization(df):\r\n\r\n # Visualizing the target variable\r\n plt.figure(figsize=(14, 10))\r\n plt.title(\"Count of bike sharing according to dates\")\r\n plt.plot(df['dteday'], df['cnt'])\r\n #plt.show()\r\n plt.savefig(\"Raw data visualization.png\")\r\n\r\n # box plot for visualizing outliers\r\n fig=px.box(df, y=\"cnt\", notched=True,title='Box plot of the count variable')\r\n #fig.show()\r\n plt.savefig(\"Box Plot.png\")\r\n\r\n # point plot for hourly utilization\r\n for column in ['season', 'yr', 'mnth', 'holiday', 'weekday', 'workingday', 'weathersit']:\r\n hist = px.histogram(df, x=column, y='cnt')\r\n hist.show()\r\n plt.savefig(\"Histogram plots for each column.png\")\r\n sns.pointplot(x=df['hr'], y='cnt', data=df);\r\n plt.title(\"Hourly Utilization\")\r\n plt.ylabel(\"Bike Shares\", fontsize=12)\r\n plt.xlabel(\"Hour\", fontsize=12)\r\n plt.savefig(\"Hourly Utilization point plot.png\", dpi=300, bbox_inches='tight')\r\n\r\n # line plot for hourly utilization\r\n for c in ['holiday','season','workingday']:\r\n sns.lineplot(data=df,x='hr',y='cnt',hue=c)\r\n plt.title('Hourly plot vs count')\r\n plt.savefig(\"Hour vs count plot_main features.png\",dpi=300, bbox_inches='tight')\r\n\r\n # point plots for humidity vs count\r\n sns.pointplot(x='hum', y='cnt', data=df)\r\n plt.title(\"Amount of bike shares vs humidity\", fontsize=25)\r\n plt.xlabel(\"Humidity (%)\", fontsize=20)\r\n plt.ylabel('count of bike shares', fontsize=20)\r\n plt.locator_params(axis='x', nbins=10)\r\n plt.savefig(\"Pointplot of humidity vs count.png\",dpi=300, bbox_inches='tight')\r\n\r\n # box plots of whole df\r\n bx=px.box(df, y=\"cnt\")\r\n bx.show()\r\n\r\n # feature correlation plot\r\n corrs = abs(df.corr())\r\n sns.heatmap(corrs, annot=True)\r\n plt.title(\"Feature Correlation\")\r\n plt.savefig(\"Feature_correlation.png\", dpi=300, bbox_inches='tight')\r\n return plt", "def main():\n data_visualisation()\n write_hyper_params()\n write_result_tables()\n write_box_plots()", "def temperature_distribution(mesh_df, verbose=True, save=False, show=True):\n console.event(\"Constructing temperature distribution plot...\", verbose=verbose)\n\n t = time.time()\n font = {'size': 10}\n mpl.rc('font', **font)\n\n objects = mesh_df['object'].tolist()\n coordinates = mesh_df['coords'].tolist()\n temperatures = mesh_df['temperature'].tolist()\n conductivities = mesh_df['conductivity'].tolist()\n dT_dts = mesh_df['dT_dt'].tolist()\n\n fig1 = plt.figure(figsize=(8.0, 5.0)) # depth vs heat flux, depth vs temperature\n\n ax1 = fig1.add_subplot(111)\n ax1.plot(coordinates, dT_dts, color='r', linewidth=1.4, linestyle='--')\n ax1.set_xlabel(\"Depth (m)\")\n ax1.set_ylabel(\"Heat Flux (degK/s)\")\n ax1.tick_params('y', colors='r')\n\n ax2 = ax1.twinx()\n ax2.plot(coordinates, temperatures, color='b', linewidth=2, linestyle='-')\n ax2.set_ylabel(\"Temperature (degK)\")\n ax2.tick_params('y', colors='b')\n\n fig2 = plt.figure(figsize=(8.0, 5.0)) # depth vs heat flux, depth vs thermal conductivity\n\n ax3 = fig2.add_subplot(111)\n ax3.plot(coordinates, dT_dts, color='r', linewidth=1.4, linestyle='--')\n ax3.set_xlabel('Depth (m)')\n ax3.set_ylabel('Heat Flux (degK/s)')\n ax3.tick_params('y', colors='r')\n\n ax4 = ax3.twinx()\n ax4.plot(coordinates, conductivities, color='m', linewidth=2, linestyle='-')\n ax4.set_ylabel('Thermal Conductivity')\n ax4.tick_params('y', colors='m')\n\n object_dict = {}\n for index, object in enumerate(objects):\n if object.lower() != 'boundary':\n if object not in object_dict.keys():\n object_dict.update({object: [coordinates[index]]})\n else:\n object_dict[object].append(coordinates[index])\n for object in object_dict.keys():\n min_coord = min(object_dict[object])\n max_coord = max(object_dict[object])\n color = np.random.rand(3, )\n ax1.axvspan(xmin=min_coord, xmax=max_coord, color=color, alpha=0.2, label=str(object))\n ax3.axvspan(xmin=min_coord, xmax=max_coord, color=color, alpha=0.2, label=str(object))\n\n ax1.set_title(\"Temperature Distribution Over Depth\")\n ax3.set_title(\"Thermal Conductivity Over Depth\")\n ax1.grid()\n ax1.legend(loc='lower left')\n ax3.grid()\n ax3.legend(loc='lower left')\n\n console.event(\"Finished constructing temperature distribution plot! (task took {}s)\".format(\n time.time() - t), verbose=verbose)\n\n if show is True:\n plt.show()\n if save is True:\n fig1.tight_layout()\n fig2.tight_layout()\n fig1_name = \"temp_distrib_fig1.png\"\n fig2_name = \"temp_distrib_fig2.png\"\n if fig1_name in os.listdir(os.getcwd()):\n os.remove(fig1_name)\n if fig2_name in os.listdir(os.getcwd()):\n os.remove(fig2_name)\n fig1.savefig(fig1_name, format='png')\n fig2.savefig(fig2_name, format='png')", "def make_timeplot(df_measure, df_prediction):\n # mode = 'confirmed'\n mode = 'active'\n df_measure_confirmed = df_measure[mode]\n colors = px.colors.qualitative.Dark24\n n_colors = len(colors)\n fig = go.Figure()\n for i, country in enumerate(df_measure_confirmed.columns):\n fig.add_trace(go.Scatter(x=df_measure_confirmed.index, \n y=df_measure_confirmed[country],\n name=country[1], mode='markers+lines',\n marker_color=colors[i%n_colors],\n line_color=colors[i%n_colors],\n visible=False))\n for i, country in enumerate(df_prediction.columns):\n fig.add_trace(go.Scatter(x=df_prediction.index, \n y=df_prediction[country],\n name='+' + country[1], mode='lines',\n line_dash='dash',\n line_color=colors[i%n_colors],\n showlegend=False,\n visible=False))\n\n last_day = df_measure_confirmed.index.max()\n day = pd.DateOffset(days=1)\n fig.update_layout(title='',\n xaxis=dict(rangeslider_visible=True,\n range=(last_day - 10 * day,\n last_day + 4 * day)))\n fig.update_layout(\n updatemenus=[\n dict(\n type = \"buttons\",\n direction = \"left\",\n buttons=list([\n dict(\n args=[{\"visible\": [False,]*len(df_measure_confirmed.columns)}],\n label=\"Reset\",\n method=\"update\",\n ),\n dict(\n args=[\"yaxis\", {'type':'log'}],\n label=\"log\",\n method=\"relayout\",\n ),\n dict(\n args=[\"yaxis\", {'type':'linear'}],\n label=\"lin\",\n method=\"relayout\",\n ),\n\n ]),\n pad={\"r\": 10, \"t\": 10, \"b\":5},\n showactive=True,\n x=0.05,\n xanchor=\"left\",\n y=1.35,\n yanchor=\"top\",\n font_color='black',\n ),\n ],\n height=.9*FIRST_LINE_HEIGHT,\n)\n\n return fig", "def main():\n import argparse\n import os\n parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('--dirs', help='List of log directories', nargs='*', default=['./log'])\n parser.add_argument('--num_timesteps', type=int, default=int(10e6))\n parser.add_argument('--xaxis', help='Varible on X-axis', default=X_TIMESTEPS)\n parser.add_argument('--task_name', help='Title of plot', default='Breakout')\n args = parser.parse_args()\n args.dirs = [os.path.abspath(folder) for folder in args.dirs]\n plot_results(args.dirs, args.num_timesteps, args.xaxis, args.task_name)\n plt.show()", "def run_and_plot(self):\n self.raw_processing()\n self.data_averaging_and_cleaning()\n\n print(self.organized_names)\n print(self.organized_film)\n print(self.organized_plank)\n\n height = self.organized_film\n bars = tuple(self.organized_names.copy())\n y_pos = np.arange(len(bars))\n\n plt.bar(y_pos, height)\n plt.xticks(y_pos, bars)\n plt.xlabel('TH% in 100ul water/TH mixture')\n plt.ylabel('CFU/mL count')\n plt.title('Experiment 2.5 (Sucrose Concentration) 7 Aug 2018')\n\n plt.show()\n\n height2 = self.organized_plank\n\n plt.bar(y_pos, height2)\n plt.xticks(y_pos, bars)\n plt.xlabel('TH% in 100ul water/TH mixture')\n plt.ylabel('Proportion of Biofilm CFUs to Planktonic CFUs')\n plt.title('Experiment 2.5 (Sucrose Concentration) 7 Aug 2018')\n\n plt.show()", "def write_plot(self):\n with open(self._graph_data_path, \"w+\") as f:\n run_time = self.start_time\n f.write(\"Time, Temperature\\n\")\n temperature = 0\n for step in self.profile[\"steps\"]:\n keys = list(step)\n if len(keys) > 0:\n if keys[0] == \"start\":\n temperature = step[\"start\"]\n if keys[0] == \"rest\":\n run_time += timedelta(minutes = step[\"rest\"])\n if keys[0] == \"ramp\":\n run_time += timedelta(minutes = step[\"ramp\"])\n temperature = step[\"to\"]\n if keys[0] == \"mashout\":\n temperature = step[\"mashout\"]\n time = run_time.strftime(\"%H:%M:%S, \")\n f.write(time + str(temperature) + \"\\n\")\n run_time += timedelta(minutes = 10)\n if keys[0] == \"jump\":\n temperature = step[\"jump\"]\n\n time = run_time.strftime(\"%H:%M:%S, \")\n f.write(time + str(temperature) + \"\\n\")\n else:\n logger.error(\"Can't make sense of \" + str(step))", "def energy_kde_paperplot(fields,df):\n plt.figure()\n i = 0\n colorList = ['dodgerblue','tomato']\n lw = 2\n\n meanE_2 = []\n meanE_3 = []\n mup = np.min(df['energy [eV]']) - pp.mu\n chi_0 = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(fields[0]))\n g_en_axis, _, _, _, _, _, _, _, _, _, _, _, _, _ = \\\n occupation_plotter.occupation_v_energy_sep(chi_0, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), np.zeros(len(g_en_axis)), '-', color='black', lineWidth=lw,label='Equilibrium')\n\n for ee in fields:\n chi_2_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n # meanE_2 = utilities.mean_energy(chi_2_i,df)\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_2_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,'--',color = colorList[i],lineWidth=lw,label=r'Low Field {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n # plt.plot(meanE_2-np.min(df['energy [eV]']),0,'.')\n chi_3_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '3_' + \"E_{:.1e}.npy\".format(ee))\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_3_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,color = colorList[i],lineWidth=lw,label=r'Full Drift {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n i = i + 1\n # plt.plot(g_en_axis - np.min(df['energy [eV]']), g_f0ax, '--', color='black', lineWidth=lw,label=r'$f_0$')\n\n plt.legend()\n # plt.ylim([-0.02, 0.015])\n plt.xlabel(r'Energy above CBM ($eV$)')\n plt.ylabel(r'Deviational occupation $\\delta f_{\\mathbf{k}}$ (norm.)')\n # plt.ylabel(r'$\\delta f_{\\mathbf{k}}/f_{\\mathbf{k}}^0$')\n plt.savefig(pp.figureLoc+'energy_KDE.png', bbox_inches='tight',dpi=600)\n\n plt.figure()\n plt.plot(g_en_axis,g_chiax)\n\n plt.figure()\n Z, xedges, yedges = np.histogram2d(df['kx [1/A]']*chi_3_i,df['ky [1/A]']*chi_3_i)\n plt.pcolormesh(xedges, yedges, Z.T)\n\n from scipy.stats.kde import gaussian_kde\n g_inds,_,_ = utilities.gaas_split_valleys(df,False)\n g_df = df.loc[g_inds]\n\n x = g_df['kx [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n y = g_df['ky [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n\n # y = g_df['energy [eV]']*(chi_3_i[g_inds]+g_df['k_FD'])\n k = gaussian_kde(np.vstack([x, y]))\n xi, yi = np.mgrid[x.min():x.max():x.size ** 0.5 * 1j, y.min():y.max():y.size ** 0.5 * 1j]\n zi = k(np.vstack([xi.flatten(), yi.flatten()]))\n\n fig = plt.figure(figsize=(7, 8))\n ax1 = fig.add_subplot(211)\n ax2 = fig.add_subplot(212)\n\n # alpha=0.5 will make the plots semitransparent\n ax1.pcolormesh(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n ax2.contourf(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n\n ax1.set_xlim(x.min(), x.max())\n ax1.set_ylim(y.min(), y.max())\n ax2.set_xlim(x.min(), x.max())\n ax2.set_ylim(y.min(), y.max())", "def exp_summary(habitat,temperature,species):\n plt.subplot(2,2,1)\n niches(species)\n plt.subplot(2,2,2)\n environment(habitat,temperature)\n plt.subplot(2,2,3)\n show_matrix(habitat,\"habitat\")\n plt.subplot(2,2,4)\n show_matrix(temperature,\"temperature\")", "def generate(self):\n\n # Load the required datapoints into memory.\n self._load_results()\n\n # Calculate datapoints statistics, like min. and max. values.\n self._calc_stats()\n\n # Generate the plots.\n self._generate_scatter_plots()\n self._generate_histograms()\n\n # Put together the final HTML report.\n self._generate_report()", "def simulate(self):\n self.fig = plt.figure(figsize=(15,15), dpi=80)\n self.cplot(self.fig, 0 , 1)\n \n #Save the configuration at the defined times\n self.metro_monte_carlo(save_conf=True)\n #cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])\n #fig.colorbar(self.ax, cax=cbar_ax)\n \n plt.savefig('config.pdf')", "def generatePlot (self, Xdata_exp, Xdata_model, Ydata_exp, Ydata_model, Component_name):\n \n #self.clear_results_directory(results_dir)\n \n XaxisLabel = 'TCD Conversion [%]'\n YaxisLabel = 'Product Yield [wt %]'\n \n self.drawplot(XaxisLabel, YaxisLabel, Xdata_exp, Xdata_model, Ydata_exp, Ydata_model, Component_name)", "def plotLJEnergy(self, phys, forces, step):\r\n self.plotQuantity(step, phys.app.energies.getTable(1), 'ljenergy')", "def plot(self):\n t = np.linspace(0, self.days, self.days + 1)\n fig, (ax1, ax2, ax3, ax4, ax5) = plt.subplots(nrows=5, sharex='all')\n ax1.plot(t, self.S, label=\"Susceptible\", color='r')\n ax1.set_ylabel(\"Number of Susceptible People\")\n ax1.set_title(\"Strong Infecitous Model SEIRV Simulation\")\n ax3.plot(t, self.I, label=\"Active Cases\", color='b')\n ax3.set_ylabel(\"Active Cases\")\n ax2.plot(t, self.E, label=\"Exposed\", color='c')\n ax2.set_ylabel(\"# of Exposed\")\n ax4.plot(t, self.R, label=\"Recovered\", color='m')\n ax5.set_xlabel(\"Days\")\n ax4.set_ylabel('Number of Recovered')\n ax5.plot(t, self.V, label=\"Vaccinated\")\n ax5.set_ylabel(\"# Vaccinated\")\n ax1.legend()\n ax2.legend()\n ax3.legend()\n ax4.legend()\n plt.show()\n return fig", "def exposure_plots(self, energy=1000.):\n cfg = configuration.Configuration(os.path.expandvars('.'), quiet=True);\n exp = cfg.irfs.exposure(0, energy) \n hf = hpm.HPskyfun('front-1000 exp', exp, 64);\n expf = hf.getcol()\n emeanf = expf.mean()\n euw=hpm.HParray('FRONT exposure @ {} MeV / {:.2e}'.format(energy, emeanf), expf/emeanf)\n fig,ax=plt.subplots(figsize=(12,6))\n euw.plot(axes=ax,vmin=0.80,vmax=1.20, title=euw.name, \n cmap=plt.get_cmap('coolwarm')).grid(color='grey');\n\n return fig", "def test():\n data1 = resources_vs_time(0.0, 50)\n data2 = resources_vs_time(1.0, 10)\n data3 = resources_vs_time(2.0, 10)\n data4 = resources_vs_time(0.5, 10)\n print data1\n simpleplot.plot_lines(\"Growth\", 600, 600, \"time\", \"total resources\", [data1])", "def plot_limit(bolo_name, list_mass, analysis_type, exposure, detector_mass = 0.6):\n\n d_graph = {}\n list_color = [kOrange-8, kGreen+2, kBlue-7, kRed, kBlack, kMagenta, kAzure+10, kGreen-3, kOrange-9]\n\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n d_graph[heat_fraction] = get_simulated_event_limit(bolo_name, list_mass, analysis_type, \"_\" + heat_fraction, exposure, detector_mass = 0.6)\n d_graph[heat_fraction].SetName(heat_fraction)\n PyRPl.process_TGraph(d_graph[heat_fraction], color = list_color[index])\n\n gr_edw_poisson = get_limit_graph(\"./Text_files/edw3_ana_1.5_0_5_poisson.txt\", 2, kBlack)\n gr_edw_low = get_limit_graph(\"./Text_files/Published_limits/edw_lowmass_2012.txt\", 2, kRed)\n gr_edw_low.SetLineStyle(7)\n gr_cdms = get_limit_graph(\"./Text_files/Published_limits/cdms_limit.txt\", 2, kBlue)\n\n h = TH1F(\"h\", \"\", 100, 3,25)\n PyRPl.process_TH1(h, X_title = \"Mass (GeV)\", Y_title = \"#sigma (pb)\", X_title_size = .06, Y_title_size = .06, X_title_offset = .98, Y_title_offset = .95)\n\n\n gr_edw_low.SetName(\"gr_edw_low\")\n gr_edw_poisson.SetName(\"gr_edw_poisson\")\n gr_cdms.SetName(\"gr_cdms\")\n\n cc = TCanvas(\"cc\", \"cc\")\n gPad.SetLogy()\n gPad.SetLogx()\n h.SetMaximum(1E-1)\n h.SetMinimum(4E-8)\n h.Draw()\n\n gr_cdms.Draw(\"sameC\")\n gr_edw_poisson.Draw(\"sameC\")\n gr_edw_low.Draw(\"sameC\")\n\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n d_graph[heat_fraction].Draw(\"sameC\")\n\n leg =TLegend(0.564,0.584,0.83,0.857)\n leg.AddEntry(\"gr_cdms\", \"SCDMS\" , \"l\")\n leg.AddEntry(\"gr_edw_low\", \"EDW II\" , \"l\")\n leg.AddEntry(\"gr_edw_poisson\", \"EDW III Poisson\" , \"l\")\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n leg.AddEntry( d_graph[heat_fraction].GetName(), heat_fraction , \"l\")\n\n leg.SetFillColor(kWhite)\n leg.SetLineColor(kWhite)\n leg.Draw()\n raw_input()", "def showPlot4():\n overall_data = []\n per_to_clean = [round(x * 0.1,1) for x in range(0,10)]\n number_of_robots = list(range(1,6))\n for per in per_to_clean:\n proc_sim_data = []\n for item in number_of_robots:\n len_sim_data = []\n raw_sim_data = runSimulation(item, 1.0, 25, 25, per, 10, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n overall_data.append(proc_sim_data)\n plot(per_to_clean, overall_data)\n title('cleaning time vs. percentage cleaned')\n xlabel('percentage clean')\n ylabel('mean time (clocks)')\n show()", "def plot_global(type):\n click.echo(click.style(\n \"Generating Plot....\", fg='cyan', bold='true'))\n plot_time_series.TimeSeriesPloTs.plot_global(type)\n click.echo(click.style(\n \"Done....\", fg='green', bold='true'))", "def make_plot(solution, t, plot_Ts, plot_T1, plot_T2, xaxis, cc, delta_cc, albedo,delta_albedo\\\n , em1, delta_em1, em2, delta_em2):\n\n plt.close('all')\n fig = plt.figure()\n ax1 = fig.add_subplot(111)\n \n if xaxis == 'cloud cover':\n inc_cc = []\n for i in range(len(solution[0,:])):\n inc_cc.append(cc + (i*delta_cc)/calcs_per_timestep)\n\n if plot_Ts == 'On': ax1.plot(inc_cc,solution[0,:],label = 'Surface temperature')\n if plot_T1 == 'On': ax1.plot(inc_cc,solution[1,:], label = 'Lower atmospheric temperature')\n if plot_T2 == 'On': ax1.plot(inc_cc,solution[2,:], label = 'Upper atmospheric temperature')\n if plot_Ts == 'Off' and plot_T1 == 'Off' and plot_T2 == 'Off': raise ValueError('No y variable selected')\n\n elif xaxis == 'time':\n \n #for i in range(len(solution[0,:])):\n #t.append(i*(timestep/calcs_per_timestep))\n \n if plot_Ts == 'On': ax1.plot(t,solution[0,:],label = 'Surface temperature')\n if plot_T1 == 'On': ax1.plot(t,solution[1,:], label = 'Lower atmospheric temperature')\n if plot_T2 == 'On': ax1.plot(t,solution[2,:], label = 'Upper atmospheric temperature')\n if plot_Ts == 'Off' and plot_T1 == 'Off' and plot_T2 == 'Off': raise ValueError('No y variable selected')\n \n elif xaxis == 'albedo':\n inc_alb = []\n for i in range(len(solution[0,:])):\n inc_alb.append(albedo+(i*delta_albedo)/calcs_per_timestep)\n \n if plot_Ts == 'On': ax1.plot(inc_alb,solution[0,:],label = 'Surface temperature')\n if plot_T1 == 'On': ax1.plot(inc_alb,solution[1,:], label = 'Lower atmospheric temperature')\n if plot_T2 == 'On': ax1.plot(inc_alb,solution[2,:], label = 'Upper atmospheric temperature')\n if plot_Ts == 'Off' and plot_T1 == 'Off' and plot_T2 == 'Off': raise ValueError('No y variable selected')\n \n elif xaxis == 'epsilon1':\n inc_em = []\n for i in range(len(solution[0,:])):\n inc_em.append(em1+(i*delta_em1)/calcs_per_timestep)\n \n if plot_Ts == 'On': ax1.plot(inc_em,solution[0,:],label = 'Surface temperature')\n if plot_T1 == 'On': ax1.plot(inc_em,solution[1,:], label = 'Lower atmospheric temperature')\n if plot_T2 == 'On': ax1.plot(inc_em,solution[2,:], label = 'Upper atmospheric temperature')\n if plot_Ts == 'Off' and plot_T1 == 'Off' and plot_T2 == 'Off': raise ValueError('No y variable selected')\n \n elif xaxis == 'epsilon2':\n inc_em = []\n for i in range(len(solution[0,:])):\n inc_em.append(em2+(i*delta_em2)/calcs_per_timestep)\n \n if plot_Ts == 'On': ax1.plot(inc_em,solution[0,:],label = 'Surface temperature')\n if plot_T1 == 'On': ax1.plot(inc_em,solution[1,:], label = 'Lower atmospheric temperature')\n if plot_T2 == 'On': ax1.plot(inc_em,solution[2,:], label = 'Upper atmospheric temperature')\n if plot_Ts == 'Off' and plot_T1 == 'Off' and plot_T2 == 'Off': raise ValueError('No y variable selected')\n \n else: raise ValueError('No x axis selected')\n \n fig.suptitle('Global Average Temperature')\n ax1.set_title(f'Final Surface Temperature = {round(solution[0,-1],2)} K')\n ax1.legend()\n\n if xaxis == 'cloud cover': ax1.set_xlabel('Cloud Cover (%)')\n elif xaxis == 'time': ax1.set_xlabel('Time (years)')\n elif xaxis == 'albedo': ax1.set_xlabel('Albedo')\n elif xaxis == 'epsilon1': ax1.set_xlabel(u'\\u03B5\\u2081')\n elif xaxis == 'epsilon2': ax1.set_xlabel(u'\\u03B5\\u2082')\n plt.ylabel('Temerature (K)')\n return fig", "def main():\n # Data processing and parsing\n mp_df = pd.read_csv(\"melting_points.csv\")\n bp_df = pd.read_csv(\"boiling_points.csv\")\n data_processing(mp_df)\n data_processing(bp_df)\n # Plots scatterplots of our variables with the boiling and melting point\n plot_scatter('Mass', 'T_exp', mp_df, 'Molecular Weight', 'Melting Point')\n plot_scatter('Mass', 'T_exp', bp_df, 'Molecular Weight', 'Boiling Point')\n plot_scatter('Atom_counts', 'T_exp', mp_df, 'Number of Atoms',\n 'Melting Point')\n \n # Creates models and prints statistics using simply mass and atom counts\n simple_parameters = ['Mass', 'Atom_counts', 'T_exp']\n model = ml_df(mp_df, simple_parameters, 0.2)\n plot_T(model, simple_parameters, \"simple_melting.png\", 'Melting Point')\n slope_average(mp_df, simple_parameters, 0.2, 100)\n model2 = ml_df(bp_df, simple_parameters, 0.2)\n plot_T(model2, simple_parameters, 'simple_boiling.png', 'Boiling Point')\n slope_average(bp_df, simple_parameters, 0.2, 100)\n # Creates models and prints statistics using more variables\n complex_parameters = [\"Mass\", \"Atom_counts\", \"C\", \"H\", \"Acid\", \"Alcohol\",\n \"Unsaturation\", \"T_exp\"]\n model3 = ml_df(mp_df, complex_parameters, 0.2)\n plot_T(model3, complex_parameters, \"complex_melting.png\", 'Melting Point')\n slope_average(mp_df, complex_parameters, 0.2, 100)\n model4 = ml_df(bp_df, complex_parameters, 0.2)\n plot_T(model4, complex_parameters, \"complex_boiling.png\", 'Boiling Point')\n slope_average(bp_df, complex_parameters, 0.2, 100)\n # Creates models and prints statistics for our model using all available variables\n complex_parameters2 = [\"Mass\", \"Atom_counts\", \"C\", \"H\", 'O', 'N', 'F',\n 'Cl', 'Br', 'I', 'S', 'Si', 'Halide', \"Acid\", \"Alcohol\",\n \"Unsaturation\", \"T_exp\"]\n model = ml_df(mp_df, complex_parameters2, 0.2)\n plot_T(model, complex_parameters2, \"more_complex_melting.png\",\n 'Melting Point')\n slope_average(mp_df, complex_parameters2, 0.2, 100)\n # Tries a few different regressor models\n slope_average(mp_df, complex_parameters2, 0.2, 100, LinearRegression())\n slope_average(bp_df, complex_parameters2, 0.2, 100, RandomForestRegressor())", "def plot_1():\n p_files = []\n filename = \"energy_data_2D_80\"\n for file in sorted(os.listdir(folder)):\n if file.startswith(filename):\n p_files.append(os.path.join(folder,file))\n T_list = []\n fig, ax = plt.subplots()\n for p_file in p_files[3::3]:\n T = (os.path.splitext(os.path.basename(p_file))[0]).split('_',4)[4]\n #print(T)\n E = []\n t = []\n if (T not in T_list):\n T_list.append(T)\n with open(p_file) as csvfile:\n lines = csv.reader(csvfile, delimiter=' ')\n sweep = 0\n for row in lines:\n E.append(float(row[0]))\n t.append(sweep)\n sweep += 1\n ax.plot(t[0:200], E[0:200],label=\"T = \"+format(T[0:3]))\n ax.set_title(\"Energy per bond vs Time\")\n ax.set_ylabel(\"e / J\")\n ax.set_xlabel(\"t / sweeps\")\n ax.legend()\n\n fig.savefig(folder2+\"energy_vs_time.png\")\n fig.savefig(texfolder+\"energy_vs_time.pdf\")", "def dashboard():", "def mc_energyplot(energy_array):\n \n\n plt.plot(energy_array, \"r-\", label=\"energy\")\n\n plt.xlabel(\"No. of steps\")\n plt.ylabel(\"Total Energy (kJ/mol)\")\n \n plt.title(\"Total energy vs steps\")\n plt.legend(loc=1, fontsize= 'x-large')\n plt.show()", "def plot_visual_abstract():\n # Which generations to plot\n GENERATIONS = [100, 230, 350]\n\n # LunarLander CMA-ES\n experiment_path = glob(\"experiments/wann_LunarLander-v2_CMAES*\")\n assert len(experiment_path) == 1, \"There should be only one CMA-ES experiment with LunarLander-v2\"\n experiment_path = experiment_path[0]\n\n pivector_paths = glob(os.path.join(experiment_path, \"pivectors\", \"*\"))\n\n tsnes = []\n rewards = []\n for generation in GENERATIONS:\n # Find pivector files for specific generation, load them and store points\n generation_paths = [path for path in pivector_paths if \"gen_{}_\".format(generation) in path]\n\n population = [np.load(path) for path in generation_paths]\n population_tsnes = np.array([x[\"tsne\"] for x in population])\n population_rewards = np.array([x[\"average_episodic_reward\"] for x in population])\n tsnes.append(population_tsnes)\n rewards.append(population_rewards)\n\n figure, axs = pyplot.subplots(\n figsize=[2.5 * 3, 2.5],\n nrows=1,\n ncols=len(GENERATIONS),\n sharex=\"all\",\n sharey=\"all\"\n )\n\n min_reward = min(x.min() for x in rewards)\n max_reward = max(x.max() for x in rewards)\n scatter = None\n\n for idx in range(len(GENERATIONS)):\n population_tsne = tsnes[idx]\n population_rewards = rewards[idx]\n generation = GENERATIONS[idx]\n ax = axs[idx]\n\n scatter = ax.scatter(\n population_tsne[:, 0],\n population_tsne[:, 1],\n c=population_rewards,\n vmin=min_reward,\n vmax=max_reward,\n cmap=\"plasma\"\n )\n ax.set_title(\"Generation {}\".format(generation))\n ax.set_xticks([])\n ax.set_yticks([])\n ax.axis(\"off\")\n\n # Making room for colorbar\n # Stackoverflow #13784201\n figure.subplots_adjust(right=1.0)\n cbar = figure.colorbar(scatter)\n cbar.set_ticks([])\n cbar.ax.set_ylabel(\"Reward $\\\\rightarrow$\", rotation=90, fontsize=\"large\")\n\n figure.tight_layout()\n figure.savefig(\"figures/visual_abstract.pdf\", bbox_inches=\"tight\", pad_inches=0.05)", "def run_simulation(random_seed=None, workers_count=[1, 1, [1, 1]]):\n if random_seed:\n random.seed(random_seed)\n\n metrics = OrderedDict()\n plot_data = {}\n\n env = Canteen(workers_count)\n\n env.process(source(env))\n env.run(until=SIMULATION_DURATION)\n\n places = [env.places[PlaceName.HOT], env.places[PlaceName.COLD]]\n cash_desks = env.cash_desks\n\n height = 3\n width = max(2, len(cash_desks))\n plot_data['size'] = [height, width]\n\n plot_data['data'] = []\n for i, place in enumerate(places + cash_desks):\n\n if not place.data:\n continue\n\n x, y = np.array(place.data).transpose()\n plot_data['data'].append([\n i+1 if i < 2 else width+i-1,\n repr(place).strip('<>'),\n x, y\n ])\n\n for place in places[:2] + cash_desks:\n max_time, mean_time = max_and_mean_time(place.time_list)\n\n data = np.array(place.data)\n mean_clients = sum(data[:, 1])/len(data[:, 1])\n max_clients = max(data[:, 1])\n\n metrics[place] = [mean_time, max_time, mean_clients, max_clients]\n\n cumulative = get_cumulative_proportional_time(Client.client_list)\n metrics['Cumulative proportional time'] = cumulative\n\n x, y = np.array(env.client_count_list).transpose()\n plot_data['data'].append([\n width * 2 + 1,\n 'Total',\n x, y\n ])\n\n return plot_data, metrics", "def main():\n\t#print(scipy.__version__)\n\t#image()\n\t#heat_capacity2()\n\t#hist()\n\t#single_plot()\n\n\t#heat_capacity2()\n\t#single_plot()\n\t#plt.show()\n\t#u0_tc()\n\t#multi_heat_capacity(\"HL_DM_flux5\",True)\n\t#multi_heat_capacity2()\n\t#plot_spin()\n\t#plt.show()\n\theat_capacity2(1,2)\n\t#hist()\n\tplt.show()\n\t#potential()\n\t#plt.show()\n\t#heat_capacity(3,4)\n\t#heat_capacity(5,6)\n\t#heat_capacity(7,8)\n\t#final_spins()\n\t#plot_spin()\n\t#plot_from_csv()\n\t#difference_plot()", "def plot_pretty():\n\n ts, ys, lin_model, K, us, dt_control, biass, end_time = simulate()\n plt.style.use('seaborn-deep')\n\n black = '#2B2B2D'\n red = '#E90039'\n orange = '#FF1800'\n white = '#FFFFFF'\n yellow = '#FF9900'\n\n plt.figure(figsize=(12.8, 9.6))\n plt.rcParams.update({'font.size': 16, 'text.color': white, 'axes.labelcolor': white,\n 'axes.edgecolor': white, 'xtick.color': white, 'ytick.color': white})\n\n plt.gcf().set_facecolor(black)\n\n plt.subplot(2, 3, 1)\n plt.plot(ts, ys[:, 2], color=orange)\n plt.axhline(lin_model.yd2n(K.ysp)[1], color=white)\n plt.title(r'$C_{FA}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 2)\n plt.plot(ts, ys[:, 0], color=orange)\n plt.axhline(lin_model.yd2n(K.ysp)[0], color=white)\n plt.title(r'$C_{G}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 3)\n plt.plot(ts, ys[:, 3], color=orange)\n plt.title(r'$C_{E}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 4)\n plt.plot(ts, us[:, lin_model.inputs[1]], color=red)\n plt.title(r'$F_{m, in}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 5)\n plt.plot(ts, us[:, lin_model.inputs[0]], color=red)\n plt.title(r'$F_{G, in}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 6)\n plt.plot(\n numpy.arange(dt_control, end_time, dt_control),\n biass[:, 1],\n color=red\n )\n plt.plot(\n numpy.arange(dt_control, end_time, dt_control),\n biass[:, 0],\n color=yellow\n )\n plt.legend([r'$C_{FA}$', r'$C_G$'], facecolor=black)\n plt.title('bias')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n # plt.suptitle('Closedloop bioreactor without noise')\n plt.tight_layout(rect=[0, 0.03, 1, 0.95])\n plt.savefig('no_noise_pretty.png', transparent=True)\n plt.show()", "def visualize(self):\n NUM_AFFINITY = 4\n NUM_WILL = 7\n\n # Colors for the tasks and categories\n COLORS = d3['Category20c'][20] + d3['Category20b'][20]\n COLORS_CAT = d3['Category20'][20]\n COLORS_AFFINITY = brewer['Greens'][NUM_AFFINITY]\n COLORS_WILL = brewer['RdBu'][NUM_WILL]\n\n # Date range for the figure title\n start_str = c.START.strftime(\"%A %m/%d/%y\")\n end_str = c.END.strftime(\"%A %m/%d/%y\")\n\n # Day of week range for the x axis\n start_weekday_str = c.START.strftime(\"%a\")\n end_weekday_str = c.END.strftime(\"%a\")\n\n times, tasks = self.array.nonzero()\n day_start = tutil.DAY_START\n hours = (times % tutil.SLOTS_PER_DAY) / tutil.SLOTS_PER_HOUR\n bottom = day_start + hours\n top = bottom + (0.95 / tutil.SLOTS_PER_HOUR)\n left = np.floor(times / tutil.SLOTS_PER_DAY)\n right = left + 0.75\n chunk_min = [self.task_chunk_min[j] for j in tasks]\n chunk_max = [self.task_chunk_max[j] for j in tasks]\n affinity_cog_task = [self.task_cognitive_load[j] for j in tasks]\n affinity_cog_slot = [c.AFFINITY_COGNITIVE[i] for i in times]\n affinity_cognitive = (np.array(affinity_cog_task) * np.array(\n affinity_cog_slot)).tolist()\n willpower_task = [self.task_willpower_load[j] for j in tasks]\n willpower_cumulative = np.cumsum(willpower_task)\n duration = [self.task_duration[j] for j in tasks]\n duration_realized = [self.task_duration_realized[j] for j in tasks]\n task_names = [self.task_names[j] for j in tasks]\n category_ids = [[l for l, j in enumerate(array) if j != 0] for array in\n [self.task_category[j, :] for j in tasks]]\n category = [\", \".join(\n [self.cat_names[l] for l, j in enumerate(array) if j != 0]) for\n array in [self.task_category[j, :] for j in tasks]]\n data_tooltips = dict(\n chunk_min=chunk_min,\n chunk_max=chunk_max,\n affinity_cognitive=affinity_cognitive,\n affinity_cog_slot=affinity_cog_slot,\n affinity_cog_task=affinity_cog_task,\n willpower_task=willpower_task,\n willpower_cumulative=willpower_cumulative,\n duration=duration,\n duration_realized=duration_realized,\n task_id=tasks,\n task=task_names,\n category=category,\n )\n\n offset = self.num_tasks - self.num_categories\n # Use #deebf7 as placeholder/default event color\n colors = [COLORS[i % len(COLORS)] if i < offset else '#ffffcc' for i in\n tasks]\n data1 = data_tooltips.copy()\n data1.update(dict(\n top=top,\n bottom=bottom,\n left=left,\n right=right,\n colors=colors,\n ))\n source1 = ColumnDataSource(data=data1)\n\n TOOLTIPS = [(\"task\", \"@task\"),\n (\"category\", \"@category\"),\n (\"duration\", \"@duration_realized / @duration\"),\n (\"willpower\", \"@willpower_task\"),\n (\"willpower (cum)\", \"@willpower_cumulative\"),\n (\"chunk_range\", \"(@chunk_min, @chunk_max)\"),\n (\"affinity [slot x task]\", \"@affinity_cognitive = \"\n \"@affinity_cog_slot x \"\n \"@affinity_cog_task\"),\n (\"task_id\", \"@task_id\"),\n (\"index\", \"$index\"),\n (\"(t,l)\", \"(@bottom, @left)\"),\n ]\n\n # [Bokeh] inverted axis range example:\n # https://groups.google.com/a/continuum.io/forum/#!topic/bokeh/CJAvppgQmKo\n yr = Range1d(start=22, end=6)\n # yr = Range1d(start=24.5, end=-0.5)\n xr = Range1d(start=-0.3, end=7.3)\n p = figure(plot_width=1000, plot_height=600, y_range=yr, x_range=xr,\n tooltips=TOOLTIPS,\n title=\"Calendar: {} to {}\".format(start_str, end_str))\n self.p = p\n output_file(\"calendar.html\")\n\n p.xaxis[0].axis_label = 'Weekday ({}-{})'.format(start_weekday_str,\n end_weekday_str)\n p.yaxis[0].axis_label = 'Hour (7AM-9:30PM)'\n\n # Replace default yaxis so that each hour is displayed\n p.yaxis[0].ticker.desired_num_ticks = int(tutil.HOURS_PER_DAY)\n p.yaxis[0].ticker.num_minor_ticks = 4\n p.xaxis[0].ticker.num_minor_ticks = 0\n\n # Display task allocation as colored rectangles\n p.quad(top='top', bottom='bottom', left='left', right='right',\n color='colors', fill_alpha=0.7, line_alpha=0.5, source=source1)\n\n # Pre-process task names for display (no repeats, abbreviated names)\n # FIXME(cathywu) currently assumes that y is in time order, which may\n # not be the case when more task types are incorporated\n task_display = []\n curr_task = \"\"\n for name in task_names:\n if name == curr_task:\n task_display.append(\"\")\n else:\n curr_task = name\n task_display.append(name)\n data2 = data_tooltips.copy()\n data2.update(dict(\n x=left,\n y=top,\n # abbreviated version of task\n task=[k[:19] for k in task_display],\n ))\n source2 = ColumnDataSource(data=data2)\n\n # Annotate rectangles with task name\n # [Bokeh] Text properties:\n # https://bokeh.pydata.org/en/latest/docs/user_guide/styling.html#text-properties\n labels = LabelSet(x='x', y='y', text='task', level='glyph', x_offset=3,\n y_offset=-1, source=source2, text_font_size='7pt',\n render_mode='canvas')\n p.add_layout(labels)\n\n # Display cognitive affinity as rectangle to the right of the task\n colors_affinity = np.array(\n np.array(affinity_cognitive) * (NUM_AFFINITY - 1), dtype=int)\n colors_affinity = [COLORS_AFFINITY[NUM_AFFINITY - 1 - i] for i in\n colors_affinity.tolist()]\n data5 = data_tooltips.copy()\n data5.update(dict(\n top=(np.array(top) - 0.05).tolist(),\n bottom=(np.array(bottom) + 0.05).tolist(),\n left=(np.array(right) + 0.12).tolist(),\n right=(np.array(right) + 0.2).tolist(),\n colors=colors_affinity,\n ))\n source5 = ColumnDataSource(data=data5)\n p.quad(top='top', bottom='bottom', left='left', right='right',\n color='colors', source=source5)\n\n # Display willpower balance as rectangle to the right of the task\n colors_will = np.minimum(willpower_cumulative, 2)\n colors_will = np.maximum(colors_will, -2)\n colors_will += 2\n colors_will = np.array(colors_will / 4 * (NUM_WILL - 1), dtype=int)\n colors_will = [COLORS_WILL[i] for i in colors_will.tolist()]\n data6 = data_tooltips.copy()\n data6.update(dict(\n top=top,\n bottom=bottom,\n left=np.array(right) + 0.02,\n right=(np.array(right) + 0.1).tolist(),\n colors=colors_will,\n ))\n source6 = ColumnDataSource(data=data6)\n p.quad(top='top', bottom='bottom', left='left', right='right',\n color='colors', source=source6)\n\n # Display categories as a colored line on the left\n # TODO(cathywu) currently displays only the \"first\" category,\n # add support for more categories\n xs = []\n ys = []\n for y0, y1, x in zip(top, bottom, left):\n xs.append([x, x])\n ys.append([y0, y1])\n colors_cat = [COLORS_CAT[cat_ids[0] % len(COLORS_CAT)] for cat_ids in\n category_ids]\n data3 = data_tooltips.copy()\n data3.update(dict(\n xs=xs,\n ys=ys,\n colors=colors_cat,\n ))\n source3 = ColumnDataSource(data=data3)\n p.multi_line(xs='xs', ys='ys', color='colors', line_width=4,\n source=source3)\n\n # Annotate columns with day of the week\n data4 = data_tooltips.copy()\n data4.update(dict(\n x=[k + 0.1 for k in range(tutil.LOOKAHEAD)],\n y=[6.75 for _ in range(tutil.LOOKAHEAD)],\n weekday=[(c.START + timedelta(k)).strftime(\"%A\") for k in\n range(tutil.LOOKAHEAD)],\n ))\n source4 = ColumnDataSource(data=data4)\n labels2 = LabelSet(x='x', y='y', text='weekday', level='glyph',\n x_offset=3, y_offset=-1, source=source4,\n text_font_size='10pt', render_mode='canvas')\n p.add_layout(labels2)\n\n show(p)", "def plotTotal(self, phys, forces, step): \r\n self.plotQuantity(step, forces.energies.potentialEnergy(phys)+TopologyUtilities.kineticEnergy(phys.myTop, phys.velvec), 'totalenergy')", "def plot_dispatch(pv, demand, E, week=30):\n\n sliced_index = (pv.index.week==week)\n pv_sliced = pv[sliced_index]\n demand_sliced = demand[sliced_index]\n self_consumption = E['inv2load'][sliced_index]\n \n direct_self_consumption = np.minimum(pv_sliced,demand_sliced)# E['inv2load'][sliced_index]\n indirect_self_consumption = self_consumption-direct_self_consumption\n res_pv_sliced = E['res_pv'][sliced_index]\n grid2load_sliced = E['grid2load'][sliced_index]\n store2inv_sliced = E['store2inv'][sliced_index]\n LevelOfCharge = E['LevelOfCharge'][sliced_index]\n inv2grid = E['inv2grid'][sliced_index]\n grid2load = E['grid2load'][sliced_index]\n aux=np.maximum(0,self_consumption)\n\n fig, axes = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(17, 4*3), frameon=False,\n gridspec_kw={'height_ratios': [3, 1, 1], 'hspace': 0.04})\n\n #fig, ax = plt.subplots(figsize=(17, 4))\n axes[0].plot(demand_sliced.index, demand_sliced, color='black', lw=2,label='demand')\n axes[0].plot(pv_sliced.index, pv_sliced, color='black',ls='--', lw=2,label='PV')\n axes[0].fill_between(direct_self_consumption.index, 0, direct_self_consumption, color='orange', alpha=.8, label='DSC')\n axes[0].fill_between(pv_sliced.index, self_consumption, pv_sliced , where=pv_sliced<demand_sliced,color='blue', hatch='//',\n alpha=.3,label='ISC')\n axes[0].fill_between(pv_sliced.index, direct_self_consumption, pv_sliced ,where=pv_sliced>demand_sliced, color='gold', alpha=.3,label='Excess PV')\n\n axes[0].fill_between(grid2load_sliced.index,self_consumption,demand_sliced,color='red',alpha=.2, label='grid2load')\n \n\n #axes[0].plot(grid2load_sliced.index, grid2load_sliced, color='red', ls=\":\", lw=1)\n axes[0].set_ylim([0, axes[0].get_ylim()[1] ])\n axes[0].set_ylabel('Power (kW)')\n\n axes[1].fill_between(LevelOfCharge.index, 0, LevelOfCharge, color='grey', alpha=.2, label='SOC')\n axes[1].set_ylabel('State of Charge (kWh)')\n\n axes[2].fill_between(inv2grid.index, 0, inv2grid, color='green', alpha=.2,label='injected2grid')\n axes[2].fill_between(inv2grid.index, 0, -grid2load, color='red', alpha=.2,label='grid drawn')\n axes[2].set_ylabel('In/out from grid (kW)')\n axes[0].legend()\n axes[1].legend()\n axes[2].legend()\n return", "def plot_tariff(self):\n\t\tplt.figure(1)\n\t\tplt.cla() # clear the plotting window to allow for re-plotting\n\n\t\tif self.chargetypetoplot.get() == 'energy':\n\t\t\ttoplot = []\n\t\t\tenergy_filter = ('energy' == self.data[\"Charge\"]) | ('Energy' == self.data[\"Charge\"])\n\t\t\tfor mo in range(1, 13):\n\t\t\t\tmonth_filter = (mo >= self.data[\"Start Month\"]) & (mo <= self.data[\"End Month\"])\n\t\t\t\ttemp = self.data.loc[(energy_filter & month_filter), :]\n\t\t\t\ttoplot.append([sum(temp.loc[(hr >= temp['Start Time']) & (hr <= temp['End Time']), \"Value\"])\n\t\t\t\t\t\t\t for hr in range(1, 25)])\n\t\t\tim = plt.imshow(toplot, interpolation='nearest')\n\t\t\tplt.xticks(ticks=[i-.5 for i in range(25)],\n\t\t\t\t\t labels=['{}:00'.format(str(j).zfill(2)) for j in range(25)], rotation=45)\n\t\t\tplt.yticks(ticks=[i-0.5 for i in range(13)],\n\t\t\t\t\t labels=['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul',\n\t\t\t\t\t\t\t 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', ''], va='top')\n\n\t\t\t# get the colors of the values, according to the\n\t\t\t# colormap used by imshow\n\t\t\tvalues = np.unique([toplot[i][j] for i in range(12) for j in range(24)])\n\t\t\tcolors = [im.cmap(im.norm(value)) for value in values]\n\t\t\t# create a patch (proxy artist) for every color\n\t\t\tpatches = [mpatches.Patch(color=colors[i], label=\"${:1.5}/kWh\".format(values[i]))\n\t\t\t\t\t for i in range(len(values))]\n\t\t\t# put those patched as legend-handles into the legend\n\t\t\tplt.legend(handles=patches, bbox_to_anchor=(1.05, 1), loc=\"upper right\", borderaxespad=0.)\n\t\t\tplt.title('Energy Price Heatmap')\n\t\telse:\n\t\t\ttoplot = []\n\t\t\tdemand_filter = ('demand' == self.data[\"Charge\"]) | ('Demand' == self.data[\"Charge\"])\n\t\t\tfor mo in range(1, 13):\n\t\t\t\tmonth_filter = (mo >= self.data[\"Start Month\"]) & (mo <= self.data[\"End Month\"])\n\t\t\t\ttemp = self.data.loc[(demand_filter & month_filter), :]\n\t\t\t\ttoplot.append([sum(temp.loc[(hr >= temp['Start Time']) & (hr <= temp['End Time']), \"Value\"]) for hr in\n\t\t\t\t\t\t\t range(1, 25)])\n\t\t\tim = plt.imshow(toplot, interpolation='nearest')\n\t\t\tplt.xticks(ticks=[i - .5 for i in range(25)], labels=['{}:00'.format(str(j).zfill(2)) for j in range(25)],\n\t\t\t\t\t rotation=45)\n\t\t\tplt.yticks(ticks=[i - 0.5 for i in range(13)],\n\t\t\t\t\t labels=['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', ''],\n\t\t\t\t\t va='top')\n\n\t\t\t# get the colors of the values, according to the\n\t\t\t# colormap used by imshow\n\t\t\tvalues = np.unique([toplot[i][j] for i in range(12) for j in range(24)])\n\t\t\tcolors = [im.cmap(im.norm(value)) for value in values]\n\t\t\t# create a patch (proxy artist) for every color\n\t\t\tpatches = [mpatches.Patch(color=colors[i], label=\"${:1.5}/kW\".format(values[i])) for i in\n\t\t\t\t\t range(len(values))]\n\t\t\t# put those patched as legend-handles into the legend\n\t\t\tplt.legend(handles=patches, bbox_to_anchor=(1.05, 1), loc=\"upper right\", borderaxespad=0.)\n\t\t\tplt.title('Demand Price Heatmap')\n\n\t\tself.chart_type.draw()\n\n\t\treturn 0", "def main():\n data = load_data()\n # BNO055 absolute orientation sensor\n bno_time = data[2].index / 1e6\n bno_accel_axes = [data[2][bno_str] / 9.8 for bno_str in BNO_ACCEL]\n plot_multi_axis(bno_time, bno_accel_axes,\n labels=['BNO055 Acceleration',\n 'Time (s)', 'Acceleration (G)',\n [*BNO_ACCEL, 'magnitude']],\n fname='bno_accel.html')\n bno_gyro_axes = [data[2][bno_str] for bno_str in BNO_GYRO]\n plot_multi_axis(bno_time, bno_gyro_axes,\n labels=['BNO055 Roll Rate',\n 'Time (s)', 'Roll Rate (deg/s)',\n [*BNO_GYRO, 'magnitude']],\n fname='bno_gyro.html')\n # MMA65XX high-range accelerometer\n mma_time = data[1].index / 1e6\n mma_axes = [data[1][mma_str] / 9.8 for mma_str in MMA]\n plot_multi_axis(mma_time, mma_axes,\n labels=['MMA65XX High-Range Acceleration',\n 'Time (s)', 'Acceleration (G)',\n [*MMA, 'magnitude']],\n fname='mma.html')\n # skybass_sampling_rates(data)", "def visualize(self):\n import matplotlib.pyplot as plt\n import numpy as np\n\n plt.figure()\n sw_ = np.linspace(0.0, 1.0, 50)\n plt.plot(sw_, self.krw(sw_), label=\"Water\")\n plt.plot(sw_, self.kro(sw_), label=\"Oil\")\n plt.xlabel(\"Water saturation\")\n plt.ylabel(\"Relative permeability\")\n plt.legend()", "def print(self):\n print(\"-----\", self.name, \"-----\")\n print(\"Enable index\", self.enable_index)\n print(\"End index:\", self.stop_index)\n print(\"Measurement length (from the very beginning to the end index):\", self.time_vec[self.stop_index])\n print()\n print(\"Start temperature (hot):\", self.temp_hot_start)\n print(\"Start temperature (cold):\", self.temp_cold_start)\n print(\"Start temperature (mean):\", self.temp_start)\n print(\"End temperature (hot):\", self.temp_hot[self.stop_index])\n print(\"End temperature (cold)\", self.temp_cold[self.stop_index])\n print()\n print(\"Max temperature\", self.temp_max)\n print(\"Min temperature\", self.temp_min)\n print()\n print(\"Heat pump\")\n print(\"Energy input:\", self.work_inp)\n print(\"Q_hot\", self.qhot_pump)\n print(\"Q_cold\", self.qcold_pump)\n print(\"Q_cold + W\", self.qcold_pump + self.work_inp)\n print(\"E_lost\", self.qcold_pump + self.work_inp - self.qhot_pump)\n print(\"Coefficient of performance COP_hot\", self.qhot_pump / self.work_inp)\n print(\"Coefficient of performance COP_cold\", self.qcold_pump / self.work_inp)\n print(\"Ideal COP_hot with the setup\", self.qhot_pump/(self.qhot_pump-self.qcold_pump))\n print(\"Ideal COP_cold with the setup\", self.qcold_pump / (self.qhot_pump - self.qcold_pump))\n print(\"Ideal Carnot COP_hot\", (self.temp_max+273.15)/(self.temp_max-self.temp_min))\n print(\"Ideal Carnot COP_cold\", (self.temp_min+273.15)/(self.temp_max-self.temp_min))\n print(\"Efficiency fraction out of ideal Carnot cooler\", (self.qcold_pump / self.work_inp)/((self.temp_min+273.15)/(self.temp_max-self.temp_min)))\n\n if self.not_air:\n print(\"Heat transfer through insulator, hot side\", self.heat_loss_pump_hot)\n print(\"Heat transfer through insulator, cold side\", self.heat_loss_pump_cold)\n print(\"Estimated Q_hot with resistor\", self.qhot_resistor)\n else:\n print(\"Estimated Q_hot with resistor (=energy input)\", self.qhot_resistor)\n #\n # I think it should be defined for Q_hot too. Yep, TODO that\n # Also calculate heatloss due to conduction TODO remove these comments when ready\n # Todo implement resistive heater calculations\n print()\n print(\"Heat engine\")\n print(\"Energy generated:\", self.work_gen)\n print(\"Q_hot\", self.qhot_engine)\n print(\"Q_cold\", self.qcold_engine)\n print(\"Q_hot - Q_cold\", self.qhot_engine - self.qcold_engine)\n print(\"E_lost\", -self.qcold_engine - self.work_gen + self.qhot_engine)\n print(\"\\\"Heat transfer efficiency\\\" (%)\", self.work_gen / (self.qhot_engine - self.qcold_engine) * 100)\n print(\"Efficiency e\", self.work_gen / self.qhot_engine)\n print(\"Ideal efficiency with the setup\", 1 - (self.qcold_engine / self.qhot_engine))\n print(\"Ideal Carnot efficiency\", (self.temp_max-self.temp_min)/(self.temp_max+273.15))\n if self.not_air:\n print(\"Heat transfer through insulator, hot side\", self.heat_loss_gen_hot)\n print(\"Heat transfer through insulator, cold side\", self.heat_loss_gen_cold)\n print()\n print(\"Total efficiency of cycle\", self.work_gen/self.work_inp)\n # About the efficiency of peltier elements (#telok@IRCnet, 2016-07-27)\n # 19:10 < AgenttiX> Oletteko kokeilleet TECin ohjaamista Arduinolla? Toimisiko tämä kytkentä? http://garagelab.com/profiles/blogs/how-to-use-a-peltier-with-arduino\n # --\n # 20:21 <@hrst> Ei toimi. Peltieriä ei voi ohjata PWM:llä.\n # 20:22 <@hrst> Hyötysuhde on PWM:llä paska, mikä on ongelma koska se on muutenkin liian paska, ja sen lisäksi se hajoaa mekaaniseen värähtelyyn ennemmin tai myöhemmin.\n print(\"-----\\n\")", "def plot_ncmc_work(self, filename):\n with PdfPages(filename) as pdf:\n for envname in self.get_environments():\n modname = 'NCMCEngine'\n work = dict()\n for direction in ['delete', 'insert']:\n varname = '/' + envname + '/' + modname + '/' + 'work_' + direction\n try:\n # TODO: For now, we analyze all but the last sample, so that this can be run on active simulations.\n # Later, we should find some way to omit the last sample only if it is nonsensical.\n work[direction] = self._ncfile[varname][:-1,:]\n print('Found %s' % varname)\n except Exception as e:\n pass\n\n def plot_work_trajectories(pdf, work, title=\"\"):\n \"\"\"Generate figures for the specified switching legs.\n \"\"\"\n plt.figure(figsize=(12, 8))\n\n nrows = 2\n ncols = 6\n workcols = 2\n for (row, direction) in enumerate(['delete', 'insert']):\n #\n # Plot work vs step\n #\n\n col = 0\n plt.subplot2grid((nrows,ncols), (row, col), colspan=(ncols-workcols))\n\n # Plot average work distribution in think solid line\n plt.plot(work[direction].mean(0), 'k-', linewidth=1.0, alpha=1.0)\n # Plot bundle of work trajectories in transparent lines\n plt.plot(work[direction].T, 'k-', linewidth=0.5, alpha=0.3)\n # Adjust axes to eliminate large-magnitude outliers (keep 98% of data in-range)\n workvals = np.ravel(np.abs(work[direction]))\n worklim = np.percentile(workvals, 98)\n nsteps = work[direction].shape[1]\n plt.axis([0, nsteps, -worklim, +worklim])\n # Label plot\n if row == 1: plt.xlabel('steps')\n plt.ylabel('work / kT')\n plt.title(\"%s NCMC in environment '%s' : %s\" % (title, envname, direction))\n plt.legend(['average work', 'NCMC attempts'])\n\n #\n # Plot work histogram\n #\n\n col = ncols - workcols\n plt.subplot2grid((nrows,ncols), (row, col), colspan=workcols)\n\n # Plot average work distribution in think solid line\n #nbins = 40\n workvals = work[direction][:-1,-1]\n #plt.hist(workvals, nbins)\n if workvals.std() != 0.0:\n sns.distplot(workvals, rug=True)\n else:\n print('workvals has stddev of zero')\n print(workvals)\n # Adjust axes to eliminate large-magnitude outliers (keep 98% of data in-range)\n #worklim = np.percentile(workvals, 98)\n #oldaxis = plt.axis()\n #plt.axis([-worklim, +worklim, 0, oldaxis[3]])\n # Label plot\n if row == 1: plt.xlabel('work / kT')\n plt.title(\"total %s work\" % direction)\n\n pdf.savefig() # saves the current figure into a pdf page\n plt.close()\n\n if len(work) > 0:\n # Plot work for all chemical transformations.\n plot_work_trajectories(pdf, work, title='(all transformations)')\n\n # Plot work separated out for each chemical transformation\n #[niterations, nsteps] = work.shape\n #transformations = dict()\n #for iteration in range(niterations):\n # plot_work_trajectories(pdf, work, title='(all transformations)')", "def main():\n data1, data2 = [], []\n\n # Compute results for sizes in the range 0...40,000\n max_size = 40000\n # Sizes used are 0**2 = 0, 20**2 = 400, 40**2 = 1600, 60**2 = 3600, \n # etc. up to 200**2 = 40,000\n for size in (x**2 for x in range(0, round(sqrt(max_size)) + 1, 20)):\n compute_time(size, data1, data2)\n\n # Create a plotter object\n plt = Plotter(600, 600, 0, max_size, 0, 120)\n\n # Plot the curves\n plt.pen.width(4)\n plt.plot_data(data1, \"blue\")\n plt.plot_data(data2, \"red\")\n\n # Wait for user interaction\n plt.interact()", "def _plot_metrics(self):\n if len(self._episode_q_means) > 0:\n mean_q = np.asscalar(np.mean(self._episode_q_means))\n self._metrics_writer.write_value('Mean Q per ep.', mean_q, self._num_actions_taken)\n\n if len(self._episode_q_stddev) > 0:\n std_q = np.asscalar(np.mean(self._episode_q_stddev))\n self._metrics_writer.write_value('Mean Std Q per ep.', std_q, self._num_actions_taken)\n\n self._metrics_writer.write_value('Sum rewards per ep.', sum(self._episode_rewards), self._num_actions_taken)", "def temphum_plot(self, kwargs=None):\n\n def valuechange():\n \"\"\"This is the function which is called, when a value is changed in the spin boxes\"\"\"\n\n tempmin.setMaximum(tempmax.value())\n tempmax.setMinimum(tempmin.value())\n hummin.setMaximum(hummax.value())\n hummax.setMinimum(hummin.value())\n\n self.variables.default_values_dict[\"settings\"][\n \"current_tempmin\"\n ] = tempmin.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_tempmax\"\n ] = tempmax.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_hummin\"\n ] = hummin.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_hummax\"\n ] = hummax.value()\n\n max = build_command(\n self.variables.devices_dict[\"temphum_controller\"],\n (\"set_hummax\", hummax.value()),\n )\n min = build_command(\n self.variables.devices_dict[\"temphum_controller\"],\n (\"set_hummin\", hummin.value()),\n )\n\n self.variables.vcw.write(\n self.variables.devices_dict[\"temphum_controller\"], max\n )\n self.variables.vcw.write(\n self.variables.devices_dict[\"temphum_controller\"], min\n )\n\n def dry_air_action():\n if dry_air_btn.isChecked():\n device_dict = self.variables.devices_dict[\"temphum_controller\"]\n try:\n command = build_command(\n device_dict, (\"set_environement_control\", \"ON\")\n )\n answer = self.variables.vcw.write(device_dict, command)\n if not answer:\n self.log.error(\n \"The environement controller did not responsed accordingly. Answer: \"\n + str(answer).strip()\n )\n return 0\n except:\n self.log.error(\n \"An error occured while changing the environement control\"\n )\n return 0\n dry_air_btn.setText(\"Humidity ctl. on\")\n self.variables.default_values_dict[\"settings\"][\n \"humidity_control\"\n ] = True\n\n else:\n device_dict = self.variables.devices_dict[\"temphum_controller\"]\n try:\n command = build_command(\n device_dict, (\"set_environement_control\", \"OFF\")\n )\n answer = self.variables.vcw.write(device_dict, command)\n if not answer:\n self.log.error(\n \"The environement controller did not responsed accordingly. Answer: \"\n + str(answer).strip()\n )\n\n return 0\n except:\n self.log.error(\n \"An error occured while changing the environement control\"\n )\n return 0\n dry_air_btn.setText(\"Humidity ctl. off\")\n self.variables.default_values_dict[\"settings\"][\n \"humidity_control\"\n ] = False\n\n def light_action():\n \"\"\"This function is debricated\"\"\"\n if light_btn.isChecked():\n self.variables.default_values_dict[\"settings\"][\"external_lights\"] = True\n else:\n self.variables.default_values_dict[\"settings\"][\n \"external_lights\"\n ] = False\n\n def check_light_state():\n if (\n self.variables.default_values_dict[\"settings\"][\"lights\"]\n and not light_btn.text() == \"Lights on\"\n ): # Checks if the lights are on and the button is off\n light_btn.setText(\"Lights on\")\n light_btn.setStyleSheet(\"background : rgb(0,255,0); border-radius: 5px\")\n elif (\n not self.variables.default_values_dict[\"settings\"][\"lights\"]\n and not light_btn.text() == \"Lights off\"\n ):\n light_btn.setText(\"Lights off\")\n light_btn.setStyleSheet(\"background : rgb(255,0,0); border-radius: 5px\")\n\n def config_plot(plot, plot2, pg):\n plot = plot.plotItem\n plot.setLabel(\"right\", \"humidity\", units=\"%\")\n plot.setLabel(\"bottom\", \"time\")\n plot.setLabel(\"left\", \"temperature\", units=\"Celsius\")\n plot.getAxis(\"left\").setPen(pg.mkPen(color=\"#c4380d\", width=3))\n plot.getAxis(\"right\").setPen(pg.mkPen(color=\"#025b94\", width=3))\n plot.showAxis(\"top\", show=True)\n plot.getAxis(\"top\").setTicks([])\n plot.getAxis(\"bottom\").setScale(1e-9)\n # plot.setRange(yRange=[15, 35])\n\n # For second plot\n plot.scene().addItem(\n plot2\n ) # inserts the second plot into the scene of the first\n plot2.setGeometry(plot.vb.sceneBoundingRect())\n plot.getAxis(\"right\").linkToView(\n plot2\n ) # links the second y axis to the second plot\n plot2.setXLink(plot) # sync the x axis of both plots\n # plot2.setRange(yRange=[0, 50])\n\n def __cut_arrays(data_array, maximum_time, arrays_to_cut):\n \"\"\"This function cuts an array to a maximum time difference\n This function is supposed to be used only for temp and humidity shaped arrays\n \"\"\"\n\n try:\n begin_time = data_array[arrays_to_cut[0]][0][0]\n end_time = data_array[arrays_to_cut[0]][0][-1]\n delta_time = (\n data_array[arrays_to_cut[0]][0][1]\n - data_array[arrays_to_cut[0]][0][0]\n )\n total_time = end_time - begin_time\n if total_time > maximum_time:\n over_time = total_time - maximum_time\n array_elm_to_drop = int(over_time / delta_time)\n for arrays in arrays_to_cut:\n data_array[arrays][0] = data_array[arrays][0][\n array_elm_to_drop:\n ]\n data_array[arrays][1] = data_array[arrays][1][\n array_elm_to_drop:\n ]\n except:\n pass\n\n def update_temphum_plots(kwargs=None):\n # for rooms in self.rooms:\n if self.variables.default_values_dict[\"settings\"][\"new_data\"]:\n temphum_plot.clear() # clears the plot and prevents a memory leak\n hum_plot_obj.clear()\n p1 = temphum_plot.plotItem\n\n ax = p1.getAxis(\"bottom\") # This is the trick\n __cut_arrays(\n self.variables.meas_data,\n float(\n self.variables.default_values_dict[\"settings\"].get(\n \"temp_history\", 3600\n )\n ),\n [\"temperature\", \"humidity\"],\n )\n ax.setTicks(\n [\n get_thicks_for_timestamp_plot(\n self.variables.meas_data[\"temperature\"][0],\n 5,\n self.variables.default_values_dict[\"settings\"][\n \"time_format\"\n ],\n )\n ]\n )\n\n try:\n if len(self.variables.meas_data[\"temperature\"][0]) == len(\n self.variables.meas_data[\"humidity\"][1]\n ): # sometimes it happens that the values are not yet ready\n p1.plot(\n self.variables.meas_data[\"temperature\"][0],\n self.variables.meas_data[\"temperature\"][1],\n pen={\"color\": \"r\", \"width\": 2},\n clear=True,\n )\n plot_item = setpg.PlotCurveItem(\n self.variables.meas_data[\"humidity\"][0],\n self.variables.meas_data[\"humidity\"][1],\n pen={\"color\": \"b\", \"width\": 2},\n clear=True,\n )\n hum_plot_obj.addItem(plot_item)\n del plot_item # the plot class needs a plot item which can be rendered, to avoid a mem leak delete the created plot item or 20k ram will be used\n # hum_plot_obj.addItem(setpg.plot(self.variables.meas_data[\"humidity\"][0],self.variables.meas_data[\"humidity\"][1],pen={'color': \"b\", 'width': 2}, clear=True))\n hum_plot_obj.setGeometry(\n p1.vb.sceneBoundingRect()\n ) # resize the second plot!\n except:\n pass\n\n # Create sublayout\n temphum_layout = QGridLayout()\n\n # Frame over the objects\n frame = QLabel()\n frame.setFrameStyle(QFrame.Box | QFrame.Raised)\n frame.setLineWidth(0)\n frame.setMidLineWidth(2)\n\n self.layout.addWidget(\n frame, self.temp_ypos, self.temp_xpos, self.temp_ysize, self.temp_xsize\n )\n\n x = np.zeros(1)\n y = np.zeros(1)\n\n setpg = pq\n # date_axis = CAxisTime(orientation='bottom') # Correctly generates the time axis\n hum_plot_obj = setpg.ViewBox() # generate new plot item\n temphum_plot = pq.PlotWidget()\n config_plot(temphum_plot, hum_plot_obj, setpg) # config the plot items\n\n self.variables.add_update_function(update_temphum_plots)\n\n # Additional Variables will be generated for temp and hum\n # self.variables.default_values_dict[\"settings\"].update({\"lights\": False, \"humidity_control\": True, \"current_tempmin\": 20, \"current_tempmax\": 25, \"current_hummin\": 20,\"current_hummax\": 25})\n\n # Spin Boxes for temp and humidity\n\n tempmin = QSpinBox()\n tempmax = QSpinBox()\n hummin = QSpinBox()\n hummax = QSpinBox()\n\n # Spinbox label\n textbox_temp = QLabel()\n textbox_temp.setText(\"Min temp. Max temp.\")\n textbox_temp.setFont(self.font)\n textbox_hum = QLabel()\n textbox_hum.setText(\"Min hum. Max hum.\")\n textbox_hum.setFont(self.font)\n\n # Config\n\n tempmin.setRange(15, 35)\n tempmin.setValue(\n float(\n self.variables.default_values_dict[\"settings\"].get(\"current_tempmin\", 0)\n )\n )\n tempmax.setRange(15, 35)\n tempmax.setValue(\n float(\n self.variables.default_values_dict[\"settings\"].get(\"current_tempmax\", 0)\n )\n )\n tempmin.valueChanged.connect(valuechange)\n tempmax.valueChanged.connect(valuechange)\n\n hummin.setRange(0, 70)\n hummin.setValue(\n float(\n self.variables.default_values_dict[\"settings\"].get(\"current_hummin\", 0)\n )\n )\n hummax.setRange(0, 70)\n hummax.setValue(\n float(\n self.variables.default_values_dict[\"settings\"].get(\"current_hummax\", 0)\n )\n )\n hummin.valueChanged.connect(valuechange)\n hummax.valueChanged.connect(valuechange)\n\n # Push buttons on the right for humidity control and light control\n\n dry_air_btn = QPushButton(\"Humidity ctl. off\")\n self.variables.default_values_dict[\"settings\"][\"humidity_control\"] = False\n dry_air_btn.setCheckable(True)\n dry_air_btn.toggle()\n dry_air_btn.clicked.connect(dry_air_action)\n dry_air_btn.setChecked(False)\n\n light_btn = QLabel()\n light_btn.setText(\"State not defined\")\n light_btn.setAlignment(QtCore.Qt.AlignVCenter | QtCore.Qt.AlignHCenter)\n light_btn.setStyleSheet(\"background : rgb(255,0,0); border-radius: 5px\")\n\n # light_btn.setCheckable(True)\n # light_btn.clicked.connect(light_action)\n\n # Humidity\n # temphum_plot.plot(x,y, pen=\"b\")\n\n # Widgets add\n temphum_layout.addWidget(textbox_temp, 0, 0, 1, 2)\n temphum_layout.addWidget(tempmin, 1, 0)\n temphum_layout.addWidget(tempmax, 1, 1)\n\n temphum_layout.addWidget(textbox_hum, 2, 0, 1, 2)\n temphum_layout.addWidget(hummin, 3, 0)\n temphum_layout.addWidget(hummax, 3, 1)\n\n temphum_layout.addWidget(dry_air_btn, 4, 0, 1, 2)\n temphum_layout.addWidget(light_btn, 5, 0, 3, 2)\n\n temphum_layout.addWidget(temphum_plot, 0, 3, 10, 2)\n\n temphum_layout.setContentsMargins(8, 8, 0, 8) # Makes a margin to the layout\n\n # Add the layout to the main layout\n self.layout.addLayout(\n temphum_layout,\n self.temp_ypos,\n self.temp_xpos,\n self.temp_ysize,\n self.temp_xsize,\n )\n\n def update():\n pass\n\n self.variables.add_update_function(update)\n self.variables.add_update_function(check_light_state)", "def plotImproperEnergy(self, phys, forces, step): \r\n self.plotQuantity(step, phys.app.energies.getTable(5), 'improperenergy')", "def chart(request):\n assert isinstance(request, HttpRequest)\n filename = 'ppg_RawDataSheet13.mat'\n subtitle = 'VerityDB/' + filename\n return render(\n request,\n 'research/chart.html',\n {\n 'title':'Chart',\n 'message':'Highcharts Based',\n 'year':datetime.now().year,\n #'data': content['val'][0:11]\n 'temp': models.load_data(),\n 'test': models.load_data_filename(filename),\n 'subtitle_text': subtitle,\n }\n )" ]
[ "0.64270926", "0.641641", "0.63471955", "0.633336", "0.6203229", "0.6157202", "0.6156514", "0.60662705", "0.60575175", "0.60480624", "0.6023913", "0.5948977", "0.5948531", "0.5910911", "0.589686", "0.5895649", "0.58862346", "0.58814776", "0.5880334", "0.58726776", "0.58645946", "0.58641326", "0.5848611", "0.58399653", "0.5827738", "0.5824148", "0.5818638", "0.58007526", "0.579874", "0.5791964", "0.5788949", "0.57776916", "0.57740486", "0.57717836", "0.5769754", "0.5759166", "0.5758017", "0.57578796", "0.5754341", "0.5752862", "0.57520145", "0.57352823", "0.5732986", "0.57323205", "0.57199466", "0.57159114", "0.57108265", "0.57054394", "0.5701048", "0.569998", "0.5698714", "0.5698658", "0.5688429", "0.56855613", "0.5683892", "0.5678879", "0.5668512", "0.5668202", "0.56678796", "0.56662023", "0.5663295", "0.566068", "0.5656574", "0.5655508", "0.56514716", "0.56480855", "0.5644994", "0.5644361", "0.5643269", "0.5643249", "0.5642927", "0.56376183", "0.56274045", "0.5621693", "0.56205344", "0.56201", "0.56131357", "0.5608235", "0.5598311", "0.5597715", "0.5592407", "0.5588956", "0.5586951", "0.5583804", "0.557865", "0.5578554", "0.5578017", "0.5575636", "0.55724955", "0.55720884", "0.5570215", "0.556845", "0.55680287", "0.555565", "0.55486953", "0.5544096", "0.55366045", "0.55232346", "0.5518295", "0.5510879" ]
0.77813894
0
[input > 32_channel > 64_channel] (lrelu, lrelu) one residual, with spatial conv1 and conv2 [64_channel > 128_channel > 256_channel] (lrelu, lrelu) one residual, with spatial conv3 and conv4 [256_channel > 512_channel] (no activation) conv5 = weighted_pooled after residual(should halve channels now)
def network_modified(input): up6 = upsample_and_concat( conv5, conv4, 256, 512 , 'up_conv1' ) conv6=slim.conv2d(up6, 256,[3,3], rate=1, activation_fn=lrelu,scope='g_conv6_1') conv6=slim.conv2d(conv6,256,[3,3], rate=1, activation_fn=lrelu,scope='g_conv6_2') up7 = upsample_and_concat( conv6, conv3, 128, 256 , 'up_conv2' ) conv7=slim.conv2d(up7, 128,[3,3], rate=1, activation_fn=lrelu,scope='g_conv7_1') conv7=slim.conv2d(conv7,128,[3,3], rate=1, activation_fn=lrelu,scope='g_conv7_2') up8 = upsample_and_concat( conv7, conv2, 64, 128 , 'up_conv3') conv8=slim.conv2d(up8, 64,[3,3], rate=1, activation_fn=lrelu,scope='g_conv8_1') conv8=slim.conv2d(conv8,64,[3,3], rate=1, activation_fn=lrelu,scope='g_conv8_2') up9 = upsample_and_concat( conv8, conv1, 32, 64 , 'up_conv4') conv9=slim.conv2d(up9, 32,[3,3], rate=1, activation_fn=lrelu,scope='g_conv9_1') conv9=slim.conv2d(conv9,32,[3,3], rate=1, activation_fn=lrelu,scope='g_conv9_2') conv10=slim.conv2d(conv9,12,[1,1], rate=1, activation_fn=None, scope='g_conv10') out = tf.depth_to_space(conv10,2) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def conv_relu_pool_forward(x, w, b, conv_param, pool_param):\n a, conv_cache = conv_forward_fast(x, w, b, conv_param)\n s, relu_cache = relu_forward(a)\n out, pool_cache = max_pool_forward_fast(s, pool_param)\n cache = (conv_cache, relu_cache, pool_cache)\n return out, cache", "def conv_relu_pool_forward(x, w, b, conv_param, pool_param):\n a, conv_cache = conv_forward_fast(x, w, b, conv_param)\n s, relu_cache = relu_forward(a)\n out, pool_cache = max_pool_forward_fast(s, pool_param)\n cache = (conv_cache, relu_cache, pool_cache)\n return out, cache", "def residual(n_filters, input):\n shape = input.shape\n _, h, w, d = shape\n l1 = Conv2D(n_filters, (5, 5), padding='valid', activation='elu')(input)\n l2 = Conv2D(n_filters, (1, 1), padding='valid', activation='linear')(l1)\n l3 = Cropping2D(cropping=2)(input)\n added = Add()([l2, l3])\n return added", "def __init__(self):\n #conv1\n n = inp_width*inp_height\n #poczatkowe wagi sieci sa ustalane losowo z rozkladu normalnego. Umieszczane sa one na liscie matryc wag\n self.Weights = [np.random.randn(layers[0][1],inp_channels,layers[0][2],layers[0][2])/np.sqrt(n)]\n out_Size = inp_width - layers[0][2] + 1 #zmienna zawiera rozmiar wyjscia danej warstwy\n #inicjalizacja progow \n self.Biases = [initBias*np.ones( layers[0][1] )]\n #przypisanie parametrow warstwie poolingu\n self.poolParams = [(layers[1][1], layers[1][2])]\n out_Size = out_Size/2 \n #conv 2\n n = out_Size*out_Size*layers[0][1]\n self.Weights.append(np.random.randn(layers[2][1],layers[0][1],layers[2][2],layers[2][2])/np.sqrt(n))\n out_Size = out_Size - layers[2][2]+1\n self.Biases.append(initBias*np.ones(layers[2][1]))\n #pool 2\n self.poolParams.append((layers[3][1],layers[3][2]))\n out_Size = out_Size/2 \n #conv 3\n n = out_Size*out_Size*layers[2][1]\n self.Weights.append(np.random.randn(layers[4][1],layers[2][1],out_Size,out_Size)/np.sqrt(n))\n out_Size = 1\n self.Biases.append(initBias*np.ones(layers[4][1]))\n #fully connected 1\n n = layers[4][1]\n self.Weights.append(np.random.randn(layers[5][1],layers[4][1])/np.sqrt(n))\n self.Biases.append(initBias*np.ones(layers[5][1]))\n #fully connected 2\n n = layers[5][1]\n self.Weights.append(np.random.randn(layers[6][1],layers[5][1])/np.sqrt(n))\n self.Biases.append(initBias*np.ones(layers[6][1]))\n\n self.Weights = np.asarray(self.Weights)\n self.Biases = np.asarray(self.Biases)\n \n delta_W = []\n delta_B = []\n for i in range(5):\n delta_W.append(np.zeros(self.Weights[i].shape))\n delta_B.append(np.zeros(self.Biases[i].shape))\n self.delta_W = np.asarray(delta_W)\n self.delta_B = np.asarray(delta_B)", "def conv_relu_pool_forward_naive(x, w, b, conv_param, pool_param):\n\ta, conv_cache = conv_forward_naive(x, w, b, conv_param)\n\ts, relu_cache = relu_forward(a)\n\tout, pool_cache = max_pool_forward_naive(s, pool_param)\n\tcache = (conv_cache, relu_cache, pool_cache)\n\treturn out, cache", "def ResNet(images, device):\n blocksPerSection = [2, 2, 2, 2]\n channelsPerSection = [64, 128, 256, 512]\n channelsPerBlock = [1, 1]\n downsampleSection = [0, 1, 1, 1]\n\n\n x = images\n channelsOut = 64\n\n with tf.device(device):\n\n x = ConvBlock(x, 64, [7,7], 2, '_init')\n x = slim.max_pool2d(x, [3, 3], stride=2, scope='pool_1')\n\n for s in range(len(blocksPerSection)):\n for l in range(blocksPerSection[s]):\n\n # Stride at the beginning of each block\n stride = 1\n if l == 0 and downsampleSection[s]:\n stride = 2\n\n sumInput = x\n\n # 2 conv only\n x = ConvBlock(x, channelsPerSection[s]*channelsPerBlock[1], [3, 3], stride, '%d_1_%d'%(s,l))\n x = ConvBlock(x, channelsPerSection[s]*channelsPerBlock[1], [3, 3], 1, '%d_2_%d'%(s,l), False)\n\n if l == 0 and channelsOut != channelsPerSection[s]*channelsPerBlock[1]:\n sumInput = ConvBlock(sumInput, channelsPerSection[s]*channelsPerBlock[1], [1,1], stride, '_sum%d'%(s), False)\n\n channelsOut = channelsPerSection[s]*channelsPerBlock[1]\n x = sumInput + x\n x = tf.nn.relu(x)\n\n with slim.arg_scope([slim.fully_connected],\n activation_fn=None,\n weights_regularizer=slim.l2_regularizer(0.0005),\n biases_regularizer=slim.l2_regularizer(0.0005),\n trainable=True):\n x = tf.reduce_mean(x, [1,2])\n softmax_linear = slim.fully_connected(x, NUM_CLASSES, scope='fc_1')\n\n return softmax_linear", "def resblock(input_tensor, num_channels):\r\n step_1_conv = Conv2D(num_channels, (3, 3), padding='same')(input_tensor)\r\n step_2_relu = Activation('relu')(step_1_conv)\r\n step_3_conv = Conv2D(num_channels, (3, 3), padding='same')(step_2_relu)\r\n output = Add()([input_tensor, step_3_conv])\r\n return Activation('relu')(output)", "def IMPALA_resnet_head(input_shape, l2_weight=0.0):\n # Total number of layers in this resnet. Used to approximiately get the\n # FixUp initialization right\n TOTAL_RESIDUAL_BLOCKS = 6\n\n model = layers.Input(shape=input_shape)\n input_layer = model\n\n for i, (num_channels, num_blocks) in enumerate([[16, 2], [32, 2], [32, 2]]):\n model = layers.Conv2D(\n num_channels, kernel_size=(3, 3), strides=(1, 1), padding=\"same\", activation=None,\n kernel_regularizer=regularizers.l2(l2_weight)\n )(model)\n model = layers.MaxPool2D(pool_size=(3, 3), strides=(2, 2))(model)\n\n for j in range(num_blocks):\n block_input = model\n model = layers.ReLU()(model)\n model = layers.Conv2D(\n num_channels, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=None,\n kernel_regularizer=regularizers.l2(l2_weight),\n kernel_initializer=keras.initializers.VarianceScaling(\n # Scaling is L^(-1/(2m - 2)) . In our case m = 2 (two layers in branch),\n # so our rescaling is L^(-1/2) = 1 / sqrt(L)\n scale=1 / np.sqrt(TOTAL_RESIDUAL_BLOCKS)\n )\n )(model)\n model = layers.ReLU()(model)\n model = layers.Conv2D(\n num_channels, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=None,\n kernel_initializer=\"zero\", bias_initializer=\"zero\",\n kernel_regularizer=regularizers.l2(l2_weight)\n )(model)\n model = layers.add([model, block_input])\n\n model = layers.ReLU()(model)\n model = layers.Flatten()(model)\n model = layers.Dense(256, activation=\"relu\")(model)\n\n return input_layer, model", "def __init__(self, in_channels=4, out1_channels=3, out2_channels=1, bn=True):\n super(SRResNet_RGBY, self).__init__()\n \n self.bn = bn\n self.conv_input = nn.Conv2d(in_channels=in_channels, out_channels=64, kernel_size=9, stride=1, padding=9//2, bias=False)\n #self.relu = nn.LeakyReLU(0.2, inplace=True)\n self.relu = nn.PReLU(num_parameters=1, init=0.2)\n \n self.residual = self.make_layer(_Residual_Block, bn, 16)\n\n self.conv_mid = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=False)\n if self.bn:\n self.bn_mid = nn.BatchNorm2d(64)\n\n self.upscale4x = nn.Sequential(\n nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),\n nn.PixelShuffle(2),\n #nn.LeakyReLU(0.2, inplace=True),\n nn.PReLU(num_parameters=1, init=0.2),\n nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),\n nn.PixelShuffle(2),\n #nn.LeakyReLU(0.2, inplace=True),\n nn.PReLU(num_parameters=1, init=0.2),\n )\n\n self.conv_output = nn.Conv2d(in_channels=64, out_channels=out1_channels, kernel_size=9, stride=1, padding=4, bias=False)\n self.conv_output2 = nn.Conv2d(in_channels=out1_channels, out_channels=out2_channels, kernel_size=1, stride=1, padding=0, bias=False)\n \n # init the weight of conv2d\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n #init.orthogonal(m.weight, math.sqrt(2))\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2. / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n if m.bias is not None:\n m.bias.data.zero_()", "def resblock(input_tensor, num_channels):\n tensor = Conv2D(filters=num_channels, kernel_size=3, padding='same')(\n input_tensor)\n tensor = Activation('relu')(tensor)\n tensor = Conv2D(filters=num_channels, kernel_size=3, padding='same')(\n tensor)\n tensor = Add()([tensor, input_tensor])\n output_tensor = Activation('relu')(tensor)\n return output_tensor", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=3,\n conv_layers=1, use_batchnorm=False, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n self.conv_layers = conv_layers\n self.num_layers = conv_layers + 2 # Currently conv + affine + softmax\n self.use_batchnorm = use_batchnorm\n\n if self.use_batchnorm:\n self.bn_params = []\n self.bn_params = [{'mode': 'train'} for i in xrange(self.num_layers + 1)]\n\n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n # Store weights and biases for the convolutional layer using the keys 'W1' #\n # and 'b1'; use keys 'W2' and 'b2' for the weights and biases of the #\n # hidden affine layer, and keys 'W3' and 'b3' for the weights and biases #\n # of the output affine layer. #\n ############################################################################\n C, H, W = input_dim\n F = num_filters\n HH = filter_size\n WW = filter_size\n\n layer_dim = (F, C, HH, WW)\n\n # Conv - relu - pool weights\n for l in xrange(1, self.conv_layers + 1):\n self.params['W%d' % l] = np.random.normal(loc=0.0, scale=weight_scale, size=layer_dim)\n self.params['b%d' % l] = np.zeros(F)\n if self.use_batchnorm:\n self.params['gamma%d' % l] = np.ones(F)\n self.params['beta%d' % l] = np.zeros(F)\n layer_dim = (F, F, HH, WW)\n\n # Affine - Relu layer\n l = self.conv_layers + 1\n h_shape = ((num_filters * np.prod(input_dim[1:]) / 4**self.conv_layers), hidden_dim)\n self.params['W%d' % l] = np.random.normal(loc=0.0, scale=weight_scale, size=h_shape)\n self.params['b%d' % l] = np.zeros(hidden_dim)\n if self.use_batchnorm:\n self.params['gamma%d' % l] = np.ones(hidden_dim)\n self.params['beta%d' % l] = np.zeros(hidden_dim)\n\n # Final affine layer (hidden layers -> classes)\n l = l + 1\n a_shape = (hidden_dim, num_classes)\n self.params['W%d' % l] = np.random.normal(loc=0.0, scale=weight_scale, size=a_shape)\n self.params['b%d' % l] = np.zeros(num_classes)\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def residual_block(layer_input, filters):\n d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(layer_input)\n d = Activation('relu')(d)\n d = BatchNormalization(momentum=0.8)(d)\n d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(d)\n d = BatchNormalization(momentum=0.8)(d)\n d = Add()([d, layer_input])\n return d", "def ResUNetPlusPlus(input_size: tuple, test_mode=False):\n assert len(input_size) == 3, \"[ERROR]: Expected tuple of length 3 got {0}\".format(len(input_size))\n\n image_width, image_height, n_channels = input_size\n\n inp = tf.keras.layers.Input(shape=(image_width, image_height, n_channels), dtype=\"float32\", name=\"input_layer\")\n\n # starting conv\n x = layers.Conv2DBN(64, 3, padding=\"same\", activation=\"relu\", name=\"conv_start\")(inp)\n\n # Residual block 1\n x = layers.ResidualBlock(64, 3, activation=\"relu\", name=\"rb_1\")(x)\n skip1 = x\n\n x = tf.keras.layers.Conv2D(128,\n kernel_size=(3, 3),\n strides=(2, 2),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.SqueezeExcitationBlock(ratio=1)(x)\n\n # Residual block 2\n x = layers.ResidualBlock(128, 3, activation=\"relu\", name=\"rb_2\")(x)\n skip2 = x\n\n x = tf.keras.layers.Conv2D(256,\n kernel_size=(3, 3),\n strides=(2, 2),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.SqueezeExcitationBlock(ratio=2)(x)\n\n # Residual block 3\n x = layers.ResidualBlock(256, 3, activation=\"relu\", name=\"rb_3\")(x)\n skip3 = x\n\n x = tf.keras.layers.Conv2D(512,\n kernel_size=(3, 3),\n strides=(2, 2),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.SqueezeExcitationBlock(ratio=4)(x)\n\n # Residual block 4\n x = layers.ResidualBlock(512, 3, activation=\"relu\", name=\"rb_4\")(x)\n skip4 = x\n\n x = tf.keras.layers.Conv2D(1024,\n kernel_size=(3, 3),\n strides=(2, 2),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.SqueezeExcitationBlock(ratio=8)(x)\n\n # Bottleneck ASPP\n x = layers.ASPP(256, [4, 8, 12], (256, 256), 16, activation=\"relu\", name=\"aspp_bottleneck\")(x)\n x = layers.Conv2DBN(1024, 1, activation=\"relu\")(x)\n\n # Up-sample L4\n x = layers.GlobalAttentionUpsample(name=\"GAU_4\")([skip4, x])\n x = tf.keras.layers.Concatenate(axis=-1)([x, skip4])\n x = tf.keras.layers.Conv2D(512,\n kernel_size=(3, 3),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.ResidualBlock(512, 3, activation=\"relu\", activate_begin=True, name=\"u_rb_4\")(x)\n\n # Up-sample L3\n x = layers.GlobalAttentionUpsample(name=\"GAU_3\")([skip3, x])\n x = tf.keras.layers.Concatenate(axis=-1)([x, skip3])\n x = tf.keras.layers.Conv2D(256,\n kernel_size=(3, 3),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.ResidualBlock(256, 3, activation=\"relu\", activate_begin=True, name=\"u_rb_3\")(x)\n\n # Up-sample L2\n x = layers.GlobalAttentionUpsample(name=\"GAU_2\")([skip2, x])\n x = tf.keras.layers.Concatenate(axis=-1)([x, skip2])\n x = tf.keras.layers.Conv2D(128,\n kernel_size=(3, 3),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.ResidualBlock(128, 3, activation=\"relu\", activate_begin=True, name=\"u_rb_2\")(x)\n\n # Up-sample L1\n x = layers.GlobalAttentionUpsample(name=\"GAU_1\")([skip1, x])\n x = tf.keras.layers.Concatenate(axis=-1)([x, skip1])\n x = tf.keras.layers.Conv2D(64,\n kernel_size=(3, 3),\n padding=\"same\",\n activation=\"linear\")(x)\n x = layers.ResidualBlock(64, 3, activation=\"relu\", activate_begin=True, name=\"u_rb_1\")(x)\n x = layers.Conv2DBN(1, 1, activation=\"sigmoid\")(x)\n\n return tf.keras.Model(inputs=[inp], outputs=[x])", "def resnet_head(input_shape):\n input_layer = layers.Input(shape=input_shape)\n\n model = layers.Conv2D(16, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=None)(input_layer)\n model = layers.MaxPool2D(pool_size=(3, 3), strides=(2, 2))(model)\n model = layers.ReLU()(model)\n model = residual_block(model, 16)\n model = residual_block(model, 16)\n\n model = layers.Conv2D(32, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=None)(model)\n model = layers.MaxPool2D(pool_size=(3, 3), strides=(2, 2))(model)\n model = layers.ReLU()(model)\n model = residual_block(model, 32)\n model = residual_block(model, 32)\n\n model = layers.Conv2D(32, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=None)(model)\n model = layers.MaxPool2D(pool_size=(3, 3), strides=(2, 2))(model)\n model = layers.ReLU()(model)\n model = residual_block(model, 32)\n model = residual_block(model, 32)\n\n model = layers.Flatten()(model)\n\n return input_layer, model", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=False,\n conv_first=True):\n # conv = Conv1D(num_filters,\n # kernel_size=kernel_size,\n # strides=strides,\n # padding='same',\n # kernel_initializer='he_normal',\n # kernel_regularizer=l2(1e-4))\n conv = Conv1D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n )\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def _make_conv_layers(self):\n conv = nn.Sequential(\n nn.Conv2d(in_channels=3, out_channels=64, kernel_size=7, stride=2, padding=1), # padding=3 so, output is 224.\n nn.LeakyReLU(0.1, inplace=True),\n nn.MaxPool2d(kernel_size=2, stride=2),\n\n nn.Conv2d(64, 192, 3, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.MaxPool2d(2,2),\n\n nn.Conv2d(192, 128, 1, padding=1), ## kernel size = 1 이므로 padding = 0(defalut)\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(128, 256, 3, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(256, 256, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(256, 512, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n nn.MaxPool2d(2,2),\n\n nn.Conv2d(512, 256, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(256, 512, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(512, 256, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(256, 512, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(512, 256, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.Conv2d(256, 512, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(512, 256, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.Conv2d(256, 512, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(512, 512, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.Conv2d(512, 1024, 3, padding=1), \n nn.LeakyReLU(0.1, inplace=True),\n nn.MaxPool2d(2,2),\n\n nn.Conv2d(1024, 512, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.Conv2d(512, 1024, 3, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n\n nn.Conv2d(1024, 512, 1, padding=1),\n nn.LeakyReLU(0.1, inplace=True),\n nn.Conv2d(512, 1024, 3, padding=1),\n nn.LeakyReLU(0.1, inplace=True)\n )\n return conv", "def __init__(self, config, input_shp):\n\n # Run initialization for super class\n super(MyNetwork, self).__init__()\n\n # Store configuration\n self.config = config\n\n # Placeholder for layers\n self.layers = {}\n indim = input_shp[0]\n\n # Retrieve Conv, Act, Pool functions from configurations. We'll use\n # these for our code below.\n if config.conv2d == \"torch\":\n self.Conv2d = nn.Conv2d\n elif config.conv2d == \"custom\":\n self.Conv2d = ConvBlock\n self.Activation = getattr(nn, config.activation)\n self.Pool2d = getattr(nn, config.pool2d)\n self.Linear = nn.Linear\n\n # Resnet Blocks, similar to slide 73 of lecture 21. However, for\n # simplicity, we'll make is slightly different. Note that we used\n # nn.Sequential this time.\n self.convs = nn.Sequential()\n cur_h, cur_w = input_shp[-2:]\n for _i in range(config.num_conv_outer):\n #\n # NOTE THE NEW LAYER ON THESE LINES!\n #\n # We have a dedicated 1x1 layer to get more channels. Note also\n # that this is a pure linear convolution layer.\n outdim = config.nchannel_base * 2 ** _i\n self.convs.add_module(\n \"conv_{}_base\".format(_i), nn.Conv2d(indim, outdim, 1, 1, 0))\n indim = outdim\n for _j in range(config.num_conv_inner):\n # We now use our selected convolution layer. Note that our\n # resnet implementation will have a different call style to\n # vanilla conv2d of torch, so we'll just do an ugly if-else\n # here.\n if config.conv2d == \"torch\":\n self.convs.add_module(\n \"conv_{}_{}\".format(_i, _j),\n self.Conv2d(indim, outdim, config.ksize, 1, 1))\n self.convs.add_module(\n \"act_{}_{}\".format(_i, _j),\n self.Activation())\n cur_h = cur_h - (config.ksize - 1)\n cur_w = cur_w - (config.ksize - 1)\n elif config.conv2d == \"custom\":\n self.convs.add_module(\n \"conv_{}_{}\".format(_i, _j),\n self.Conv2d(indim, outdim, config.ksize, 1, self.Activation))\n self.convs.add_module(\n \"conv_{}_pool\".format(_i), self.Pool2d(2, 2))\n cur_h = cur_h // 2\n cur_w = cur_w // 2\n\n # Final output layer. We'll assume that conv layer outputs are global\n # average pooled\n self.output = nn.Linear(indim, config.num_class)\n\n print(self)", "def build_resnet(depth, input_layer, n_classes, activation='relu'):\n num_conv_layers = 1\n num_add_layers = 1\n if (depth - 2) % 6 != 0:\n raise ValueError( 'depth should be 6n+2 (eg 20, 32, 44 in [a])' )\n # Start model definition.\n num_filters = 16\n num_res_blocks = int( (depth - 2) / 6 )\n\n inputs = input_layer\n x = resnet_layer( inputs=inputs, layer_num=num_conv_layers )\n num_conv_layers += 1\n # Instantiate the stack of residual units\n for stack in range( 3 ):\n for res_block in range( num_res_blocks ):\n strides = 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n strides = 2 # downsample\n y = resnet_layer( inputs=x,\n num_filters=num_filters,\n activation=activation,\n strides=strides,\n layer_num=num_conv_layers,\n weight_decay=l2(1e-4) )\n\n num_conv_layers += 1\n y = resnet_layer( inputs=y,\n num_filters=num_filters,\n activation=None,\n layer_num=num_conv_layers,\n weight_decay=l2(1e-4) )\n num_conv_layers += 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n # linear projection residual shortcut connection to match\n # changed dims\n x = resnet_layer( inputs=x,\n num_filters=num_filters,\n kernel_size=1,\n strides=strides,\n activation=None,\n batch_normalization=False,\n layer_num=num_conv_layers,\n weight_decay=l2(1e-4) )\n num_conv_layers += 1\n x = keras.layers.add( [x, y], name='add_%d' % num_add_layers )\n num_add_layers += 1\n x = Activation( activation )( x )\n num_filters *= 2\n\n # Add classifier on top.\n x = AveragePooling2D( pool_size=8 )( x )\n y = Flatten()( x )\n outputs = Dense( n_classes,\n activation='softmax',\n kernel_initializer='he_normal',\n name='classification' )( y )\n\n # Instantiate model.\n model = Model( inputs=inputs, outputs=outputs )\n return model", "def PXRCmodel(isize, nc, conv_init, ndf=128, bn=True, se=False):\n \n def squeeze_excite_block(tensor, ratio=16):\n \n init = tensor\n filters = init._keras_shape[3]\n se_shape = (1, 1, filters)\n\n se = GlobalAveragePooling2D()(init)\n se = Reshape(se_shape)(se)\n se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)\n se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)\n\n x = multiply([init, se])\n return x\n \n x = inputs = Input(shape=(isize, isize, nc))\n x = Conv2D(filters=ndf, kernel_size=4, strides=1, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = Conv2D(filters=ndf, kernel_size=4, strides=2, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x) \n x = LeakyReLU(alpha=0.2)(x)\n \n \n x = Conv2D(filters=ndf*2, kernel_size=4, strides=2, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = Conv2D(filters=ndf*2, kernel_size=4, strides=1, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = LeakyReLU(alpha=0.2)(x)\n \n\n x = Conv2D(filters=ndf*4, kernel_size=4, strides=2, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = Conv2D(filters=ndf*4, kernel_size=4, strides=1, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = LeakyReLU(alpha=0.2)(x)\n \n \n x = Conv2D(filters=ndf*8, kernel_size=4, strides=2, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = Conv2D(filters=ndf*8, kernel_size=4, strides=1, use_bias=False,\n padding = \"same\", kernel_initializer = conv_init)(x)\n x = LeakyReLU(alpha=0.2)(x)\n \n \n y = Conv2D(filters=256, kernel_size=(3, 3), padding='same')(x)\n y = Conv2D(filters=256, kernel_size=(3, 3), padding='same')(y)\n y = Conv2D(filters=256, kernel_size=(3, 3), padding='same')(y)\n \n if (bn==True):\n y = BatchNormalization()(y)\n \n y = LeakyReLU()(y)\n y = MaxPool2D()(y)\n y = LeakyReLU()(y)\n \n ###########\n \n y = Conv2D(filters=128, kernel_size=(3, 3), padding='same')(y)\n y = Conv2D(filters=128, kernel_size=(3, 3), padding='same')(y)\n y = Conv2D(filters=128, kernel_size=(3, 3), padding='same')(y)\n \n if (se==True):\n y = squeeze_excite_block(y)\n \n if (bn==True):\n y = BatchNormalization()(y)\n \n y = LeakyReLU()(y)\n y = MaxPool2D()(y)\n y = LeakyReLU()(y)\n \n \n y = GlobalAveragePooling2D()(y)\n predictions = Dense(2, activation='softmax')(y)\n \n return Model(inputs=inputs, outputs=predictions)", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n\n return x", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,\n conv_first=True):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def resnet_block(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n conv_first=True):\n if conv_first:\n x = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))(inputs)\n x = BatchNormalization()(x)\n if activation:\n x = Activation(activation)(x)\n return x\n x = BatchNormalization()(inputs)\n if activation:\n x = Activation('relu')(x)\n x = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))(x)\n return x", "def forward(self, x):\n\n x = F.max_pool2d(F.relu(self.batch_norm1(self.conv1(x))), 3, stride=2, padding=1)\n x = F.max_pool2d(F.relu(self.batch_norm2(self.conv2(x))), 3, stride=2, padding=1)\n x = F.max_pool2d(F.relu(self.batch_norm3_b(self.conv3_b(F.relu(self.batch_norm3_a(self.conv3_a(x)))))), 3, stride=2, padding=1)\n x = F.max_pool2d(F.relu(self.batch_norm4_b(self.conv4_b(F.relu(self.batch_norm4_a(self.conv4_a(x)))))), 3, stride=2, padding=1)\n x = F.max_pool2d(F.relu(self.batch_norm5_b(self.conv5_b(F.relu(self.batch_norm5_a(self.conv5_a(x)))))), 3, stride=2, padding=1)\n x = self.avg_pool(x).view(-1,512)\n out = self.linear(x)\n\n return out", "def resnet_v1(input_shape, depth, num_classes=10,input_tensor=None):\n if (depth - 2) % 6 != 0:\n raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')\n # Start model definition.\n num_filters = 16\n num_res_blocks = int((depth - 2) / 6)\n\n if (input_tensor == None):\n inputs = Input(shape=input_shape)\n else:\n inputs = input_tensor\n x = resnet_layer(inputs=inputs)\n # Instantiate the stack of residual units\n for stack in range(3):\n for res_block in range(num_res_blocks):\n strides = 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n strides = 2 # downsample\n y = resnet_layer(inputs=x,\n kernel_size=30,\n num_filters=num_filters,\n strides=strides)\n y = resnet_layer(inputs=y,\n kernel_size=20,\n num_filters=num_filters,\n activation=None)\n if stack > 0 and res_block == 0: # first layer but not first stack\n # linear projection residual shortcut connection to match\n # changed dims\n x = resnet_layer(inputs=x,\n num_filters=num_filters,\n kernel_size=10,\n strides=strides,\n activation=None,\n batch_normalization=False)\n x = keras.layers.add([x, y])\n x = Activation('relu')(x)\n num_filters *= 2\n\n # Add classifier on top.\n # v1 does not use BN after last shortcut connection-ReLU\n x = AveragePooling1D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Instantiate model.\n model = Model(inputs=inputs, outputs=outputs)\n return model", "def resnet_v2(input_shape, depth, num_classes=7):\n if (depth - 2) % 9 != 0:\n raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])')\n \n num_filters_in = 16\n num_res_blocks = int((depth - 2) / 9)\n \n inputs = Input(shape=input_shape)\n x = resnet_layer(inputs=inputs,\n num_filters=num_filters_in,\n conv_first=True)\n \n for stage in range(3):\n for res_block in range(num_res_blocks):\n activation = 'relu'\n batch_normalization = True\n strides = 1\n # num of param setting \n if stage == 0: # first stage\n num_filters_out = num_filters_in * 4\n if res_block == 0: # first layer & first stage\n activation = None\n batch_normalization = False\n else: # second, third stage\n num_filters_out = num_filters_in * 2\n if res_block == 0: # first layer but no first stage\n strides = 2 # downsample\n y = resnet_layer(inputs=x,\n num_filters=num_filters_in,\n kernel_size=1,\n strides=strides,\n activation=activation,\n batch_normalization=batch_normalization,\n conv_first=False)\n y = resnet_layer(inputs=y,\n num_filters=num_filters_in,\n conv_first=False)\n y = resnet_layer(inputs=y,\n num_filters=num_filters_out,\n kernel_size=1,\n conv_first=False)\n if res_block == 0:\n # Linear projection residual shortcut connection to match\n # changed dims\n # at the first time, make a shortcut origin\n x = resnet_layer(inputs=x,\n num_filters=num_filters_out,\n kernel_size=1,\n strides=strides,\n activation=None,\n batch_normalization=False)\n # and add every reputation\n x = keras.layers.add([x, y])\n \n num_filters_in = num_filters_out\n \n # Add classifier on top\n # v2 has BN_ReLU before Pooling\n x = BatchNormalization()(x)\n x = Activation('relu')(x)\n x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n \n # Instantiate model\n model = Model(inputs=inputs, outputs=outputs)\n \n return model", "def resnet_layer(inputs,\r\n num_filters=16,\r\n kernel_size=3,\r\n strides=1,\r\n activation='relu',\r\n batch_normalization=True,\r\n conv_first=True):\r\n conv = Conv2D(num_filters,\r\n kernel_size=kernel_size,\r\n strides=strides,\r\n padding='same',\r\n kernel_initializer='he_normal',\r\n kernel_regularizer=l2(1e-4))\r\n\r\n x = inputs\r\n if conv_first:\r\n x = conv(x)\r\n if batch_normalization:\r\n x = BatchNormalization()(x)\r\n if activation is not None:\r\n x = Activation(activation)(x)\r\n else:\r\n if batch_normalization:\r\n x = BatchNormalization()(x)\r\n if activation is not None:\r\n x = Activation(activation)(x)\r\n x = conv(x)\r\n return x", "def resnet_layer(inputs,\r\n num_filters=16,\r\n kernel_size=3,\r\n strides=1,\r\n activation='relu',\r\n batch_normalization=True,\r\n conv_first=True):\r\n conv = Conv2D(num_filters,\r\n kernel_size=kernel_size,\r\n strides=strides,\r\n padding='same',\r\n kernel_initializer='he_normal',\r\n kernel_regularizer=l2(1e-4))\r\n\r\n x = inputs\r\n if conv_first:\r\n x = conv(x)\r\n if batch_normalization:\r\n x = BatchNormalization()(x)\r\n if activation is not None:\r\n x = Activation(activation)(x)\r\n else:\r\n if batch_normalization:\r\n x = BatchNormalization()(x)\r\n if activation is not None:\r\n x = Activation(activation)(x)\r\n x = conv(x)\r\n return x", "def residual_block(layer_input, filters):\n d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(layer_input)\n d = Activation('relu')(d)\n d = BatchNormalization(momentum=0.8)(d)\n d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(d)\n d = BatchNormalization(momentum=0.8)(d)\n d = Add()([d, layer_input])\n return d", "def _first_conv(x: tf.Tensor) -> tf.Tensor:\n with slim.arg_scope([slim.conv2d], activation_fn=None, normalizer_fn=None):\n x = ResNet._conv2d_same(x, 64, 7, stride=2, scope='conv1')\n return slim.max_pool2d(x, [3, 3], stride=2, scope='pool1')", "def WideResNet(input_shape,\n nb_output_nodes,\n output_activation):\n\n # At some point turn these back into input parameters\n N=2\n k=2\n dropout=0.0\n \n channel_axis = 1 if K.image_data_format() == \"channels_first\" else -1\n\n ip = Input(shape=input_shape)\n\n x = initial_conv(ip)\n x = expand_conv(x, 16, k)\n\n for i in range(N - 1):\n x = conv1_block(x, k, dropout)\n\n x = BatchNormalization(axis=channel_axis, momentum=0.1,\n epsilon=1e-5, gamma_initializer='uniform')(x)\n x = Activation('relu')(x)\n\n x = expand_conv(x, 32, k, strides=(2, 2))\n\n for i in range(N - 1):\n x = conv2_block(x, k, dropout)\n\n\n x = BatchNormalization(axis=channel_axis, momentum=0.1,\n epsilon=1e-5, gamma_initializer='uniform')(x)\n x = Activation('relu')(x)\n\n x = expand_conv(x, 64, k, strides=(2, 2))\n\n\n for i in range(N - 1):\n x = conv3_block(x, k, dropout)\n\n\n x = BatchNormalization(axis=channel_axis, momentum=0.1,\n epsilon=1e-5, gamma_initializer='uniform')(x)\n x = Activation('relu')(x)\n\n x = AveragePooling2D((8, 8))(x)\n x = Flatten()(x)\n\n #x = Dense(nb_output_nodes, kernel_regularizer=l2(weight_decay),\n # activation=output_activation)(x)\n\n x = Dense(nb_output_nodes, kernel_regularizer=l2(weight_decay))(x)\n x = Activation(output_activation)(x)\n\n model = Model(ip, x)\n return model", "def ResNet18(input_shape = (28, 28, 1), classes = 24):\n \n # Define the input as a tensor with shape input_shape\n X = X_input = Input(input_shape)\n\n \n # Zero-Padding\n X = ZeroPadding2D((3, 3))(X_input)\n \n # Stage 1\n X = Conv2D(64, (7, 7), strides = (2, 2), name = 'conv1', kernel_initializer = glorot_uniform(seed=0))(X)\n X = BatchNormalization(axis = 3, name = 'bn_conv1')(X)\n X = Activation('relu')(X)\n #X = MaxPooling2D((3, 3), strides=(2, 2))(X)\n\n # Stage 2\n X = convolutional_block(X, [64, 64], stage=2, block='a')\n X = identity_block(X, [64, 64], stage=2, block='b')\n\n # Stage 3\n X = convolutional_block(X, [128, 128], stage=3, block='a')\n X = identity_block(X, [128, 128], stage=3, block='b')\n\n # Stage 4\n X = convolutional_block(X, [256, 256], stage=4, block='a')\n X = identity_block(X, [256, 256], stage=4, block='b')\n\n # Stage 5\n X = convolutional_block(X, [512, 512], stage=5, block='a')\n X = identity_block(X, [512, 512], stage=5, block='b')\n\n # AVGPOOL\n # X = AveragePooling2D(pool_size=(2,2), name='avg_pool')(X)\n\n # output layer\n X = Flatten()(X)\n X = Dense(classes, activation='softmax', name='fc' + str(classes), kernel_initializer = glorot_uniform(seed=0))(X)\n \n # Create model\n model = Model(inputs = X_input, outputs = X, name='ResNet18')\n\n return model", "def __init__(self, n_filters = 64,\n n_kernels = 3,\n n_outputs = 10,\n inp_shape = (28,28),\n residual=True,\n regularizer = None,\n intializer = None,\n use_pool= False,\n use_dropout = False,\n use_batchnorm = False\n ):\n super(CNNModel, self).__init__()\n self.conv_dim = len(inp_shape)-1\n self.n_filters = n_filters\n self.initializer = intializer\n self.n_kernels = n_kernels\n self.projection = 3\n self.n_outputs = n_outputs\n self.num_layers = 1\n self.inp_shape = inp_shape\n self.regularizer = regularizer\n self.use_pool = use_pool\n self.residual = residual\n self.use_dropout = use_dropout\n self.use_batchnorm = use_batchnorm\n\n kernel_initializer = initializers.RandomNormal(mean=0.0, stddev=0.05)\n\n if self.conv_dim == 1:\n self.input_layer = layers.Conv1D(self.n_filters, (self.projection),\n activation = \"linear\",\n input_shape = self.inp_shape,\n name ='cnn_input',\n padding = 'same',\n kernel_regularizer = self.regularizer,\n bias_regularizer = self.regularizer,\n kernel_initializer=kernel_initializer,\n bias_initializer=initializers.get(\"zeros\")\n )\n self.output_layer = layers.Conv1D(self.n_kernels, (self.projection),\n activation=\"linear\",\n input_shape=(None, self.inp_shape[0], self.n_filters),\n name='cnn_output',\n padding = 'same',\n kernel_regularizer=self.regularizer,\n bias_regularizer=self.regularizer,\n kernel_initializer=kernel_initializer,\n bias_initializer=initializers.get(\"zeros\")\n )\n if self.use_pool:\n self.pool = layers.MaxPool1D()\n elif self.conv_dim == 2:\n self.input_layer = layers.Conv2D(self.n_filters, (self.projection,self.projection),\n activation=\"linear\",\n input_shape=self.inp_shape,\n name='cnn_input',\n padding = 'same',\n kernel_regularizer=self.regularizer,\n bias_regularizer=self.regularizer,\n kernel_initializer=kernel_initializer,\n bias_initializer=initializers.get(\"zeros\")\n )\n self.output_layer = layers.Conv2D(self.n_kernels, (self.projection, self.projection),\n activation= \"linear\",\n input_shape=(None, self.inp_shape[0],self.inp_shape[1], self.n_filters),\n name=\"cnn_output\",\n padding = 'same',\n kernel_regularizer=self.regularizer,\n bias_regularizer=self.regularizer,\n kernel_initializer=kernel_initializer,\n bias_initializer=initializers.get(\"zeros\")\n )\n if self.use_pool:\n self.pool = layers.MaxPool2D()\n self.list_cnn = [self.input_layer]\n self.flatten = layers.Flatten()\n\n #compute input shape after flatten for the dense layer\n if not self.use_pool:\n self.class_inp = np.prod(self.inp_shape[:-1])*self.n_kernels\n else:\n self.class_inp = np.prod(self.inp_shape[:-1])*self.n_kernels//(2**self.conv_dim)\n # self.classify = MyDenseLayer(\n # self.n_outputs,shape = (None,self.class_inp),\n # layer_name = 'classify',\n # initializer = \"RandomNormal\")\n self.classify = layers.Dense(units = self.n_outputs,\n activation = 'softmax', use_bias = True,\n input_shape = self.class_inp,\n kernel_initializer = kernel_initializer, bias_initializer=initializers.get(\"zeros\"),\n name = 'classification_layer')", "def resnet_layer(inputs,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n weight_decay=l2(1e-4),\n batch_normalization=True,\n conv_first=True,\n layer_num=None):\n conv = Conv2D(num_filters,\n kernel_size=kernel_size,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=weight_decay,\n name='conv2d_%d' % layer_num)\n\n x = inputs\n if conv_first:\n x = conv(x)\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n else:\n if batch_normalization:\n x = BatchNormalization()(x)\n if activation is not None:\n x = Activation(activation)(x)\n x = conv(x)\n return x", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n \n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n ############################################################################\n \n # Store weights and biases for the convolutional layer using the keys 'W1' and 'b1'; \n C, H, W = input_dim\n filter_sizes = (filter_size, filter_size)\n self.params['W1'] = np.random.normal(0, weight_scale, [num_filters, C, filter_sizes[0], filter_sizes[1]])\n self.params['b1'] = np.zeros((num_filters, ))\n\n # use keys 'W2' and 'b2' for the weights and biases of the hidden affine layer;\n # In this case, ConvLayer doesn't reduce the spatial size of the input, (N, C, H, W) -> Conv -> (N, F, H, W)\n # To satisfy this constraint, (W + 2 * pad - filter_size) / stride + 1 = W need to hold, which led to pad = (F - S) / 2 where S == 1\n # (N, C, H, W) -> Conv -> (N, F, H, W) -> Pooling -> (N, F, H/2, W/2)\n # In a FC_NN, FCL weights (input_dim, hidden_dim) where every img is flatten into a 1D array of length D = F * H/2 * W/2.\n self.params['W2'] = np.random.normal(0, weight_scale, [num_filters * (H / 2) * (W / 2), hidden_dim])\n self.params['b2'] = np.zeros((hidden_dim, ))\n\n # And the keys 'W3' and 'b3' for the weights and biases of the output affine layer. \n self.params['W3'] = np.random.normal(0, weight_scale, [hidden_dim, num_classes])\n self.params['b3'] = np.zeros((num_classes, ))\n\n \n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def forward(self, x):\n # Encoder1 --block1\n encode_block1 = self.conv_encoder1(x)\n if self.residus[0] == 1:\n encode_block1 += self.residual_shortcut1(x)\n encode_pool1 = self.max_pool_encoder1(encode_block1)\n\n # Encoder2 --block2\n encode_block2 = self.conv_encoder2(encode_pool1)\n if self.residus[1] == 1:\n encode_block2 += self.residual_shortcut2(encode_pool1)\n encode_pool2 = self.max_pool_encoder2(encode_block2)\n \n # Encoder3 --block3\n encode_block3 = self.conv_encoder3(encode_pool2)\n if self.residus[2] == 1:\n encode_block3 += self.residual_shortcut3(encode_pool2) ##\n encode_pool3 = self.max_pool_encoder3(encode_block3)\n \n # Encoder4 --block4\n encode_block4 = self.conv_encoder4(encode_pool3) \n if self.residus[3] == 1:\n encode_block4 += self.residual_shortcut4(encode_pool3) ##\n encode_pool4 = self.max_pool_encoder4(encode_block4)\n\n # Transitional block \n encode_block_trans_1 = self.conv_encoder_trans_1(encode_pool4)\n encode_block_trans_1 = self.conv_encoder_trans_2(encode_block_trans_1)\n encode_block_trans_1 = self.conv_encoder_trans_3(encode_block_trans_1)\n \n encode_block_trans_1 = torch.cat((encode_pool4, encode_block_trans_1), 1) # Concatenation\n \n encode_block_trans_2 = self.conv_encoder_trans_4(encode_block_trans_1)\n encode_block_trans_2 = self.conv_encoder_trans_5(encode_block_trans_2)\n encode_block_trans_2 = self.conv_encoder_trans_6(encode_block_trans_2)\n\n middle_block = torch.cat((encode_block_trans_1, encode_block_trans_2), 1) # Concatenation\n \n convTranspose_transitional = self.convTranspose_transitional(middle_block) \n # Decoder4 --block5\n decode_block4 = torch.cat((convTranspose_transitional, encode_block4), 1) \n if self.residus[4] == 1:\n decode_block4 += self.residual_shortcut_encoder_decoder4(encode_block4)\n\n #--block6\n cat_layer3 = self.conv_decoder4(decode_block4) \n if self.residus[5] == 1:\n cat_layer3 += self.residual_shortcut_decoder4(decode_block4)\n convTranspose_decoder4 = self.convTranspose_decoder4(cat_layer3)\n \n \n # Decoder3 --block7\n decode_block3 = torch.cat((convTranspose_decoder4, encode_block3), 1)\n if self.residus[6] == 1:\n decode_block3 += self.residual_shortcut_encoder_decoder3(encode_block3)\n \n #--block8\n cat_layer2 = self.conv_decoder3(decode_block3) \n if self.residus[7] == 1:\n cat_layer2 += self.residual_shortcut_decoder3(decode_block3)\n convTranspose_decoder3 = self.convTranspose_decoder3(cat_layer2)\n \n # Decoder2 --block9\n decode_block2 = torch.cat((convTranspose_decoder3, encode_block2), 1) \n if self.residus[8] == 1:\n decode_block2 += self.residual_shortcut_encoder_decoder2(encode_block2)\n \n #--block10\n cat_layer1 = self.conv_decoder2(decode_block2)\n if self.residus[9] == 1:\n cat_layer1 += self.residual_shortcut_decoder2(decode_block2)\n convTranspose_decoder2 = self.convTranspose_decoder2(cat_layer1)\n \n # Decoder1 --block11\n decode_block1 = torch.cat((convTranspose_decoder2, encode_block1), 1) \n if self.residus[10] == 1:\n decode_block1 += self.residual_shortcut_encoder_decoder1(encode_block1)\n \n #--block12\n final_layer = self.final_layer(decode_block1)\n if self.residus[11] == 1:\n final_layer += self.residual_shortcut_final_layer(decode_block1)\n \n \n return final_layer", "def UNet(input_size=(256, 256, 1)):\n inputs = Input(input_size)\n c1 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(inputs)\n c1 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c1)\n p1 = MaxPooling2D((2, 2))(c1)\n\n c2 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(p1)\n c2 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c2)\n p2 = MaxPooling2D((2, 2))(c2)\n\n c3 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(p2)\n c3 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c3)\n p3 = MaxPooling2D((2, 2))(c3)\n\n c4 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(p3)\n c4 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c4)\n p4 = MaxPooling2D(pool_size=(2, 2))(c4)\n\n c5 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(p4)\n c5 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c5)\n\n u6 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(c5)\n u6 = concatenate([u6, c4])\n c6 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(u6)\n c6 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c6)\n\n u7 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(c6)\n u7 = concatenate([u7, c3])\n c7 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(u7)\n c7 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c7)\n\n u8 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(c7)\n u8 = concatenate([u8, c2])\n c8 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(u8)\n c8 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c8)\n\n u9 = Conv2DTranspose(16, (2, 2), strides=(2, 2), padding='same')(c8)\n u9 = concatenate([u9, c1], axis=3)\n c9 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(u9)\n c9 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same')(c9)\n\n outputs = Conv2D(1, (1, 1), activation='sigmoid')(c9)\n model = Model(inputs=[inputs], outputs=[outputs])\n model.compile(optimizer=Adam(lr=1e-4), loss=dice_coef_loss, metrics=['accuracy', dice_coef])\n return model", "def get_resnet_v1(input_shape, depth, num_classes=10):\n if (depth - 2) % 6 != 0:\n raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')\n # Start model definition.\n num_filters = 16\n num_res_blocks = int((depth - 2) / 6)\n\n inputs = Input(shape=input_shape)\n x = resnet_layer(inputs=inputs)\n # Instantiate the stack of residual units\n for stack in range(3):\n for res_block in range(num_res_blocks):\n strides = 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n strides = 2 # downsample\n y = resnet_layer(inputs=x,\n num_filters=num_filters,\n strides=strides)\n y = resnet_layer(inputs=y,\n num_filters=num_filters,\n activation=None)\n if stack > 0 and res_block == 0: # first layer but not first stack\n # linear projection residual shortcut connection to match\n # changed dims\n x = resnet_layer(inputs=x,\n num_filters=num_filters,\n kernel_size=1,\n strides=strides,\n activation=None,\n batch_normalization=False)\n x = add([x, y])\n x = Activation('relu')(x)\n num_filters *= 2\n\n # Add classifier on top.\n # v1 does not use BN after last shortcut connection-ReLU\n x = AveragePooling2D(pool_size=7)(x)\n # x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Instantiate model.\n model = Model(inputs=inputs, outputs=outputs)\n return model", "def forward(self, x):\n\n out = F.relu(self.conv1(x))\n out = F.relu(self.conv2(out))\n\n out = F.relu(self.resnet_block(out))\n\n # 8 x 8 x 64\n noise = self.sample_noise((out.shape[0], self.noise_dim, out.shape[2], out.shape[3]))\n\n # print(noise.shape)\n # print(out.shape)\n\n out = torch.cat([out, noise], dim=1)\n\n # print(out.shape)\n\n out = F.relu(self.deconv1(out))\n out = F.tanh(self.deconv2(out))\n\n return out", "def residual_net(n,bottom,total_depth, nclasses, use_global_stats=False,return_layer=None):\n # figure out network structure\n net_defs = {\n 18:([2, 2, 2, 2], \"standard\"),\n 34:([3, 4, 6, 3], \"standard\"),\n 50:([3, 4, 6, 3], \"bottleneck\"),\n 101:([3, 4, 23, 3], \"bottleneck\"),\n 152:([3, 8, 36, 3], \"bottleneck\"),\n }\n alpha = string.ascii_lowercase\n assert total_depth in net_defs.keys(), \"net of depth:{} not defined\".format(total_depth)\n\n # nunits_list a list of integers indicating the number of layers in each depth.\n nunits_list, unit_type = net_defs[total_depth] \n nouts = [64, 128, 256, 512] # same for all nets\n \n n.conv1, n.bn1, n.scale1 = conv_bn_scale(bottom, ks = 7, \n stride = 2, nout = 64, pad = 3,\n use_global_stats=use_global_stats)\n n.conv1_relu = L.ReLU(n.scale1, in_place=True)\n n.pool1 = L.Pooling(n.conv1_relu, stride = 2, kernel_size = 3, pool=P.Pooling.MAX)\n \n U=n.pool1\n \n # make the convolutional body\n for i,(nout, nunits) in enumerate(zip(nouts, nunits_list)): # for each depth and nunits\n for unit,a in zip(range(1, nunits + 1),alpha): # for each unit. Enumerate from 1.\n# s = str(nout) + '_' + str(unit) + '_' # layer name prefix\n s= 'res{}{}'.format(i+2,a)\n# print(s)\n newdepth = 2 if unit is 1 else 0\n if i is 0 and newdepth:\n newdepth=1\n if unit_type == \"standard\":\n\n U=residual_standard_unit(n,U, nout, s, newdepth = newdepth, use_global_stats=use_global_stats)\n else:\n U=residual_bottleneck_unit(n,U, nout, s, newdepth = newdepth, use_global_stats=use_global_stats)\n\n # add the end layers \n n.global_pool = L.Pooling(U, pooling_param = dict(pool = 1, global_pooling = True))\n setattr(n,'fc'+str(nclasses), L.InnerProduct(n.global_pool, num_output = nclasses,\n param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)]))\n \n if return_layer is None:\n return getattr(n,'fc'+str(nclasses))\n else:\n return getattr(n,return_layer)", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n \n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n # Store weights and biases for the convolutional layer using the keys 'W1' #\n # and 'b1'; use keys 'W2' and 'b2' for the weights and biases of the #\n # hidden affine layer, and keys 'W3' and 'b3' for the weights and biases #\n # of the output affine layer. #\n ############################################################################ \n C, H, W = input_dim;\n\n # Dimensions of data output by convolutional layer\n S = 1; pad = (filter_size - 1) / 2; # Stride and image padding\n hconv = (H - filter_size + 2*pad)/S + 1;\n wconv = (W - filter_size + 2*pad)/S + 1;\n\n # Get dimensions of 2x2 max-pool output\n hmp = hconv / 2;\n wmp = wconv / 2;\n\n # Get dimensions of vector fed into affine layer\n # Convert maxpool output by using np.reshape(v1,(N,-1))\n # Recover by using np.reshape(dv1,v1.shape)\n laff = hmp*wmp*num_filters;\n\n # Determine starting weight and bias matrices\n self.params['W1'] = weight_scale * np.random.randn(num_filters, C, filter_size, filter_size);\n self.params['b1'] = np.zeros(num_filters);\n self.params['W2'] = weight_scale * np.random.randn(laff, hidden_dim);\n self.params['b2'] = np.zeros(hidden_dim);\n self.params['W3'] = weight_scale * np.random.rand(hidden_dim,num_classes);\n self.params['b3'] = np.zeros(num_classes);\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def conv_and_max_pool(x, conv):\n return F.relu(conv(x).permute(0, 2, 1).max(1)[0])", "def forward(self, x):\n\n ########################\n # PUT YOUR CODE HERE #\n #######################\n\n x = self.pool1(F.relu(self.batch1(self.conv1(x))))\n x = self.pool2(F.relu(self.batch2(self.conv2(x))))\n x = F.relu(self.batch3a(self.conv3a(x)))\n x = self.pool3(F.relu(self.batch3b(self.conv3b(x))))\n x = F.relu(self.batch4a(self.conv4a(x)))\n x = self.pool4(F.relu(self.batch4b(self.conv4b(x))))\n x = F.relu(self.batch5a(self.conv5a(x)))\n x = self.pool5(F.relu(self.batch5b(self.conv5b(x))))\n x = self.avgpool(x)\n x = x.reshape(x.shape[0], -1)\n out = self.fc1(x)\n\n# raise NotImplementedError\n ########################\n # END OF YOUR CODE #\n #######################\n\n return out", "def Unet4(shape, nb_filters=32, exp=1, kernel_size=3, initialization=\"glorot_uniform\", activation=\"relu\", sigma_noise=0, output_channels=1, drop=0.0, regularization=None):\n \n \n input_layer = Input(shape=shape)\n\n conv1 = ConvBlock(input_layer, nb_filters=nb_filters, kernel_size=kernel_size, initializer=initialization, activation=activation, regularization=regularization)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n if drop > 0.0: pool1 = Dropout(drop)(pool1)\n\n conv2 = ConvBlock(pool1, nb_filters=nb_filters * 2 **(1 * exp), kernel_size=kernel_size, initializer=initialization, activation=activation, regularization=regularization)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n if drop > 0.0: pool2 = Dropout(drop)(pool2)\n\n conv3 = ConvBlock(pool2, nb_filters=nb_filters * 2 **(2 * exp), kernel_size=kernel_size, initializer=initialization, activation=activation, regularization=regularization)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n if drop > 0.0: pool3 = Dropout(drop)(pool3)\n\n conv4 = ConvBlock(pool3, nb_filters=nb_filters * 2 **(3 * exp), kernel_size=kernel_size, initializer=initialization, activation=activation, regularization=regularization)\n pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)\n if drop > 0.0: pool4 = Dropout(drop)(pool4)\n\n deconv5 = DeconvBlock(conv4, residual=conv3, nb_filters=nb_filters * 2 **(2 * exp), kernel_size=kernel_size, regularization=regularization)\n if drop > 0.0: deconv5 = Dropout(drop)(deconv5)\n\n deconv6 = DeconvBlock(deconv5, residual=conv2, nb_filters=nb_filters * 2 **(1 * exp), kernel_size=kernel_size, regularization=regularization)\n if drop > 0.0: deconv6 = Dropout(drop)(deconv6)\n\n deconv7 = DeconvBlock(deconv6, residual=conv1, nb_filters=nb_filters, kernel_size=kernel_size, regularization=regularization)\n if drop > 0.0: deconv7 = Dropout(drop)(deconv7)\n\n if sigma_noise > 0:\n deconv7 = GaussianNoise(sigma_noise)(deconv7)\n\n output_layer = Conv2D(filters=output_channels, kernel_size=(1, 1))(deconv7)\n output_layer = BatchNormalization()(output_layer)\n output_layer = Activation('softmax')(output_layer)\n\n model = Model(inputs=input_layer, outputs=output_layer, name='Unet')\n return model", "def resnet_v1(input_shape, depth, num_classes=100):\n if (depth - 2) % 6 != 0:\n raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')\n # Start model definition.\n num_filters = 16\n num_res_blocks = int((depth - 2) / 6)\n\n inputs = Input(shape=input_shape)\n x = resnet_layer(inputs=inputs)\n # Instantiate the stack of residual units\n for stack in range(3):\n for res_block in range(num_res_blocks):\n strides = 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n strides = 2 # downsample\n y = resnet_layer(inputs=x,\n num_filters=num_filters,\n strides=strides)\n y = resnet_layer(inputs=y,\n num_filters=num_filters,\n activation=None)\n if stack > 0 and res_block == 0: # first layer but not first stack\n # linear projection residual shortcut connection to match\n # changed dims\n x = resnet_layer(inputs=x,\n num_filters=num_filters,\n kernel_size=1,\n strides=strides,\n activation=None,\n batch_normalization=False)\n x = keras.layers.add([x, y])\n x = Activation('relu')(x)\n num_filters *= 2\n\n # Add classifier on top.\n # v1 does not use BN after last shortcut connection-ReLU\n x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Instantiate model.\n model = Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer=SGD(lr=0.1), loss='categorical_crossentropy', metrics = ['accuracy'])\n return model", "def resnet_v1(input_shape, depth, num_classes=2):\n if (depth - 2) % 6 != 0:\n raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')\n # Start model definition.\n num_filters = 16\n num_res_blocks = int((depth - 2) / 6)\n\n inputs = Input(shape=input_shape)\n x = resnet_layer(inputs=inputs)\n # Instantiate the stack of residual units\n for stack in range(3):\n for res_block in range(num_res_blocks):\n strides = 1\n if stack > 0 and res_block == 0: # first layer but not first stack\n strides = 2 # downsample\n y = resnet_layer(inputs=x,\n num_filters=num_filters,\n strides=strides)\n y = resnet_layer(inputs=y,\n num_filters=num_filters,\n activation=None)\n if stack > 0 and res_block == 0: # first layer but not first stack\n # linear projection residual shortcut connection to match\n # changed dims\n x = resnet_layer(inputs=x,\n num_filters=num_filters,\n kernel_size=1,\n strides=strides,\n activation=None,\n batch_normalization=False)\n x = tensorflow.keras.layers.add([x, y])\n x = Activation('relu')(x)\n num_filters *= 2\n\n # Add classifier on top.\n # v1 does not use BN after last shortcut connection-ReLU\n x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Instantiate model.\n model = Model(inputs=inputs, outputs=outputs)\n return model", "def net(data_path, input_image, reduce=False, reuse=False):\n if reduce == False:\n layers = (\n 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',\n\n 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',\n\n 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',\n 'relu3_3', 'conv3_4', 'relu3_4', 'pool3',\n\n 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',\n 'relu4_3', 'conv4_4', 'relu4_4', 'pool4',\n\n 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',\n 'relu5_3', 'conv5_4', 'relu5_4'\n )\n return __net(data_path, input_image, layers, reuse=False) \n else:\n layers = (\n 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',\n\n 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',\n\n 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',\n 'relu3_3', 'conv3_4', 'relu3_4', 'pool3',\n\n 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',\n 'relu4_3', 'conv4_4', 'relu4_4', 'pool4'\n ) \n return __net(data_path, input_image, layers, reuse=reuse)", "def conv_relu_forward(x, w, b, conv_param):\n a, conv_cache = conv_forward_fast(x, w, b, conv_param)\n out, relu_cache = relu_forward(a)\n cache = (conv_cache, relu_cache)\n return out, cache", "def conv_relu_forward(x, w, b, conv_param):\n a, conv_cache = conv_forward_fast(x, w, b, conv_param)\n out, relu_cache = relu_forward(a)\n cache = (conv_cache, relu_cache)\n return out, cache", "def forward(self, x): # pylint: disable=invalid-name\n\n residual = x\n\n out = self.conv1(x)\n out = self.bn1(out)\n\n out = self.relu(out)\n\n out = self.conv2(out)\n out = self.bn2(out)\n\n out = self.relu(out)\n\n out = self.conv3(out)\n out = self.bn3(out)\n\n if self.downsample is not None:\n residual = self.downsample(x)\n out += residual\n\n out = self.relu(out)\n out = self.maxpool(out)\n\n return out", "def forward(self, x, h, u, time, feat_kernels_enc_conv, feat_bias_enc_conv, feat_kernels_enc_fc, feat_bias_enc_fc, feat_kernels_enc_3dgru, feat_bias_enc_3dgru):\n\n\n conv1a_wt,conv1b_wt,conv2a_wt,conv2b_wt,conv2c_wt,conv3a_wt,conv3b_wt,conv3c_wt,conv4a_wt,conv4b_wt,conv5a_wt,conv5b_wt,conv5c_wt,conv6a_wt,conv6b_wt = feat_kernels_enc_conv\n conv1a_bias,conv1b_bias,conv2a_bias,conv2b_bias,conv2c_bias,conv3a_bias,conv3b_bias,conv3c_bias,conv4a_bias,conv4b_bias,conv5a_bias,conv5b_bias,conv5c_bias,conv6a_bias,conv6b_bias = feat_bias_enc_conv\n t_x_s_update_fc_layer, t_x_s_update_conv3d, t_x_s_reset_fc_layer, t_x_s_reset_conv3d, t_x_rs_fc_layer, t_x_rs_conv3d = feat_kernels_enc_3dgru\n t_x_s_update_bias, t_x_s_reset_bias, t_x_rs_bias = feat_bias_enc_3dgru\n\n conv1a = F.conv2d(x, conv1a_wt, bias=conv1a_bias, padding=3) #self.conv1a(x)\n rect1a = self.leaky_relu(conv1a)\n conv1b = F.conv2d(rect1a, conv1b_wt, bias=conv1b_bias, padding=1) #self.conv1b(rect1a)\n rect1 = self.leaky_relu(conv1b)\n pool1 = self.pool(rect1)\n \n \n conv2a = F.conv2d(pool1, conv2a_wt, bias=conv2a_bias, padding=1) #self.conv2a(pool1)\n rect2a = self.leaky_relu(conv2a)\n conv2b = F.conv2d(rect2a, conv2b_wt, bias=conv2b_bias, padding=1) #self.conv2b(rect2a)\n rect2 = self.leaky_relu(conv2b)\n conv2c = F.conv2d(pool1, conv2c_wt, bias=conv2c_bias) #self.conv2c(pool1)\n res2 = conv2c + rect2\n pool2 = self.pool(res2)\n \n \n conv3a = F.conv2d(pool2, conv3a_wt, bias=conv3a_bias, padding=1) #self.conv3a(pool2)\n rect3a = self.leaky_relu(conv3a)\n conv3b = F.conv2d(rect3a, conv3b_wt, bias=conv3b_bias, padding=1) #self.conv3b(rect3a)\n rect3 = self.leaky_relu(conv3b)\n conv3c = F.conv2d(pool2, conv3c_wt, bias=conv3c_bias) #self.conv3c(pool2)\n res3 = conv3c + rect3\n pool3 = self.pool(res3)\n \n conv4a = F.conv2d(pool3, conv4a_wt, bias=conv4a_bias, padding=1) #self.conv4a(pool3)\n rect4a = self.leaky_relu(conv4a)\n conv4b = F.conv2d(rect4a, conv4b_wt, bias=conv4b_bias, padding=1) #self.conv4b(rect4a)\n rect4 = self.leaky_relu(conv4b)\n pool4 = self.pool(rect4)\n \n \n conv5a = F.conv2d(pool4, conv5a_wt, bias=conv5a_bias, padding=1) #self.conv5a(pool4)\n rect5a = self.leaky_relu(conv5a)\n conv5b = F.conv2d(rect5a, conv5b_wt, bias=conv5b_bias, padding=1) #self.conv5b(rect5a)\n rect5 = self.leaky_relu(conv5b)\n conv5c = F.conv2d(pool4, conv5c_wt, bias=conv5c_bias) #self.conv5c(pool4)\n res5 = conv5c + rect5\n pool5 = self.pool(res5)\n \n \n conv6a = F.conv2d(pool5, conv6a_wt, bias=conv6a_bias, padding=1) #self.conv6a(pool5)\n rect6a = self.leaky_relu(conv6a)\n conv6b = F.conv2d(rect6a, conv6b_wt, bias=conv6b_bias, padding=1) #self.conv6b(rect6a)\n rect6 = self.leaky_relu(conv6b)\n res6 = pool5 + rect6\n pool6 = self.pool(res6)\n \n \n pool6 = pool6.view(pool6.size(0), -1)\n \n \n fc7 = F.linear(pool6, feat_kernels_enc_fc[0], bias=feat_bias_enc_fc[0]) #self.fc7(pool6)\n rect7 = self.leaky_relu(fc7)\n \n t_x_s_update = self.t_x_s_update(rect7, h, t_x_s_update_fc_layer, t_x_s_update_conv3d, t_x_s_update_bias)\n t_x_s_reset = self.t_x_s_reset(rect7, h, t_x_s_reset_fc_layer, t_x_s_reset_conv3d, t_x_s_reset_bias)\n \n update_gate = self.sigmoid(t_x_s_update)\n complement_update_gate = 1 - update_gate\n reset_gate = self.sigmoid(t_x_s_reset)\n \n rs = reset_gate * h\n t_x_rs = self.t_x_rs(rect7, rs, t_x_rs_fc_layer, t_x_rs_conv3d, t_x_rs_bias)\n tanh_t_x_rs = self.tanh(t_x_rs)\n \n gru_out = update_gate * h + complement_update_gate * tanh_t_x_rs\n \n return gru_out, update_gate", "def __init__(self, n_input_channels=3, n_conv_output_channels=16, k=3, s=1, pad=1, p = 0.5):\n super(ModelCNN, self).__init__()\n # 1. Convolutional layers\n # Single image is in shape: 3x96x96 (CxHxW, H==W), RGB images\n self.conv1 = nn.Conv2d(in_channels = n_input_channels, out_channels = n_conv_output_channels, kernel_size = k, stride = s, padding = pad)\n self.bn1 = nn.BatchNorm2d(n_conv_output_channels)\n self.conv2 = nn.Conv2d(in_channels = n_conv_output_channels, out_channels = 2*n_conv_output_channels, kernel_size = k, stride = s, padding = pad)\n self.bn2 = nn.BatchNorm2d(2*n_conv_output_channels)\n self.conv3 = nn.Conv2d(in_channels = 2*n_conv_output_channels, out_channels = 4*n_conv_output_channels, kernel_size = k, stride = s, padding = pad)\n self.bn3 = nn.BatchNorm2d(4*n_conv_output_channels)\n self.conv4 = nn.Conv2d(in_channels = 4*n_conv_output_channels, out_channels = 8*n_conv_output_channels, kernel_size = k, stride = s, padding = pad)\n self.bn4 = nn.BatchNorm2d(8*n_conv_output_channels)\n self.pool = nn.MaxPool2d(kernel_size = k - 1, stride = 2*s, padding = pad - pad)\n \n self.dropout = nn.Dropout(p = p)\n \n # 2. FC layers to final output\n self.fc1 = nn.Linear(in_features = 288*n_conv_output_channels, out_features = 32*n_conv_output_channels)\n self.fc_bn1 = nn.BatchNorm1d(32*n_conv_output_channels)\n self.fc2 = nn.Linear(in_features = 32*n_conv_output_channels, out_features = 16*n_conv_output_channels)\n self.fc_bn2 = nn.BatchNorm1d(16*n_conv_output_channels)\n self.fc3 = nn.Linear(in_features = 16*n_conv_output_channels, out_features = 8*n_conv_output_channels)\n self.fc_bn3 = nn.BatchNorm1d(8*n_conv_output_channels)\n self.fc4 = nn.Linear(in_features = 8*n_conv_output_channels, out_features = 1)", "def convolutional_layers():\n x = tf.placeholder(tf.float32, [None, None, None])\n\n # First layer\n W_conv1 = weight_variable([5, 5, 1, 48])\n b_conv1 = bias_variable([48])\n x_expanded = tf.expand_dims(x, 3)\n h_conv1 = tf.nn.relu(conv2d(x_expanded, W_conv1) + b_conv1)\n h_pool1 = max_pool(h_conv1, ksize=(2, 2), stride=(2, 2))\n\n # Second layer\n W_conv2 = weight_variable([5, 5, 48, 64])\n b_conv2 = bias_variable([64])\n\n h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)\n h_pool2 = max_pool(h_conv2, ksize=(2, 1), stride=(2, 1))\n\n # Third layer\n W_conv3 = weight_variable([5, 5, 64, 128])\n b_conv3 = bias_variable([128])\n\n h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)\n h_pool3 = max_pool(h_conv3, ksize=(2, 2), stride=(2, 2))\n\n return x, h_pool3, [W_conv1, b_conv1,\n W_conv2, b_conv2,\n W_conv3, b_conv3]", "def get_unet0(nClasses, input_height, input_width, nchannel=3):\n\n inputs = Input(shape=(input_height, input_width, nchannel))\n # temp = BatchNormalization()(inputs)\n\n conv1 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(inputs)\n conv1 = BatchNormalization()(conv1)\n conv1 = Activation(\"relu\")(conv1)\n conv1 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(conv1)\n conv1 = BatchNormalization()(conv1)\n conv1 = Activation(\"relu\")(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n\n conv2 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(pool1)\n conv2 = BatchNormalization()(conv2)\n conv2 = Activation(\"relu\")(conv2)\n conv2 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(conv2)\n conv2 = BatchNormalization()(conv2)\n conv2 = Activation(\"relu\")(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n\n conv3 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(pool2)\n conv3 = BatchNormalization()(conv3)\n conv3 = Activation(\"relu\")(conv3)\n conv3 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(conv3)\n conv3 = BatchNormalization()(conv3)\n conv3 = Activation(\"relu\")(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n\n conv4 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(pool3)\n conv4 = BatchNormalization()(conv4)\n conv4 = Activation(\"relu\")(conv4)\n conv4 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(conv4)\n conv4 = BatchNormalization()(conv4)\n conv4 = Activation(\"relu\")(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)\n\n conv5 = Conv2D(512, (3, 3), padding='same', kernel_initializer='he_uniform')(pool4)\n conv5 = BatchNormalization()(conv5)\n conv5 = Activation(\"relu\")(conv5)\n conv5 = Conv2D(512, (3, 3), padding='same', kernel_initializer='he_uniform')(conv5)\n conv5 = BatchNormalization()(conv5)\n conv5 = Activation(\"relu\")(conv5)\n\n up6 = concatenate([UpSampling2D(size=(2, 2))(conv5), conv4], axis=3)\n conv6 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(up6)\n conv6 = BatchNormalization()(conv6)\n conv6 = Activation(\"relu\")(conv6)\n conv6 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(conv6)\n conv6 = BatchNormalization()(conv6)\n conv6 = Activation(\"relu\")(conv6)\n\n up7 = concatenate([UpSampling2D(size=(2, 2))(conv6), conv3], axis=3)\n conv7 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(up7)\n conv7 = BatchNormalization()(conv7)\n conv7 = Activation(\"relu\")(conv7)\n conv7 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(conv7)\n conv7 = BatchNormalization()(conv7)\n conv7 = Activation(\"relu\")(conv7)\n\n up8 = concatenate([UpSampling2D(size=(2, 2))(conv7), conv2], axis=3)\n conv8 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(up8)\n conv8 = BatchNormalization()(conv8)\n conv8 = Activation(\"relu\")(conv8)\n conv8 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(conv8)\n conv8 = BatchNormalization()(conv8)\n conv8 = Activation(\"relu\")(conv8)\n\n up9 = concatenate([UpSampling2D(size=(2, 2))(conv8), conv1], axis=3)\n conv9 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(up9)\n conv9 = BatchNormalization()(conv9)\n conv9 = Activation(\"relu\")(conv9)\n conv9 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(conv9)\n conv9 = BatchNormalization()(conv9)\n conv9 = Activation(\"relu\")(conv9)\n\n conv10 = Conv2D(nClasses, (1, 1), activation='relu', padding='same')(conv9)\n conv10 = Reshape((input_height * input_width, nClasses))(conv10)\n # conv10 = Permute((2, 1))(conv10)\n\n conv11 = Activation('softmax')(conv10)\n\n\n model = Model(inputs=inputs, outputs=conv11)\n\n return model", "def __init__(self, Nchannels, Nsamples, output_units):\n super(conv2DNet_1, self).__init__()\n # Layer 1\n l1_channels = 16 \n self.conv1 = nn.Conv2d(1, l1_channels, (Nchannels, 1), padding = 0)\n self.batchnorm1 = nn.BatchNorm2d(l1_channels, False) # final size bsize x 1 x l1_channels x Nsamples\n\n # Layer 2\n l2_channels = 4\n l2_temp_window = 32\n l2_l1channel_overlap = 2\n self.padding1 = nn.ZeroPad2d((l2_temp_window // 2, l2_temp_window // 2 - 1, l2_l1channel_overlap//2-1, l2_l1channel_overlap//2)) # left, right, top, bottom\n self.conv2 = nn.Conv2d(1, l2_channels, (l2_l1channel_overlap, l2_temp_window)) # does not change size if combined with above padding\n self.batchnorm2 = nn.BatchNorm2d(l2_channels, False)\n self.pooling2 = nn.MaxPool2d((2, 4)) # final size bsize x l2_channels x floor(l1_channels/2) x floor(Nsamples/4)\n\n # Layer 3\n l3_channels = 4\n l3_temp_window = 4\n l3_l2channel_overlap = 8\n self.padding2 = nn.ZeroPad2d((l3_temp_window//2, l3_temp_window//2-1, l3_l2channel_overlap//2, l3_l2channel_overlap//2-1))\n self.conv3 = nn.Conv2d(l2_channels, l3_channels, (l3_l2channel_overlap, l3_temp_window))\n self.batchnorm3 = nn.BatchNorm2d(l3_channels, False)\n self.pooling3 = nn.MaxPool2d((2, 4)) # final size bsize x l3_channels x floor(l1_channels/4) x floor(Nsamples/16)\n\n # FC Layer\n fc_inputs = l3_channels * (l1_channels//4) * (Nsamples//16)\n self.fc1 = nn.Linear(fc_inputs, output_units)", "def __init__(self):\n super(CNN, self).__init__()\n\n self.conv0 = nn.Conv2d(3, 3, kernel_size=5, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv0.weight)\n\n self.conv1 = nn.Conv2d(3, 30, kernel_size=5, stride=2, padding=0)\n self.conv1.weight = nn.Parameter(get_filters())\n\n self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0)\n\n self.conv2 = nn.Conv2d(30, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv2.weight)\n\n self.conv3 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv3.weight)\n\n self.conv4 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv4.weight)\n\n self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0)\n\n self.conv5 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv5.weight)\n\n self.conv6 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv6.weight)\n\n self.conv7 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv7.weight)\n\n self.conv8 = nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=0)\n nn.init.xavier_uniform_(self.conv8.weight)\n\n self.fc = nn.Linear(16 * 5 * 5, 2)\n\n self.drop1 = nn.Dropout(p=0.5) # used only for the NC dataset", "def __init__(self):\n super(DLStudio.ExperimentsWithCIFAR.Net2, self).__init__()\n self.relu = nn.ReLU()\n strides = []\n patch_size = 2\n ## conv1:\n out_ch, ker_size, conv_stride, pool_stride = 128,5,1,2\n self.conv1 = nn.Conv2d(3, out_ch, (ker_size,ker_size), padding=(ker_size-1)//2) \n self.pool1 = nn.MaxPool2d(patch_size, pool_stride) \n strides += (conv_stride, pool_stride)\n ## conv2:\n in_ch = out_ch\n out_ch, ker_size, conv_stride, pool_stride = 128,3,1,2\n self.conv2 = nn.Conv2d(in_ch, out_ch, ker_size, padding=(ker_size-1)//2)\n self.pool2 = nn.MaxPool2d(patch_size, pool_stride) \n strides += (conv_stride, pool_stride)\n ## conv3: \n ## meant for repeated invocation, must have same in_ch, out_ch and strides of 1\n in_ch = out_ch\n out_ch, ker_size, conv_stride, pool_stride = in_ch,2,1,1\n self.conv3 = nn.Conv2d(in_ch, out_ch, ker_size, padding=1)\n self.pool3 = nn.MaxPool2d(patch_size, pool_stride) \n# strides += (conv_stride, pool_stride)\n ## figure out the number of nodes needed for entry into fc:\n in_size_for_fc = out_ch * (32 // np.prod(strides)) ** 2 ## (A)\n self.in_size_for_fc = in_size_for_fc\n self.fc1 = nn.Linear(in_size_for_fc, 150)\n self.fc2 = nn.Linear(150, 100)\n self.fc3 = nn.Linear(100, 10)", "def __create_wide_residual_network(nb_classes, img_input, include_top,\n depth=28, width=8, dropout=0.0):\n N = (depth - 4) // 6\n\n x = __conv1_block(img_input)\n nb_conv = 4\n\n for i in range(N):\n x = __conv2_block(x, width, dropout)\n nb_conv += 2\n\n x = MaxPooling2D((2, 2))(x)\n\n for i in range(N):\n x = __conv3_block(x, width, dropout)\n nb_conv += 2\n\n x = MaxPooling2D((2, 2))(x)\n\n for i in range(N):\n x = ___conv4_block(x, width, dropout)\n nb_conv += 2\n\n x = AveragePooling2D((8, 8))(x)\n\n if include_top:\n x = Flatten()(x)\n x = Dense(nb_classes, activation='softmax')(x)\n\n return x", "def SmallResNet(n = 9, filters = [16, 32, 64],\n include_top=True, weights=None,\n input_tensor=None, input_shape=None,\n pooling='avg', regularizer=regularizers.l2(0.0002), activation = 'relu',\n top_activation='softmax',\n conv_shortcut=False, bn=True,\n classes=100, name=None):\n \n # Determine proper input shape\n if input_shape is None:\n if K.image_data_format() == 'channels_first':\n input_shape = (3, 32, 32) if include_top and pooling is None else (3, None, None)\n else:\n input_shape = (32, 32, 3) if include_top and pooling is None else (None, None, 3)\n\n # Build network\n if input_tensor is None:\n img_input = Input(shape=input_shape)\n else:\n if not K.is_keras_tensor(input_tensor):\n img_input = Input(tensor=input_tensor, shape=input_shape)\n else:\n img_input = input_tensor\n if K.image_data_format() == 'channels_last':\n bn_axis = 3\n else:\n bn_axis = 1\n\n x = Conv2D(filters[0], (3, 3), padding='same', name='conv0', kernel_regularizer = regularizer)(img_input)\n if bn:\n x = BatchNormalization(axis=bn_axis, name='bn0')(x)\n x = Activation(activation)(x)\n \n x = unit(x, [filters[0], filters[0]], n, '1-', kernel_size = 3, stride = 1, regularizer=regularizer, activation=activation, conv_shortcut=conv_shortcut, bn=bn)\n for i in range(1, len(filters)):\n x = unit(x, [filters[i-1], filters[i]], n, str(i+1)+'-', kernel_size = 3, stride = 2, regularizer=regularizer, activation=activation, conv_shortcut=conv_shortcut, bn=bn)\n\n if pooling == 'avg':\n x = GlobalAveragePooling2D(name='avg_pool')(x)\n elif pooling == 'max':\n x = GlobalMaxPooling2D(name='max_pool')(x)\n\n if include_top:\n x = Dense(classes, activation=top_activation, name = 'embedding' if top_activation is None else 'prob', kernel_regularizer = regularizer)(x)\n\n # Ensure that the model takes into account\n # any potential predecessors of `input_tensor`.\n if input_tensor is not None:\n inputs = get_source_inputs(input_tensor)\n else:\n inputs = img_input\n # Create model.\n model = Model(inputs, x, name='cifar-resnet{}'.format(2*len(filters)*n) if name is None else name)\n\n # load weights\n if weights is not None:\n model.load_weights(weights)\n\n if K.image_data_format() == 'channels_first' and K.backend() == 'tensorflow':\n warnings.warn('You are using the TensorFlow backend, yet you '\n 'are using the Theano '\n 'image data format convention '\n '(`image_data_format=\"channels_first\"`). '\n 'For best performance, set '\n '`image_data_format=\"channels_last\"` in '\n 'your Keras config '\n 'at ~/.keras/keras.json.')\n return model", "def cifar10_5layers(input_image, keep_prob, init_method=tf.truncated_normal_initializer(stddev=1e-2)):\n with tf.variable_scope(\"conv1\"):\n W1 = tf.get_variable(name=\"W1\", shape=[5,5,3,32], dtype=tf.float32, \\\n initializer=init_method)\n b1 = tf.get_variable(name=\"b1\", shape=[32], dtype=tf.float32, \\\n initializer=tf.constant_initializer(0.01))\n conv1 = conv_pool_relu(input_image, W1, b1)\n with tf.variable_scope(\"conv2\"):\n W2 = tf.get_variable(name=\"W2\", shape=[5,5,32,64], dtype=tf.float32, \\\n initializer=init_method)\n b2 = tf.get_variable(name=\"b2\", shape=[64], dtype=tf.float32, \\\n initializer=tf.constant_initializer(0.01))\n conv2 = conv_pool_relu(conv1, W2, b2)\n\tconv2 = tf.nn.dropout(conv2, keep_prob)\n with tf.variable_scope(\"conv3\"):\n W3 = tf.get_variable(name=\"W3\", shape=[5,5,64,128], dtype=tf.float32, \\\n initializer=init_method)\n b3 = tf.get_variable(name=\"b3\", shape=[128], dtype=tf.float32, \\\n initializer=tf.constant_initializer(0.01))\n conv3 = conv_pool_relu(conv2, W3, b3)\n\tconv3 = tf.nn.dropout(conv3, keep_prob)\n with tf.variable_scope(\"fc1\"):\n W4 = tf.get_variable(name=\"W4\", shape=[4*4*128,256], dtype=tf.float32, \\\n initializer=init_method)\n b4 = tf.get_variable(name=\"b4\", shape=[256], dtype=tf.float32, \\\n initializer=tf.constant_initializer(0.01))\n conv3_flat = tf.reshape(conv3, [-1, 4*4*128])\n fc1 = fc_relu(conv3_flat, W4, b4)\n\tfc1 = tf.nn.dropout(fc1, keep_prob)\n with tf.variable_scope(\"output\"):\n W5 = tf.get_variable(name=\"W5\", shape=[256,10], dtype=tf.float32, \\\n initializer=init_method)\n b5 = tf.get_variable(name=\"b5\", shape=[10], dtype=tf.float32, \\\n initializer=tf.constant_initializer(0.01))\n\ty_logit = tf.matmul(fc1, W5) + b5\n return y_logit, tf.nn.softmax(y_logit, name=\"softmax\")", "def forward(self, x, pool_size=(2, 2), pool_type=\"avg\"):\n\n x = F.relu_(self.norm1(self.conv1(x)))\n x = F.relu_(self.norm2(self.conv2(x)))\n if pool_type == \"max\":\n x = F.max_pool2d(x, kernel_size=pool_size)\n elif pool_type == \"avg\":\n x = F.avg_pool2d(x, kernel_size=pool_size)\n elif pool_type == \"avg+max\":\n x1 = F.avg_pool2d(x, kernel_size=pool_size)\n x2 = F.max_pool2d(x, kernel_size=pool_size)\n x = x1 + x2\n else:\n raise Exception(\"Incorrect pooling type!\")\n return x", "def ResBlock(input_tensor, filters):\n \n conv_1 = Conv2D(filters = filters, kernel_size = 3, padding = 'same', kernel_initializer = 'he_normal') \n conv_1a = conv_1(input_tensor) # Shared weights conv layer\n batch_1 = BatchNormalization()(conv_1a)\n relu_1 = Activation(\"relu\")(batch_1)\n drop_1 = Dropout(drop)(relu_1)\n conv_1b = conv_1(drop_1) # Shared weights conv layer\n batch_1 = BatchNormalization()(conv_1b)\n return batch_1", "def ResNet20(inputShape):\n inputs = Input(shape=inputShape)\n x = resLayer(inputs) # resLayer1\n\n # resBlocks\n for nStage in range(3):\n for nBlock in range(3):\n x = resBlock(x, nStage, nBlock)\n\n x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(10, activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Generate model\n model = Model(inputs=inputs, outputs=outputs)\n return model", "def conv_bn_relu(X,channels,stride,kernel_shape,padding,activation = 'sigmoid',initialize_weights = True,batchnorm = True,*args): # Refactoring conv_batchnorm_relu as one layer\n \n if initialize_weights: # no of channels in kernel must be the same as that in the input image,\n output_1,weights,X_im2col,im,output_shape,bias = conv2D(0,channels,X,stride,kernel_shape,padding,initialize_weights = True,*args) # conv\n else:\n output1,X_im2col = conv2D(0,channels,X,stride,kernel_shape,padding,initialize_weights = False,*args) # conv\n if batchnorm == True:\n output_1 = BatchNorm(output_1)\n if activation == 'sigmoid':\n X_1 = sigmoid(output_1)\n elif activation == 'relu':\n X_1 = relu(output_1)\n else:\n X_1 = output_1\n if initialize_weights:\n weights.requires_grad = False\n return torch.nn.Parameter(X_1),torch.nn.Parameter(weights),torch.nn.Parameter(X_im2col),im,output_shape,bias,activation\n else:\n return output1,X_im2col", "def build_resnet(self):\r\n\r\n # INPUTS\r\n inputs_data = Input((self.data_rows, self.data_cols, 1),name='inputs_data')\r\n\r\n\r\n def residual_block(input, output_channels=64, kernel_size=(3, 3), stride=(1, 1)):\r\n x = Conv2D(output_channels, kernel_size, padding='same', strides=stride)(input)\r\n x = BatchNormalization()(x)\r\n x = Activation('relu')(x)\r\n\r\n x = Conv2D(output_channels, kernel_size, padding='same', strides=stride)(x)\r\n x = BatchNormalization()(x)\r\n x = Activation('relu')(x)\r\n\r\n x = Add()([x, input])\r\n\r\n residual_block.counter += 1\r\n return x\r\n\r\n residual_block.counter = 0\r\n\r\n conv1=Conv2D(64,(3,3),strides=(1,1),padding='same',activation='relu')(inputs_data)\r\n res_block1=residual_block(conv1,output_channels=64)\r\n res_block2 =residual_block(res_block1, output_channels=64)\r\n res_block3 =residual_block(res_block2, output_channels=64)\r\n conv2=Conv2D(1,(3,3),strides=(1,1),padding='same')(res_block3)\r\n outputs=Add()([conv2,inputs_data])\r\n\r\n\r\n model = Model(inputs=inputs_data, outputs=outputs)\r\n\r\n\r\n return model", "def __init__(self, conv_features_sizes, linear_layer_sizes, connector_shape):\n super().__init__()\n \n self.conv = nn.Sequential()\n self.mlp = nn.Sequential()\n self.flat = nn.Flatten()\n\n self.conv.add_module(name=f\"e-fconv{0}\", module=_conv2d_block(1, conv_features_sizes[0], kernel_size=3, padding=1))\n self.conv.add_module(name=f\"e-max{0}\", module=nn.MaxPool2d(2, 2))\n for i, (in_size, out_size) in enumerate(zip(conv_features_sizes[:-1], conv_features_sizes[1:]), 1):\n self.conv.add_module(name=f\"e-fconv{i}\", module=_conv2d_block(in_size, out_size, kernel_size=3, padding=1))\n self.conv.add_module(name=f\"e-max{i}\", module=nn.MaxPool2d(2, 2))\n\n mlp_input_shape = int(reduce((lambda x,y: x * y), connector_shape))\n self.mlp.add_module(name=f\"e-linear{0}\", module=nn.Linear(mlp_input_shape, linear_layer_sizes[0]))\n self.mlp.add_module(name=f\"e-batchnorm{0}\", module=nn.BatchNorm1d(linear_layer_sizes[0]))\n self.mlp.add_module(name=f\"e-relu{0}\", module=nn.ReLU())\n for i, (in_size, out_size) in enumerate(zip(linear_layer_sizes[:-1], linear_layer_sizes[1:]), 1):\n self.mlp.add_module(name=f\"e-linear{i}\", module=nn.Linear(in_size, out_size))\n self.mlp.add_module(name=f\"e-batchnorm{i}\", module=nn.BatchNorm1d(out_size))\n self.mlp.add_module(name=f\"e-relu{i}\", module=nn.ReLU())", "def forward(self, x):\n\n residual = x\n\n out = self.conv1(x)\n out = self.bn1(out)\n out = self.relu(out)\n\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu(out)\n\n out = self.conv3(out)\n out = self.bn3(out)\n\n if self.downsample is not None:\n residual = self.downsample(x)\n\n out += residual\n out = self.relu(out)\n\n return out", "def resnet50():\n\n X = K.Input(shape=(224, 224, 3))\n init = K.initializers.he_normal(seed=None)\n\n conv1 = K.layers.Conv2D(\n filters=64,\n kernel_size=(\n 7,\n 7),\n padding='same',\n strides=2,\n kernel_initializer=init)(X)\n\n bn1 = K.layers.BatchNormalization(axis=3)(conv1)\n\n activation1 = K.layers.Activation('relu')(bn1)\n\n maxpool1 = K.layers.MaxPooling2D(\n pool_size=(\n 3, 3), strides=(\n 2, 2), padding='same',)(activation1)\n\n Projection1 = projection_block(maxpool1, [64, 64, 256], s=1)\n IdenBlock1 = identity_block(Projection1, [64, 64, 256])\n IdenBlock2 = identity_block(IdenBlock1, [64, 64, 256])\n\n Projection2 = projection_block(IdenBlock2, [128, 128, 512])\n IdenBlock3 = identity_block(Projection2, [128, 128, 512])\n IdenBlock4 = identity_block(IdenBlock3, [128, 128, 512])\n IdenBlock5 = identity_block(IdenBlock4, [128, 128, 512])\n\n Projection3 = projection_block(IdenBlock5, [256, 256, 1024])\n IdenBlock6 = identity_block(Projection3, [256, 256, 1024])\n IdenBlock7 = identity_block(IdenBlock6, [256, 256, 1024])\n IdenBlock8 = identity_block(IdenBlock7, [256, 256, 1024])\n IdenBlock9 = identity_block(IdenBlock8, [256, 256, 1024])\n IdenBlock10 = identity_block(IdenBlock9, [256, 256, 1024])\n\n Projection4 = projection_block(IdenBlock10, [512, 512, 2048])\n IdenBlock11 = identity_block(Projection4, [512, 512, 2048])\n IdenBlock12 = identity_block(IdenBlock11, [512, 512, 2048])\n\n avgpool = K.layers.AveragePooling2D(\n pool_size=(\n 1, 1), strides=(\n 7, 7), padding='same',)(IdenBlock12)\n\n SoftMax = K.layers.Dense(\n units=1000,\n kernel_initializer=init,\n activation='softmax',\n )(avgpool)\n\n Keras = K.Model(inputs=X, outputs=SoftMax)\n\n return Keras", "def __init__(self, dropout=0, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\r\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0, \r\n use_batch_norm=False, dtype=np.float32):\r\n self.use_dropout = dropout > 0\r\n self.use_batch_norm = use_batch_norm\r\n self.params = {}\r\n self.reg = reg\r\n self.num_layers = 3\r\n self.dtype = dtype\r\n self.pool_height = 2\r\n self.pool_width = 2\r\n self.pool_stride = 2\r\n\r\n ############################################################################\r\n # TODO: Initialize weights and biases for the three-layer convolutional #\r\n # network. Weights should be initialized from a Gaussian with standard #\r\n # deviation equal to weight_scale; biases should be initialized to zero. #\r\n # All weights and biases should be stored in the dictionary self.params. #\r\n # Store weights and biases for the convolutional layer using the keys 'W1' #\r\n # and 'b1'; use keys 'W2' and 'b2' for the weights and biases of the #\r\n # hidden affine layer, and keys 'W3' and 'b3' for the weights and biases #\r\n # of the output affine layer. #\r\n ############################################################################\r\n # NUmber of channels\r\n C, H, W = input_dim\r\n self.params['W1'] = np.random.randn(num_filters, C, filter_size, filter_size) * weight_scale\r\n self.params['b1'] = np.zeros(num_filters)\r\n H_pool = (H - self.pool_height) / 2 + 1\r\n W_pool = (W - self.pool_width) / 2 + 1\r\n self.params['W2'] = np.random.randn(np.prod((num_filters, H_pool, W_pool)), hidden_dim) * weight_scale\r\n self.params['b2'] = np.zeros(hidden_dim)\r\n self.params['W3'] = np.random.randn(hidden_dim, num_classes) * weight_scale\r\n self.params['b3'] = np.zeros(num_classes)\r\n\r\n # Initialize the parameters for batch normalization if necessary\r\n if self.use_batch_norm:\r\n self.params['gamma1'] = np.ones(num_filters) \r\n self.params['beta1'] = np.zeros(num_filters)\r\n self.params['gamma2'] = np.ones(hidden_dim)\r\n self.params['beta2'] = np.zeros(hidden_dim)\r\n\r\n # Set dropout parameters if necessary\r\n self.dropout_param={}\r\n if self.use_dropout:\r\n self.dropout_param ={'mode':'train', 'p':dropout}\r\n\r\n self.bn_params = []\r\n if self.use_batch_norm:\r\n self.bn_params = [{'mode': 'train'} for i in range(self.num_layers - 1)]\r\n\r\n ############################################################################\r\n # END OF YOUR CODE #\r\n ############################################################################\r\n\r\n for k, v in self.params.items():\r\n self.params[k] = v.astype(dtype)", "def _init_layers(self):\n self.relu = nn.ReLU(inplace=True)\n self.rpn_convs = nn.ModuleList()\n for i in range(self.stacked_convs):\n chn = self.in_channels if i == 0 else self.feat_channels\n self.rpn_convs.append(\n ConvModule(\n chn,\n self.feat_channels,\n 3,\n stride=1,\n padding=1,\n conv_cfg=self.conv_cfg,\n norm_cfg=self.norm_cfg))\n self.rpn_cls = nn.Conv2d(\n self.feat_channels, self.num_anchors * 1, 3, padding=1)\n self.rpn_reg = nn.Conv2d(\n self.feat_channels, self.num_anchors * 4 * (self.reg_max + 1), 3, padding=1)\n self.rpn_iou = nn.Conv2d(\n self.feat_channels, self.num_anchors * 1, 3, padding=1)\n self.scales = nn.ModuleList(\n [Scale(1.0) for _ in self.anchor_generator.strides])\n\n ##############V2################\n conf_vector = [nn.Conv2d(self.num_anchors * 4 * self.total_dim, self.num_anchors * self.reg_channels, 1)]\n conf_vector += [self.relu]\n conf_vector += [nn.Conv2d(self.num_anchors * self.reg_channels, self.num_anchors, 1), nn.Sigmoid()]\n\n self.reg_conf = nn.Sequential(*conf_vector)\n ##############V2################", "def res_unet_core(vol_size, enc_nf, dec_nf, full_size=True, src=None, tgt=None, src_feats=1, tgt_feats=1):\n ndims = len(vol_size)\n assert ndims in [1, 2, 3], \"ndims should be one of 1, 2, or 3. found: %d\" % ndims\n upsample_layer = getattr(KL, 'UpSampling%dD' % ndims)\n# MaxPool_layer = getattr(KL, 'MaxPooling%dD' % ndims)\n# pool_size = tuple([2]*ndims)\n\n # inputs\n if src is None:\n src = Input(shape=[*vol_size, src_feats])\n if tgt is None:\n tgt = Input(shape=[*vol_size, tgt_feats])\n x_in = concatenate([src, tgt])\n \n\n # down-sample path (encoder)\n x_enc = [x_in]\n for i in range(len(enc_nf)):\n convi = res_conv_block(x_enc[-1], enc_nf[i], strides=2, activation=False)\n convi = residual_block(convi, enc_nf[i],batch_activate=False)\n convi = residual_block(convi, enc_nf[i],batch_activate=True)\n x_enc.append(convi)\n# pooli = MaxPool_layer(pool_size)(convi)\n# pooli = Dropout(DropoutRatio/2)(pooli)\n# x_enc.append(pooli)\n\n # up-sample path (decoder)\n# x = conv_block(x_enc[-1], dec_nf[0])\n x = res_conv_block(x_enc[-1], dec_nf[0], activation=False)\n x = residual_block(x,dec_nf[0])\n x = residual_block(x,dec_nf[0], True)\n x = upsample_layer()(x)\n# x = Conv3DTranspose(dec_nf[0], kernel_size=3, strides=2, padding=\"same\")(x)\n x = concatenate([x, x_enc[-2]])\n\t\n# x = conv_block(x, dec_nf[1])\n x = res_conv_block(x,dec_nf[1], activation=False)\n x = residual_block(x,dec_nf[1])\n x = residual_block(x,dec_nf[1], True)\n x = upsample_layer()(x)\n x = concatenate([x, x_enc[-3]])\n# x = conv_block(x, dec_nf[2])\n x = res_conv_block(x,dec_nf[2], activation=False)\n x = residual_block(x,dec_nf[2])\n x = residual_block(x,dec_nf[2], True)\n\t\n x = upsample_layer()(x)\n x = concatenate([x, x_enc[-4]])\n# x = conv_block(x, dec_nf[3])\n x = res_conv_block(x,dec_nf[3], activation=False)\n x = residual_block(x,dec_nf[3])\n x = residual_block(x,dec_nf[3], True)\n\t\n x = conv_block(x, dec_nf[4])\n# x = res_conv_block(x,dec_nf[4], activation=False)\n# x = residual_block(x,dec_nf[4])\n# x = residual_block(x,dec_nf[4], True)\n \n # only upsampleto full dim if full_size\n # here we explore architectures where we essentially work with flow fields \n # that are 1/2 size \n if full_size:\n x = upsample_layer()(x)\n x = concatenate([x, x_enc[0]])\n x = conv_block(x, dec_nf[5])\n\n # optional convolution at output resolution (used in voxelmorph-2)\n if len(dec_nf) == 7:\n x = conv_block(x, dec_nf[6])\n\n return Model(inputs=[src, tgt], outputs=[x])", "def single_residual():\n\n inputs = tf.keras.Input(shape=(16, 16, 3,))\n x = tf.keras.layers.Conv2D(16, (3, 3))(inputs)\n x = tf.keras.layers.BatchNormalization(momentum=.3, epsilon=.65)(x)\n x = tf.nn.relu(x)\n x = tf.keras.layers.MaxPool2D()(x)\n residual = x\n residual = tf.keras.layers.Conv2D(8, (1, 1))(residual)\n residual = tf.nn.relu(residual)\n\n x = tf.keras.layers.Conv2D(8, (1, 1))(x)\n x = tf.keras.layers.Conv2D(8, (1, 1))(x)\n x = tf.keras.layers.BatchNormalization(momentum=.4, epsilon=.25)(x)\n x = tf.add(x, residual)\n x = tf.nn.relu(x)\n\n x = tf.keras.layers.Conv2D(8, (3, 3))(x)\n x = tf.keras.layers.AvgPool2D()(x)\n x = tf.keras.layers.Flatten()(x)\n outputs = tf.keras.layers.Dense(2, activation=tf.nn.softmax, name=\"single_residual\")(x)\n\n return outputs", "def sg_resnet_101(x, opt):\n opt += tf.sg_opt(num_class=1000, conv_only=False, squeeze=True)\n\n # convolution layers ( residual net v2 arch )\n conv = (x\n .sg_conv(dim=64, size=7, stride=2)\n .sg_pool(size=3, stride=2, pad='SAME')\n .sg_resnet_layer(dim=64, num=3, stride=1)\n .sg_resnet_layer(dim=128, num=4, stride=2)\n .sg_resnet_layer(dim=256, num=23, stride=2)\n .sg_resnet_layer(dim=512, num=3, stride=2)\n .sg_bypass(act='relu', bn=True)\n .sg_pool(size=7, stride=1, avg=True)) # global average pool\n\n # fully convolution layers\n fc = (conv\n .sg_conv(dim=opt.num_class, size=1, act='linear', bn=False))\n\n if opt.conv_only:\n return conv\n else:\n if opt.squeeze:\n return fc.sg_squeeze(dim=(1, 2))\n else:\n return fc", "def _residual(self, x, in_filter, out_filter, stride, activate_before_residual=False):\n if activate_before_residual: \n with tf.variable_scope('shared_activation'):\n x = self._batch_norm('init_bn', x)\n x = self._relu(x, self.hps.relu_leakiness)\n orig_x = x\n else:\n with tf.variable_scope('residual_only_activation'):\n orig_x = x\n x = self._batch_norm('init_bn', x)\n x = self._relu(x, self.hps.relu_leakiness)\n\n with tf.variable_scope('sub1'):\n x = self._conv('conv1', x, 3, in_filter, out_filter, stride)\n\n with tf.variable_scope('sub2'):\n x = self._batch_norm('bn2', x)\n x = self._relu(x, self.hps.relu_leakiness)\n x = self._conv('conv2', x, 3, out_filter, out_filter, [1, 1, 1, 1])\n\n with tf.variable_scope('sub_add'):\n if in_filter != out_filter:\n orig_x = tf.nn.avg_pool(orig_x, stride, stride, 'VALID')\n orig_x = tf.pad(\n orig_x, [[0, 0], [0, 0], [0, 0],\n [(out_filter-in_filter)//2, (out_filter-in_filter)//2]])\n x += orig_x\n\n tf.logging.debug('image after unit %s', x.get_shape())\n return x", "def convnet_layers( inputs, widths, mode ):\n\n training = (mode == \"train\")\n \n with tf.variable_scope( \"convnet\" ): # h,w\n \n #print(inputs.shape)\n x = conv_layer( inputs, layer_params[0], training ) \n #print(x.shape)\n x = conv_layer( x, layer_params[1], training ) \n #print(x.shape)\n x = pool_layer( x, 2, 'valid', 'pool2' )\n #print(x.shape)\n x = conv_layer( x, layer_params[2], training ) \n x = conv_layer( x, layer_params[3], training )\n #print(x.shape)\n x = pool_layer( x, 2, 'valid', 'pool4' )\n #print(x.shape)\n x = conv_layer( x, layer_params[4], training ) \n x = conv_layer( x, layer_params[5], training )\n #print(x.shape)\n x = pool_layer( x, 2, 'valid', 'pool6') \n #print(x.shape)\n x = conv_layer( x, layer_params[6], training ) \n x = conv_layer( x, layer_params[7], training )\n \n x = tf.layers.max_pooling2d( x, [2, 1], [2, 1], \n padding='valid', \n name='pool8' ) \n\n #print(x.shape)\n\n # squeeze row dim\n x = tf.squeeze( x, axis=1, name='features' )\n\n #print(x.shape)\n\n sequence_length = get_sequence_lengths( widths ) \n\n return x, sequence_length", "def __reslayer_bottleneck(self, inputs, in_filters, out_filters, stride=1):\n with tf.variable_scope('sub1'):\n kernel = tf.get_variable('weights', [1, 1, in_filters, out_filters / 4],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n conv = tf.nn.conv2d(inputs, kernel, [1, stride, stride, 1],\n padding='SAME',\n name='conv')\n batch_norm = self.__batch_norm_wrapper(conv, decay=0.9999, shape=[0, 1, 2])\n conv = tf.nn.elu(batch_norm, 'elu')\n\n with tf.variable_scope('sub2'):\n kernel = tf.get_variable('weights',\n [3, 3, out_filters / 4, out_filters / 4],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n conv = tf.nn.conv2d(conv, kernel, [1, 1, 1, 1], padding='SAME',\n name='conv1')\n batch_norm = self.__batch_norm_wrapper(conv, decay=0.9999, shape=[0, 1, 2])\n conv = tf.nn.elu(batch_norm, 'elu')\n\n with tf.variable_scope('sub3'):\n kernel = tf.get_variable('weights', [1, 1, out_filters / 4, out_filters],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n conv = tf.nn.conv2d(conv, kernel, [1, 1, 1, 1],\n padding='SAME',\n name='conv')\n batch_norm = self.__batch_norm_wrapper(conv, decay=0.9999, shape=[0, 1, 2])\n\n with tf.variable_scope('subadd'):\n if in_filters != out_filters:\n kernel = tf.get_variable('weights', [1, 1, in_filters, out_filters],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n inputs = tf.nn.conv2d(\n inputs, kernel, [1, stride, stride, 1], padding='SAME')\n batch_norm += inputs\n conv = tf.nn.elu(batch_norm, 'elu')\n\n num = np.power(2, np.floor(np.log2(out_filters) / 2))\n\n grid = self.__put_activations_on_grid(conv, (int(num),\n int(out_filters /\n num)))\n tf.summary.image('sub3/activations', grid, max_outputs=1)\n\n return conv", "def forward(self, x): \n # Layer 1\n x = F.elu(self.conv1(x)) # bsize x l1_channels x 1 x Nsamples\n x = self.batchnorm1(x)\n x = F.dropout(x, 0.25)\n x = x.permute(0, 2, 1, 3) # bsize x 1 x l1_channels x Nsamples\n\n # Layer 2\n x = self.padding1(x)\n x = F.elu(self.conv2(x)) # bsize x l2_channels x l1_channels x Nsamples\n x = self.batchnorm2(x) \n x = F.dropout(x, 0.25)\n x = self.pooling2(x) # bsize x l2_channels x floor(l1_channels/2) x floor(Nsamples/4)\n\n # Layer 3\n x = self.padding2(x)\n x = F.elu(self.conv3(x)) # bsize x l3_channels x floor(l1_channels/2) x floor(Nsamples/4)\n x = self.batchnorm3(x)\n x = F.dropout(x, 0.25)\n x = self.pooling3(x) # bsize x l3_channels x floor(l1_channels/4) x floor(Nsamples/16)\n\n # Fully-connected Layer\n x = x.view(-1, self.fc1.in_features) # bsize x (l3_channels*floor(l1_channels/4)*floor(Nsamples/16))\n x = F.sigmoid(self.fc1(x)) # bisze x self.fc1.out_features \n \n if self.fc1.out_features == 1:\n x = x.view(-1) # bsize (1D if 1 output unit)\n \n return x", "def __init__(self, rng, input, n_in = 0, n_out = 0, \n halfWinSize = 0, activation = T.nnet.relu, mask = None):\n self.input = input\n self.n_in = n_in\n self.n_out = n_out\n\tself.halfWinSize = halfWinSize\n\n windowSize = 2*halfWinSize + 1\n self.filter_size = windowSize\n\n # reshape input to shape (batchSize, n_in, nRows=1, nCols=seqLen) \n in4conv2D = input.dimshuffle(0, 1, 'x', 2)\n\n # initialize the filter\n w_shp = (n_out, n_in, 1, windowSize)\n\tif activation == T.nnet.relu:\n W_values = np.asarray(\n rng.normal(scale = np.sqrt(2. / (n_in*windowSize + n_out)),\n size = w_shp), \n dtype = theano.config.floatX )\n\telse:\n W_values = np.asarray(\n rng.uniform(low = - np.sqrt(6. / (n_in*windowSize + n_out)), \n high = np.sqrt(6. / (n_in*windowSize + n_out)), \n size = w_shp),\n dtype=theano.config.floatX\n )\n if activation == theano.tensor.nnet.sigmoid:\n \tW_values *= 4\n\n self.W = theano.shared(value=W_values, name='ResConv1d_W', borrow=True)\n\n b_shp = (n_out,)\n self.b = theano.shared(\n np.asarray(rng.uniform(low = -.0, high = .0, size = b_shp), \n dtype=input.dtype), \n name ='ResConv1d_b', \n borrow=True)\n\n # conv_out and conv_out_bias have shape (batch_size, n_out, 1, nCols)\n conv_out = T.nnet.conv2d(in4conv2D, self.W, \n filter_shape=w_shp, border_mode='half')\n if activation is not None:\n conv_out_bias = activation(conv_out + \n self.b.dimshuffle('x', 0, 'x', 'x'))\n else:\n conv_out_bias = (conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))\n\n\t## out2 has shape (batchSize, n_out, nCols)\n out2 = conv_out_bias.dimshuffle(0, 1, 3, 2)[:, :, :, 0]\n\n if mask is not None:\n ## since we did zero padding at left side of the input tensor\n ## we need to reset these positions to 0 again after convolution \n ## to avoid introducing noise\n ## mask has shape (batchSize, #positions_to_be_masked)\n\n ##take the subtensor of out2 that needs modification\n out2_sub = out2[:, :, :mask.shape[1] ]\n mask_new = mask.dimshuffle(0, 'x', 1)\n self.output = T.set_subtensor(out2_sub, T.mul(out2_sub, mask_new))\n else:\n self.output = out2\n\n\t##self.output has shape (batchSize, n_out, nCols)\n\n # parameters of the model\n self.params=[self.W, self.b]\n\n self.paramL1 = abs(self.W).sum() + abs(self.b).sum()\n self.paramL2 = (self.W**2).sum() + (self.b**2).sum()", "def get_unet():\n inputs = Input((img_rows, img_cols, 1))\n conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(inputs)\n conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n\n conv2 = Conv2D(64, (3, 3), activation='relu', padding='same')(pool1)\n conv2 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n\n conv3 = Conv2D(128, (3, 3), activation='relu', padding='same')(pool2)\n conv3 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n\n conv4 = Conv2D(256, (3, 3), activation='relu', padding='same')(pool3)\n conv4 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)\n\n conv5 = Conv2D(512, (3, 3), activation='relu', padding='same')(pool4)\n conv5 = Conv2D(512, (3, 3), activation='relu', padding='same')(conv5)\n\n up6 = concatenate([Conv2DTranspose(256, (2, 2), strides=(2, 2),\n padding='same')(conv5), conv4], axis=3)\n conv6 = Conv2D(256, (3, 3), activation='relu', padding='same')(up6)\n conv6 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv6)\n\n up7 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2),\n padding='same')(conv6), conv3], axis=3)\n conv7 = Conv2D(128, (3, 3), activation='relu', padding='same')(up7)\n conv7 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv7)\n\n up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2),\n padding='same')(conv7), conv2], axis=3)\n conv8 = Conv2D(64, (3, 3), activation='relu', padding='same')(up8)\n conv8 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv8)\n\n up9 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2),\n padding='same')(conv8), conv1], axis=3)\n conv9 = Conv2D(32, (3, 3), activation='relu', padding='same')(up9)\n conv9 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv9)\n\n conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)\n\n model = Model(inputs=[inputs], outputs=[conv10])\n\n model.compile(optimizer=Adam(lr=1e-4), loss=dice_coef_loss,\n metrics=[dice_coef])\n\n return model", "def conv_net(x, keep_prob, nconv1, nconv2, nfullyconn, nfullyconn2):\n # TODO: Apply 1, 2, or 3 Convolution and Max Pool layers\n # Play around with different number of outputs, kernel size and stride\n # Function Definition from Above:\n # conv2d_maxpool(x_tensor, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides)\n #layer_norm = tflearn.layers.normalization.batch_normalization (x, name='BatchNormalization')\n layer_conv = conv2d_maxpool(x, nconv1, (2,2), (2,2), (2,2), (2,2))\n #layer_conv = tf.nn.dropout(layer_conv, keep_prob)\n layer_conv = tf.layers.batch_normalization (layer_conv, name='BatchNormalization')\n #layer_conv = tf.nn.dropout(layer_conv, keep_prob)\n #print(layer_conv)\n layer_conv = conv2d_maxpool(x, nconv2, (5,5), (2,2), (2,2), (2,2))\n layer_conv = tf.layers.batch_normalization (layer_conv, name='BatchNormalization2')\n # TODO: Apply a Flatten Layer\n # Function Definition from Above:\n # flatten(x_tensor)\n layer_flat = flatten(layer_conv)\n #layer_flat = tflearn.layers.normalization.batch_normalization (layer_flat, name='BatchNormalization')\n \n \n\n # TODO: Apply 1, 2, or 3 Fully Connected Layers\n # Play around with different number of outputs\n # Function Definition from Above:\n # fully_conn(x_tensor, num_outputs)\n #layer_fully_conn = fully_conn(x, nfullyconn)\n layer_fully_conn = fully_conn(layer_flat, nfullyconn)\n #print(\"Fully Connected Outputs: {}\".format(layer_fully_conn.shape[1]))\n #layer_fully_conn = fully_conn(layer_fully_conn, nconv)\n layer_fully_conn = tf.layers.batch_normalization (layer_fully_conn, name='BatchNormalization3')\n layer_flat = flatten(layer_fully_conn)\n layer_fully_conn = fully_conn(layer_flat, nfullyconn2)\n layer_fully_conn = tf.layers.batch_normalization (layer_fully_conn, name='BatchNormalization4')\n layer_flat = flatten(layer_fully_conn)\n layer_fully_conn = tf.nn.dropout(layer_fully_conn, keep_prob)\n #layer_fully_conn = tf.nn.dropout(layer_fully_conn, keep_prob)\n \n # TODO: Apply an Output Layer\n # Set this to the number of classes\n # Function Definition from Above:\n # output(x_tensor, num_outputs)\n layer_final = output(layer_fully_conn, 46)\n \n \n # TODO: return output\n return layer_final", "def build_resnet101(self):\n use_batch_norm = self.use_batch_norm\n\n imgs = tf.placeholder(tf.float32, [self.batch_size]+self.img_shape)\n is_train = tf.placeholder(tf.bool)\n\n conv1_feats = convolution(imgs, 7, 7, 64, 2, 2, 'conv1')\n conv1_feats = batch_norm(conv1_feats, 'bn_conv1', is_train, use_batch_norm)\n conv1_feats = nonlinear(conv1_feats, 'relu')\n pool1_feats = max_pool(conv1_feats, 3, 3, 2, 2, 'pool1')\n\n res2a_feats = self.basic_block(pool1_feats, 'res2a', 'bn2a', is_train, use_batch_norm, 64, 1)\n res2b_feats = self.basic_block2(res2a_feats, 'res2b', 'bn2b', is_train, use_batch_norm, 64)\n res2c_feats = self.basic_block2(res2b_feats, 'res2c', 'bn2c', is_train, use_batch_norm, 64)\n \n res3a_feats = self.basic_block(res2c_feats, 'res3a', 'bn3a', is_train, use_batch_norm, 128) \n temp = res3a_feats\n for i in range(1, 4):\n temp = self.basic_block2(temp, 'res3b'+str(i), 'bn3b'+str(i), is_train, use_batch_norm, 128)\n res3b3_feats = temp\n \n res4a_feats = self.basic_block(res3b3_feats, 'res4a', 'bn4a', is_train, use_batch_norm, 256)\n temp = res4a_feats\n for i in range(1, 23):\n temp = self.basic_block2(temp, 'res4b'+str(i), 'bn4b'+str(i), is_train, use_batch_norm, 256)\n res4b22_feats = temp\n\n res5a_feats = self.basic_block(res4b22_feats, 'res5a', 'bn5a', is_train, use_batch_norm, 512)\n res5b_feats = self.basic_block2(res5a_feats, 'res5b', 'bn5b', is_train, use_batch_norm, 512)\n res5c_feats = self.basic_block2(res5b_feats, 'res5c', 'bn5c', is_train, use_batch_norm, 512)\n\n res5c_feats_flat = tf.reshape(res5c_feats, [self.batch_size, 49, 2048])\n self.conv_feats = res5c_feats_flat\n self.conv_feat_shape = [49, 2048]\n self.num_ctx = 49 \n self.dim_ctx = 2048\n\n self.imgs = imgs\n self.is_train = is_train", "def resnet_v2(input_shape, depth, num_classes=10):\n if (depth - 2) % 9 != 0:\n raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])')\n # Start model definition.\n inputs = Input(shape=input_shape)\n num_filters_in = 16\n num_filters_out = 64\n filter_multiplier = 4\n num_sub_blocks = int((depth - 2) / 9)\n\n # v2 performs Conv2D on input w/o BN-ReLU\n x = Conv2D(num_filters_in,\n kernel_size=3,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))(inputs)\n\n # Instantiate convolutional base (stack of blocks).\n for i in range(3):\n if i > 0:\n filter_multiplier = 2\n num_filters_out = num_filters_in * filter_multiplier\n\n for j in range(num_sub_blocks):\n strides = 1\n is_first_layer_but_not_first_block = j == 0 and i > 0\n if is_first_layer_but_not_first_block:\n strides = 2\n y = resnet_block(inputs=x,\n num_filters=num_filters_in,\n kernel_size=1,\n strides=strides,\n conv_first=False)\n y = resnet_block(inputs=y,\n num_filters=num_filters_in,\n conv_first=False)\n y = resnet_block(inputs=y,\n num_filters=num_filters_out,\n kernel_size=1,\n conv_first=False)\n if j == 0:\n x = Conv2D(num_filters_out,\n kernel_size=1,\n strides=strides,\n padding='same',\n kernel_initializer='he_normal',\n kernel_regularizer=l2(1e-4))(x)\n x = keras.layers.add([x, y])\n\n num_filters_in = num_filters_out\n\n # Add classifier on top.\n # v2 has BN-ReLU before Pooling\n x = BatchNormalization()(x)\n x = Activation('relu')(x)\n x = AveragePooling2D(pool_size=8)(x)\n y = Flatten()(x)\n outputs = Dense(num_classes,\n activation='softmax',\n kernel_initializer='he_normal')(y)\n\n # Instantiate model.\n model = Model(inputs=inputs, outputs=outputs)\n return model", "def __init__(self, n_channels_list, bn_momentum=0.01, activation='relu'):\n super(MultiResolutionLayer, self).__init__()\n self.n_branches = len(n_channels_list)\n self.fuse_layers = nn.ModuleList()\n for branch_i in range(self.n_branches):\n layer = nn.ModuleList()\n for branch_j in range(self.n_branches):\n if branch_i < branch_j:\n # resolution of branch i is greater than branch_j\n # branch_j will be upsample with nearest resize\n layer.append(nn.Sequential(\n nn.Conv2d(in_channels=n_channels_list[branch_j], out_channels=n_channels_list[branch_i],\n kernel_size=1, stride=1, padding=0, bias=False),\n nn.BatchNorm2d(n_channels_list[branch_i], momentum=bn_momentum))\n )\n\n elif branch_i == branch_j:\n # branch i is branch_j\n layer.append(None)\n else:\n # branch_i > branch_j\n # resolution of branch i is greater than branch_j\n # needed to be downsample(stride 2 convolution) branch_i - branch_j times\n downsample_conv = []\n for k in range(branch_i - branch_j):\n if k == branch_i - branch_j - 1:\n downsample_conv.append(\n nn.Sequential(\n nn.Conv2d(\n in_channels=n_channels_list[branch_j],\n out_channels=n_channels_list[branch_i],\n kernel_size=3, stride=2, padding=1,\n bias=False),\n nn.BatchNorm2d(n_channels_list[branch_i], momentum=bn_momentum)))\n else:\n downsample_conv.append(\n nn.Sequential(\n nn.Conv2d(in_channels=n_channels_list[branch_j],\n out_channels=n_channels_list[branch_j],\n kernel_size=3, stride=2, padding=1, bias=False),\n nn.BatchNorm2d(n_channels_list[branch_j], momentum=bn_momentum)))\n\n layer.append(nn.Sequential(*downsample_conv))\n self.fuse_layers.append(layer)\n pass", "def __init__(self, rng, input, filter_shape, image_shape, poolsize=(2, 2), non_linear=\"tanh\"):\n\n assert image_shape[1] == filter_shape[1]\n self.input = input\n self.filter_shape = filter_shape\n self.image_shape = image_shape\n self.poolsize = poolsize\n self.non_linear = non_linear\n # there are \"num input feature maps * filter height * filter width\"\n # inputs to each hidden unit\n fan_in = np.prod(filter_shape[1:])\n # each unit in the lower layer receives a gradient from:\n # \"num output feature maps * filter height * filter width\" /\n # pooling size\n fan_out = (filter_shape[0] * np.prod(filter_shape[2:]) /np.prod(poolsize))\n # initialize weights with random weights\n if self.non_linear == \"none\" or self.non_linear == \"relu\":\n self.W = theano.shared(np.asarray(rng.uniform(low=-0.01,high=0.01,size=filter_shape), \n dtype=theano.config.floatX),borrow=True,name=\"W_conv\")\n # self.W = T.as_tensor_variable(np.asarray(rng.uniform(low=-0.01,high=0.01,size=filter_shape), \n # dtype=theano.config.floatX),name=\"W_conv\")\n else:\n W_bound = np.sqrt(6. / (fan_in + fan_out))\n self.W = theano.shared(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=filter_shape),\n dtype = theano.config.floatX),borrow=True,name=\"W_conv\") \n # self.W = T.as_tensor_variable(np.asarray(rng.uniform(low=-W_bound, high=W_bound, size=filter_shape),\n # dtype = theano.config.floatX),name=\"W_conv\") \n b_values = np.zeros((filter_shape[0],), dtype=theano.config.floatX)\n self.b = theano.shared(value=b_values, borrow=True, name=\"b_conv\")\n # self.b = T.as_tensor_variable(b_values, name=\"b_conv\")\n \n # convolve input feature maps with filters\n conv_out = conv.conv2d(input=input, filters=self.W,filter_shape=self.filter_shape, image_shape=self.image_shape)\n if self.non_linear == \"tanh\":\n conv_out_tanh = T.tanh(conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))\n self.output = downsample.max_pool_2d(input=conv_out_tanh, ds=self.poolsize, ignore_border=True)\n elif self.non_linear == \"relu\":\n conv_out_tanh = ReLU(conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))\n self.output = downsample.max_pool_2d(input=conv_out_tanh, ds=self.poolsize, ignore_border=True)\n else:\n pooled_out = downsample.max_pool_2d(input=conv_out, ds=self.poolsize, ignore_border=True)\n self.output = pooled_out + self.b.dimshuffle('x', 0, 'x', 'x')\n self.params = [self.W, self.b]", "def __init__(self, \n input_dim=(3, 32, 32), \n num_filters = (32, 64), filter_sizes = (7, 7), conv_param = {\"stride\": 1, \"pad\": 3},\n hidden_dim= 100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32\n ):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n self.conv_param = conv_param\n self.filter_sizes = filter_sizes\n self.num_layers = 4\n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n ############################################################################\n \n C, H, W = input_dim\n filter_size1, filter_size2 = filter_sizes\n num_filters1, num_filters2 = num_filters\n\n # conv layer 1: (N, C, H, W) -> (N, num_filters1, H, W)\n self.params['W1'] = np.random.normal(0, weight_scale, [num_filters1, C, filter_size1, filter_size1]) # square filter\n self.params['b1'] = np.zeros((num_filters1, ))\n self.params[\"sbnGamma1\"] = np.ones((num_filters1, )) # scale parameter one for each color channel during spatial batch norm\n self.params[\"sbnBeta1\"] = np.zeros((num_filters1, )) # shift parameter one for each color channel during spatial batch norm\n\n # conv layer 2: (N, num_filters1, H, W) -> (N, num_filters2, H, W)\n self.params['W2'] = np.random.normal(0, weight_scale, [num_filters2, num_filters1, filter_size2, filter_size2]) # square filter\n self.params['b2'] = np.zeros((num_filters2, ))\n self.params[\"sbnGamma2\"] = np.ones((num_filters2, ))\n self.params[\"sbnBeta2\"] = np.zeros((num_filters2, ))\n\n # (2, 2, 2) maxpool: (N, num_filters2, H, W) -> (N, num_filters2, H/2. W/2)\n # maxpool layer contributes nothing to self.params that need to be updated.\n self.maxpool_params = {\"pool_height\": 2, \"pool_width\": 2, \"stride\": 2}\n\n # affine layer 3: (N, num_filters2, H/2. W/2) -> (N, hidden_dim)\n self.params['W3'] = np.random.normal(0, weight_scale, [num_filters2 * (H / 2) * (W / 2), hidden_dim])\n self.params['b3'] = np.zeros((hidden_dim, ))\n self.params[\"bnGamma3\"] = np.ones((hidden_dim, ))\n self.params[\"bnBeta3\"] = np.zeros((hidden_dim, ))\n\n # output affine - sfmx layer 4: (N, hidden_dim) -> (N, num_classes)\n self.params['W4'] = np.random.normal(0, weight_scale, [hidden_dim, num_classes])\n self.params['b4'] = np.zeros((num_classes, ))\n\n self.bn_params = [{\"mode\": \"train\"} for _ in range(self.num_layers)]\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def res_net(*inputs, **kwargs):\n inp_shapes = kwargs['inp_shapes']\n out_shapes = kwargs['out_shapes']\n params = kwargs['params']\n layer_width = kwargs['layer_width']\n nblocks = kwargs['nblocks']\n block_size = kwargs['block_size']\n output_args = kwargs['output_args']\n ninputs = len(inp_shapes)\n noutputs = len(out_shapes)\n\n input_width = np.sum([in_shape[1] for in_shape in inp_shapes])\n flat_output_shapes = [np.prod(out_shape[1:]) for out_shape in out_shapes]\n output_width = np.sum(flat_output_shapes)\n print(\"Building resnet with: %s residual blocks of size %s inner width: %s from: %s inputs to %s outputs\" %\n (nblocks, block_size, layer_width, input_width, output_width))\n input_layers = [InputLayer(inp_shapes[i], input_var = inputs[i]) for i in range(len(inputs))]\n\n ## Flatten the input\n reshaped = [ReshapeLayer(inp, ([0], -1)) for inp in input_layers]\n\n net = {}\n net['concat'] = prev_layer = ConcatLayer(reshaped)\n # Projet inner layer down/up to hidden layer width only if necessary\n if layer_width != input_width:\n print(\"Input projection, layer_width: %s input_width: %s\" % (layer_width, input_width))\n wx_sfx = 'wxinpproj'\n wx = batch_norm_params(DenseLayer(prev_layer, layer_width, nonlinearity = rectify,\n W=params['W_%s' % wx_sfx, HeNormal(gain='relu')],\n b=params['b_%s' % wx_sfx, Constant(0)]), wx_sfx, params)\n else:\n print(\"Skipping input weight projection, layer_width: %s input_width: %s\" % (layer_width, input_width))\n wx = prev_layer\n\n ## Residual Blocks\n for j in range(nblocks):\n for i in range(block_size):\n sfx = \"%s_%s\" % (j,i)\n net['res2d%s_%s' % (j,i)] = prev_layer = batch_norm_params(\n DenseLayer(prev_layer, layer_width, nonlinearity = rectify,\n W=params['W_%s' % sfx, HeNormal(gain='relu')],\n b=params['b_%s' % sfx, Constant(0)]), sfx, params)\n net['block%s' % j] = prev_layer = wx = lasagne.layers.ElemwiseSumLayer([prev_layer, wx])\n\n ## Project output to correct width\n if layer_width != output_width:\n print(\"Output projection, layer_width: %s output_width: %s\" % (layer_width, output_width))\n wx_sfx = 'wxoutproj'\n net['output'] = wx = batch_norm_params(DenseLayer(prev_layer, output_width, nonlinearity = rectify,\n W=params['W_%s' % wx_sfx, HeNormal(gain='relu')],\n b=params['b_%s' % wx_sfx, Constant(0)]), wx_sfx, params)\n else:\n print(\"Skipping output projection, layer_width: %s output_width: %s\" % (layer_width, output_width))\n net['output'] = prev_layer\n\n # Split up the final layer into necessary parts and reshape\n output_product = lasagne.layers.get_output(net['output'], **output_args)\n outputs = []\n lb = 0\n for i in range(noutputs):\n ub = lb + flat_output_shapes[i]\n out = output_product[:, lb:ub]\n rout = out.reshape((out.shape[0],) + (out_shapes[i][1:]))\n outputs.append(rout)\n lb = ub\n\n params.add_tagged_params(get_layer_params(lasagne.layers.get_all_layers(net['output'])))\n params.check(lasagne.layers.get_all_params(prev_layer))\n return outputs, params", "def model(name, conv_dropout_keep_prob=1.0, fc_dropout_keep_prob=1.0, reuse=None):\n with tf.variable_scope(name, reuse=reuse) as scope:\n # input format\n input_shape = [100, 100, 60] # can't actually get rid of this \n num_labels = 2\n\n # architecture\n k = 5\n depth_1 = 32\n depth_2 = 32\n \n k = 3\n depth_3 = 64\n depth_4 = 64\n \n fc_num_0 = conv_to_fc_size(input_shape, conv_depth=depth_4, pools=2)\n fc_num_1 = 32\n\n # regularizers\n reg_conv = tf.contrib.layers.l2_regularizer(scale=1e-6)\n reg_fc = tf.contrib.layers.l2_regularizer(scale=1e-6)\n\n # 2 convolution and pooling layers\n conv_1 = conv_relu_layer(x_batch,\n 5, 1, depth_1,\n regularizer=reg_conv,\n dropout_keep_prob=conv_dropout_keep_prob,\n name='conv_1')\n# conv_1 = maxpool_layer(conv_1, name='maxpool_1')\n\n conv_2 = conv_relu_layer(conv_1,\n 5, depth_1, depth_2,\n regularizer=reg_conv,\n dropout_keep_prob=conv_dropout_keep_prob,\n name='conv_2')\n conv_2 = maxpool_layer(conv_2, name='maxpool_2')\n\n conv_3 = conv_relu_layer(conv_2,\n 3, depth_2, depth_3,\n regularizer=reg_conv,\n dropout_keep_prob=conv_dropout_keep_prob,\n name='conv_3')\n\n conv_4 = conv_relu_layer(conv_3,\n 3, depth_3, depth_4,\n regularizer=reg_conv,\n dropout_keep_prob=conv_dropout_keep_prob,\n name='conv_4')\n \n conv_4 = maxpool_layer(conv_4, name='maxpool_2')\n\n # 1 fully connected layer\n fc_1 = fc_layer(conv_4, \n fc_num_0, fc_num_1, \n conv_input=True, \n activation=tf.nn.relu,\n regularizer=reg_fc,\n dropout_keep_prob=fc_dropout_keep_prob,\n name='fc_1')\n\n # output\n logits = fc_layer(fc_1,\n fc_num_1, num_labels, \n regularizer=reg_fc,\n dropout_keep_prob=fc_dropout_keep_prob,\n name='out')\n\n return logits", "def forward(self, input_image):\n c1 = self.conv1(input_image)\n c2 = self.conv2_bn(self.conv2(F.leaky_relu(c1, 0.1)))\n c3 = self.conv3_bn(self.conv3(F.leaky_relu(c2, 0.1)))\n c4 = self.conv4_bn(self.conv4(F.leaky_relu(c3, 0.1)))\n c5 = self.conv5(F.leaky_relu(c4, 0.1))\n\n if self.noise:\n c5 += Variable((0.01 * (torch.randn(c5.size()))).cuda())\n\n d1 = F.dropout(self.deconv1_bn(self.deconv1(F.leaky_relu(c5, 0.1))), 0.5, training=True)\n if self.residual:\n d1 = torch.cat([d1, c4], 1)\n d2 = F.dropout(self.deconv2_bn(self.deconv2(F.leaky_relu(d1, 0.1))), 0.5, training=True)\n if self.residual:\n d2 = torch.cat([d2, c3], 1)\n d3 = F.dropout(self.deconv3_bn(self.deconv3(F.leaky_relu(d2, 0.1))), 0.5, training=True)\n if self.residual:\n d3 = torch.cat([d3, c2], 1)\n d4 = self.deconv4_bn(self.deconv4(F.leaky_relu(d3, 0.1)))\n if self.residual:\n d4 = torch.cat([d4, c1], 1)\n d5 = self.deconv5(F.leaky_relu(d4, 0.1))\n out = F.tanh(d5)\n\n return out", "def baseUNet(input_shape,conv_depth,n_classes,init_w,dropout):\n inputs = Input(input_shape)\n\n c1=Conv2D(conv_depth,\n (3,3),\n activation='relu',\n padding='same',\n kernel_initializer=init_w)(inputs)\n\n c1=Conv2D(conv_depth,\n (3,3),\n activation='relu',\n padding=\"same\",\n kernel_initializer=init_w)(c1)\n\n # pool down to next layer\n pool1 = MaxPooling2D((2,2),strides = (2,2))(c1)\n\n conv_depth *= 2\n\n # convolute down again\n conv2 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool1)\n\n conv2 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv2)\n \n # pool down again\n pool2 = MaxPooling2D((2,2),strides = (2,2))(conv2)\n\n conv_depth *= 2 \n\n # Convolution\n conv3 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool2)\n\n conv3 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv3)\n \n # pool down\n pool3 = MaxPooling2D((2,2),strides = (2,2))(conv3)\n\n conv_depth *= 2 \n # Convolution\n conv4 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool3)\n\n conv4 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv4)\n \n # pool down \n pool4 = MaxPooling2D((2,2),strides = (2,2))(conv4)\n\n conv_depth *=2 \n\n # Convolution\n conv5 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool4)\n\n conv5 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv5)\n\n drop = Dropout(dropout)(conv5)\n\n conv_depth /= 2\n conv_depth = int(conv_depth) \n # do upsampling\n up1 = UpSampling2D(size = (2,2))(drop)\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up1)\n \n # add in skip info\n cat1 = concatenate([conv4,conv6],axis = 3)\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat1)\n\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv6)\n\n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up2 = UpSampling2D(size = (2,2))(conv6)\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up2)\n \n # add in skip info\n cat2 = concatenate([conv3,conv7],axis = 3)\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat2)\n\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv7)\n \n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up3 = UpSampling2D(size = (2,2))(conv7)\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size=(3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up3)\n \n # add in skip info\n cat3 = concatenate([conv2,conv8],axis = 3)\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat3)\n\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv8)\n \n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up4 = UpSampling2D(size = (2,2))(conv8)\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up4)\n \n # add in skip info\n cat4 = concatenate([c1,conv9],axis = 3)\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat4)\n\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv9)\n\n outputs = Conv2D(n_classes, 1, activation = 'softmax')(conv9)\n\n return outputs,inputs", "def build_resnet152(self):\n use_batch_norm = self.use_batch_norm\n\n imgs = tf.placeholder(tf.float32, [self.batch_size]+self.img_shape)\n is_train = tf.placeholder(tf.bool)\n\n conv1_feats = convolution(imgs, 7, 7, 64, 2, 2, 'conv1')\n conv1_feats = batch_norm(conv1_feats, 'bn_conv1', is_train, use_batch_norm)\n conv1_feats = nonlinear(conv1_feats, 'relu')\n pool1_feats = max_pool(conv1_feats, 3, 3, 2, 2, 'pool1')\n\n res2a_feats = self.basic_block(pool1_feats, 'res2a', 'bn2a', is_train, use_batch_norm, 64, 1)\n res2b_feats = self.basic_block2(res2a_feats, 'res2b', 'bn2b', is_train, use_batch_norm, 64)\n res2c_feats = self.basic_block2(res2b_feats, 'res2c', 'bn2c', is_train, use_batch_norm, 64)\n \n res3a_feats = self.basic_block(res2c_feats, 'res3a', 'bn3a', is_train, use_batch_norm, 128) \n temp = res3a_feats\n for i in range(1, 8):\n temp = self.basic_block2(temp, 'res3b'+str(i), 'bn3b'+str(i), is_train, use_batch_norm, 128)\n res3b7_feats = temp\n \n res4a_feats = self.basic_block(res3b7_feats, 'res4a', 'bn4a', is_train, use_batch_norm, 256)\n temp = res4a_feats\n for i in range(1, 36):\n temp = self.basic_block2(temp, 'res4b'+str(i), 'bn4b'+str(i), is_train, use_batch_norm, 256)\n res4b35_feats = temp\n\n res5a_feats = self.basic_block(res4b35_feats, 'res5a', 'bn5a', is_train, use_batch_norm, 512)\n res5b_feats = self.basic_block2(res5a_feats, 'res5b', 'bn5b', is_train, use_batch_norm, 512)\n res5c_feats = self.basic_block2(res5b_feats, 'res5c', 'bn5c', is_train, use_batch_norm, 512)\n\n res5c_feats_flat = tf.reshape(res5c_feats, [self.batch_size, 49, 2048])\n self.conv_feats = res5c_feats_flat\n self.conv_feat_shape = [49, 2048]\n self.num_ctx = 49 \n self.dim_ctx = 2048\n\n self.imgs = imgs\n self.is_train = is_train", "def forward(self, x):\n residual = x\n out = self.conv1(x)\n out = self.bn1(out)\n out += residual\n\n residual = out\n out = self.conv2(out)\n out = self.bn2(out)\n out += residual\n\n residual = out\n out = self.conv3(out)\n out = self.bn3(out)\n out += residual\n if self.apply_activation: out = self.relu(out)\n return out", "def inference(image,norm = True,phase_train = True):\n batch_size = image.shape[0]\n r,g,b = tf.split(axis = 3,num_or_size_splits = 3,value = image)\n p_image = tf.concat([r - 123.68,\n g - 116.78,\n b - 103.94],axis = 3)\n with tf.variable_scope('vgg_16'):\n with tf.variable_scope('conv1'):\n conv1_1 = layer.conv_layer('conv1_1',p_image,[3,3,3,64])\n conv1_2 = layer.conv_layer('conv1_2',conv1_1,[3,3,64,64])\n pool1 = layer.pool_layer('pool1',conv1_2)\n with tf.variable_scope('conv2'):\n conv2_1 = layer.conv_layer('conv2_1',pool1,[3,3,64,128])\n conv2_2 = layer.conv_layer('conv2_2',conv2_1,[3,3,128,128])\n pool2 = layer.pool_layer('pool2',conv2_2)\n with tf.variable_scope('conv3'):\n conv3_1 = layer.conv_layer('conv3_1',pool2,[3,3,128,256])\n conv3_2 = layer.conv_layer('conv3_2',conv3_1,[3,3,256,256])\n conv3_3 = layer.conv_layer('conv3_3',conv3_2,[3,3,256,256])\n pool3 = layer.pool_layer('pool3',conv3_3)\n with tf.variable_scope('conv4'):\n conv4_1 = layer.conv_layer('conv4_1',pool3,[3,3,256,512])\n conv4_2 = layer.conv_layer('conv4_2',conv4_1,[3,3,512,512])\n conv4_3 = layer.conv_layer('conv4_3',conv4_2,[3,3,512,512])\n pool4 = layer.pool_layer('pool4',conv4_3)\n with tf.variable_scope('conv5'):\n conv5_1 = layer.conv_layer('conv5_1',pool4,[3,3,512,512])\n conv5_2 = layer.conv_layer('conv5_2',conv5_1,[3,3,512,512])\n conv5_3 = layer.conv_layer('conv5_3',conv5_2,[3,3,512,512])\n pool5 = layer.pool_layer('pool5',conv5_3,ksize = [1,3,3,1],strides = [1,1,1,1])\n with tf.variable_scope('ssd'):\n conv6 = layer.atrous_conv('conv6',pool5,[3,3,512,1024],rate = 6,\n batch_normalization = norm,phase_train = phase_train)\n conv7 = layer.conv_layer('conv7',conv6,[1,1,1024,1024],\n batch_normalization = norm,phase_train = phase_train)\n with tf.variable_scope('conv8'):\n conv8_1 = layer.conv_layer('conv8_1',conv7,[1,1,1024,256],\n batch_normalization = norm,phase_train = phase_train)\n conv8_2 = layer.conv_layer('conv8_2',conv8_1,[3,3,256,512],\n stride = [1,2,2,1],batch_normalization = norm,phase_train = phase_train)\n with tf.variable_scope('conv9'):\n conv9_1 = layer.conv_layer('conv9_1',conv8_2,[1,1,512,128],\n batch_normalization = norm,phase_train = phase_train)\n conv9_2 = layer.conv_layer('conv9_2',conv9_1,[3,3,128,256],\n stride = [1,2,2,1],batch_normalization = norm,phase_train = phase_train)\n with tf.variable_scope('conv10'):\n conv10_1 = layer.conv_layer('conv10_1',conv9_2,[1,1,256,128],\n batch_normalization = norm,phase_train = phase_train)\n conv10_2 = layer.conv_layer('conv10_2',conv10_1,[3,3,128,256],\n padding = 'VALID',batch_normalization = norm,phase_train = phase_train)\n with tf.variable_scope('conv11'):\n conv11_1 = layer.conv_layer('conv11_1',conv10_2,[1,1,256,128],\n batch_normalization = norm,phase_train = phase_train)\n conv11_2 = layer.conv_layer('conv11_2',conv11_1,[3,3,128,256],\n padding = 'VALID',batch_normalization = norm,phase_train = phase_train)#vgg300\n with tf.variable_scope('multibox'):\n\n l2_conv4_3 = layer.l2_normalization('l2_normalization',conv4_3,scaling = True)\n cls4 = layer.conv_layer('cls4',l2_conv4_3,[3,3,512,84],activation = None)\n loc4 = layer.conv_layer('loc4',l2_conv4_3,[3,3,512,16],activation = None)\n\n cls4_reshape = tf.reshape(cls4,[batch_size,-1,21])\n loc4_reshape = tf.reshape(loc4,[batch_size,-1,4])\n\n\n cls7 = layer.conv_layer('cls7',conv7,[3,3,1024,126],activation = None)\n loc7 = layer.conv_layer('loc7',conv7,[3,3,1024,24],activation = None)\n\n cls7_reshape = tf.reshape(cls7,[batch_size,-1,21])\n loc7_reshape = tf.reshape(loc7,[batch_size,-1,4])\n\n cls8 = layer.conv_layer('cls8',conv8_2,[3,3,512,126],activation = None)\n loc8 = layer.conv_layer('loc8',conv8_2,[3,3,512,24],activation = None)\n\n cls8_reshape = tf.reshape(cls8,[batch_size,-1,21])\n loc8_reshape = tf.reshape(loc8,[batch_size,-1,4])\n\n cls9 = layer.conv_layer('cls9',conv9_2,[3,3,256,126],activation = None)\n loc9 = layer.conv_layer('loc9',conv9_2,[3,3,256,24],activation = None)\n\n cls9_reshape = tf.reshape(cls9,[batch_size,-1,21])\n loc9_reshape = tf.reshape(loc9,[batch_size,-1,4])\n\n cls10 = layer.conv_layer('cls10',conv10_2,[3,3,256,84],activation = None)\n loc10 = layer.conv_layer('loc10',conv10_2,[3,3,256,16],activation = None)\n\n cls10_reshape = tf.reshape(cls10,[batch_size,-1,21])\n loc10_reshape = tf.reshape(loc10,[batch_size,-1,4])\n\n cls11 = layer.conv_layer('cls11',conv11_2,[1,1,256,84],activation = None)\n loc11 = layer.conv_layer('loc11',conv11_2,[1,1,256,16],activation = None)\n\n cls11_reshape = tf.reshape(cls11,[batch_size,-1,21])\n loc11_reshape = tf.reshape(loc11,[batch_size,-1,4])\n\n cls_logit = tf.concat([\n cls4_reshape,\n cls7_reshape,\n cls8_reshape,\n cls9_reshape,\n cls10_reshape,\n cls11_reshape\n ],1)\n loc_logit = tf.concat([\n loc4_reshape,\n loc7_reshape,\n loc8_reshape,\n loc9_reshape,\n loc10_reshape,\n loc11_reshape\n ],1)\n \n return cls_logit,loc_logit", "def build_resnet_block(inputres, dim, name=\"resnet\", padding=\"REFLECT\"):\n with tf.variable_scope(name):\n out_res = tf.pad(inputres, [[0, 0], [1, 1], [\n 1, 1], [0, 0]], padding)\n out_res = general_conv2d(\n out_res, dim, 3, 3, 1, 1, 0.02, \"VALID\", \"c1\")\n out_res = tf.pad(out_res, [[0, 0], [1, 1], [1, 1], [0, 0]], padding)\n out_res = general_conv2d(\n out_res, dim, 3, 3, 1, 1, 0.02, \"VALID\", \"c2\", do_relu=False)\n\n return tf.nn.relu(out_res + inputres)", "def resnet50():\n initializer = K.initializers.he_normal(seed=None)\n\n X = K.Input(shape=(224, 224, 3))\n\n # conv1\n layer = K.layers.Conv2D(filters=64,\n kernel_size=(7, 7),\n strides=(2, 2),\n padding='same',\n kernel_initializer=initializer,\n )(X)\n\n layer = K.layers.BatchNormalization(axis=3)(layer)\n\n layer = K.layers.Activation('relu')(layer)\n\n # conv2_x\n layer = K.layers.MaxPool2D(pool_size=(3, 3),\n strides=(2, 2),\n padding='same')(layer)\n\n layer = projection_block(layer, [64, 64, 256], 1)\n for _ in range(2):\n layer = identity_block(layer, [64, 64, 256])\n\n # conv3_x\n layer = projection_block(layer, [128, 128, 512])\n for _ in range(3):\n layer = identity_block(layer, [128, 128, 512])\n\n # conv4_x\n layer = projection_block(layer, [256, 256, 1024])\n for _ in range(5):\n layer = identity_block(layer, [256, 256, 1024])\n\n # conv5_x\n layer = projection_block(layer, [512, 512, 2048])\n for _ in range(2):\n layer = identity_block(layer, [512, 512, 2048])\n\n layer = K.layers.AveragePooling2D(pool_size=(7, 7),\n padding='same')(layer)\n\n layer = K.layers.Dense(units=1000,\n activation='softmax',\n kernel_initializer=initializer,\n )(layer)\n\n model = K.models.Model(inputs=X, outputs=layer)\n return model", "def residual_block(layer_input, filters=512, down_filter=False, normalization=False):\n\td1 = Conv2D(filters, kernel_size=3, strides=1, padding='same')(layer_input)\n\tif normalization:\n\t\t# d = InstanceNormalization()(d)\n\t\td1 = BatchNormalization(momentum=0.8)(d1) # 6/6/2018: use it for CT # 6/5/2018: remove it for MNIST\n\td1 = Activation('relu')(d1)\n\td2 = Conv2D(filters, kernel_size=3, strides=1, padding='same')(d1)\n\tif normalization:\n\t\t# d = InstanceNormalization()(d)\n\t\td2 = BatchNormalization(momentum=0.8)(d2) # 6/6/2018: use it for CT # 6/5/2018: remove it for MNIST\n\tif down_filter:\n\t\td = Add()([d1, d2])\n\telse:\n\t\td = Add()([d2, layer_input])\n\treturn d", "def __reslayer(self, inputs, in_filters, out_filters, stride=1):\n with tf.variable_scope('sub1'):\n kernel = tf.get_variable('weights', [3, 3, in_filters, out_filters],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n conv = tf.nn.conv2d(inputs, kernel, [1, stride, stride, 1],\n padding='SAME',\n name='conv')\n batch_norm = self.__batch_norm_wrapper(conv, decay=0.9999, shape=[0, 1, 2])\n conv = tf.nn.elu(batch_norm, 'elu')\n\n with tf.variable_scope('sub2'):\n kernel = tf.get_variable('weights',\n [3, 3, out_filters, out_filters],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n conv = tf.nn.conv2d(conv, kernel, [1, 1, 1, 1], padding='SAME',\n name='conv1')\n bias = self.__batch_norm_wrapper(conv, decay=0.9999, shape=[0, 1, 2])\n\n with tf.variable_scope('subadd'):\n if in_filters != out_filters:\n kernel = tf.get_variable('weights', [1, 1, in_filters, out_filters],\n initializer=xavier_initializer(\n dtype=tf.float32),\n dtype=tf.float32)\n inputs = tf.nn.conv2d(\n inputs, kernel, [1, stride, stride, 1], padding='SAME')\n bias += inputs\n conv = tf.nn.elu(bias, 'elu')\n\n num = np.power(2, np.floor(np.log2(out_filters) / 2))\n\n grid = self.__put_activations_on_grid(conv, (int(num),\n int(out_filters /\n num)))\n tf.summary.image('sub2/activations', grid, max_outputs=1)\n\n return conv", "def inference(image, keep_prob):\r\n '''\r\n print(\"setting up vgg initialized conv layers ...\")\r\n model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)\r\n\r\n mean = model_data['normalization'][0][0][0]\r\n mean_pixel = np.mean(mean, axis=(0, 1))\r\n\r\n weights = np.squeeze(model_data['layers'])\r\n print(\"weights.shape\",weights.shape)\r\n\r\n processed_image = utils.process_image(image, mean_pixel)'''\r\n\r\n with tf.variable_scope(\"inference\"):\r\n pooling_net,conv_final_layer = inference_op(image)\r\n #conv_final_layer = image_net[\"conv5_3\"]\r\n\r\n pool5 = utils.max_pool_2x2(conv_final_layer)\r\n\r\n W6 = utils.weight_variable([7, 7, 512, 4096], name=\"W6\")\r\n b6 = utils.bias_variable([4096], name=\"b6\")\r\n conv6 = utils.conv2d_basic(pool5, W6, b6)\r\n relu6 = tf.nn.relu(conv6, name=\"relu6\")\r\n if FLAGS.debug:\r\n utils.add_activation_summary(relu6)\r\n relu_dropout6 = tf.nn.dropout(relu6, keep_prob=keep_prob)\r\n\r\n W7 = utils.weight_variable([1, 1, 4096, 4096], name=\"W7\")\r\n b7 = utils.bias_variable([4096], name=\"b7\")\r\n conv7 = utils.conv2d_basic(relu_dropout6, W7, b7)\r\n relu7 = tf.nn.relu(conv7, name=\"relu7\")\r\n if FLAGS.debug:\r\n utils.add_activation_summary(relu7)\r\n relu_dropout7 = tf.nn.dropout(relu7, keep_prob=keep_prob)\r\n\r\n W8 = utils.weight_variable([1, 1, 4096, NUM_OF_CLASSESS], name=\"W8\")\r\n b8 = utils.bias_variable([NUM_OF_CLASSESS], name=\"b8\")\r\n conv8 = utils.conv2d_basic(relu_dropout7, W8, b8)\r\n # annotation_pred1 = tf.argmax(conv8, dimension=3, name=\"prediction1\")\r\n\r\n # now to upscale to actual image size\r\n deconv_shape1 = pooling_net[\"pool4\"].get_shape()\r\n W_t1 = utils.weight_variable([4, 4, deconv_shape1[3].value, NUM_OF_CLASSESS], name=\"W_t1\")\r\n b_t1 = utils.bias_variable([deconv_shape1[3].value], name=\"b_t1\")\r\n # 对第8层的结果进行反卷积(上采样),通道数也由NUM_OF_CLASSESS变为第4层的通道数\r\n conv_t1 = utils.conv2d_transpose_strided(conv8, W_t1, b_t1, output_shape=tf.shape(pooling_net[\"pool4\"]))\r\n fuse_1 = tf.add(conv_t1, pooling_net[\"pool4\"], name=\"fuse_1\")\r\n\r\n deconv_shape2 = pooling_net[\"pool3\"].get_shape()\r\n W_t2 = utils.weight_variable([4, 4, deconv_shape2[3].value, deconv_shape1[3].value], name=\"W_t2\")\r\n b_t2 = utils.bias_variable([deconv_shape2[3].value], name=\"b_t2\")\r\n conv_t2 = utils.conv2d_transpose_strided(fuse_1, W_t2, b_t2, output_shape=tf.shape(pooling_net[\"pool3\"]))\r\n fuse_2 = tf.add(conv_t2, pooling_net[\"pool3\"], name=\"fuse_2\")\r\n\r\n shape = tf.shape(image)\r\n deconv_shape3 = tf.stack([shape[0], shape[1], shape[2], NUM_OF_CLASSESS])\r\n W_t3 = utils.weight_variable([16, 16, NUM_OF_CLASSESS, deconv_shape2[3].value], name=\"W_t3\")\r\n b_t3 = utils.bias_variable([NUM_OF_CLASSESS], name=\"b_t3\")\r\n conv_t3 = utils.conv2d_transpose_strided(fuse_2, W_t3, b_t3, output_shape=deconv_shape3, stride=8)\r\n\r\n annotation_pred = tf.argmax(conv_t3, dimension=3, name=\"prediction\")\r\n print(\"annotation_pred.shape\",annotation_pred.shape)\r\n print(\"conv_t3\",conv_t3)\r\n print(\"tf.expand_dims(annotation_pred, dim=3)\",tf.expand_dims(annotation_pred, dim=3))\r\n return tf.expand_dims(annotation_pred, dim=3), conv_t3" ]
[ "0.7122538", "0.7122538", "0.6950416", "0.6885206", "0.6869267", "0.6830916", "0.6811857", "0.6751886", "0.67322737", "0.6657507", "0.6656693", "0.66494304", "0.663799", "0.66285485", "0.66139823", "0.66124654", "0.6612394", "0.66063386", "0.6571061", "0.65616673", "0.65578496", "0.65578496", "0.65578496", "0.65578496", "0.65578496", "0.6546124", "0.65374076", "0.65209436", "0.6511597", "0.64950556", "0.64950556", "0.6489723", "0.6478628", "0.64470714", "0.64412045", "0.6413624", "0.6413448", "0.64117926", "0.6398555", "0.63956195", "0.6391282", "0.6383171", "0.6375937", "0.6363452", "0.6360105", "0.6354448", "0.6322739", "0.6322106", "0.6307013", "0.6296851", "0.6290344", "0.6290344", "0.6289414", "0.6289116", "0.6288351", "0.6286306", "0.62830454", "0.62728167", "0.6270248", "0.626726", "0.62647736", "0.6256348", "0.6254286", "0.62481105", "0.62452525", "0.624348", "0.6242813", "0.62422895", "0.62216306", "0.62194866", "0.6218229", "0.6211739", "0.6206071", "0.6200242", "0.6185371", "0.6177968", "0.6171059", "0.61675954", "0.61597276", "0.61561114", "0.6152964", "0.61509156", "0.61376333", "0.6131082", "0.6127554", "0.61169654", "0.61141443", "0.6106662", "0.61039823", "0.6102567", "0.61014456", "0.6097665", "0.6095125", "0.60902643", "0.60881746", "0.60851836", "0.60826236", "0.60816497", "0.6079041", "0.6072793" ]
0.61448115
82
Creates a new user profile object
def create_user(self, email, name, password=None): if not email: raise ValueError("Users must have an email address") email = self.normalize_email(email) user = self.model(email = email, name = name) user.set_password(password) user.save(using = self._db) return user
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_user_profile(instance, created, **_):\n if created:\n Profile.objects.create(user=instance)", "def createUserProfile(user):\n MyProfile.objects.get_or_create(user=user)", "def create_profile(username):\n user = User.objects.create(username=username)\n return Profile.objects.create(user=user)", "def create(self, validated_data):\n request = self.context.get('request')\n profile = Profile(**validated_data)\n profile.user = request.user\n profile.save()\n return profile", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def profile_create(faker_obj=fake_init()):\n profile = faker_obj.simple_profile()\n user = User.objects.create(\n username=profile[\"username\"],\n email=profile[\"mail\"],\n password=profile[\"username\"][::-1],\n )\n return user.id", "def create_profile(sender, **kw):\n user = kw['instance']\n if kw['created']:\n profile = UserProfile(user=user)\n profile.save()", "def create_profile_for_new_user(sender, created, instance, **kwargs):\n if created:\n profile = self.get_model('profile')(user=instance)\n profile.save()", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n profile = UserProfile()\n profile.user = instance\n profile.email=instance.email\n profile.save()", "def create_user(self, *args, **kwargs):\n user = User.objects.create_user(*args, **kwargs)\n return get_profile(user)", "def create_profile(sender, **kwargs):\n user = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n user_profile = Profile(user=user)\n user_profile.save()", "def create_user_profile(sender, instance, created, **kwargs):\n\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def create_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create(self, validated_data):\r\n user_data = validated_data.pop('user')\r\n user = UserSerializer.create(UserSerializer(), validated_data = user_data)\r\n profile, created = Profile.objects.update_or_create(user = user,\r\n bio = validated_data.pop('bio'),\r\n location = validated_data.pop('location'),\r\n birth_date = validated_data.pop('birth_date'))\r\n return profile", "def create_profile(sender, instance, created, **kwargs):\n if created: \n profile, new = UserProfile.objects.get_or_create(user=instance)", "def create_profile(self, user, *args, **kwargs):\n salt = hashlib.sha1(str(random.random())).hexdigest()[:5]\n activation_key = hashlib.sha1(salt + user.username).hexdigest()\n return self.create(user=user, activation_key=activation_key, **kwargs)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n # create new Stellar account\n stellar.api.create_account(user=instance)", "def create_profile(sender, instance, created, **kwargs):\n if created:\n profile, created = UserProfile.objects.get_or_create(user=instance)", "def create_profile(sender, **kwargs):\n\n # I import profile here cause i can't import it right in the top.\n from .profiles import Profile\n\n user = kwargs['instance']\n\n Profile.objects.get_or_create(user=user)", "def create_profile_for_new_users(sender, instance, created, **kwargs):\n if not created:\n return\n\n profile = Profile.objects.filter(user=instance).first()\n if profile is None:\n profile = Profile(user=instance)\n profile.save()", "def create_profile(self, user):\n salt = sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n# prepend \"key_\" to the key_name, because key_names can't start with numbers\n registrationprofile = RegistrationProfile(user=user, activation_key=activation_key)\n db = DB_Session()\n db.add(registrationprofile)\n db.flush()\n db.refresh(registrationprofile)\n db.commit()\n db.close()\n return registrationprofile", "async def create_profile_for_user(self, *, profile_create: ProfileCreate) -> ProfileInDB:\n created_profile = await self.db.fetch_one(query=CREATE_PROFILE_FOR_USER_QUERY, values=profile_create.dict())\n return ProfileInDB(**created_profile)", "def create_user_profile(sender, **kwargs):\n\n if kwargs['created']:\n UserProfile.objects.create(user=kwargs['instance'])", "def create(self, validated_data):\n user_data = validated_data.pop('user')\n user = UserSerializer.create(UserSerializer(), validated_data=user_data)\n profile, created = Profile.objects.update_or_create(\n user=user,\n avatar=validated_data.pop('avatar'),\n biography=validated_data.pop('biography'),\n link=validated_data.pop('link') \n )\n return profile", "def create(self, data):\n # Make User\n username = data['email'].split(\"@\")[0]\n user = User.objects.create_user(**data, username=username, is_verified=False, is_client=True)\n Profile.objects.create(user=user)\n send_confirmation_email.delay(user_pk=user.pk)\n return user", "def create_profile(self, user):\n salt = sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n return self.create(user=user,\n activation_key=activation_key)", "def create_profile(self, user):\r\n salt = sha.new(str(random.random())).hexdigest()[:5]\r\n activation_key = sha.new(salt+user.username).hexdigest()\r\n return self.create(user=user,\r\n activation_key=activation_key)", "def profile(**kwargs):\n defaults = {'name': 'Test K. User', 'bio': 'Some bio.',\n 'website': 'http://support.mozilla.com',\n 'timezone': None, 'country': 'US', 'city': 'Mountain View',\n 'locale': 'en-US'}\n if 'user' not in kwargs:\n u = user(save=True)\n defaults['user'] = u\n defaults.update(kwargs)\n\n p = Profile(**defaults)\n p.save()\n return p", "def createProfile(self):\n if self.profile:\n return\n from soc.modules.gsoc.models.profile import GSoCProfile\n user = self.createUser()\n properties = {'link_id': user.link_id, 'student_info': None, 'user': user,\n 'parent': user, 'scope': self.program, 'status': 'active'}\n self.profile = seeder_logic.seed(GSoCProfile, properties)", "def _profile(user):\n profile = UserProfile()\n profile.user_id = user.id\n profile.save()", "def create(cls, **kwargs):\n if \"user\" not in kwargs:\n with mute_signals(post_save):\n profile = ProfileFactory.create()\n kwargs[\"user\"] = profile.user\n return super().create(**kwargs)", "def create_profile(sender, instance, signal, created, **kwargs):\n \n from phylocommons.models import UserProfile\n \n if created:\n UserProfile(user = instance).save()", "def create (self, validated_data):\n user = models.UserProfile.objects.create_user(\n email = validated_data ['email'],\n name = validated_data ['name'],\n password = validated_data ['password']\n )\n\n return user", "def create(self, validated_data):\n\n user = models.UserProfile(\n email=validated_data['email'],\n name=validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n user.save()\n return user", "def create(self, validated_data):\n\n # Here we actually create a new user.\n user = models.UserProfile(\n email = validated_data['email'],\n name = validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n\n # Here we save the object to the database.\n user.save()\n\n return user", "def profile(user, **kwargs):\n defaults = {'user': user, 'name': 'Test K. User', 'bio': 'Some bio.',\n 'website': 'http://support.mozilla.com',\n 'timezone': None, 'country': 'US', 'city': 'Mountain View'}\n defaults.update(kwargs)\n\n p = Profile(**defaults)\n p.save()\n return p", "def create(self, validated_data):\n\n user = models.UserProfile(\n email=validated_data['email'],\n name=validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n user.save()\n\n return user", "def create(self, validated_data):\n\n user = models.UserProfile(\n username=validated_data['username'],\n email=validated_data['email'],\n first_name=validated_data['first_name'],\n mobile_number=validated_data['mobile_number'],\n )\n\n user.set_password(validated_data['password'])\n user.save()\n\n return user", "def create( self , validated_data ) :\n\n user = models.UserProfile(\n email = validated_data[ 'email' ] ,\n name = validated_data[ 'name' ]\n )\n\n user.set_password( validated_data[ 'password' ] )\n user.save( )\n\n return user", "def create(self, email, password=None, *args, **kwargs):\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n profile = Profile(\n user=user,\n )\n profile.save()\n return user", "def create_profile(self,user):\n salt= sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n\n return RegistrationProfile(user=user,\n activation_key=activation_key)", "def make_profile_for_user(sender, instance, **kwargs):\n if kwargs['created']:\n new_profile = ImagerProfile(user=instance)\n new_profile.save()", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data[\"email\"],\n name=validated_data[\"name\"],\n password=validated_data[\"password\"]\n )\n\n return user", "def create_profile(sender, instance, signal, created, **kwargs):\n \n from tutablr_app.models import UserProfile\n \n if created:\n UserProfile.objects.get_or_create(user = instance);\n # Do additional stuff here if needed, e.g.\n # create other required related records", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data['email'],\n first_name = validated_data['first_name'],\n last_name = validated_data['last_name'],\n password = validated_data['password']\n )\n return user", "def create_user_profile_callback(sender, instance, created, **kwargs):\n try:\n instance.get_profile()\n except UserProfile.DoesNotExist:\n UserProfile.objects.create(user=instance)", "def create_user_profile(email, **kwargs): # POST\n user_exists = coll(\"users\").find_one({\"_id\": email})\n\n if user_exists:\n return {\"message\": \"User already exists\"}, 400\n\n # NOTE Doesn't make sense for a person to have prizes only a team should have this\n coll(\"users\").insert_one(\n {\n \"_id\": email,\n \"skills\": kwargs[\"skills\"],\n \"prizes\": kwargs[\"prizes\"],\n \"bio\": kwargs[\"bio\"],\n \"github\": kwargs[\"github\"],\n \"interests\": kwargs[\"interests\"],\n \"seriousness\": kwargs[\"seriousness\"],\n \"team_id\": \"\",\n \"hasateam\": False,\n }\n )\n return {\"message\": \"User profile successfully created\"}, 201", "def create(self, validated_data):\n user = models.UserProfile.objects.create_user(\n email=validated_data['email'],\n username=validated_data['username'],\n password=validated_data['password'],\n\n )\n\n return user", "def create_user(username, password, user_fname, user_lname, email, profile_picture=\"/static/img/profile_pictures/default.png\"):\n\n user = User(username=username, password=password, user_fname=user_fname, user_lname=user_lname, profile_picture=profile_picture, email=email)\n\n db.session.add(user)\n db.session.commit()\n\n return user", "def create_player_profile(sender, **kwargs):\n if kwargs.get('created') is True:\n PlayerProfile.objects.create(user=kwargs.get('instance'))", "def manage_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)\n else:\n instance.profile.save()", "def create(self, validated_data):\n ## overriding default create\n\n user = UserProfile.objects.create_user(\n email = validated_data['email'],\n name = validated_data['name'],\n password=validated_data['password']\n )\n \n return user", "def create_or_update_user_profile(sender, instance, created, **kwargs):\n\n # Create profile and set ACTIVE status to account -- TODO : ACTIVE STATUS\n if created:\n Profile.objects.create(user=instance, status=Status.get_or_create_status(strings.ACTIVE_STATUS))\n\n else:\n instance.profile.save()", "def create_or_update_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.get_or_create(user=instance)\n instance.profile.save()", "def create_my_profile(\n body: Optional[UserProfilePrivateCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = CreateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def save(self):\n data = self.cleaned_data\n data.pop('password_confirmation')\n user = User.objects.create_user(**data)\n profile = Profile(user=user)\n profile.save()", "def create(self, validated_data):\n user = User.objects.create(\n first_name=validated_data.get('first_name'),\n middle_name=validated_data.get('middle_name'),\n last_name=validated_data.get('last_name'),\n email=validated_data.get('email'),\n username=validated_data.get('username'),\n mobile_number=validated_data.get('mobile_number'),\n gender=validated_data.get('gender'),\n is_active=validated_data.get('is_active'),\n country=validated_data.get('country'),\n address=validated_data.get('address'),\n role=validated_data.get('role'),\n )\n if self.context['request'].data.get('file_profile_picture') is not None:\n user.profile_picture = self.context['request'].data['file_profile_picture']\n if self.context['request'].data.get('file_signature') is not None:\n user.signature = self.context['request'].data['file_signature']\n user.set_password(validated_data.get('password'))\n user.save()\n return user", "def create(self, validated_data):\n # We override the create function by accessing the method 'create_user(args)'\n # defined in 'objects' that is an object\n # which references UserProfileManager class into UserProfile class.\n user = models.UserProfile.objects.create_user(\n email = validated_data['email'],\n name = validated_data['name'],\n password = validated_data['password']\n )\n\n return user", "def create(self, request, *args, **kwargs):\n response = super().create(request, *args, **kwargs)\n profile = response.data\n user_name = profile.get(\"username\")\n cache.set(f\"{USER_PROFILE_PREFIX}{user_name}\", profile)\n return response", "def create_user(request):\n serializer = UserProfileSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status = status.HTTP_400_BAD_REQUEST)", "def create_user(user, first_name, last_name, major, bio):\n return userAccount.objects.create(user=user, first_name=first_name, last_name=last_name, major=major, bio=bio)", "def post (self):\n\t\tobj = request.get_json()\n\n\t\tif (('username' not in obj) or ('email' not in obj) or ('secret' not in obj)):\n\t\t\treturn {\"status\":\"MISSING_PARAMS\"}\n\t\telif (len(obj['username'])<4 or len(obj['username'])>25):\n\t\t\treturn {\"status\":\"USER_NAME_LENGTH\"}\n\n\t\ttry:\n\t\t\tobj['username'].decode('ascii')\n\t\texcept UnicodeEncodeError:\n\t\t\treturn {'status':'INVALID_NAME'}\n\n\t\tdb = getattr(g, 'db', None)\n\t\twith db as cur:\n\t\t\tqry = \"INSERT INTO profiles VALUES (default, %s, %s, FALSE, %s, '', '', NULL, NULL);\"\n\t\t\ttry:\n\n\t\t\t\tsecret = obj['secret']\n\t\t\t\tif isinstance(secret, unicode):\n\t\t\t\t\tsecret = secret.encode('utf-8')\n\n\t\t\t\thashed = hashpw(secret, gensalt())\n\t\t\t\tcur.execute(qry, (obj['username'],obj['email'], hashed))\n\t\t\t\tdb.commit()\n\t\t\t\treturn {\"status\":\"USER_CREATED\"}\n\t\t\texcept Exception as e:\n\t\t\t\tprint \"Error\", e\n\t\t\t\treturn {\"status\":\"USER_EXISTS\"}", "def create_or_update_user_profile(sender, instance, created, **kwargs):\n _, created = UserProfile.objects.get_or_create(user=instance)\n if created and instance.email != \"\":\n instance.profile.email = instance.email\n instance.profile.save()", "def create(self, validated_data):\n # print('Validated Data',validated_data['profile'].get('group'))\n group = validated_data['profile'].get('group')\n profile_data = validated_data.pop('profile')\n password = validated_data.pop('password')\n user = User(**validated_data)\n user.set_password(password)\n user.save()\n \"\"\"\n After the creation of the user he is added to a particular group.\n \"\"\"\n user.groups.add(Group.objects.get(name=group))\n UserProfile.objects.create(user=user, **profile_data)\n return user", "def save(self, profile_callback=None):\r\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\r\n password=self.cleaned_data['password1'],\r\n email=self.cleaned_data['email'],\r\n profile_callback=profile_callback)\r\n return new_user", "def create_user_object(self, request):\r\n user = {\r\n \"first_name\": request.form.get(\"first_name\"),\r\n \"last_name\": request.form.get(\"last_name\"),\r\n \"age\": request.form.get(\"age\"),\r\n \"cpr_number\": request.form.get(\"CPR\"),\r\n \"email\": request.form.get(\"email\"),\r\n \"phone_number\": request.form.get(\"phone_number\"),\r\n \"password\": PasswordHasher().hash(request.form.get(\"password\")),\r\n \"bank_account\": str(BankAccount(\"Savings\", 1000.00).store_account().inserted_id),\r\n \"crypto_wallet\": str(CryptoWallet(\"Bitcoin\", 0.0045).store_account().inserted_id)\r\n }\r\n return user", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def create_user_profile(IamUserArn=None, SshUsername=None, SshPublicKey=None, AllowSelfManagement=None):\n pass", "def test_create_user(profile_data):\n email = \"email@localhost\"\n username = \"username\"\n user = api.create_user(username, email, profile_data, {\"first_name\": \"Bob\"})\n\n assert isinstance(user, User)\n assert user.email == email\n assert user.username == username\n assert user.first_name == \"Bob\"\n\n if \"name\" in profile_data:\n assert user.profile.name == profile_data[\"name\"]\n else:\n assert user.profile.name is None", "def test_user_profile_created(self):\n\t\tself.assertTrue(\n\t\t\thasattr(self.user, 'profile'),\n\t\t\t\"User profile does not exist.\"\n\t\t)\n\n\t\tself.assertTrue(\n\t\t\tisinstance(self.user.profile, UserProfile),\n\t\t\t\"User profile is of type {}, expected to be of type {}\".format(\n\t\t\t\ttype(self.user.profile), type(UserProfile)\n\t\t\t)\n\t\t)", "def create_user(request):\n message = None\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n # This creates a new User in the database\n new_user = form.save()\n # now we create a new blank profile, link it to the new user and save it\n new_profile = Profile()\n new_profile.user = new_user\n new_profile.save()\n # Get the user object we just created\n new_user = authenticate(username=form.cleaned_data['username'],\n password=form.cleaned_data['password1'])\n login(request, new_user)\n return HttpResponseRedirect('/index')\n else:\n form = UserCreationForm()\n\n return render(request, 'woofer/show_form.html', {\n 'form' : form,\n 'message' : message,\n 'form_action' : reverse('create-user'),\n 'title' : \"Create Account\"\n })", "def create_pootle_profile(sender, instance, **kwargs):\n try:\n profile = instance.get_profile()\n except PootleProfile.DoesNotExist:\n profile = PootleProfile(user=instance)\n profile.save()", "def save_user_profile(instance, **_):\n instance.profile.save()", "def perform_create(self, serializer):\n serializer.save(user_profile = self.request.user)", "def create_registration_profile(self, user):\n salt = hashlib.sha1(str(random.random())).hexdigest()[:10]\n activation_key = hashlib.sha1(salt + user.username).hexdigest()\n\n return self.create(user=user, activation_key=activation_key)", "def makeProfile(request):\n upr = UserProfile()\n upr.user = request.user\n upr.image = \"images/no-pic.png\"\n upr.save()", "def perform_create(self, serializer):\n print(self.request.user)\n serializer.save(user_profile=self.request.user)", "def create_user():\n first_name = request.form['first_name'].capitalize()\n last_name = request.form['last_name'].capitalize()\n image_url = request.form['image_url']\n\n new_user = User(first_name=first_name, last_name=last_name, image_url=image_url)\n db.session.add(new_user)\n db.session.commit()\n\n return redirect(\"/users\")", "def create_new_user(self):\n username = 'pseudo'\n email = 'carole@tests.com'\n password = '00000000'\n user_created = self.user.objects.create_user(id=1, username=username,\n email=email, password=password)\n HistoryUser.objects.create(user=user_created)\n StatusUser.objects.create(user=user_created)\n\n return user_created", "def perform_create(self, serializer): # this method runs everytime a POST method is called\n serializer.save(user_profile=self.request.user)", "def create_user(self, email, name, phone1, password=None, signed_up=timezone.localtime(),):\n if not email:\n raise ValueError(_('Users must have an email address'))\n\n user = self.model(\n email=self.normalize_email(email),\n name=name,\n phone1=phone1,\n signed_up=signed_up,\n )\n\n user.set_password(password)\n user.save(using=self._db)\n MyUserProfile.objects.create(myuser=user) \n NotifClick.objects.create(myuser=user) \n\n return user", "def public_create_user_profile(\n user_id: str,\n body: Optional[UserProfileCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicCreateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def new_user(first_name, sur_name, user_name, email, password):\n new_user = User(first_name, sur_name, user_name, email, password)\n return new_user", "def save(self):\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\n password=self.cleaned_data['password1'],\n email=self.cleaned_data['email'],\n firstname=self.cleaned_data['first_name'],\n lastname=self.cleaned_data['last_name'],\n agree=self.cleaned_data['tos'])\n return new_user", "def save(self, commit=True):\n user = super(UserCreationForm, self).save(commit=False)\n user.set_password(self.cleaned_data['password1'])\n\n user.save()\n\n # Making user profile and assigning to CPCESU\n # CPCESU\n #group = Organization.objects.get(name='Colorado Plateau')\n\n # New profile with group\n profile = UserProfile(user=user, first_name=self.cleaned_data.get('first_name'),\n last_name=self.cleaned_data.get('last_name'))\n profile.save()\n\n return user", "def perform_create(self,serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self,serializer):\n serializer.save(user_profile=self.request.user)", "def create_user():\n new_user = User(id=login_session['gplus_id'],\n name=login_session['username'],\n email=login_session['email'],\n picture=login_session['picture'])\n session.add(new_user)\n session.flush()\n session.commit()\n user = session.query(User).filter_by(email=login_session['email']).one()\n return user.id", "def create_user(first_name,last_name,email,password):\n\n\tnew_user = User(first_name,last_name,email,password)\n\treturn new_user", "def test_profile_creation(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n\n self.assertEqual(self.registration_profile.objects.count(), 1)\n self.assertEqual(profile.user.id, new_user.id)\n self.assertTrue(re.match('^[a-f0-9]{40,64}$', profile.activation_key))\n self.assertEqual(str(profile),\n \"Registration information for alice\")", "def save_user_receiver(sender, instance, created, *args, **kwargs):\n print(\"profile created\", instance)\n if created:\n new_profile = UserProfile.objects.get_or_create(owner=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created and instance.pk >= 0:\n UserProfile.objects.create(user=instance)\n \n # get default group, but not for anonymous\n try:\n default_group = Group.objects.get(name = \"default_users\")\n instance.groups.add(default_group)\n except:\n pass", "def create_new_user(first_name, last_name, email, password):\n \n new_user = User(first_name, last_name, email, password)\n db.session.add(new_user)\n db.session.commit()\n \n # link a root storage folder to the user\n root_folder = Folder()\n db.session.add(root_folder)\n db.session.commit()\n new_user.storage_root_id = root_folder.id\n new_user.storage_root = root_folder\n db.session.commit()\n\n # link usage tracking to the user\n usage = Usage()\n usage.user_id = new_user.id\n new_user.usage = usage\n db.session.add(usage)\n db.session.commit()\n\n # link a billing address to the user\n billing_address = BillingAddress()\n billing_address.user_id = new_user.id\n new_user.billing_address = billing_address\n db.session.add(billing_address)\n db.session.commit()\n\n # link settings to the User\n settings = Settings()\n settings.user_id = new_user.id\n new_user.settings = settings\n db.session.add(settings)\n db.session.commit()" ]
[ "0.8367903", "0.81304103", "0.81051487", "0.8098728", "0.8076744", "0.8076744", "0.8076744", "0.805672", "0.8050523", "0.80482244", "0.8038606", "0.800379", "0.7968006", "0.79638684", "0.7961977", "0.79449934", "0.7897795", "0.78840756", "0.7868318", "0.786531", "0.7865149", "0.7844125", "0.78215057", "0.7818519", "0.78048044", "0.77965426", "0.7782059", "0.7739201", "0.77357006", "0.773313", "0.77257663", "0.7666976", "0.763165", "0.76266545", "0.7611544", "0.7611496", "0.75858426", "0.75841945", "0.7572226", "0.75609314", "0.7554326", "0.75438845", "0.75415033", "0.7523439", "0.7506414", "0.7495846", "0.74928945", "0.7486313", "0.7465041", "0.7435061", "0.74234974", "0.7421789", "0.74078584", "0.7377419", "0.7376855", "0.7316854", "0.7303552", "0.730217", "0.7297273", "0.72806805", "0.72656715", "0.7232872", "0.7198391", "0.7165518", "0.71447617", "0.7142738", "0.71282804", "0.711605", "0.7102596", "0.7092516", "0.7092516", "0.7092516", "0.7092516", "0.7092516", "0.7092516", "0.7067333", "0.7066723", "0.70586425", "0.70543987", "0.7053952", "0.7049318", "0.7036003", "0.7026296", "0.7023414", "0.70161456", "0.7007824", "0.6991668", "0.69773287", "0.6974231", "0.6961204", "0.69502187", "0.69404274", "0.6937635", "0.6936801", "0.6936801", "0.6932176", "0.6921922", "0.6919056", "0.69133765", "0.69116884", "0.69051534" ]
0.0
-1
Creates and saves a new superuser with given details
def create_superuser(self, email, name, password): user = self.create_user(email, name, password) user.is_superuser = True user.is_staff = True user.save(using=self._db) return user
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_superuser(self, su_id, first_name, last_name, email, phone_number, password):\n user = self.create_user(\n su_id,\n first_name,\n last_name,\n email,\n phone_number,\n password=password,\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def _create_superuser(username, email, password):\n if username and email and password:\n user, created = User.objects.get_or_create(pk=defaults.USERWARE_SUPERUSER_ID)\n if user:\n user.username = username\n user.email = email\n user.set_password(password)\n user.is_staff = True\n user.is_active = True\n user.is_superuser = True\n user.save()\n action = \"Created\" if created else \"Updated\"\n print >> sys.stderr, \"{} Superuser: [username={}, email={}, id={}]\".format(action, username, email, user.id)", "def create_user(self):\n User.objects.create_user('test', 'testing@test.com', 'testing')", "def createsuperuser():\n\n email = prompt('User E-Mail')\n email_confirm = prompt('Confirm E-Mail')\n\n if not email == email_confirm:\n sys.exit('\\nCould not create user: E-Mail did not match')\n\n if not EMAIL_REGEX.match(email):\n sys.exit('\\nCould not create user: Invalid E-Mail addresss')\n\n password = prompt_pass('User password')\n password_confirm = prompt_pass('Confirmed password')\n\n if not password == password_confirm:\n sys.exit('\\nCould not create user: Passwords did not match')\n\n datastore = SQLAlchemyUserDatastore(db, User, Role)\n datastore.create_user(\n email=email,\n password=encrypt_password(password),\n active=True,\n super_user=True)\n\n db.session.commit()", "def create_superuser(self, username, email, password):\n print(\"creating super user....\")\n user = self.create_user(\n\n username=username,\n password=password,\n email = email,\n commit=False,\n )\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n return user", "def create_superuser(self, username, firstname, lastname, email, password):\n user = self.create_user(\n username=username,\n firstname=firstname,\n lastname=lastname,\n email=email,\n password=password,\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, name, phone1, password=None, signed_up=timezone.localtime(),):\n user = self.create_user(\n email,\n password=password,\n name=name,\n phone1=phone1,\n signed_up=signed_up,\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, full_name, password=None):\n print(\"Is this the method being called\")\n user = self.create_user(\n email,\n full_name,\n password=password,\n )\n user.staff = True\n user.admin = True\n user.save(using=self._db)\n return user", "def create_superuser(self, username, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n user = self.model(username=username, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def create_superuser(self,FirstName,LastName,MobileNo, EmailId, password=None, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n\n if extra_fields.get('is_staff') is not True:\n logger.error('creating super user with is_staff=False.')\n raise ValueError('Superuser must have is_staff=True.')\n if extra_fields.get('is_superuser') is not True:\n logger.error('creating super user with is_superuser=False.')\n raise ValueError('Superuser must have is_superuser=True.')\n return self._create_user(FirstName,LastName, EmailId, MobileNo , password, **extra_fields)", "def create_superuser(self, email, password, **extrac_fields):\n\n user = self.create_user(email, password)\n\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n return user", "def create_superuser(self,email,name,password):\n\n user = self.create_user(email, name, password)\n user.is_superuser = True\n user.is_staff = True\n\n user.save(using=self._db)\n return user", "def create_user(self):\n return User.objects.create_user(**self.user_data)", "def create(self, validated_data):\n # user = super().create(validated_data)\n # user.set_password(validated_data['password'])\n # user.save()\n\n user = User.objects.create_user(**validated_data)\n return user", "def create(self, validated_data:tuple):\n user = user_details.objects.create(user_name=validated_data[0], email=validated_data[1], password=validated_data[2])\n return user", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n return self.create_user(email, password, **extra_fields)", "def save(self):\n data = self.cleaned_data\n del data['password_confirmation']\n return User.objects.create_user(**data)", "def create_superuser(self, username, email, persona_id, nombre_completo, password, **kwargs):\n return self._create_user(username, email, persona_id, nombre_completo, password, True, True, **kwargs)", "def create_superuser(self, first_name, last_name, username, email, date_of_birth, password):\n user = self.model(\n first_name = first_name,\n last_name = last_name,\n username = username,\n email=self.normalize_email(email),\n date_of_birth=date_of_birth,\n password=password,\n\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create(self,validated_data):\n\n user = models.User.object.create_user(\n email = validated_data['email'],\n full_name = validated_data['full_name'],\n phone = validated_data['phone'],\n password = validated_data['password']\n )\n\n #user.set_password(validated_data['password'])\n user.save()\n return user", "def create_superuser(self, username, email, password):\n\t\tuser = self._create_user(username, email, password)\n\t\tuser.is_admin = True\n\t\tuser.is_author = True\n\t\tuser.save(using=self._db)\n\t\treturn user", "def create_superuser(self, email, password, firstname, lastname):\n user = self.create_user(\n firstname,\n lastname,\n email,\n '',\n password=password,\n )\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n return user", "def create_superuser(self, username, major, password):\r\n user = self.create_user(username, major, password=password,)\r\n user.is_admin = True\r\n user.save(using=self._db)\r\n return user", "def create_superuser(self,name,email,password):\n\n user = self.Create_user(email,name,password)\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n \n return user", "def create_superuser(self, email, name, password):\n\n user = self.create_user(email, name, password)\n\n # Make this user an admin.\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n\n return user", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n\n if extra_fields.get('is_superuser') is False:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self._create_user(email, password, **extra_fields)", "def create_new_user(self):\n username = 'pseudo'\n email = 'carole@tests.com'\n password = '00000000'\n user_created = self.user.objects.create_user(id=1, username=username,\n email=email, password=password)\n HistoryUser.objects.create(user=user_created)\n StatusUser.objects.create(user=user_created)\n\n return user_created", "def create_superuser(self, email, rut, nombres, apellidos, password):\n user = self.create_user(\n email,\n rut=rut,\n nombres=nombres,\n apellidos=apellidos,\n password=password,\n )\n user.is_admin = True # Esta es la unica diferencia entre un superusuario y un usuario normal.\n user.save(using=self._db)\n return user", "def create_superuser(self, email, username, first_name, last_name, password):\n\n user = self.create_user(\n email,\n username,\n first_name,\n last_name,\n password\n )\n\n user.is_superuser = True\n user.is_staff = True\n\n user.save(using=self._db)\n\n return user", "def create_superuser(self, email, password):\n user = self.create_user(email, password)\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n\n return user", "def create_user(user, first_name, last_name, major, bio):\n return userAccount.objects.create(user=user, first_name=first_name, last_name=last_name, major=major, bio=bio)", "def test_if_created_superusers_permissions(self):\r\n payload = {\r\n \"email\": \"t@t.pl\",\r\n \"password\": \"password\",\r\n \"name\": \"asdasd\",\r\n \"is_superuser\": False,\r\n }\r\n\r\n res = self.client_superuser.post(reverse(CREATE_USER_URL),data=payload)\r\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)", "def create_user(self):\n u = USER.objects.create(username='test_user1',\n email='test_email@example.com', )\n u.set_password('test_password')\n u.save()\n self.user = u\n return u", "def create_superuser(self, username, email, password=None):\n if password is None:\n raise ValueError('Password should not be none')\n\n # Creating user instance and saving it to database\n user = self.create_user(username, email, password)\n\n # Assigning current user as superuser\n user.is_superuser = True\n user.is_staff = True\n\n # Saving the modified data to the database\n user.save()\n\n return user", "def create(self, validated_data):\n ## overriding default create\n\n user = UserProfile.objects.create_user(\n email = validated_data['email'],\n name = validated_data['name'],\n password=validated_data['password']\n )\n \n return user", "def create_user(self, email, password=None, **extra_fields):\n extra_fields.setdefault('is_staff', False)\n extra_fields.setdefault('is_superuser', False)\n return self._create_user(email, password, **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n user = self.model(\n email = email,\n **extra_fields \n )\n user.set_password(password)\n user.is_admin =True\n user.is_superuser=True\n user.is_staff=True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError('Superuser must have is_staff=True.')\n if extra_fields.get('is_superuser') is not True:\n raise ValueError('Superuser must have is_superuser=True.')\n\n return self._create_user(email, password, **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault(\"is_staff\", True)\n extra_fields.setdefault(\"is_superuser\", True)\n extra_fields.setdefault(\"is_active\", True)\n\n if extra_fields.get(\"is_staff\") is not True:\n raise ValueError(_(\"Superuser must have is_staff=True.\"))\n if extra_fields.get(\"is_superuser\") is not True:\n raise ValueError(_(\"Superuser must have is_superuser=True.\"))\n return self.create_user(email, password, **extra_fields)", "def create_superuser(self, email, password):\n user = self.create_user(\n email=email,\n password=password,\n )\n user.is_staff = True\n user.is_teacher = True\n user.save(using=self._db)\n # generate a staff detail with random ID\n from assignments.models import StaffDetail\n StaffDetail.objects.create(staff_id=\"STF-%s\" %(randomString()),\n user=user,\n staff_role='MAN')\n return user", "def create_superuser(self, username, email, password):\n user = self.create_user(\n username,\n email,\n password=password,\n )\n user.admin = True\n user.staff = True\n user.save(using=self._db)\n return user", "def create_superuser(self,email,password):\n user = self.create_user(email,password)\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, password, **extra_fields):\n return self._create_user(email, password, True, True, is_active=True,\n **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_manager', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_manager') is not True:\n raise ValueError(_('Superuser must have is_manager=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(email, password, **extra_fields)", "def save(self):\n data = self.cleaned_data\n data.pop('password_confirmation')\n user = User.objects.create_user(**data)\n profile = Profile(user=user)\n profile.save()", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True;'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True'))\n return self.create_user(email, password, **extra_fields)", "def create(self, validated_data):\n user = super(UserProfileSerializer, self).create(validated_data)\n user.set_password(validated_data['password'])\n user.save()\n\n return user", "def create_superuser(self, email, first_name='', last_name='', password=None, **extra_fields):\n return self._create_user(email, password, first_name, last_name, is_staff=True, is_superuser=True,\n **extra_fields)", "def create_new_user():\n return get_user_model().objects.create_user(\n email='test@gmail.com',\n password='test@londodnjisdjfois',\n username='tempusername'\n )", "def create_superuser(self, username, password, email=None):\n user = self.create_user(username, password)\n user.is_staff = True\n user.is_superuser = True\n user.save()\n\n return user", "def create_superuser(self, email, password, **kwargs):\n user = self.create_user(email, password, **kwargs)\n user.is_superuser = True\n user.is_staff = True\n user.save()\n\n return user", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(email, password, **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(email, password, **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(email, password, **extra_fields)", "def create_superuser(self, email, password, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n extra_fields.setdefault('type', \"ADMINISTRATOR\")\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(email, password, **extra_fields)", "def create_user(request):\n post = request.POST.dict()\n username = post.get('username')\n if username is None:\n response = {'status':-1, 'status_message':'No username specified.'}\n return HttpResponse(json.dumps(response))\n password = post.get('password')\n if password is None:\n response = {'status': -1, 'status_message': 'No password specified.'}\n return HttpResponse(json.dumps(response))\n user_obj = User(\n username=username,\n first_name=post.get('first_name'),\n last_name=post.get('last_name'),\n email=post.get('email'),\n is_superuser=json.loads(post.get('is_admin', 'false')),\n is_active=json.loads(post.get('is_enabled', 'false'))\n )\n user_obj.set_password(password)\n user_obj.save()\n response = {'status':1, 'status_message':'Success'}\n return HttpResponse(json.dumps(response))", "def create_superuser(self, email, password, **extra_fields):\n return self.create_user(email, password, is_staff=True,\n is_superuser=True, **extra_fields)", "def create(self, data):\n # ensure 'create()' calls the specific 'create_user()' method\n # note that the 'data' gets validated\n user = get_user_model().objects.create_user(**data)\n return user", "def new_user(request):\r\n rdict = request.params\r\n\r\n u = User()\r\n\r\n u.username = unicode(rdict.get('username'))\r\n if u.username:\r\n u.username = u.username.lower()\r\n u.email = unicode(rdict.get('email')).lower()\r\n passwd = get_random_word(8)\r\n u.password = passwd\r\n u.activated = True\r\n u.is_admin = False\r\n u.api_key = User.gen_api_key()\r\n\r\n try:\r\n DBSession.add(u)\r\n DBSession.flush()\r\n # We need to return the password since the admin added the user\r\n # manually. This is only time we should have/give the original\r\n # password.\r\n ret = dict(u)\r\n ret['random_pass'] = passwd\r\n return _api_response(request, ret)\r\n\r\n except IntegrityError, exc:\r\n # We might try to add a user that already exists.\r\n LOG.error(exc)\r\n request.response.status_int = 400\r\n return _api_response(request, {\r\n 'error': 'Bad Request: User exists.',\r\n })", "def create_user(self, data):\n return self.client.post(\n path='/api/v2/auth/signup/', data=json.dumps(data), content_type='application/json')", "def create (self, validated_data):\n user = models.UserProfile.objects.create_user(\n email = validated_data ['email'],\n name = validated_data ['name'],\n password = validated_data ['password']\n )\n\n return user", "def create_superuser(self, email, first_name, last_name, password):\n user = self.create_user(email, password=password, first_name=first_name, last_name=last_name)\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create_superuser(self, username, email, date_of_birth, full_name, password):\n user = self.create_user(\n username=username,\n full_name=full_name,\n email=UserManager.normalize_email(email),\n password=password,\n date_of_birth=date_of_birth,\n is_staff=True\n )\n user.is_superuser = True\n user.save(using=self._db)\n return user", "def create_superuser(self, *args, **kwargs):\n password = kwargs.pop('password', '')\n email = kwargs.pop('email', '')\n user = self.model(email=self.normalize_email(email), **kwargs)\n user.set_password(password)\n user.is_superuser = True\n user.is_staff = True\n user.save()\n\n return user", "def save(self, commit=True):\n user = super(UserCreationForm, self).save(commit=False)\n user.set_password(self.cleaned_data['password1'])\n\n user.save()\n\n # Making user profile and assigning to CPCESU\n # CPCESU\n #group = Organization.objects.get(name='Colorado Plateau')\n\n # New profile with group\n profile = UserProfile(user=user, first_name=self.cleaned_data.get('first_name'),\n last_name=self.cleaned_data.get('last_name'))\n profile.save()\n\n return user", "def create_superuser(self, email, password, full_name=None):\n user = self.create_user(\n email,\n password=password,\n )\n user.staff = True\n user.admin = True\n user.save(using=self._db)\n return user", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data['email'],\n first_name = validated_data['first_name'],\n last_name = validated_data['last_name'],\n password = validated_data['password']\n )\n return user", "def create_new_user(first_name, last_name, email, password):\n \n new_user = User(first_name, last_name, email, password)\n db.session.add(new_user)\n db.session.commit()\n \n # link a root storage folder to the user\n root_folder = Folder()\n db.session.add(root_folder)\n db.session.commit()\n new_user.storage_root_id = root_folder.id\n new_user.storage_root = root_folder\n db.session.commit()\n\n # link usage tracking to the user\n usage = Usage()\n usage.user_id = new_user.id\n new_user.usage = usage\n db.session.add(usage)\n db.session.commit()\n\n # link a billing address to the user\n billing_address = BillingAddress()\n billing_address.user_id = new_user.id\n new_user.billing_address = billing_address\n db.session.add(billing_address)\n db.session.commit()\n\n # link settings to the User\n settings = Settings()\n settings.user_id = new_user.id\n new_user.settings = settings\n db.session.add(settings)\n db.session.commit()", "def create_superuser(self, username, email, password):\n if password is None:\n raise TypeError('Superusers must have a password.')\n\n id_number = self.create_id_number()\n user = self.create_user(\n username=username,\n email=email,\n password=password,\n id_number=id_number\n )\n user.is_superuser = True\n user.is_active = True\n user.is_staff = True\n user.save()\n\n return user", "def create_superuser(self, username, email, password):\n return self.create_user(username, email, password, is_staff = True, is_superuser= True)", "def create_superuser(self, email, first_name, last_name, password):\n user = self.create_user(email, first_name, last_name, password)\n\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n\n return user", "def create(self, validated_data):\n username = validated_data.get('username')\n email = validated_data.get('email')\n password = validated_data.get('password')\n first_name = validated_data.get('first_name', '')\n last_name = validated_data.get('last_name', '')\n return User.objects.create_user(username, email, password, first_name=first_name,\n last_name=last_name)", "def create_superuser(self, email, name, password):\n employee = self.create_user(email, name, password)\n employee.is_superuser = True\n employee.is_staff = True\n employee.save(using=self._db)\n\n return employee", "def create_user(email, password, f_name, l_name):\n pass", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data[\"email\"],\n name=validated_data[\"name\"],\n password=validated_data[\"password\"]\n )\n\n return user", "def create_superuser(self, email, password=None, **extra_fields):\n user = self.create_user(email, password)\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n\n return user", "def create_user(self, password=None, phone=None, **extra_fields):\n extra_fields.setdefault('is_staff', False)\n extra_fields.setdefault('is_superuser', False)\n\n return self._create_user(password=password, phone=phone, **extra_fields)", "def create_superuser(self, username, email, password):\n if password is None:\n raise TypeError('Superusers must have a password.')\n\n user = self.create_user(username, email, password)\n user.is_superuser = True\n user.is_staff = True\n user.save()\n\n return user", "def create_superuser(self, username, email, password):\n user = self.create_user(\n username,\n email,\n password=password,\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create(self, request, *args, **kwargs):\n user = request.user\n if user.is_authenticated and not user.has_perm(\"users.add_user\"):\n self.permission_denied(request, message=_(\"You cannot create users.\"))\n return super().create(request, *args, **kwargs)", "def create_user():\n username = request.get_json().get(\"name\", None)\n role = request.get_json().get(\"role\", None)\n email = request.get_json().get(\"email\", None)\n return jsonify(\n admin.create_user(current_app.scoped_session(), username, role, email)\n )", "def create_superuser(self, username, password, cpf, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError(_('Superuser must have is_staff=True.'))\n if extra_fields.get('is_superuser') is not True:\n raise ValueError(_('Superuser must have is_superuser=True.'))\n return self.create_user(username, password,cpf, **extra_fields)", "def create_user(self, email, mobile_number, password, **extra_fields):\n extra_fields.setdefault('is_staff', False)\n extra_fields.setdefault('is_superuser', False)\n return self._create_user(email, mobile_number, password , **extra_fields)", "def create_superuser(self, phone, password):\n user = self.model(phone=phone)\n user.set_password(password)\n user.save(using=self._db)\n user.is_staff = True\n user.is_superuser = True\n user.save(using=self._db)\n\n return user", "def create_superuser(self, username, email, password):\n user = self.create_user(\n username,\n email,\n password=password,\n\n )\n\n user.is_admin = True\n user.is_staff = True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, name, password):\n user = self.create_user(email, name, password)\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n return user", "def create(self, validated_data):\n user = get_user_model().objects.create(\n username=validated_data['username'],\n )\n user.set_password(validated_data['password'])\n user.save()\n return user", "def create_superuser(self, username, email, password):\n user = self.create_user(username, email,\n password=password\n )\n user.is_admin = True\n user.is_active = True\n user.is_superuser = True\n user.save(using=self._db)\n return user", "def createsuperuser(request):\r\n\r\n user = models.User()\r\n user.username = 'admin' # change later\r\n user.email = 'mayaankvad@gmail.com'\r\n user.set_password(\"qazwsxed\")\r\n user.is_staff = True\r\n user.is_superuser = True\r\n\r\n if models.User.objects.filter(username=user.username).exists():\r\n return redirect('/')\r\n else:\r\n user.save()\r\n return redirect('/console')", "def create_superuser(self, username, email, password=None):\n\n user = self.create_user(\n username,\n email,\n password=password,\n )\n user.is_admin = True\n user.save(using=self._db)\n return user", "def create_superuser(self, email, password):\n if password is None:\n raise TypeError('Superusers must have a password.')\n user = self.create_user(email, password)\n user.is_superuser = True\n user.is_staff = True\n user.save()\n return user", "def create_superuser(self, email, date_of_birth, password):\n user = self.create_user(email,\n password=password,\n date_of_birth=date_of_birth\n )\n user.is_admin = True\n user.save()\n return user", "def create_superuser(self, username, email: str = None, password: str = None, **kwargs):\n kwargs.setdefault('is_staff', True)\n kwargs.setdefault('is_superuser', True)\n return self._create_user(username, email=email, password=password, **kwargs)", "def save(self):\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\n password=self.cleaned_data['password1'],\n email=self.cleaned_data['email'],\n firstname=self.cleaned_data['first_name'],\n lastname=self.cleaned_data['last_name'],\n agree=self.cleaned_data['tos'])\n return new_user", "def create_superuser(self, email, name, password):\n # Create a normal user first, then change it to the super user\n user = self.create_user(email, name, password)\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n\n return user", "def create_superuser(self, username, password):\n user = self.create_user(username=username, password=password)\n user.is_superuser = True\n user.is_staff = True\n user.save()\n\n return user", "def create_superuser(self, name, email, password):\n user = self.create_user(name, email, password)\n user.is_superuser = True\n user.is_staff = True\n user.save(using=self._db)\n return user", "def _create_user(self, new_user):\n new_user = User(user_name=new_user['user_name'], pin=new_user['pin'], user_type='customer')\n self.session.output(new_user.get_user_info(), '\\n[ New user created ]')", "def create_superuser(self, phone=None,\n password=None, **extra_fields):\n extra_fields.setdefault('is_staff', True)\n extra_fields.setdefault('is_superuser', True)\n extra_fields.setdefault('is_active', True)\n\n if extra_fields.get('is_staff') is not True:\n raise ValueError('Superuser must have is_staff=True')\n\n if extra_fields.get('is_superuser') is not True:\n raise ValueError('Superuser must have is_superuser=True')\n\n return self._create_user(phone, password, **extra_fields)", "def create(self, validated_data):\n password = validated_data.pop('password')\n new_user = User.objects.create(**validated_data)\n new_user.set_password(password)\n new_user.save()\n return new_user", "def create(self, data):\n # Make User\n username = data['email'].split(\"@\")[0]\n user = User.objects.create_user(**data, username=username, is_verified=False, is_client=True)\n Profile.objects.create(user=user)\n send_confirmation_email.delay(user_pk=user.pk)\n return user" ]
[ "0.7452185", "0.7432198", "0.7297633", "0.7279892", "0.72240347", "0.7219963", "0.7207689", "0.71808976", "0.7158941", "0.71064526", "0.7101526", "0.70998144", "0.7079699", "0.7065048", "0.70579106", "0.70458865", "0.7034362", "0.7034056", "0.70265925", "0.70232207", "0.70158947", "0.70151025", "0.7010647", "0.7000515", "0.6999792", "0.6983223", "0.6976148", "0.69751114", "0.6973439", "0.6968055", "0.69660604", "0.6956265", "0.69526803", "0.69514847", "0.6948165", "0.69479", "0.694599", "0.6945557", "0.69443005", "0.69282985", "0.6922043", "0.6919716", "0.6903643", "0.6903238", "0.6901136", "0.69006824", "0.6900516", "0.6896192", "0.68955976", "0.6894126", "0.68900216", "0.6889669", "0.6889669", "0.6889669", "0.6889542", "0.688652", "0.68855643", "0.6873864", "0.6873829", "0.68653464", "0.6864882", "0.68643534", "0.6863922", "0.68613476", "0.68567634", "0.68531173", "0.68528634", "0.6848222", "0.68455565", "0.6822369", "0.6821458", "0.68132234", "0.6810037", "0.68096805", "0.6799099", "0.6798871", "0.6796934", "0.6792308", "0.67896926", "0.6784178", "0.6778898", "0.6775158", "0.6772013", "0.6769043", "0.67663515", "0.67649525", "0.67628795", "0.67584044", "0.6751785", "0.6744863", "0.6736457", "0.67341244", "0.67315745", "0.67308635", "0.6730807", "0.6726685", "0.6726362", "0.67261994", "0.6717937", "0.67176384", "0.67135394" ]
0.0
-1
Used to get a users full name
def get_full_name(self): return self.name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_full_name(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip())\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def get_full_name(self):\n full_name = f'{self.first_name} {self.last_name}' if self.first_name and self.last_name else self.username\n return full_name.strip()", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def ldap_get_fullname(self, user):\n result = super(Auth42, self)._search_not_empty(user)\n if result is not None:\n fullname = (result.get(\"first-name\")[0], result.get(\"last-name\")[0])\n return ' '.join(str(name) for name in fullname)\n\n return None", "def get_full_name(self):\n # The user is identified by their email address\n return self.first_name+' '+self.last_name", "def get_user_name(self):\n full_name = f'{self.f_name} {self.l_name}'\n return full_name", "def full_name(self):\n return \"{} {}\".format(self.user.first_name, self.user.last_name)", "def full_name(self):\n return self.user.get_full_name() or None", "def get_full_name(self):\n return self.name+self.last_name", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = u'%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(cls, user_id):\n\n u = cls.query.get_or_404(user_id)\n u_first = u.first_name\n u_last = u.last_name \n\n u_full = f\"{u_first} {u_last}\"\n\n return u_full", "def get_full_name(self):\r\n full_name = '%s %s' % (self.first_name, self.last_name)\r\n return full_name.strip()", "def get_full_name(self):\r\n full_name = '%s %s' % (self.first_name, self.last_name)\r\n return full_name.strip()", "def full_name(self, obj: User) -> str:\n return obj.get_full_name()", "def get_user_fullname(self):\n member = self.get_user()\n if member:\n return member.getProperty('fullname')", "def get_full_name(self):\n\t\tfull_name = '%s %s' % (self.first_name, self.last_name)\n\t\treturn full_name.strip()", "def get_full_name(self):\n return self.last_name + self.first_name", "def full_name_short(self):\n return \"{}. {}\".format(str(self.user.first_name)[:1], self.user.last_name)", "def get_short_name(self):\n return f\"{self.first_name} {self.last_name[:1]}\" if self.first_name else self.username", "def _get_full_name(self):\n if self.middle_name:\n return u'%s %s %s' % (self.first_name, self.middle_name,\n self.last_name)\n else:\n return u'%s %s' % (self.first_name, self.last_name)", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '{0} {1} {2}'.format(self.last_name, self.first_name, self.patronymic)\n return full_name.strip()", "def get_full_name(self):\n return self.name + \" \" + self.email", "def get_user_fullname(self):\n return self.applicant.userprofile.display_name()", "def get_full_name(self):\n\t\treturn self.email", "def get_full_name(self):\n full_name = \"%s %s\" % (self.firstname, self.lastname)\n return full_name.strip()", "def get_full_name(self):\n return self.first_name + ' ' + self.last_name", "def get_displayname(self):\n return self.full_name or self.user.username", "def get_full_name(self):\n\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = '{} {}'.format(self.first_name, self.last_name)\n return full_name.strip()", "def get_username(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip()[0:1])\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def get_full_name(self):\n return u'%s %s' % (self.first_name, self.last_name)", "def full_name(self):\n return self.first_name + \" \" + self.last_name", "def get_full_name(self):\n return \"{0} {1}\".format(self.first_name, self.last_surname)", "def get_full_name(self):\n return self.first_name+\" \"+self.last_name", "def get_full_name(self):\r\n full_name = '%s' % (self.name)\r\n return full_name.strip()", "def get_short_name(self):\n # The user is identified by their email address\n return self.first_name", "def get_full_name(self):\n return \"%s %s\" % (self._first_name, self._last_name)", "def get_full_name(self):\n return f\"{self.first_name} {self.last_name}\"", "def get_full_name(self):\n return f\"{self.first_name} {self.last_name}\"", "def get_full_name(self):\n return f'{self.first_name} {self.last_name}'", "def full_name(self):\n return u\"{} {}\".format(self.pref_first_name(), self.last_name)", "def fullname(self):\n parts = []\n self.lastname and parts.append(self.lastname)\n self.firstname and parts.append(self.firstname)\n len(parts) == 0 and parts.append(self.userid)\n\n return ' '.join(parts)", "def get_full_name(self) -> str:\n return f\"{self.first_name} {self.last_name}\"", "def full_name(self):\n \tif self.first_name and self.last_name:\n \t\treturn \"{} {}\".format(self.first_name, self.last_name)", "def get_user_display_name(self):\n return self.user.get_full_name() or self.user.get_username()", "def get_name(self):\n user = self.user\n name = \"%s %s\" % (user.first_name, user.last_name)\n name = name.strip()\n\n return self.display_name or name or user.email or user.username", "def get_full_name(self):\n return \"{} {}\".format(self.first_name, self.last_name)", "def full_name(self):\n return f\"{self.first_name} {self.last_name}\"", "def full_name(self):\n return f\"{self.first_name} {self.last_name}\"", "def full_name(self):\n return f\"{self.first_name} {self.last_name}\"", "def get_user_name(user: User) -> str:\n user_name = user.get(\"display_name\")\n if not user_name:\n user_name = user[\"fullname\"]\n if not user_name:\n user_name = user[\"name\"]\n return user_name", "def get_full_name(self):\n\n return self.name", "def full_name(first_name, last_name):\n\t\n\treturn first_name + \" \" + last_name", "def get_full_name(self, include_title: bool = True) -> str:\n\n full_name = self.user.get_full_name()\n if include_title and self.title:\n title = self.get_title_repr()\n return f\"{full_name}, {title}\"\n return full_name", "def get_full_name(self):\n return self.name #self is base and it hits name filed", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n if self.patronymic_name:\n return '{} {} {}'.format(\n self.first_name,\n self.patronymic_name,\n self.last_name,\n )\n\n return '{} {}'.format(\n self.first_name,\n self.last_name,\n )", "def __full_name_for_user(self, username):\n\n # First check our cache of previous hits.\n if username in self.username_map:\n return self.username_map[username]\n\n # Fall back to p4gf_usermap, p4 users.\n user_3tuple = self.usermap.lookup_by_p4user(username)\n if user_3tuple:\n user = p4gf_usermap.tuple_to_P4User(user_3tuple)\n else:\n user = None\n fullname = ''\n if user:\n # remove extraneous whitespace for consistency with Git\n fullname = ' '.join(user.full_name.split())\n self.username_map[username] = fullname\n return fullname", "def FullName(self, default=None):\n return self.data.get('full_name', default)", "def FullName(self, default=None):\n return self.data.get('full_name', default)", "def full_name(self) -> str:\r\n\t\tname = f'{self.last_name} {self.first_name}'\r\n\t\tif self.middle_name:\r\n\t\t\tname += ' ' + self.middle_name\r\n\t\treturn name", "def full_name(first_name, last_name):\n return first_name + \" \" + last_name", "def full_name(self):\n return self._full_name", "def full_name(self,first_name):\n full_name = self.first_name + ' ' + self.last_name\n return full_name" ]
[ "0.8447406", "0.84173375", "0.840863", "0.840863", "0.8322008", "0.83157456", "0.8233342", "0.8191648", "0.81704843", "0.80976194", "0.8085201", "0.8049897", "0.80166596", "0.8014928", "0.8014928", "0.80035484", "0.7998611", "0.79971266", "0.79911834", "0.79884243", "0.7980313", "0.7965516", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7910768", "0.7906633", "0.7891431", "0.78816074", "0.7874174", "0.78692186", "0.786576", "0.78540266", "0.78404254", "0.78404254", "0.7835792", "0.78311026", "0.7800933", "0.77859634", "0.7760608", "0.7757169", "0.77552783", "0.77548134", "0.7745102", "0.7730422", "0.7730422", "0.7712868", "0.77082807", "0.7695994", "0.76898813", "0.7668", "0.76552725", "0.7632437", "0.76053417", "0.75991064", "0.75991064", "0.75991064", "0.7599052", "0.7593109", "0.75505245", "0.75352883", "0.7465039", "0.744284", "0.744284", "0.744284", "0.7439436", "0.7439436", "0.7439436", "0.7439436", "0.7439436", "0.7433863", "0.742393", "0.7410808", "0.7410808", "0.7378634", "0.7372484", "0.7366494", "0.73653924" ]
0.0
-1
Used to get the users short name
def get_short_name(self): return self.name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n # The user is identified by their email address\n return self.first_name", "def get_short_name(self):\n return f\"{self.first_name} {self.last_name[:1]}\" if self.first_name else self.username", "def get_short_name(self):\r\n return self.first_name", "def get_short_name(self) -> str:\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n # The user is identified by the email address\n return self.email", "def get_short_name(self):\n\n return self.first_name", "def get_short_name(self):\n\t\treturn self.email", "def short_name(self):\n return self.get(\"short_name\", decode=True)", "def get_short_name(self):\n return self.full_name.split(' ')[0]", "def full_name_short(self):\n return \"{}. {}\".format(str(self.user.first_name)[:1], self.user.last_name)", "def shortname(self):\n return self.get(\"shortName\")", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def get_short_name(self):\n return self.last_name", "def getShortName(self) -> str:\n return self.short_name", "def get_displayname(self):\n return self.full_name or self.user.username", "def short_name(self) -> str:\r\n\t\treturn f'{self.last_name} {self.first_name}'", "def get_short_name(self):\n\n return self.email", "def short_displayname(self):\n return self.get_short_displayname()", "def get_user_fullname(self):\n return self.applicant.userprofile.display_name()", "def get_short_name(self):\r\n return self.name", "def get_short_name(self):\n\n return self.name", "def get_user_name(self):\n full_name = f'{self.f_name} {self.l_name}'\n return full_name", "def full_name(self):\n return self.user.get_full_name() or None", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_user_display_name(self):\n return self.user.get_full_name() or self.user.get_username()", "def get_short_name(self):\n return self.firstname\n\n # allows assignment property to the image value\n # from the view", "def get_user_name(user: User) -> str:\n user_name = user.get(\"display_name\")\n if not user_name:\n user_name = user[\"fullname\"]\n if not user_name:\n user_name = user[\"name\"]\n return user_name", "def get_full_name(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip())\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def get_full_name(self):\n # The user is identified by their email address\n return self.first_name+' '+self.last_name", "def get_full_name(self):\n\n return self.name", "def full_name(self, obj: User) -> str:\n return obj.get_full_name()", "def get_full_name(self):\n full_name = f'{self.first_name} {self.last_name}' if self.first_name and self.last_name else self.username\n return full_name.strip()", "def full_name(self):\n return \"{} {}\".format(self.user.first_name, self.user.last_name)", "def display_name(self):\n return self.settings['displayName']", "def test_get_short_name_should_return_first_name(self):\n email = 'example@example.com'\n password = 'password'\n first_name = 'Example'\n last_name = 'User'\n user = MyUser(\n email=email,\n password=password,\n first_name=first_name,\n last_name=last_name\n )\n short_name = user.get_short_name()\n self.assertEqual(short_name, first_name)", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_short_name(self):\n last_name = self.last_name\n first_name = self.first_name\n if (not (last_name and not last_name.isspace())):\n \"\"\" If last name is empty or none then return first name\"\"\"\n return first_name\n else:\n return last_name", "def user_name(self):\n return lamin_user_settings().name", "def get_user_fullname(self):\n member = self.get_user()\n if member:\n return member.getProperty('fullname')", "def get_full_name(self):\n\t\treturn self.email", "def get_full_name(self):\n return self.name + \" \" + self.email", "def get_full_name(self):\n return self.name+self.last_name", "def get_full_name(self):\n return self.name #self is base and it hits name filed", "def short_name_or_full(self):\n return self.short_name or self.title", "def full_name(self):\n return u\"{} {}\".format(self.pref_first_name(), self.last_name)", "def get_full_name(self):\n full_name = '{0} {1} {2}'.format(self.last_name, self.first_name, self.patronymic)\n return full_name.strip()", "def get_full_name(self):\r\n full_name = '%s' % (self.name)\r\n return full_name.strip()", "def get_full_name(cls, user_id):\n\n u = cls.query.get_or_404(user_id)\n u_first = u.first_name\n u_last = u.last_name \n\n u_full = f\"{u_first} {u_last}\"\n\n return u_full", "def get_full_name(self):\n return u'%s %s' % (self.first_name, self.last_name)", "def full_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"full_name\")", "def ldap_get_fullname(self, user):\n result = super(Auth42, self)._search_not_empty(user)\n if result is not None:\n fullname = (result.get(\"first-name\")[0], result.get(\"last-name\")[0])\n return ' '.join(str(name) for name in fullname)\n\n return None", "def friendly_name(self) -> str:\n return pulumi.get(self, \"friendly_name\")", "def get_full_name(self):\n full_name = '%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n full_name = u'%s %s' % (self.first_name, self.last_name)\n return full_name.strip()", "def get_full_name(self):\n\n return self.name", "def get_full_name(self):\n\n return self.name", "def display_name(self) -> str:\n return self.requester.username", "def get_real_name(self):\n return self.get_display_name()", "def get_username(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip()[0:1])\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def get_full_name(self):\n return self.last_name + self.first_name", "def user_display_name(self):\n return self.key.id()", "def __str__(self):\n return self.user.get_full_name()", "def full_name(self):\n return self._full_name", "def get_fulll_name(self):\n return self.name", "def full_name(self) -> Optional[str]:\n return pulumi.get(self, \"full_name\")", "def short_uid(self):\n return self.uid.split(\"-\")[0]" ]
[ "0.8692162", "0.8692162", "0.8692162", "0.8419169", "0.835935", "0.8267483", "0.8214984", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.8179056", "0.81158876", "0.8013196", "0.79904336", "0.7989155", "0.79555285", "0.7938165", "0.79132426", "0.7891928", "0.7891928", "0.78409505", "0.7688494", "0.7659017", "0.760529", "0.7602975", "0.7592388", "0.7577724", "0.75404483", "0.75202286", "0.7500807", "0.74970376", "0.74893343", "0.74893343", "0.74893343", "0.74893343", "0.74893343", "0.74893343", "0.74893343", "0.74893343", "0.7441893", "0.7397847", "0.7363409", "0.73334163", "0.73262143", "0.7324104", "0.73135775", "0.7298369", "0.72895426", "0.7235009", "0.7232697", "0.7227596", "0.7227596", "0.7227596", "0.7227596", "0.7227596", "0.7219678", "0.719706", "0.71844006", "0.71766335", "0.7158975", "0.7144317", "0.7120825", "0.71163994", "0.71131057", "0.71026146", "0.7101894", "0.70718426", "0.7059208", "0.7052736", "0.70485777", "0.70458525", "0.7034802", "0.703375", "0.7030191", "0.7030191", "0.7028188", "0.70123047", "0.7003766", "0.6998595", "0.69972146", "0.699147", "0.69901", "0.6988645", "0.6980595", "0.69787955" ]
0.73236275
60
Django uses this when it needs to convert the object into a string
def __str__(self): return self.email
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return str(self.obj)", "def str_(object_):\n return str(object_)", "def u(obj):\n return obj if isinstance(obj, str) else str(obj)", "def __str__(self):\n return str(self.__dict__['_obj'])", "def format_item(self,obj):\n return unicode(obj)", "def format_result(self,obj):\n return unicode(obj)", "def get_result(self, obj):\n return str(obj)", "def get_result(self, obj):\n return str(obj)", "def _tostr(obj): # pragma: no cover\n return obj if isinstance(obj, str) else obj.decode()", "def _object2string(self,param_name,obj,replace=True):\n self.debug(\"object2string(%s,%s)\"%(param_name,obj))\n translator = self.translators[param_name]\n\n if not replace:\n translator=copy.copy(translator)\n\n return translator.object2string(obj)", "def __unicode__(self):\n return unicode(self.obj)", "def get_string_value(self, obj, field):\n return smart_unicode(field.value_to_string(obj))", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def get_str(self, obj):\n if self.pretty:\n return pprint.pformat(obj)\n else:\n return str(obj)", "def value_to_string(self, obj):\n value = self.value_from_object(obj)\n return value", "def __str__(self):\n return json.dumps(self.obj)", "def asString(obj):\n if type(obj) in _STR_TYPES:\n return obj\n return str(obj)", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def serialize(self, obj):\n return obj", "def __str__(self):\n return str(self.serialize())", "def __str__(self):\n return self.__unicode__().encode('utf-8').decode()", "def __str__(self):\n return self.get_str()", "def _transform(obj):\n\n if isinstance(obj, date) or isinstance(obj, time) or isinstance(obj, datetime):\n return str(obj)\n if isinstance(obj, decimal):\n return str(float(obj))\n if obj == None: \n return 'null'\n return str(obj)", "def stringify(obj):\n tp = type(obj)\n if issubclass(tp, basestring):\n return obj\n elif hasattr(tp, '__unicode__'):\n s = tp.__unicode__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__unicode__ did not return a string')\n return s\n elif hasattr(tp, '__str__'):\n s = tp.__str__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__str__ did not return a string')\n return s\n else:\n return str(obj)", "def __str__(self):\n\n if compat.PY3:\n return self.__unicode__()\n return self.__bytes__()", "def __str__(self):\n return self.get_string()", "def __str__(self):\n return self.asJsonString()", "def __str__(self):\n return self.as_json_string()", "def __str__(self):\n return self.data.__str__()", "def __str__(self):\n return self.data.__str__()", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return self._str", "def _stringify(obj):\r\n if isinstance(obj, unicode):\r\n return obj.encode('utf-8')\r\n elif isinstance(obj, str):\r\n return obj\r\n else:\r\n raise TypeError('Object is not a string.')", "def format(self, obj):\n pass", "def format(self, obj):\n pass", "def __str__(self):\n raise ValueError('Should not convert Entity directly to string')", "def to_str(self) -> str:", "def serialize(self, obj):\n return dill.dumps(obj, 0).decode('latin-1')", "def __str__(self):\n return self.toString()", "def myconverter(o: object):\n if isinstance(o, datetime.datetime):\n return o.__str__()", "def __str__(self):\n return self.string", "def to_string(self):\r\n return self.__str__()", "def __str__(self):\n return self.AsJsonString()", "def __str__(self):\n return bytes_to_str(bytes(self))", "def __str__(self):\n return unicode(self).encode('utf-8')", "def __str__(self) -> str:\n return self.encode()", "def _sanitizer(self, obj):\n if isinstance(obj, datetime.datetime):\n return obj.isoformat()\n if hasattr(obj, \"to_dict\"):\n return obj.to_dict()\n return obj", "def __str__(self):\n return str(self.GetString())", "def _tostr(t):\n\treturn t.__unicode__()", "def __str__(self):\n return str(self.__data)", "def __str__(self):\n outstr = self._field1\n return outstr", "def serialize(self, obj):\n pass", "def __str__(self):\n return self.make_flat()", "def __str__(self):\n\n return self.raw_field", "def __str__(self):\n return str(self.get_data())", "def __unicode__(self):\n return str(self).decode('ascii')", "def stringify(obj):\n tp = type(obj)\n if issubclass(tp, basestring):\n return obj\n elif hasattr(tp, '__unicode__'):\n s = tp.__unicode__(obj)\n if not isinstance(s, basestring):\n raise TypeError, '__unicode__ did not return a string'\n return s\n elif hasattr(tp, '__str__'):\n s = tp.__str__(obj)\n if not isinstance(s, basestring):\n raise TypeError, '__str__ did not return a string'\n return s\n else:\n return str(obj)", "def __str__(self):\n return str(self.__s)", "def __str__(self):\n\n return self.toString()", "def default(self, obj):\n \n if isinstance(obj, np.ndarray):\n return list(obj)\n\n if isinstance(obj, uuid.UUID):\n return str(obj)\n\n if isinstance(obj, datetime.datetime):\n return obj.isoformat()\n \n if isinstance(obj,TPC):\n return obj._so()\n \n # No special handling called for; pass through\n return json.JSONEncoder.default(self, obj)", "def safeToString():", "def __str__(self):\n str(self.data)\n return str", "def __str__(self):\r\n return Assert(self.obj.__str__())", "def __str__(self):\n\n return '__str__ for Object'", "def to_python(self, value):\n if isinstance(value, models.CharField):\n # If an instance, just return the instance.\n return value\n if value is None:\n # If db has NULL, convert it to UNKNOWN.\n return UNKNOWN\n\n # Otherwise, just return the value.\n return value", "def __str__(self):\n return self.s", "def serialize(self):\n\n\t\treturn str(self)", "def __str__(self): # pragma: nocover\n return str(self.value)", "def transform_python(self, value):\n return str(value)", "def get_notification_string(self, obj):\n return str(obj)", "def __str__(self):\n return super().__str__()", "def __unicode__(self):\n # Should be overwritten by base classes\n return object.__repr__(self)", "def alchemyencoder(obj):\n if isinstance(obj, datetime.datetime):\n return obj.isoformat(' ')\n elif isinstance(obj, datetime.date):\n return obj.isoformat()\n elif isinstance(obj, decimal.Decimal):\n return float(obj)\n return None", "def convert_for_json(obj):\n if isinstance(obj, datetime.datetime):\n return obj.__str__()\n return obj", "def __unicode__(self):\n return unicode(self.asPyDict())", "def __pout__(self):\n return self.__str__()", "def __str__(self):\n return str(self._data)", "def __str__(self):\n\n\t\treturn str(self.__value)", "def as_string(self, value, context=None):\n return str(value)", "def str(self):\n return self", "def __str__ (self) :\n\n return self.as_string()", "def _make_serializable(self, field):\n if isinstance(field, datetime):\n return str(field)\n elif isinstance(field, Decimal):\n return float(field)\n else:\n return field", "def _serialize(self, instance, owner):\n val = instance.__dict__[self._name]\n if val is None: return None\n return str(val)", "def _convert_to_str(self, data):\n raise NotImplementedError()", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return bytes_to_string(self._bytes)", "def __str__(self) -> str:\n # noinspection PyUnresolvedReferences\n opts = self._meta\n if self.name_field:\n result = str(opts.get_field(self.name_field).value_from_object(self))\n else:\n model_fields = get_model_fields(\n opts.model,\n foreign=False,\n m2m=False,\n exclude=self.exclude_from_str\n )\n # TODO: replace the above with the below to remove the get_model_fields call:\n # model_fields = [\n # f for f in opts.get_fields()\n # if f.concrete\n # and not (f.primary_key or f.is_relation or f.name in self.exclude_from_str)\n # ]\n result = \" \".join(\n [\n str(fld.value_from_object(self))\n for fld in model_fields\n if fld.value_from_object(self)\n ]\n )\n return result.strip() or super().__str__()", "def admin_content_object(self,obj):\n try:\n if obj.content_object:\n return unicode(obj.content_object)\n except:\n pass\n return None", "def serialize(self):\n\n return str(self)", "def ToString(self):\r\n pass", "def ToString(self):\r\n pass", "def __unicode__(self):\r\n return unicode(repr(self))" ]
[ "0.7571222", "0.74968106", "0.7390877", "0.73628175", "0.73334724", "0.7276389", "0.7255745", "0.7255745", "0.7237711", "0.72124106", "0.71872014", "0.7185711", "0.71037436", "0.71037436", "0.71037436", "0.7084364", "0.7082783", "0.7033254", "0.7020016", "0.69618905", "0.69618905", "0.69618905", "0.6936576", "0.68954235", "0.68837494", "0.68645936", "0.6848298", "0.6826807", "0.68182206", "0.6750843", "0.6733729", "0.67336136", "0.6731106", "0.6731106", "0.67128634", "0.67128634", "0.67030096", "0.66944313", "0.6668004", "0.6668004", "0.6664335", "0.66544634", "0.66530836", "0.66418874", "0.66411424", "0.66345316", "0.6632965", "0.6632126", "0.6617048", "0.6613929", "0.6605071", "0.66043115", "0.6597792", "0.65839326", "0.6579114", "0.657292", "0.6568755", "0.6556681", "0.6555511", "0.6553242", "0.65521353", "0.65388095", "0.65352935", "0.6533093", "0.6530919", "0.6516647", "0.6490363", "0.64888126", "0.64784855", "0.64621264", "0.6460439", "0.64598376", "0.6453105", "0.6451109", "0.6450938", "0.64436686", "0.6440766", "0.6436381", "0.64326143", "0.64290255", "0.64251906", "0.64173925", "0.63910204", "0.6385612", "0.63747543", "0.6374288", "0.6369652", "0.63683885", "0.6366288", "0.6355251", "0.6355251", "0.6355251", "0.6355251", "0.6355251", "0.6354576", "0.6353607", "0.634572", "0.6334924", "0.6323454", "0.6323454", "0.6321198" ]
0.0
-1
Return the model as a string.
def __str__(self): return self.status_text
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return super().__str__() + self.model.__str__()", "def model_info(self) -> str:\n return self._model_info(self.model).decode(\"utf-8\")", "def get_cpo_model_string(self):\n # Build string\n self._build_cpo_model_string()\n\n # Publish model\n self._publish_model()\n\n # Return\n return self.cpostr", "def __str__(self):\n return '%s%s' % (self.name, ' - %s' % self.model if self.model else '')", "def __str__(self):\n model = self._meta.verbose_name.title()\n return f\"{model:s}: {self.name:s}\"", "def __str__(self):\n model = self._meta.verbose_name.title()\n return f\"{model:s}: {self.name:s}\"", "def __repr__(self):\n\n mod = f\"{self.__class__.__name__} Model\"\n try:\n mod += f': {self.filename}'\n except AttributeError:\n pass\n s = [mod]\n for name, v in self.metadata.items():\n s += [f\"{name:16} : {v}\"]\n return '\\n'.join(s)", "def __str__(self) -> str:\n # noinspection PyUnresolvedReferences\n opts = self._meta\n if self.name_field:\n result = str(opts.get_field(self.name_field).value_from_object(self))\n else:\n model_fields = get_model_fields(\n opts.model,\n foreign=False,\n m2m=False,\n exclude=self.exclude_from_str\n )\n # TODO: replace the above with the below to remove the get_model_fields call:\n # model_fields = [\n # f for f in opts.get_fields()\n # if f.concrete\n # and not (f.primary_key or f.is_relation or f.name in self.exclude_from_str)\n # ]\n result = \" \".join(\n [\n str(fld.value_from_object(self))\n for fld in model_fields\n if fld.value_from_object(self)\n ]\n )\n return result.strip() or super().__str__()", "def __str__(self):\n return f\"model {self._name}\"", "def model(self) -> str:\n ...", "def get_model(self) -> str:\n return self._get_string(openvr.Prop_ModelNumber_String)", "def __str__(self):\n \n res = ['>>> Model %(model_name)s <<<']\n res.append('')\n res.append('Independent parameters:')\n res.append('-----------------------')\n res.append('')", "def to_string(self):\r\n return self.__str__()", "def __str__(self):\n model = self._meta.verbose_name.title()\n title = self.extended_object.get_title()\n return f\"{model:s}: {title:s}\"", "def __str__(self) -> str:\n model_str = [\"\\nModel info:\\n\", \" Unimodal encoder:\\n\"]\n\n for modality in range(self.num_modalities):\n model_str.append(f\" ({modality + 1}) {self.unimodal_encoder[modality]}\")\n\n model_str.append(\"\\n\\n Unimodal decoder:\\n\")\n for modality in range(self.num_modalities):\n model_str.append(f\" ({modality + 1}) {self.unimodal_decoder[modality]}\")\n\n if self.multimodal_decoder is not None:\n model_str.append(\"\\n\\n Multimodal decoder:\\n\")\n model_str.append(f\" {self.multimodal_decoder}\")\n\n return \"\".join(model_str)", "def to_str(self) -> str:", "def __str__(self):\n model = self._meta.verbose_name.title()\n return f\"{model:s}: {self.licence.name:s}\"", "def __str__ (self) :\n\n return self.as_string()", "def _get_model(self) -> str:\n return str(self.hass.data[DOMAIN][self._config_entry.entry_id][ATTR_MODEL])", "def __repr__(self):\n return grid_search_to_str(self.model)", "def print_model(self, model):\n return \"null\"", "def __str__(self):\n return self.get_str()", "def __repr__(self):\n s = 'text model name: ' + self.name + '\\n'\n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths)) + '\\n'\n s += ' most common words: ' + str(self.common_word) + '\\n'\n\n return s", "def __str__(self):\n return str(self.obj)", "def __str__(self):\n return self.make_flat()", "def __str__(self):\n return self.get_string()", "def get_model_name(self) -> str:\n return self._get_string(openvr.Prop_RenderModelName_String)", "def __str__(self):\n return str(self.__dict__['_obj'])", "def __str__(self):\n return str(self.serialize())", "def __str__(self):\n model = self._meta.verbose_name.title()\n title = self.title or str(_(\"Empty title\"))\n\n return f\"{model:s}: {title:s}\"", "def convert_model_to_string(model):\n # type: (Any) -> str\n if type(model).__name__ == \"Graph\": # Tensorflow Graph Definition\n try:\n from google.protobuf import json_format\n\n graph_def = model.as_graph_def()\n model = json_format.MessageToJson(graph_def, sort_keys=True)\n except Exception:\n LOGGER.warning(\"Failed to convert Tensorflow graph to JSON\", exc_info=True)\n\n if hasattr(model, \"to_json\"):\n # First, try with sorted keys:\n try:\n model = model.to_json(sort_keys=True)\n except Exception:\n model = model.to_json()\n elif hasattr(model, \"to_yaml\"):\n model = model.to_yaml()\n\n try:\n return str(model)\n except Exception:\n LOGGER.warning(\"Unable to convert model to a string\")\n return \"Unable to convert model to a string\"", "def __repr__(self):\n \n s = 'text model name: ' + self.name + '\\n' \n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths)) + '\\n'\n s += ' number of word stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of commas counts: ' + str(len(self.commas_per_sentence)) + '\\n'\n return s", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return self.toString()", "def to_representation(self) -> str:\n raise NotImplementedError()", "def name(self) -> str:\n return self._name(self.model).decode(\"utf-8\")", "def __repr__(self):\n s = 'text model name: ' + self.name + '\\n'\n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths))\\\n + '\\n'\n s += ' number of punctuation types: ' + str(len(self.punctuation))\n return s", "def __str__(self):\n\n return self.toString()", "def __str__(self):\n return \"DataModel(name={},attributes={},description={})\".format(\n self.name, {a.name: str(a) for a in self.attributes}, self.description\n )", "def __str__(self):\n return str(self.get_data())", "def serialize(self):\n\n\t\treturn str(self)", "def __repr__(self):\n return '<ModelSignature(model_name=%r)>' % self.model_name", "def __repr__(self):\n return '<ModelSignature(model_name=%r)>' % self.model_name", "def __str__(self):\n msg = [\n f'{self.model=}',\n f'{self.field=}',\n f'{self.fxx=}',\n f'{self.date=}',\n f'{self.priority=}',\n ]\n return '\\n'.join(msg)", "def serialize(self):\n\n return str(self)", "def dump_model(self):", "def __str__(self):\n tablename = self.tablename()\n attrs = {}\n if Registry.SCHEMAS.has_key(tablename):\n for key in Registry.SCHEMAS[tablename]:\n attrs[key] = getattr(self, key, None)\n return \"<%s object: %s>\" % (self.__class__.__name__, str(attrs))", "def __str__(self):\n return self.s", "def __str__(self):\n return self.format()", "def model_repr(model):\n if model.is_saved():\n key = model.key()\n return '<%s: %s>' % (key.kind(), key.id() or repr(key.name()))\n else:\n return '<%s: unsaved>' % model.kind()\n\n # Use a dummy password when connecting to a development app server.\n password = (address == 'localhost' and 'foo') or None", "def __str__(self):\n return self._str", "def __repr__(self):\n\n # info string\n info = self.model.__repr__()\n info += \"\\n=========================\\n\"\n info += f\"Train data length:\\t\\t{ len(self.train_dataset) }\\n\"\n info += f\"Eval sata length:\\t\\t{ len(self.eval_dataset) }\\n\"\n info += f\"Optimizer:\\t\\t\\t\\t{ str(self.optimizer).split('(')[0] }\\n\"\n info += f\"Criterion:\\t\\t\\t\\t{ str(self.criterion).split('(')[0] }\\n\"\n info += f\"Training Environment:\\t{ self.device.type }\\n\"\n info += f\"Show information:\\t\\t{ 'True' if self.info else 'False' }\\n\"\n info += \"=========================\\n\"\n\n return info", "def get_string(self):\n return self.__str", "def toString(self) -> str:\n raise NotImplementedError", "def get_model_info(self, model, mode='docs'):\n\n if mode == 'props':\n return dal._properties_to_json(model)\n elif mode == 'docs':\n return '\\n'.join(model.docs)\n else:\n output = dal._properties_to_json(model)\n output += '\\n' + '\\n'.join(model.docs)\n return output", "def get_model_name(self) -> str:\n raise NotImplementedError", "def __str__(self):\n return self._metadata.__str__()", "def as_string(self):\n return self.__repr__()", "def __str__(self) -> str:\n if self.name_field:\n return str(getattr(self, self.name_field))\n # noinspection PyUnresolvedReferences\n data = [\n # Collect the string representations of related objects.\n # getattr(self, fk_field.attname) and\n # fk_field.value_from_object(self) would only return the primary\n # key of the related object.\n str(getattr(self, fk_field.name))\n for fk_field in get_model_fields(\n self._meta.model, base=False, foreign=True, m2m=False\n )\n if not fk_field.null\n ]\n if len(data) < 2:\n # Cannot build a more meaningful representation than the default.\n return super().__str__()\n else:\n template = \"{}\" + \" ({})\" * (len(data) - 1)\n return template.format(*data)", "def __repr__(self):\n s= 'text model name: ' + self.name + '\\n'\n s+= 'number of words: ' + str(len(self.words)) + '\\n'\n s+='number of word lengths: ' + str(len(self.word_lengths))+'\\n'\n s+='number of word stems: ' + str(len(self.stems)) + '\\n'\n s+='number of sentence lengths: ' + str(len(self.sentence_lengths)) +'\\n'\n s+='number of word suffixes: '+ str(len(self.endings))\n \n return s", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__unicode__().encode('utf-8').decode()", "def __str__(self):\n\n if compat.PY3:\n return self.__unicode__()\n return self.__bytes__()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()" ]
[ "0.80334365", "0.7646696", "0.7642632", "0.7509533", "0.74480665", "0.74480665", "0.74373275", "0.74339825", "0.74090093", "0.7385252", "0.73606116", "0.730152", "0.7259204", "0.72199345", "0.7141298", "0.7138032", "0.7135251", "0.7126956", "0.7084482", "0.7061448", "0.7044826", "0.7024524", "0.7021991", "0.702071", "0.6973154", "0.69568986", "0.69546103", "0.69335794", "0.69285184", "0.69207996", "0.69138604", "0.6888685", "0.6887571", "0.6887571", "0.6885295", "0.6868837", "0.686396", "0.6856058", "0.6844763", "0.6822968", "0.6819199", "0.6811635", "0.6804996", "0.6804996", "0.67783874", "0.6770449", "0.67576534", "0.6738823", "0.67346185", "0.67297214", "0.6718303", "0.6714054", "0.67080754", "0.67054296", "0.67022943", "0.6700204", "0.6697949", "0.6697508", "0.6693749", "0.6673282", "0.66628146", "0.6653355", "0.6653355", "0.6653355", "0.6653355", "0.6653355", "0.66504264", "0.66494584", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954", "0.66437954" ]
0.0
-1
Simplifies display of description of the object
def __unicode__(self): return self.title
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def description(self):", "def description():", "def __str__(self):\n return str(self.description)[:10]", "def Description(self) -> str:", "def Description(self) -> str:", "def __str__(self):\n return \"{0} : {1}\".format(self.name, self.description)", "def description(self):\n pass", "def description(self):\n pass", "def get_description(self):", "def __str__(self) -> str:\n return self.description", "def __str__(self):\n return self.description", "def __str__(self):\n return self.description", "def __str__(self):\n\n return (self._display_name + \"\\n\\t\" + self._description + \"\\n\")", "def __str__(self):\n return f\"{self._desc:16s}\"", "def __repr__(self):\n return self.description", "def __str__(self):\n if self.__description:\n return self.__description\n return repr(self)", "def __str__(self):\n return self._name+self._description", "def describe(self) -> Text:\n return self.__repr__()", "def __str__(self):\n description = \"Object class Critter.\"\n description += \"\\nName: \" + self.name + \"\\nMood: \" + self.mood + \"\\nHunger: \" + str(self.hunger) + \"\\nBoredom: \" + str(self.boredom) + \"\\n\"\n return description", "def _repr_(self):\n return self._description", "def description(self) -> str:\n pass", "def __str__(self):\n return \"Description(values={},data_model={})\".format(\n self._values, self.data_model\n )", "def get_description(self):\n pass", "def description(self):\n desc = self.title\n ops = []\n for attribute in self.attributes.all():\n value = attribute.value\n if isinstance(value, list):\n ops.append(\n \"%s = '%s'\" % (attribute.type, (\", \".join([str(v) for v in value])))\n )\n else:\n ops.append(\"%s = '%s'\" % (attribute.type, value))\n if ops:\n desc = \"%s (%s)\" % (desc, \", \".join(ops))\n return desc", "def describe(self):\n return str(self)", "def _generateDescription(self, obj, **args):\n result = []\n if obj.description:\n label = self._script.utilities.displayedLabel(obj) or \"\"\n name = obj.name or \"\"\n desc = obj.description.lower()\n if not (desc in name.lower() or desc in label.lower()):\n result.append(obj.description)\n return result", "def describe(self) -> str:", "def describe(self):\n return ''", "def __str__(self):\n\n desc = self.description\n if desc is not None:\n return str(desc)\n\n desc = self.debugDescription\n if desc is not None:\n return str(desc)\n\n return repr(self)", "def describe(self):\n print(self.description)", "def describe(self):\n print(self.description)", "def __str__(self):\n return \"<%s: %s>\" % (self.__class__, self.describe())", "def get_description(self) -> str:\n pass", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\r\n raise NotImplementedError", "def description(self) -> str:\r\n raise NotImplementedError", "def description(self) -> str:\r\n raise NotImplementedError", "def _description(self):\n return None", "def description(self):\n\n\t\treturn \"%d %s %s\" % (self.vintage, self.winery.name, self.name)", "def get_description(self):\n raise NotImplementedError", "def description(self):\n return ' '.join(self._description)", "def get_description(self):\r\n return self.__description", "def description(self):\n\t\treturn \"%s, %s\" % (self.name, self.country)", "def __str__(self) -> str:\n string = fr\"{self.id}\\. `{self.content}`\"\n if self.description:\n string += f\" - {self.description}\"\n return string", "def get_descriptive_name(self):\n description = (f\"{self.year} {self.manufacturer.title()} \"\n f\"{self.model.title()}\")\n\n return description", "def toString(self):\n\t\ts = \"A %s titled '%s':\\n\\n\" % (self.getSpecString(), self.getName())\n\t\ts += \"It's summary reads: %s\\n\\n\" % (self.getDescription())\n\t\ts += \"~~\\n%s\\n~~\" % (self.getAllItemsStr())\n\t\treturn s", "def __str__(self):\n description = \"-------- %s --------\" % (self.name)\n description += \"\\nnmax = %i\" % (self.nmax)\n description += \"\\nnslots = %i\" % (self.nslots)\n description += \"\\nbonus_power = %i\" % (self.bonus_power)\n description += \"\\nbonus_initiative = %.1f\" % (self.bonus_initiative)\n description += \"\\nneeds_drive = %i\" % (self.needs_drive)\n description += \"\\nis_mobile = %i\" % (self.is_mobile)\n description += \"\\n----- Default Parts -----\"\n for i in range(len(self.default_parts)):\n description += \"\\n%i) %s\" % (i + 1, self.default_parts[i].name)\n return description", "def short_description(self):\n return self.name", "def get_description():\n raise NotImplementedError", "def describe(self):\r\n print( self.name + \" is here!\" )\r\n print( self.description )", "def get_description(self):\n return DisplayText(self._description)", "def __str__(self):\n if len(self.label) > 0:\n descr = [\"'%s', target='%s' [%s]\" % (self.label, self.target.name, self.target.body_type)]\n else:\n descr = [\"target='%s' [%s]\" % (self.target.name, self.target.body_type)]\n if self.baseline:\n descr[0] += ', initial baseline offset=%f' % (self.baseline.poly[-1],)\n if self.beam:\n descr[0] += ', beam height=%f' % (self.beam.height,)\n for scan_ind, scan in enumerate(self.scans):\n descr.append('%4d: %s' % (scan_ind, str(scan)))\n return '\\n'.join(descr)", "def __repr__(self) -> str:\n return f\"<Doc[{self.desc}]>\"", "def __str__(self):\n txt = (self.name, self.description)\n return txt", "def get_description(self):\n return \"-\".join(\n map(str, (self.release, self.chromosome, self.start, self.reference, self.alternative))\n )", "def description(self):\n return \"Le célèbre \" + self._nom + \" se trouve devant vous.\"", "def __str__(self):\n return self.summary()", "def description(self):\n return self.visual_desc", "def _get_description(self):\n return self.__description", "def _get_description(self):\n return self.__description", "def get_description(self):\n return self.__description", "def __str__(self):\n\n descr = \"You are in the \" + self.name + \"\\n\"\n for key in self.exits:\n descr += \"You can go \" + key + \" to the \" + self.exits[key].name + \"\\n\"\n for item in self.inventory:\n descr += \"There is a \" + item.name + \" here.\\n\"\n for item in self.objects:\n descr += item.name + \" is here.\"\n return descr", "def shortDescription(self):\n return None", "def get_description(self):\n return re.sub('\\n\\W+',' ', self.__doc__)", "def describe(self) -> str:\n return (\n \"{name} {surname} è nata/o a {birth_municipality} ({birth_province_code}) il {birthdate}.\"\n \" Ora vive a {municipality} ({province_code}) in {address} {house_number}.\"\n ).format(**self._data)", "def __repr__(self):\n return f\"{self.number} {self.name}: {self.desc}\"", "def description(self):\n return (self.__doc__ or \"\").strip()", "def __description__(self):\r\n return id(self)", "def ObjectDescription(object_id):\n rhobj = rhutil.coercerhinoobject(object_id, True, True)\n return rhobj.ShortDescription(False)", "def descString(self):\n return \"\".join ([self.Name, \" (AR \", str(self.AR), \", Max DEX \"\\\n , str(self.MaxDEXMod), \") - \", str(self.Value), \" gp\"])", "def __str__(self):\n return self.__class__.__name__ + '\\n' + self.__class__.__doc__", "def get_description(cls) -> str:\n return cls.__doc__ or \"\"", "def __str__(self):\n model = self._meta.verbose_name.title()\n title = self.extended_object.get_title()\n return f\"{model:s}: {title:s}\"", "def descString(self):\n return \"\".join ([self.Name, \" (\", str(self.RollCount), \"d\"\\\n , str(self.RollMax), \"; \", str(self.CritRollMin), \"-\"\\\n , str(self.CritRollMax), \"x\", str (self.CritRollMult)\\\n , \") - \", str(self.Value), \" gp\"])", "def description(self, description: str):\n return self.swag({\n 'description': normalize_indent(description),\n })", "def complete_alt_title(self, obj):\n return str(obj)", "def description(self):\n\t\tif self._record is not None:\n\t\t return self._record.description\n\t\telse:\n\t\t return \"\"", "def __str__(self) -> str:\n if len(self.saliva_data) > 0:\n return \"\"\"{}\n Saliva Type(s): {}\n Saliva Sample Times: {}\n Structure: {}\n \"\"\".format(\n self.name, self.saliva_types, self.sample_times, self.structure\n )\n return \"\"\"{}\n Structure: {}\"\"\".format(\n self.name, self.structure\n )", "def __repr__(self):\n return '{cls}(name={x.name!r}, description={x.description!r})'.format(cls=self.__class__.__name__, x=self)", "def short_desc(self):\n return str(self.id)", "def _short_info(self) -> str:\n nullable = \"Nullable \" if self._is_nullable else \"\"\n\n # Good candidate for python pattern matching once <3.10 support no longer required\n num_metadata_items = len(self.__metadata)\n if num_metadata_items == 0:\n metadata = \"\"\n elif num_metadata_items == 1:\n metadata = f\" [with {num_metadata_items} metadata item]\"\n else:\n metadata = f\" [with {num_metadata_items} metadata items]\"\n\n return f\"<{nullable}{self.__class__.__name__}{metadata}: {self._resolve_field_name()}>\"", "def description(self, newDescription=None):\n pass", "def get_description(self):\n return self.description", "def get_description(self):\n return self.description", "def get_description(self):\n return self.description", "def get_description(self):\n return self.description", "def _repr_(self):\n if self._is_identity:\n description = \"Identity map \"\n else:\n description = \"Automorphism \"\n if self._name is not None:\n description += self._name + \" \"\n description += \"of the {}\".format(self._fmodule)\n return description", "def __str__(self):\n if __debug__:\n description = ('CM' in debug.active)\n else:\n description = False\n return self.asstring(short=False, header=True, summary=True,\n description=description)", "def description(self) :\n\t\ttry :\n\t\t\treturn self._description\n\t\texcept Exception as e:\n\t\t\traise e", "def description(self) :\n\t\ttry :\n\t\t\treturn self._description\n\t\texcept Exception as e:\n\t\t\traise e", "def summary_string(self) -> str:", "def get_description(obj):\n if not isinstance(obj.data, dict):\n return \"No description found.\"\n abstract = \"\"\n authors = []\n categories = []\n final_identifiers = []\n\n # Get identifiers\n dois = get_value(obj.data, \"dois.value\", [])\n if dois:\n final_identifiers.extend(dois)\n\n system_no = get_value(obj.data, \"external_system_numbers.value\", [])\n if system_no:\n final_identifiers.extend(system_no)\n\n # Get subject categories, adding main one first. Order matters here.\n record_categories = get_value(obj.data, \"arxiv_eprints.categories\", []) + \\\n get_value(obj.data, \"subject_terms.term\", [])\n for category_list in record_categories:\n if isinstance(category_list, list):\n categories.extend(category_list)\n else:\n categories.append(category_list)\n categories = list(OrderedDict.fromkeys(categories)) # Unique only\n abstract = get_value(obj.data, \"abstracts.value\", [\"\"])[0]\n authors = obj.data.get(\"authors\", [])\n return render_template('inspire_workflows/styles/harvesting_record.html',\n object=obj,\n authors=authors,\n categories=categories,\n abstract=abstract,\n identifiers=final_identifiers)", "def get_describe_name(self):\n long_name = str(self.year)+ ' ' + self.make.title()+ ' ' +self.model.title()\n return long_name", "def __repr__(self):\n indent = len(self.type) + 2\n jstr = ',\\n' + ' ' * indent\n\n props = self._display_properties()\n\n params = jstr.join('{:}={:}'.format(p, summary(self[p],\n indent=indent))\n for (p, dp) in props)\n return '<{}({:})>'.format(self.type, params)", "def short_description(self):\n return self._short_description", "def short_description(self):\n return self._short_description", "def __repr__(self) -> str:\n desc: List[str] = []\n if classname := self['classname']:\n desc.append(classname)\n desc.append('Entity')\n if name := self['targetname']:\n desc.append(f'\"{name}\"({classname})')\n else:\n desc.append(classname)\n if hammerid := self['hammerid']:\n desc.append(f'#{hammerid}')\n if origin := self['origin']:\n desc.append(f'@ ({origin})')\n return f'<{\" \".join(desc)}>'", "def summary(self):\r\n return '%s%s: %s%s %s%s' % (BLUE, self.title,\r\n GREEN, self.description,\r\n NORMAL, self.link)", "def _get_desc(self):\n return self.__desc" ]
[ "0.80118245", "0.7948067", "0.77770305", "0.77731", "0.77731", "0.7686482", "0.7643417", "0.7643417", "0.763395", "0.7609585", "0.75963145", "0.75963145", "0.7564732", "0.7494345", "0.74726653", "0.74393976", "0.738546", "0.7361387", "0.7352297", "0.7344379", "0.73168623", "0.7306112", "0.73039144", "0.73027706", "0.7297566", "0.7284567", "0.72767526", "0.7275305", "0.7232894", "0.72325236", "0.72325236", "0.72266495", "0.72053957", "0.7183095", "0.7183095", "0.7183095", "0.71822953", "0.71822953", "0.71822953", "0.716475", "0.71234804", "0.7103459", "0.7068572", "0.70670307", "0.7034467", "0.70146096", "0.70125234", "0.7011311", "0.70070624", "0.7004163", "0.6990304", "0.6978259", "0.69769365", "0.69682205", "0.6966544", "0.696423", "0.6944381", "0.6932077", "0.6920855", "0.69205654", "0.6916492", "0.6916492", "0.69112056", "0.6908907", "0.6899296", "0.6897129", "0.6887552", "0.6869183", "0.68616736", "0.6860223", "0.68578285", "0.6857478", "0.6853065", "0.68428415", "0.6831206", "0.68186873", "0.6817746", "0.68026835", "0.6800119", "0.6794996", "0.67780477", "0.6774085", "0.67639875", "0.67517704", "0.67493254", "0.67493254", "0.67493254", "0.67493254", "0.6747445", "0.6745568", "0.6745301", "0.6745301", "0.6745249", "0.6742896", "0.67324275", "0.67293346", "0.6727295", "0.6727295", "0.6726438", "0.67256707", "0.6723856" ]
0.0
-1
returns as a string
def __unicode__(self): return unicode(self.user)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_str(self) -> str:", "def toString():", "def to_string(self):\r\n return self.__str__()", "def toString(self) -> unicode:\n ...", "def toString(self) -> unicode:\n ...", "def safeToString():", "def toString(self) -> str:\n raise NotImplementedError", "def get_string(self):\n return self.__str", "def __str__(self):\n buf = StringIO()\n self.write_to(buf)\n return buf.getvalue()", "def valueToString():", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def __pout__(self):\n return self.__str__()", "def to_string(self):\r\n return self.command()", "def __str__ (self) :\n\n return self.as_string()", "def as_str(self):\n return self.as_type(str)", "def __str__(self):\n return self.get_string()", "def __str__(self):\n return str(self.get_data())", "def string(self):\n return self._my_string", "def ToString(self):\r\n pass", "def ToString(self):\r\n pass", "def __str__(self):\n # print(self.get_string())\n return self.get_string()", "def __str__(self):\n # print(self.get_string())\n return self.get_string()", "def format(self) -> str:", "def __str__(self):\n return self.string", "def __str__(self):\n return str(self.GetString())", "def asString(self):\n\n res = []\n for v in list(self.vars.values()):\n res.append(v.asString())\n res.append('')\n for e in list(self.enums.values()):\n res.append(e.asString())\n res.append('')\n for s in list(self.structs.values()):\n res.append(s.defAsString())\n res.append('')\n for s in list(self.structs.values()):\n res.append(s.dataAsString())\n\n return '\\n'.join(res)", "def as_string(self):\n return self.__repr__()", "def to_str(self) -> str:\n fields: t.Tuple[dataclasses.Field, ...] = dataclasses.fields(self)\n items: t.List[str] = [\"# Concepts for getting assets from the API\"]\n for field in fields:\n value: AssetsHelper = getattr(self, field.name)\n items.append(value.to_str().strip())\n value: str = \"\\n\\n\".join(items)\n return value", "def __str__(self):\n buf = io.StringIO()\n args.output.write(buf, self.root, self.headings)\n return buf.getvalue()", "def _tostr(t):\n\treturn t.__unicode__()", "def __str__(self):\n return self.get_str()", "def get_string2(self):\n pass", "def __str__(self) -> str:\n return self.encode()", "def to_string(self, name, value):\r\n \r\n return str(value)", "def _to_string(self):\r\n parts = []\r\n if self.offering:\r\n parts.extend([self.org, self.offering])\r\n if self.branch:\r\n parts.append(u\"{prefix}+{branch}\".format(prefix=self.BRANCH_PREFIX, branch=self.branch))\r\n if self.version_guid:\r\n parts.append(u\"{prefix}+{guid}\".format(prefix=self.VERSION_PREFIX, guid=self.version_guid))\r\n return u\"+\".join(parts)", "def get_string(self, **kwargs):\n ...", "def __str__(self):\n return self.format()", "def __str__(self) -> str:", "def __str__(self) -> str:", "def __str__(self) -> str:", "def __str__(self) -> str:", "def __str__(self) -> str:\n return str(self.data)", "def __str__(self):\n st=\"\"\n for g in self:\n st+=g.fasta()\n st+=\"\\n\"\n return st", "def to_string(self, increment):\n\n raise Exception(\"Not implemented!\"+self.__class__)", "def __str__(self):\n str(self.data)\n return str", "def __str__(self):\n return str(self.__s)", "def __str__(self):\n return ''.join(self.contents)", "def getString(self):\n return \"\".join(self.data)", "def __str__(self):\n return _libsbml.string___str__(self)", "def as_string(self, value, context=None):\n return str(value)", "def toString(self): #$NON-NLS-1$\r", "def dumps(self) -> str:\n ...", "def __str__(self):\n return str(self._data)", "def text(self) -> str:", "def __str__(self):\n return self.printable()", "def __str__(self):\n return ''.join(self)", "def str_info(self):\n return \"\"", "def __str__(self):\n return str(self.__data)", "def __str__(self):\n slist = self.buildstrings()\n local_s = ''\n for slistsub in range(0, len(slist)):\n local_s += slist[slistsub]\n if slistsub != len(slist)-1:\n local_s += '\\n'\n return local_s", "def c_str(self):\n return _libsbml.string_c_str(self)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def __str__(self):\n return str(self.data)", "def str_(object_):\n return str(object_)", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())", "def to_str(self):\n return pformat(self.to_dict())" ]
[ "0.8379665", "0.81285405", "0.7657668", "0.7572911", "0.7572911", "0.75156456", "0.7466699", "0.74074817", "0.7362591", "0.72081953", "0.72052455", "0.72052455", "0.72052455", "0.71933657", "0.7128041", "0.71150506", "0.7070591", "0.7048586", "0.70474184", "0.7038429", "0.70101076", "0.70101076", "0.69861406", "0.69861406", "0.6974091", "0.6973168", "0.6967204", "0.6949086", "0.6948101", "0.69369584", "0.692505", "0.68854266", "0.6879101", "0.68576044", "0.6855386", "0.6855161", "0.685509", "0.68537015", "0.6851764", "0.6851526", "0.6851526", "0.6851526", "0.6851526", "0.68431294", "0.68377435", "0.6826933", "0.6823366", "0.68212306", "0.6817462", "0.68152356", "0.6815098", "0.68140733", "0.68113065", "0.6789854", "0.6789462", "0.677926", "0.67676103", "0.676561", "0.6763676", "0.6758876", "0.6752915", "0.67520076", "0.6743653", "0.6743653", "0.6743653", "0.6743653", "0.6743653", "0.67316073", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016", "0.67243016" ]
0.0
-1
Download and unpack the Zenodo minted data for the current stitches distribution.
def fetch_zenodo(self): # full path to the stitches root directory where the example dir will be stored if self.data_dir is None: data_directory = pkg_resources.resource_filename('stitches', 'data') else: data_directory = self.data_dir # build needed subdirectories if they do not already exist tas_data_path = os.path.join(data_directory, "tas-data") temp_data_path = os.path.join(data_directory, "temp-data") if not os.path.exists(tas_data_path): os.mkdir(tas_data_path) if not os.path.exists(temp_data_path): os.mkdir(temp_data_path) # get the current version of stitches that is installed current_version = pkg_resources.get_distribution('stitches').version try: data_link = InstallPackageData.DATA_VERSION_URLS[current_version] except KeyError: msg = f"Link to data missing for current version: {current_version}. Using default version: {InstallPackageData.DEFAULT_VERSION}" data_link = InstallPackageData.DEFAULT_VERSION print(msg) # retrieve content from URL print("Downloading example data for stitches version {}. This may take a few minutes...".format(current_version)) response = requests.get(data_link) with zipfile.ZipFile(BytesIO(response.content)) as zipped: # extract each file in the zipped dir to the project for f in zipped.namelist(): extension = os.path.splitext(f)[-1] # Extract only the csv and nc files if all([len(extension) > 0, extension in (".csv", ".nc")]): basename = os.path.basename(f) # Check to see if tas-data is in the file path if "tas-data" in f: basename = os.path.join("tas-data", basename) out_file = os.path.join(data_directory, basename) # extract to a temporary directory to be able to only keep the file out of the dir structure with tempfile.TemporaryDirectory() as tdir: # extract file to temporary directory zipped.extract(f, tdir) # construct temporary file full path with name tfile = os.path.join(tdir, f) print(f"Unzipped: {out_file}") # transfer only the file sans the parent directory to the data package shutil.copy(tfile, out_file)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_zenodo(self):\n\n # retrieve content from URL\n try:\n logging.info(f\"Downloading example data from {self.url}\")\n r = requests.get(self.url, stream=True)\n with io.BytesIO() as stream:\n with tqdm.wrapattr(\n stream,\n 'write',\n file=sys.stdout,\n miniters=1,\n desc=self.url,\n total=int(r.headers.get('content-length', 0))\n ) as file:\n for chunk in r.iter_content(chunk_size=4096):\n file.write(chunk)\n with zipfile.ZipFile(stream) as zipped:\n # extract each file in the zipped dir to the project\n for f in zipped.namelist():\n logging.info(\"Unzipped: {}\".format(os.path.join(self.destination, f)))\n zipped.extract(f, self.destination)\n\n logging.info(\"Download and install complete.\")\n\n self.close_logger()\n\n except requests.exceptions.MissingSchema:\n msg = f\"Unable to download data from {self.url}\"\n logging.exception(msg)\n self.close_logger()\n raise", "def fetch_the_data():\n subprocess.run([\"wget\", \"https://storage.googleapis.com/recipe-box/recipes_raw.zip\"])\n subprocess.run([\"unzip\", \"recipes_raw.zip\", \"-d\", RECIPES_DIRPATH])\n subprocess.run([\"rm\", \"recipes_raw.zip\"])", "def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))", "def download_data():\n url = 'https://www.dropbox.com/s/h9ubx22ftdkyvd5/ml-latest-small.zip?dl=1'\n urllib.request.urlretrieve(url, 'ml-latest-small.zip')\n zfile = zipfile.ZipFile('ml-latest-small.zip')\n zfile.extractall()\n zfile.close()", "def fetch_taiwan_ntu_dsi():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n uraw = 'http://dl.dropbox.com/u/2481924/taiwan_ntu_dsi.nii.gz'\n ubval = 'http://dl.dropbox.com/u/2481924/tawian_ntu_dsi.bval'\n ubvec = 'http://dl.dropbox.com/u/2481924/taiwan_ntu_dsi.bvec'\n ureadme = 'http://dl.dropbox.com/u/2481924/license_taiwan_ntu_dsi.txt'\n folder = pjoin(dipy_home, 'taiwan_ntu_dsi')\n\n md5_list = ['950408c0980a7154cb188666a885a91f', # data\n '602e5cb5fad2e7163e8025011d8a6755', # bval\n 'a95eb1be44748c20214dc7aa654f9e6b', # bvec\n '7fa1d5e272533e832cc7453eeba23f44'] # license\n\n url_list = [uraw, ubval, ubvec, ureadme]\n fname_list = ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw DSI data (91MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n print('See DSI203_license.txt for LICENSE.')\n print('For the complete datasets please visit :')\n print('http://dsi-studio.labsolver.org')\n\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download():\n base_loc = DATA_DIR + '/raw/human_activity'\n loc = base_loc + '/human_activity.zip'\n if os.path.exists(loc):\n print('Path already exists at {}. If you wish to re-download you must delete this folder.'.format(loc))\n return\n if not os.path.exists(base_loc):\n os.mkdir(base_loc)\n\n url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/00341/HAPT%20Data%20Set.zip'\n urllib.request.urlretrieve(url, loc)\n\n with zipfile.ZipFile(loc, 'r') as zip_ref:\n zip_ref.extractall(base_loc)", "def download():\n basedir = os.path.dirname(os.path.dirname(__file__))\n print(basedir)\n datadir = os.path.join(basedir,\"data/NeonTreeEvaluation/\")\n print(\"Downloading data files to {}\".format(datadir)) \n eval_url = zenodo_url(concept_rec_id=\"3723356\", datadir=datadir)", "def electroweakinos_likelihoods_download():\n oneLbb_HEPData_URL = \"https://www.hepdata.net/record/resource/1267798?view=true\"\n targz_filename = \"oneLbb_workspaces.tar.gz\"\n response = requests.get(oneLbb_HEPData_URL, stream=True)\n assert response.status_code == 200\n with open(targz_filename, \"wb\") as file:\n file.write(response.content)\n assert (\n hashlib.sha256(open(targz_filename, \"rb\").read()).hexdigest()\n == \"64bbbef9f1aaf9e30d75c8975de4789484329b2b825d89331a6f2081661aa728\"\n )\n # Open as a tarfile\n yield tarfile.open(targz_filename, \"r:gz\")\n os.remove(targz_filename)", "def download():\n toydata = requests.get(DATA_URL).json()\n return toydata", "def download_data(origin_time, net, sta, loc, chan):\n \n dataDir_get = '/import/netapp-m-02-bay200/mseed_online/archive/'\n \n fileName = \".\".join((net, sta, \".\" + chan + \".D\",\n origin_time.strftime(\"%Y.%j\")))\n filePath = os.path.join(dataDir_get, origin_time.strftime(\"%Y\"),\n net, sta, chan + '.D', fileName)\n o_time2 = origin_time + 86400\n fileName2 = \".\".join((net, sta, \".\" + chan + \".D\",\n o_time2.strftime(\"%Y.%j\")))\n filePath2 = os.path.join(dataDir_get, o_time2.strftime(\"%Y\"),\n net, sta, chan + '.D', fileName2)\n\n if os.path.isfile(filePath):\n if origin_time.hour > 21:\n st = Stream()\n st.extend(read(filePath, starttime = origin_time - 180,\n endtime = origin_time + 3 * 3600))\n st.extend(read(filePath2, \n starttime = UTCDateTime(o_time2.year, o_time2.month, \n o_time2.day, 0, 0),\n endtime = origin_time + 3 * 3600))\n st.merge(method=-1)\n else:\n st = read(filePath, starttime = origin_time - 180,\n endtime = origin_time + 3 * 3600)\n else:\n print \"++++ cannot find the following file: \\n %s \\n++++\" % filePath\n\n if not st:\n raise RotationalProcessingException('Data not available for this'\n ' event...')\n st.trim(starttime=origin_time-180, endtime=origin_time+3*3600)\n\n print 'Download of', st[0].stats.station, st[0].stats.channel, \\\n 'data successful!'\n\n return st", "def download(root: str) -> None:\n for ix in [1, 2]:\n fn = f\"lizard_images{ix}.zip\"\n url = f\"https://warwick.ac.uk/fac/cross_fac/tia/data/lizard/{fn}\"\n SimpleDownloader.download(url, root)\n\n url = \"https://warwick.ac.uk/fac/cross_fac/tia/data/lizard/lizard_labels.zip\"\n SimpleDownloader.download(url, root)\n LizardDataModule.extract_zips(root, rm=True)", "def download_data():\n url = 'https://www.dropbox.com/s/xk4glpk61q3qrg2/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()", "def maybe_download():\n\n print(\"Downloading Inception 5h Model ...\")\n download.maybe_download_and_extract(url=data_url, download_dir=data_dir)", "def download_mission(self):\n cmds = self.vehicle.commands\n cmds.download()\n # Wait until download is complete.\n cmds.wait_valid()", "def download_data():\n url = 'https://www.dropbox.com/s/8oehplrobcgi9cq/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()", "def download_data():\r\n print('Downloading cifar-10 data...')\r\n request.urlretrieve(dataurl)\r\n print('Done')\r\n print('Please unzip files. command is:')\r\n print('gzip -d cifar-10-python.tar.gz')\r\n print('tar -xf cifar-10-python.tar')\r\n exit()", "def manually_download_MNIST(DATASET_DIR):\n\n output_path = os.path.join(DATASET_DIR, \"MNIST.zip\")\n if not os.path.exists(DATASET_DIR):\n os.mkdir(DATASET_DIR)\n url = \"https://github.com/vandedok/IIC_tutorial/releases/download/v0.2/MNIST.zip\"\n print(\"Downloading MNIST...\", end=\" \")\n urllib.request.urlretrieve(url, output_path)\n print(\"Done!\")\n\n with zipfile.ZipFile(output_path, \"r\") as zip_ref:\n zip_ref.extractall(DATASET_DIR)", "def _download(self):\n self._system.download(\"http://geant4.web.cern.ch/geant4/support/source/\" + self._tar_name)", "def download_data():\n # Download Unihan meta data for radical-stroke analysis\n os.system(' mkdir Unihan')\n os.system(' curl -O http://unicode.org/Public/UCD/latest/ucd/Unihan.zip')\n os.system(' apt-get -y install unzip')\n os.system(' unzip Unihan.zip -d Unihan/')\n os.system(' rm Unihan.zip')\n\n data_path = 'Unihan/Unihan_RadicalStrokeCounts.txt'\n assert(os.path.isfile(data_path))\n\n return data_path", "def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()", "def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def cli(date, path, mission):\n download.main(path, mission, date)", "def do_download() -> None:\n\n asset_dir = pycozmo.util.get_cozmo_asset_dir()\n resource_file = asset_dir / \"obb.zip\"\n\n # Check whether resources have already been downloaded.\n if os.path.exists(asset_dir / \"resources.txt\"):\n print(f\"Resources already available in {asset_dir}\")\n sys.exit(1)\n\n # Create directory structure.\n try:\n os.makedirs(asset_dir)\n except FileExistsError:\n pass\n\n print(\"Downloading...\")\n\n res = download(resource_file)\n if not res:\n print(\"ERROR: Download failed.\")\n sys.exit(2)\n\n print(\"Extracting...\")\n\n res = extract(\n resource_file,\n asset_dir / \"obb\")\n if not res:\n print(\"ERROR: Extraction failed.\")\n sys.exit(3)\n os.remove(str(resource_file))\n\n res = extract(\n asset_dir / \"obb\" / \"Android\" / \"obb\" / \"com.anki.cozmo\" / \"main.1204.com.anki.cozmo.obb\",\n asset_dir / \"..\")\n if not res:\n print(\"ERROR: Secondary extraction failed.\")\n sys.exit(4)\n shutil.rmtree(asset_dir / \"obb\")\n\n res = extract(\n asset_dir / \"cozmo_resources\" / \"sound\" / \"AudioAssets.zip\",\n asset_dir / \"cozmo_resources\" / \"sound\")\n if not res:\n print(\"ERROR: Sound extraction failed.\")\n sys.exit(5)\n\n print(f\"Resources downloaded successfully in {asset_dir}\")", "def download_data():\n urllib.request.urlretrieve('http://cs.iit.edu/~culotta/cs579/a1/edges.txt.gz', 'edges.txt.gz')", "def download(self, args):\n\n\t\t\"\"\" Default argument for Architecture \"\"\"\n\t\tif len(args) >= 4:\n\t\t\tarch = args[3]\n\t\telse:\n\t\t\tarch = platform.processor()\n\n\t\t\"\"\" Default argument for Version \"\"\"\n\t\tif len(args) >= 3:\n\t\t\tif args[2] == \"latest\":\n\t\t\t\tversion = \"Latest\"\n\t\t\telse:\n\t\t\t\tversion = args[2]\n\t\telse:\n\t\t\tversion = \"Latest\"\n\n\t\t\"\"\" Find package path from package list, based on prev. arguments \"\"\"\n\t\tif len(args) >= 2:\n\t\t\tpackage = args[1]\n\t\t\tfilename = False\n\t\t\t\n\t\t\tversions = self.master.Dump(package)\n\t\t\tfor d in versions:\n\t\t\t\tif d[\"Version\"] == version:\n\t\t\t\t\tif d[\"Version\"] != \"Latest\" and d[\"Architecture\"] == arch:\n\t\t\t\t\t\tfilename = d[\"Filename\"]\n\t\t\t\t\telse:\n\t\t\t\t\t\tfor e in versions:\n\t\t\t\t\t\t\tif e[\"Version\"] == d[\"LatestVersion\"] and e[\"Architecture\"] == arch:\n\t\t\t\t\t\t\t\tfilename = e[\"Filename\"]\n\t\t\t\t\t\t\t\tversion = d[\"LatestVersion\"];\n\t\t\tif not filename:\n\t\t\t\tself.write_line(\"ERROR XXX: Package not found.\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Find chunks to download \"\"\"\n\t\t\tid = 0\n\t\t\tto_download = False\n\t\t\tfor f in self.torrent_info.files():\n\t\t\t\tprint(f.path.replace(\"packages/\", \"\") + \" = \" + filename);\n\t\t\t\tif f.path.replace(\"packages/\", \"\") == filename:\n\t\t\t\t\tto_download = f\n\t\t\t\t\tbreak;\n\t\t\t\tid += 1\n\t\t\tif not to_download:\n\t\t\t\tprint(\"ERROR XXX: dunno\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Set chunks priority to 7? (download max priority) \"\"\"\n\t\t\tpr = self.torrent_info.map_file(id, 0, to_download.size);\n\t\t\tn_pieces = math.ceil(pr.length / self.torrent_info.piece_length() + 1);\n\n\t\t\tfor i in range(self.torrent_info.num_pieces()):\n\t\t\t\tif i in range(pr.piece, pr.piece + n_pieces):\n\t\t\t\t\tself.handler.piece_priority(i, 7)\n\n\n\t\t\t\"\"\" Print download of package status \"\"\"\n\t\t\tself.print_status(id, pr, package, version, filename)\n\t\t\t\t\n\t\t\t\"\"\" Check the server for hash validation \"\"\"\n\t\t\tif self.valid_tpkg_file(to_download.path):\n\t\t\t\tself.write_line(\"DONE {0} {1} {2} {3}\".format(package, version, arch, self.config[\"daemon\"][\"rootdir\"] + \"/\" + to_download.path).replace('//', '/'))\n\t\t\telse:\n\t\t\t\tself.write_line(\"ERROR XXX: Hash verification failed.\")\n\t\telse:\n\t\t\tself.write_line(\"INVALID ARGUMENTS\");", "def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()", "def download():\n env_banner()\n\n download_data = Download()\n download_data()\n click.echo('Download done.')", "def download_and_extract(down_dir=download_dir, url=tuda_url):\n\n wget.download(url, down_dir) \n tar_filepath = os.path.join(down_dir, \"german-speechdata-package-v2.tar.gz\")\n #with tarfile.open(tar_filepath, \"r\") as tar:\n # tar.extractall(down_dir)", "def fetch_syn_data():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://dl.dropboxusercontent.com/u/5918983/'\n t1 = url + 't1.nii.gz'\n b0 = url + 'b0.nii.gz'\n \n folder = pjoin(dipy_home, 'syn_test')\n\n md5_list = ['701bda02bb769655c7d4a9b1df2b73a6', # t1\n 'e4b741f0c77b6039e67abb2885c97a78'] # b0\n\n url_list = [t1, b0]\n fname_list = ['t1.nii.gz', 'b0.nii.gz']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading t1 and b0 volumes from the same session (12MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download_hess_dr1_data():\n download_data_files(FILENAMES_HESS_DR1)", "def download(self):\n \n if not os.path.exists(self.directory):\n os.mkdir(self.directory)\n if not os.path.exists(self.fullPath):\n os.mkdir(self.fullPath)\n \n dm = pymodis.downmodis.downModis(self.fullPath, self.password, self.username, self.url, self.tiles, self.path, self.dataset, \n self.today, self.enddate, jpg = False, debug = True, timeout = 30)\n dm.connect()\n self.filelist = dm.getListDays() \n self.observations = len(dm.getListDays()) \n \n if self.dataset != 'MOD13Q1.005':\n if self.observations % 2 != 0:\n raise IOError(\"The total number of observations through time must be an even number. Please add or remove an observation before or after %s\" % str(self.filelist[0]))\n \n dm.downloadsAllDay()\n logger.log('SUCCESS', 'Downloading is complete! %d HDF files of %s data for tiles %s were downloaded for the following days: %s' % (self.observations*len(self.tiles), str(self.dataset), str(self.tiles), str(self.filelist)))", "def _download_sst(cls):\n path = Path(cls.dataset_path)\n if path.exists():\n return\n\n path.mkdir(parents=True, exist_ok=True)\n generic_download(\n url=\"https://s3.amazonaws.com/enso-data/SST-binary.csv\",\n text_column=\"Text\",\n target_column=\"Target\",\n filename=SST_FILENAME\n )", "def _download_karakas():\n #url = 'http://zenodo.org/record/12800/files/dartmouth.h5'\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FMNRAS%2F403%2F1413'\n import urllib\n print('Downloading Karakas 2010 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def _download_karakas():\n #url = 'http://zenodo.org/record/12800/files/dartmouth.h5'\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FMNRAS%2F403%2F1413'\n import urllib\n print('Downloading Karakas 2010 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def _download_data(self):\n logger.info('Downloading ChemIDplus data...')\n outfile_path = self._src_data_dir / self._src_fname\n\n self._ftp_download(self._src_server,\n self._src_dir_path,\n self._src_data_dir,\n self._src_fname)\n\n parser = ET.iterparse(outfile_path, ('start', 'end'))\n date = next(parser)[1].attrib['date']\n version = date.replace('-', '')\n outfile_path.rename(self._src_data_dir / f'chemidplus_{version}.xml')\n logger.info('Finished downloading ChemIDplus data')", "def _download(self) -> None:\n if self._check_integrity():\n print(\"Files already downloaded and verified\")\n return\n\n download_and_extract_archive(\n self.url,\n self.root,\n filename=self.filename,\n md5=self.md5 if self.checksum else None,\n )\n\n # Generate train/val/test splits\n # Always check the sha256 of this file before executing\n # to avoid malicious code injection\n with working_dir(self.root):\n with open(\"split.py\") as f:\n split = f.read().encode(\"utf-8\")\n assert hashlib.sha256(split).hexdigest() == self.sha256\n exec(split)", "def download(self):\n pass", "def download(self):\n pass", "def download(self, root='./'):\n dir = os.path.join(root, 'tiny-imagenet-200')\n dir_train = os.path.join(dir, 'train')\n if os.path.exists(dir) and os.path.exists(dir_train):\n print('==> Already downloaded.')\n return\n\n path = Path(os.path.join(root, 'tiny-imagenet-200.zip'))\n if not os.path.exists(path):\n os.makedirs(path.parent, exist_ok=True)\n\n print('==> Downloading TinyImagenet200...')\n with urllib.request.urlopen(self.url) as response, \\\n open(str(path), 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n\n print('==> Extracting TinyImagenet200...')\n with zipfile.ZipFile(str(path)) as zf:\n zf.extractall(root)", "def download_dataset(dataset):\n\n if dataset not in URLS:\n print(f\"unknown dataset {dataset}\")\n sys.exit(0)\n\n filename = f'{dataset}.tar.gz'\n url = URLS[dataset]\n\n if not os.path.exists(filename):\n print(f'downloading dataset \"{dataset}\"')\n os.system(f'curl \"{url}\" -o {filename}')\n else:\n print(f'zipfile \"{filename}\" already exists, remove it if you want to re-download.')\n\n if not os.path.exists(dataset):\n print(f'extracting \"{filename}\"')\n os.system(f'tar -xvf {filename}')\n else:\n print(f'folder \"{dataset}\" already exists, remove it if you want to re-create.')\n\n image_chips = f'{dataset}/image-chips'\n label_chips = f'{dataset}/label-chips'\n if not os.path.exists(image_chips) and not os.path.exists(label_chips):\n print(\"creating chips\")\n libs.images2chips.run(dataset)\n else:\n print(f'chip folders \"{image_chips}\" and \"{label_chips}\" already exist, remove them to recreate chips.')", "def download():\n raise NotImplementedError", "def download(self):\n\n with open(self.dataset_path) as dataset_file:\n dataset = json.load(dataset_file)\n\n path = \"\".join([POST_HIT_PATH, dataset[\"dataset\"][\"data_path\"]])\n if not os.path.exists(path):\n os.makedirs(path)\n\n protocole = dataset[\"dataset\"][\"protocole\"]\n\n download_links = []\n\n for resource in dataset[\"dataset\"][\"resources\"]:\n file_path = \"\".join([path, resource[\"filename\"]])\n\n #Check if the the download link has not been used before (One download link for all)\n if resource[\"download_link\"] not in download_links:\n \n print(\"DOWNLOADING : {}\".format(resource[\"filename\"]))\n f = urllib.request.urlopen(resource[\"download_link\"])\n data = f.read()\n with open(file_path, \"wb\") as donwload_file:\n donwload_file.write(data)\n\n download_links.append(resource[\"download_link\"])\n\n \n #Extract all files from the tar archives if necessary\n if tarfile.is_tarfile(file_path):\n tf = tarfile.open(file_path)\n tf.exractall()", "def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None", "def download():\n \"\"\"\n \"The book p.79 have error.\n \"https://github.com/login/oauth/authorize?client_id=7e0a3cd836d3e544dbd9&redirect_uri=https%3A%2F%2Fgist.github.com%2Fauth%2Fgithub%2Fcallback%3Freturn_to%3Dhttps%253A%252F%252Fgist.github.com%252Fyoungsoul%252Ffc69665c5d08e189c57c0db0e93017a6&response_type=code&state=9b385430ee7cd1a75ca91c1d1cb6c565111f6b81e54a71f42ae9b22035241b9b\n \"\"\"\n subprocess.call([\n 'wget',\n 'https://github.com/amplab/datascience-sp14/raw/master/lab7/mldata/mnist-original.mat', \n '-P',\n 'origin_data/'\n ])\n logger.info('Download success!')", "def download_and_prepare(self):\n self._download_and_prepare()", "def download_dailydialog(daily_raw_fname: str, data_path: str):\n wget.download(daily_raw_fname, data_path)\n # Manually unzip the train/dev/test files", "def download(all):\n print(\"Downloading\")", "def download_kitti(\n root_dir : str,\n keep_zip: bool = True\n) -> None:\n \n # URL's to .zip files\n kitti_raw_lidar_fname = 'data_depth_velodyne.zip'\n kitti_depth_annot_fname = 'data_depth_annotated.zip'\n kitti_test_set_fname = 'data_depth_selection.zip'\n kitti_calib_fnames = ['2011_09_26_calib.zip',\n '2011_09_28_calib.zip',\n '2011_09_29_calib.zip', \n '2011_09_30_calib.zip', \n '2011_10_03_calib.zip']\n \n kitti_base_url = 'http://s3.eu-central-1.amazonaws.com/avg-kitti'\n kitti_raw_data_base_url = '/'.join([kitti_base_url, 'raw_data'])\n kitti_raw_lidar_url = '/'.join([kitti_base_url, kitti_raw_lidar_fname])\n kitti_depth_annot_url = '/'.join([kitti_base_url, kitti_depth_annot_fname])\n kitti_test_set_url = '/'.join([kitti_base_url, kitti_test_set_fname])\n\n # Local path\n dest_dir = os.path.join(root_dir, 'kitti_depth_completion')\n\n # Check if output directory exist\n if not(os.path.isdir(dest_dir)):\n os.makedirs(dest_dir, exist_ok=True)\n print('Destination diectory does not exist, created new directory at: {}'.format(dest_dir))\n\n # Make temporary directory\n tmp_dir = '/'.join([dest_dir, 'tmp'])\n if not(os.path.isdir(tmp_dir)):\n os.makedirs(tmp_dir, exist_ok=True)\n\n # Directories name\n # lidar_raw_dir = 'velodyne_raw'\n # ground_truth_dir = 'groundtruth'\n rgb_dir = 'image'\n intrinsics_dir = 'intrinsics'\n \n # Raw lidar\n dest_zip_file = '/'.join([tmp_dir, kitti_raw_lidar_fname])\n\n if not os.path.isfile(dest_zip_file):\n # Check in case the files was donwloaded and previous run didn't complete.\n download_file(kitti_raw_lidar_url, dest_zip_file, verbose=True)\n else:\n print('Destination file already exist, skip downloading')\n\n unzip_file(dest_zip_file, dest_dir, keep_zip=keep_zip)\n \n # Groundtruh (annotations)\n dest_zip_file = '/'.join([tmp_dir, kitti_depth_annot_fname])\n\n if not os.path.isfile(dest_zip_file):\n # Check in case the files was donwloaded and previous run didn't complete.\n download_file(kitti_depth_annot_url, dest_zip_file, verbose=True)\n else:\n print('Destination file already exist, skip downloading')\n\n unzip_file(dest_zip_file, dest_dir, keep_zip=keep_zip)\n\n # Test/val selection data\n dest_zip_file = '/'.join([tmp_dir, kitti_test_set_fname])\n if not os.path.isfile(dest_zip_file):\n # Check in case the files was donwloaded and previous run didn't complete.\n download_file(kitti_test_set_url, dest_zip_file, verbose=True)\n else:\n print('Destination file already exist, skip downloading')\n\n unzip_file(dest_zip_file, dest_dir, keep_zip=keep_zip)\n \n # Move test set and validation select sets to place\n depth_sel_path = \"/\".join([dest_dir, 'depth_selection'])\n shutil.move(\"/\".join([depth_sel_path, \"val_selection_cropped\"]), \"/\".join([dest_dir, \"val_selection_cropped\"]))\n shutil.move(\"/\".join([depth_sel_path, \"test_depth_completion_anonymous\"]), \"/\".join([dest_dir, \"test_depth_completion_anonymous\"]))\n shutil.rmtree(depth_sel_path)\n\n # Calibration files\n calib_dest_dir = '/'.join([dest_dir, intrinsics_dir])\n\n for calib_file in kitti_calib_fnames:\n calib_file_url = '/'.join([kitti_raw_data_base_url, calib_file])\n dest_zip_file = '/'.join([tmp_dir, calib_file])\n if not os.path.isfile(dest_zip_file):\n download_file(calib_file_url, dest_zip_file, verbose=True)\n else:\n print('Destination file already exist')\n\n unzip_file(dest_zip_file, calib_dest_dir, keep_zip=keep_zip)\n \n # RGB images for train and validation \n # (test and val select are downloaded with RGB iamges)\n print('Downloading RGB images\\n')\n for split in ['train', 'val']:\n # List all depth maps\n split_root_path = '/'.join([dest_dir, split])\n depth_map_list = os.listdir(split_root_path)\n print(f'-Split type: {split}, Split size: {len(depth_map_list)} [depth map]\\n')\n\n # Iterate over depth maps in split\n for depth_map_name in depth_map_list[0:1]:\n # Initialize\n zip_file_name = depth_map_name + '.zip'\n orig_file_url = '/'.join([kitti_raw_data_base_url, depth_map_name[:-5], zip_file_name])\n rgb_dest_path = '/'.join([split_root_path, depth_map_name, rgb_dir])\n dest_zip_file = '/'.join([tmp_dir, zip_file_name])\n tmp_depth_map_path = '/'.join([tmp_dir, depth_map_name])\n tmp_depth_map_img_path = '/'.join([tmp_depth_map_path, rgb_dir])\n\n # Check if destination file or .zip file already exist\n if os.path.isdir(rgb_dest_path):\n print('{} : destination files already exist, skipping\\n'.format(depth_map_name))\n continue\n\n # Check if already exist\n if os.path.exists(dest_zip_file): \n print('{} : file already exist, skipping\\n'.format(depth_map_name))\n else:\n # Download\n download_file(orig_file_url, dest_zip_file, verbose=True)\n \n # Unzip\n unzip_file(dest_zip_file, tmp_dir, rgb_raw_zip=True, keep_zip=keep_zip)\n\n # Move images to correct directory\n shutil.move(tmp_depth_map_img_path, rgb_dest_path)\n shutil.rmtree(tmp_depth_map_path)\n \n # Remove temporary directory\n if not keep_zip:\n shutil.rmtree(tmp_dir)", "def download_and_unzip_data(\n url=\"https://storage.googleapis.com/simpeg/em_examples/tdem_groundedsource/tdem_groundedsource.tar\",\n):\n # download the data\n downloads = utils.download(url)\n\n # directory where the downloaded files are\n directory = downloads.split(\".\")[0]\n\n # unzip the tarfile\n tar = tarfile.open(downloads, \"r\")\n tar.extractall()\n tar.close()\n\n return downloads, directory", "def run(self):\n download(self.attempt)", "def download_data():\n\n if not os.path.exists(zipfile_path):\n print(f'Downloading {config.download_url} to {zipfile_path}')\n urlretrieve(config.download_url, zipfile_path)\n print(f'Successfully downloaded {zipfile_path}')\n\n zip_ref = ZipFile(zipfile_path, 'r')\n zip_ref.extractall(config.raw_data_dir)\n zip_ref.close()\n\n os.rename(f\"{config.raw_data_dir}/cornell movie-dialogs corpus\", extracted_dir)", "def downloadFile()-> None:\n logging.info(f\"Downloading current data set {getTime()}\")\n with open(DATA_FILE,\"wb\") as f:\n f.write(get(\"https://covid.ourworldindata.org/data/owid-covid-data.csv\").text.encode())\n logging.info(f\"Finished Downloading current data set {getTime()}\")", "def download(self):\n if not self.url:\n raise RuntimeError(self.tips)\n\n download_file_name = os.path.join(\n self.raw_path, os.path.splitext(os.path.basename(self.url))[0]\n )\n file_format = self.url.split(\".\")[-1]\n if \"amazon\" in self.url:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.json.{file_format}\"\n )\n else:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if \"1drv.ms\" in self.url:\n file_format = \"zip\"\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if not os.path.exists(raw_file_path):\n print(f\"download_file: url: {self.url}, raw_file_path: {raw_file_path}\")\n download_file(self.url, raw_file_path)\n if \"amazon\" in raw_file_path:\n # amazon dataset do not unzip\n print(\"amazon dataset do not decompress\")\n return\n elif file_format == \"gz\":\n file_name = raw_file_path.replace(\".gz\", \"\")\n with gzip.open(raw_file_path, \"rb\") as fin:\n with open(file_name, \"wb\") as fout:\n shutil.copyfileobj(fin, fout)\n else:\n shutil.unpack_archive(\n raw_file_path, self.raw_path, format=get_format(file_format)\n )\n\n if not os.path.exists(download_file_name):\n return\n elif os.path.isdir(download_file_name):\n os.rename(\n download_file_name, os.path.join(self.raw_path, self.dataset_name)\n )\n else:\n os.rename(\n download_file_name,\n os.path.join(\n self.raw_path,\n f'{self.dataset_name}.{download_file_name.split(\".\")[-1]}',\n ),\n )", "def download_and_preprocess(self):\n print('Preparing steering angle database.')\n print('Downloading...')\n self.download()\n print('Preprocessing...')\n self.preprocess()", "def main():\n # the url for african daily and global daily\n african_dialy_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/africa_daily/tifs/p25/\"\n global_daily_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/tifs/p25/\"\n\n\n each_year_list = GetRasterYears(url=african_dialy_url)\n new_path = makenewdir(each_year_list)\n years_new_list = fecthrasterurl(url=african_dialy_url)\n downloadwithwget(each_year_list, years_new_list, new_path)", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def fetch_stanford_hardi():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://stacks.stanford.edu/file/druid:yx282xq2090/'\n uraw = url + 'dwi.nii.gz'\n ubval = url + 'dwi.bvals'\n ubvec = url + 'dwi.bvecs'\n folder = pjoin(dipy_home, 'stanford_hardi')\n\n md5_list = ['0b18513b46132b4d1051ed3364f2acbc', # data\n '4e08ee9e2b1d2ec3fddb68c70ae23c36', # bval\n '4c63a586f29afc6a48a5809524a76cb4'] # bvec\n\n url_list = [uraw, ubval, ubvec]\n fname_list = ['HARDI150.nii.gz', 'HARDI150.bval', 'HARDI150.bvec']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw HARDI data (87MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download_demo_files():\n\n master_zip_url = 'https://github.com/%s/archive/%s.zip' % \\\n (sample_data_gh_repo, sample_data_gh_commit)\n ofile = os.path.join(cache_dir,\n 'salem-sample-data-%s.zip' % sample_data_gh_commit)\n odir = os.path.join(cache_dir)\n\n # download only if necessary\n if not os.path.exists(ofile):\n print('Downloading salem-sample-data...')\n _urlretrieve(master_zip_url, ofile)\n\n # Trying to make the download more robust\n try:\n with zipfile.ZipFile(ofile) as zf:\n zf.extractall(odir)\n except zipfile.BadZipfile:\n # try another time\n if os.path.exists(ofile):\n os.remove(ofile)\n _urlretrieve(master_zip_url, ofile)\n with zipfile.ZipFile(ofile) as zf:\n zf.extractall(odir)\n\n # list of files for output\n out = dict()\n for root, directories, filenames in os.walk(sample_data_dir):\n for filename in filenames:\n out[filename] = os.path.join(root, filename)\n\n return out", "def dascasi_download():\n p = argparse.ArgumentParser(description=\"download DASC all-sky camera data\")\n p.add_argument(\"site\", choices=[\"EAA\", \"FYU\", \"KAK\", \"PKR\", \"TOO\", \"VEE\"])\n p.add_argument(\n \"startend\", help=\"start/end times UTC e.g. 2012-11-03T06:23 2012-11-03T07\", nargs=2\n )\n p.add_argument(\"odir\", help=\"directory to write downloaded FITS to\")\n p.add_argument(\"-w\", \"--wavelen\", help=\"request specific wavelength(s)\", nargs=\"+\")\n p.add_argument(\"-host\", default=\"ftp://optics.gi.alaska.edu\")\n p = p.parse_args()\n\n # host = \"ftp://mirrors.arsc.edu/AMISR/PKR/DASC/RAW/\"\n download(p.startend, p.site, p.odir, p.host, p.wavelen)", "def extract(*args):\r\n bank_rut= args[0]\r\n bank_id= args[1]\r\n\r\n while True:\r\n try:\r\n print(\"Downloading file for...\" + str(args[0]),end=\"\\n\")\r\n myfile = requests.get(\"https://www.sbif.cl/sbifweb/internet/bancos/balances/\"+str(YEAR)+\"/\"+bank_id+\".zip\", allow_redirects=True)\r\n time.sleep(rd.randint(4,7))\r\n break\r\n except:\r\n print(\"request failed\")\r\n pass\r\n \r\n open(str(PATH.joinpath(\"./data_banks/\"+bank_id+\".zip\")), 'wb').write(myfile.content)\r\n time.sleep(rd.randint(1,2))\r\n \r\n yield (bank_rut,bank_id)", "def download_dataset(self):\n dataset_name = ADE20K_URL.split(\"/\")[-1].split(\".\")[0]\n req = urllib.request.Request(ADE20K_URL, method=\"HEAD\")\n size_file = urllib.request.urlopen(req).headers[\"Content-Length\"]\n download = \"n\"\n while download != \"y\":\n if not self.yes_all:\n download = input(f\"You are about to download {dataset_name} ({size_file} bytes) to the temporary folder {self.tmp_path}. Do you want to continue? [y/n] \\n\")\n if self.yes_all or download == \"y\":\n logger.info(f\"Downloading dataset {dataset_name} at {ADE20K_URL} to temporary folder {self.tmp_path}...\")\n zip_path, hdrs = urllib.request.urlretrieve(ADE20K_URL, f\"{self.tmp_path}/{dataset_name}.zip\")\n logger.info(f\"Extracting {zip_path} to temporary folder {self.tmp_path}...\")\n with zipfile.ZipFile(f\"{zip_path}\", 'r') as z:\n z.extractall(f\"{self.tmp_path}\")\n self.input_data_path = zip_path[:-4]\n break\n elif download == \"n\":\n logger.error(f\"Cannot pursue without downloading the dataset.\")\n sys.exit()\n else:\n logger.error(\"Please enter a valid answer (y or n).\")", "def __download(self, year, month, day):\n print 'Download...'\n logging.info('[download]->Download...')\n t = datetime.datetime(year, month, day)\n spdata.download(stime=t, stations=self.aodSetting.stations, ftp_dir=self.aodSetting.ftp_root, data_dir=self.aodSetting.dd_dir, ftp_ip=self.aodSetting.ftp_ip,\n user=self.aodSetting.ftp_user, pword=self.aodSetting.ftp_psw)\n print 'Download Done!'\n logging.info('[download]->Download Done!')", "def fetch(self,url=URL):\n\t\tlog.info('downloading latest PHE case data')\n#\t\tself.data=lookup_json(url)\n\t\tself.fetch_csv() #JSON discontinued; switched back to CSV\n\t\tself.edition=self.latest_samples\n\t\tlog.info(f'Last samples from {self.edition}')", "def download_http(self, url):\n\n # Set things up.\n # ==============\n\n out = None\n headers = {}\n if (url.username is not None) and (url.password is not None):\n tmp = base64.b64encode(':'.join([url.username, url.password]))\n headers['Authorization'] = \"Basic %s\" % tmp\n\n\n # Toe the waters.\n # ===============\n # We start with an HTTP HEAD request to check the status.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"HEAD\", url.path, '', headers)\n r = conn.getresponse()\n conn.close()\n if self.verbose:\n print >> sys.stderr, url, r.status, ''\n\n\n # Bail.\n # =====\n # Short-cut when we just care whether it's a package.\n\n if url.path.endswith('/'):\n out = r.status == 200\n\n\n elif r.status == 200:\n\n # Wade in.\n # ========\n # If the status is positive we check to see if we've already\n # downloaded the latest copy.\n\n etag = r.getheader('etag', '')\n lm = r.getheader('last-modified', '')\n key = sha.new(str(url) + etag + lm).hexdigest()\n\n if not self.cachedir:\n raise ValueError(\"netimp.importer.cachedir not set\")\n if not os.path.isdir(self.cachedir):\n raise IOError( \"netimp.importer.cachedir not found \"\n + \"(%s)\" % self.cachedir\n )\n\n path = join(self.cachedir, key)\n if os.path.isfile(path):\n out = open(path, 'rb')\n else:\n\n # Dive in!\n # ========\n # We don't have this module locally yet: download it for real.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"GET\", url.path, '', headers)\n r = conn.getresponse()\n if r.status == 200: # just in case!\n fp = open(path, 'w+b')\n fp.write(r.read())\n fp.flush()\n fp.close()\n out = open(path, 'rb')\n conn.close()\n\n return out", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath,\n reporthook=_progress)\n print()\n statinfo = os.stat(filepath)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def dowload_vt():\n print get_date_time_now() + \" ==> Download VT Samples started!\"\n print get_date_time_now() + \" ==> Nothing downloaded\"", "def _download(self, path):\n self.logger.info('Getting Million Song Dataset...')\n self.logger.info('Downloading Echo Nest Taste Subprofile train data...')\n base_url = 'http://millionsongdataset.com/sites/default/files/challenge/'\n\n download_dataset(\n base_url + 'train_triplets.txt.zip',\n join(self.data_folder, 'train.zip')\n )\n rename(join(self.data_folder, 'train'), path)\n\n self.logger.info('Downloading evaluation data for MSD Challenge...')\n download_dataset(\n base_url + 'EvalDataYear1MSDWebsite.zip',\n join(path, 'eval.zip')\n )\n rename(\n join(path, 'EvalDataYear1MSDWebsite'),\n join(path, 'evaluation')\n )\n\n self.logger.info('Downloading list of matching errors...')\n url = 'http://millionsongdataset.com/sites/default/files/tasteprofile/sid_mismatches.txt'\n download_url(url, join(path, 'sid_mismatches.txt'))", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def _download(self):\n self._system.download_file(\"http://curl.haxx.se/download/\" + self._tar_name)", "def download_archive(self):\n\n def time_convert(structure):\n \"\"\"\n :param structure: tuple representation of time\n :return: GitHub archive time\n \"\"\"\n \n \n join_number_to_zero = lambda number: (\"\" if number > 9 else \"0\") + str(number)\n\n return \"%s-%s-%s-%s\" % (\n structure.tm_year, join_number_to_zero(structure.tm_mon), join_number_to_zero(structure.tm_mday),\n structure.tm_hour)\n\n current_time = self.get_time()\n self.logger.debug(__name__ + \": \" + \"current time: \" + str(gmtime(current_time)))\n\n difference = -25200\n #timezone difference in seconds between GMT and west coast of USA\n\n downloading_time = int(timegm(self.config[\"last_connection_time\"])) + 3600\n self.logger.debug(__name__ + \": \" + \"downloading time: \" + str(gmtime(downloading_time)))\n\n if downloading_time > current_time - 7200:\n self.logger.info(__name__ + \": \" + \"unable to download file (time limiting).\")\n return\n\n downloading_time += difference\n\n json_file_name = self.download_file(time_convert(gmtime(downloading_time)))\n\n self.config[\"last_connection_time\"] = gmtime(downloading_time - difference)\n self.logger.debug(__name__ + \": \" + \"last_connection_time: \" + str(self.config[\"last_connection_time\"]))\n\n return json_file_name", "def _Download( self ):\n self._DownloadPipe += PackageUtil.ExecuteSimpleCommand( \"git\", [\"clone\", \"git@github.com:mastbaum/avalanche.git\", self.GetInstallPath()], None, os.getcwd() )\n return", "def download_JSON(sets_to_reference):\n print(\"- Downloading data from MTGAJSON -\")\n for setname in tqdm(sets_to_reference) :\n r = requests.get('https://mtgjson.com/api/v5/'+ setname +'.json.zip')\n zipfile = ROOT_DIR + 'data/sets/' +setname +\".zip\"\n with open(zipfile, \"wb\") as data:\n data.write(r.content)\n with ZipFile(zipfile, 'r') as zipObj:\n # Extract all the contents of zip file in current directory\n zipObj.extractall(path=ROOT_DIR + 'data/sets/')\n ## If file exists, delete it ##\n if os.path.isfile(zipfile):\n os.remove(zipfile)\n else: ## Show an error ##\n print(\"Error: %s file not found\" % zipfile)", "def maybe_download_and_extract():\n dest_directory = FLAGS.data_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n r = requests.Session().get(DATA_URL)\n with open(filepath, 'wb') as fd:\n for chunk in r.iter_content(500):\n fd.write(chunk)\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n extracted_dir_path = os.path.join(dest_directory, 'cifar-10-batches-bin')\n if not os.path.exists(extracted_dir_path):\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def _download(self, variables):\n required_vars = ['container', 'src', 'object']\n variables_dict = self._get_vars(variables, required=required_vars)\n\n container_name = variables_dict.pop('container')\n object_name = variables_dict.pop('object')\n src_path = variables_dict.pop('src')\n\n with open(src_path, 'wb') as f:\n f.write(\n self.swift.get_object(\n container_name, object_name, resp_chunk_size=204800\n )\n )\n\n self.state_change = True", "def unzip() -> None:\n logger = logging.getLogger(__name__)\n logger.info('Download the dataset')\n\n # define the destination\n destination = project_dir / 'data' / 'raw'\n\n # extract zip\n zip_file = ZipFile(destination / \"original.zip\")\n zip_file.extractall(destination)", "def fetch_isbi2013_2shell():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://dl.dropboxusercontent.com/u/2481924/isbi2013_merlet/'\n uraw = url + '2shells-1500-2500-N64-SNR-30.nii.gz'\n ubval = url + '2shells-1500-2500-N64.bval'\n ubvec = url + '2shells-1500-2500-N64.bvec'\n folder = pjoin(dipy_home, 'isbi2013')\n\n md5_list = ['42911a70f232321cf246315192d69c42', # data\n '90e8cf66e0f4d9737a3b3c0da24df5ea', # bval\n '4b7aa2757a1ccab140667b76e8075cb1'] # bvec\n\n url_list = [uraw, ubval, ubvec]\n fname_list = ['phantom64.nii.gz', 'phantom64.bval', 'phantom64.bvec']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw 2-shell synthetic data (20MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download_all_data(self) -> None:\n print(\"Download in progress.\")\n self.download_data(os.environ[\"NC_TOKEN_TRAIN_CHARACTERS\"], \"nextcloud\")\n self.download_data(os.environ[\"NC_TOKEN_TRAIN_FRAGMENTS\"], \"nextcloud\")\n self.download_data(os.environ[\"HABBAKUK_URL\"], \"generic_url\")\n print(\"Download complete!\")", "def download(directory: str) -> None:\n path = f'{directory}/m5/datasets'\n if not os.path.exists(path):\n download_file(directory=path,\n source_url=M5.source_url,\n decompress=True)", "def fetch_housing_data(housing_url=HOUSING_URL, housing_path=HOUSING_PATH):\n logging.info(\"Fetch housing data.....\")\n os.makedirs(housing_path, exist_ok=True)\n tgz_path = os.path.join(housing_path, \"housing.tgz\")\n urllib.request.urlretrieve(housing_url, tgz_path)\n housing_tgz = tarfile.open(tgz_path)\n housing_tgz.extractall(path=housing_path)\n housing_tgz.close()", "def download_compressed_dataset(url):\n raise NotImplementedError", "def download():\n response = requests.get(URL, stream=True)\n\n file = open(FILE_NAME, 'wb')\n file.write(response.content)\n\n with zipfile.ZipFile(FILE_NAME, 'r') as zip_ref:\n zip_ref.extractall()\n\n file.close()\n os.remove(FILE_NAME)", "def __download(self):\n\n if self.__check_exists():\n return\n\n print(\"Downloading AudioMNIST dataset\")\n\n # download files\n try:\n os.makedirs(self.__path)\n except OSError as e:\n if e.errno == errno.EEXIST:\n pass\n else:\n raise\n\n if not os.path.exists(os.path.join(self.__path, 'AudioMNIST-master.zip')):\n url = 'https://github.com/soerenab/AudioMNIST/archive/master.zip'\n wget_data = wget.download(url, out=self.__path)\n\n archive = zipfile.ZipFile(wget_data)\n\n for file in archive.namelist():\n if file.startswith('AudioMNIST-master/data/'):\n archive.extract(file, self.__path)\n\n print(\"Download successful\")\n\n audio_mnist_src = os.path.join(self.__path, 'AudioMNIST-master/data/')\n data = np.array(glob.glob(os.path.join(audio_mnist_src, \"**/*.wav\")))\n\n train_images = []\n train_labels = []\n test_images = []\n test_labels = []\n\n # first 5-cross-validation set from https://github.com/soerenab/AudioMNIST/blob/master/preprocess_data.py\n train_folders = [28, 56, 7, 19, 35, 1, 6, 16, 23, 34, 46, 53, 36, 57, 9, 24, 37, 2,\n 8, 17, 29, 39, 48, 54, 43, 58, 14, 25, 38, 3, 10, 20, 30, 40, 49, 55,\n 12, 47, 59, 15, 27, 41, 4, 11, 21, 31, 44, 50]\n test_folders = [26, 52, 60, 18, 32, 42, 5, 13, 22, 33, 45, 51]\n\n print(\"Converting audio to images\")\n # create train and test folders and save audios as images\n for filepath in tqdm(data):\n # the last one is just a counter for repeat of each digit, e.g. say zero once, twice, third time..\n\n dig, vp, rep = filepath.rstrip(\".wav\").split(\"/\")[-1].split(\"_\")\n\n # according to https://github.com/soerenab/AudioMNIST/blob/master/preprocess_data.py\n fs, data = wavf.read(filepath)\n\n # resample\n data = librosa.core.resample(y=data.astype(np.float32), orig_sr=fs, target_sr=8000, res_type=\"scipy\")\n # zero padding\n if len(data) > 8000:\n raise ValueError(\"data length cannot exceed padding length.\")\n elif len(data) < 8000:\n embedded_data = np.zeros(8000)\n offset = np.random.randint(low=0, high=8000 - len(data))\n embedded_data[offset:offset + len(data)] = data\n elif len(data) == 8000:\n # nothing to do here\n embedded_data = data\n pass\n\n # 1. fourier transform\n # stft, with selected parameters, spectrogram will have shape (228, 230)\n f, t, zxx = scipy.signal.stft(embedded_data, 8000, nperseg=455, noverlap=420, window='hann')\n # get amplitude\n zxx = np.abs(zxx[0:227, 2:-1])\n\n # if not 2, then convert to decibel\n zxx = librosa.amplitude_to_db(zxx, ref=np.max)\n\n # normalize from range -80,0 to 0,1\n zxx = (zxx - zxx.min()) / (zxx.max() - zxx.min())\n\n zxx = zxx[::-1] # reverse the order of frequencies to fit the images in the paper\n zxx = np.atleast_3d(zxx).transpose(2, 0, 1) # reshape to (1, img_dim_h, img_dim_w)\n\n # decide to which list to add (train or test)\n if int(vp) in train_folders:\n train_images.append(zxx)\n train_labels.append(int(dig))\n elif int(vp) in test_folders:\n test_images.append(zxx)\n test_labels.append(int(dig))\n else:\n raise Exception('Person neither in train nor in test set!')\n\n train_images = torch.Tensor(train_images).float()\n train_labels = torch.Tensor(train_labels).long()\n test_images = torch.Tensor(test_images).float()\n test_labels = torch.Tensor(test_labels).long()\n\n torch.save(train_images, os.path.join(self.__path, 'train_images_tensor.pt'))\n torch.save(train_labels, os.path.join(self.__path, 'train_labels_tensor.pt'))\n torch.save(test_images, os.path.join(self.__path, 'test_images_tensor.pt'))\n torch.save(test_labels, os.path.join(self.__path, 'test_labels_tensor.pt'))\n\n print('Done!')", "def get_obscode(self, dest_list):\n OBSCODE_SRC = \"http://www.minorplanetcenter.net/iau/lists/ObsCodes.html\"\n OBSCODE_NAME = \"OBSCODE.dat\"\n \n out_fh = None\n in_fh = None \n \n # get observatory codes from http://www.minorplanetcenter.net/iau/lists/ObsCodes.html\n obscode_file = self.get_file(OBSCODE_SRC)\n if (obscode_file == None): return MOPSUpdater.FAIL\n \n try:\n # remove the first two and last line from obscode.\n in_fh = open(obscode_file, \"r\")\n lines = in_fh.readlines()\n out_fh = open(os.path.join(self._workDir, OBSCODE_NAME), \"w\")\n out_fh.writelines(lines[2:-1])\n except Exception, e:\n self._logger.error(\"UPDATE_MOPS_DATA: %s\" % (str(e)))\n return MOPSUpdater.FAIL\n finally:\n if (in_fh): in_fh.close()\n if (out_fh): out_fh.close()\n # <-- end try\n \n try:\n # Install OBSCODE.dat in mops\n for d in dest_list:\n # Verify destination directory\n if (not os.path.exists(os.path.dirname(d))):\n self._logger.error(\"UPDATE_MOPS_DATA: The destination directory %s does not exist.\" % (os.path.dirname(d)))\n continue\n # <-- end if \n shutil.copyfile(os.path.join(self._workDir, OBSCODE_NAME),d)\n # <-- end for \n except Exception, e:\n self._logger.error(\"UPDATE_MOPS_DATA: %s\" % (str(e))) \n return MOPSUpdater.FAIL\n else:\n self._logger.info(\"UPDATE_MOPS_DATA: obscode.dat file update complete.\") \n self._logger.debug(\"UPDATE_MOPS_DATA: obscode.dat file contents\")\n self._logger.debug(\"%s\" % (lines[2:-1]))\n return MOPSUpdater.SUCCESS\n # <-- end try", "def get_pronto_data():\n download_if_needed(\"https://s3.amazonaws.com/pronto-data/open_data_year_one.zip\",\n \"open_data_year_one.zip\")", "def _download_to_flc(self):\n self.communicator.download_to_flc()", "def download_and_update():\n with tempfile.TemporaryDirectory(dir=TEMP_DOWNLOAD_DIR) as temp_dir:\n reistijden_jsonfile = os.path.join(temp_dir, 'reistijdenAmsterdam.geojson')\n\n r = requests.get(REISTIJDEN_TARGET_URL)\n with open(reistijden_jsonfile, 'w') as f:\n f.write(r.text)\n\n _parse_and_store_geojson(reistijden_jsonfile)", "def download_dataset():\n \n ID = \"1-3_oB5iSF-c_V65-uSdUlo024NzlgSYZ\"\n script1 = f\"\"\"\n wget --load-cookies /tmp/cookies.txt \"https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id='{ID} -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\\1\\n/p')&id=\"{ID} -O Data.zip && rm -rf /tmp/cookies.txt\n \"\"\"\n script2 = \"\"\"unzip Data.zip\"\"\"\n\n os.system(script1)\n os.system(script2)", "def download_optional_inputs(flywheel_basedir, sub_dir, ses_dir, rootdir):\n print('Looking for manifest-defined anatomical files')\n t1_anat_dir = os.path.join(flywheel_basedir, 'input', 't1w_anatomy')\n if os.path.isdir(t1_anat_dir):\n t1_file = os.listdir(t1_anat_dir)\n if t1_file:\n t1_file = os.path.join(t1_anat_dir, t1_file[0])\n anat_dir = os.path.join(rootdir, sub_dir, ses_dir, 'anat')\n if not os.path.isdir(anat_dir):\n os.mkdir(anat_dir)\n dest_file = os.path.join(anat_dir, sub_dir + '_' + ses_dir + '_T1w.nii.gz')\n if os.path.exists(dest_file):\n print('Found downloaded T1 file - overwriting!')\n os.remove(dest_file)\n os.remove(dest_file.replace('.nii.gz', '.json'))\n shutil.copyfile(t1_file, dest_file)\n\n t2_anat_dir = os.path.join(flywheel_basedir, 'input', 't2w_anatomy')\n if os.path.isdir(t2_anat_dir):\n t2_file = os.listdir(t2_anat_dir)\n if t2_file:\n anat_dir = os.path.join(rootdir, sub_dir, ses_dir, 'anat')\n if not os.path.isdir(anat_dir):\n os.mkdir(anat_dir)\n t2_file = os.path.join(t2_anat_dir, t2_file[0])\n dest_file = os.path.join(anat_dir, sub_dir + '_' + ses_dir + '_T2w.nii.gz')\n if os.path.exists(dest_file):\n print('Found downloaded T2 file - overwriting!')\n os.remove(dest_file)\n os.remove(dest_file.replace('.nii.gz', '.json'))\n shutil.copyfile(t2_file, dest_file)", "def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))", "def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))", "def fetch(self, url) -> bytes:\n buffer = self.download(url)\n zfs = ZipFileSystem(buffer, \"r\")\n return zfs.open(zfs.glob(\"*\")[0]).read()", "def fmarket_download():\r\n name = request.args[\"address\"]\r\n markets = get_zipcode_markets(name)\r\n\r\n return Response(markets.to_json(), 200, mimetype=\"application/json\")", "def download(self, verbose):\n # Download datasets\n if verbose:\n print(\"Retrieving datasets from Our World In Data https://github.com/owid/covid-19-data/\")\n # Vaccinations\n v_rec_cols = [\n \"date\", \"location\", \"iso_code\", \"total_vaccinations\", \"people_vaccinated\", \"people_fully_vaccinated\"]\n v_rec_df = pd.read_csv(self.URL_V_REC, usecols=v_rec_cols)\n v_loc_df = pd.read_csv(self.URL_V_LOC, usecols=[\"location\", \"vaccines\"])\n v_df = v_rec_df.merge(v_loc_df, how=\"left\", on=\"location\")\n # Tests\n pcr_rec_cols = [\"ISO code\", \"Date\", \"Daily change in cumulative total\", \"Cumulative total\"]\n pcr_df = pd.read_csv(self.URL_P_REC, usecols=pcr_rec_cols)\n pcr_df = pcr_df.rename(columns={\"ISO code\": \"iso_code\", \"Date\": \"date\"})\n pcr_df[\"cumsum\"] = pcr_df.groupby(\"iso_code\")[\"Daily change in cumulative total\"].cumsum()\n pcr_df = pcr_df.assign(tests=lambda x: x[\"Cumulative total\"].fillna(x[\"cumsum\"]))\n # Combine data (vaccinations/tests)\n df = v_df.set_index([\"iso_code\", \"date\"])\n df = df.combine_first(pcr_df.set_index([\"iso_code\", \"date\"]).loc[:, [\"tests\"]])\n df = df.reset_index()\n # Location (country/province)\n df[\"location\"] = df[\"location\"].replace(\n {\n # COG\n \"Congo\": \"Republic of the Congo\",\n }\n )\n df = df.loc[~df[\"iso_code\"].str.contains(\"OWID_\")]\n df[\"location\"] = df.groupby(\"iso_code\")[\"location\"].bfill()\n df.loc[df[\"location\"] == df[\"iso_code\"], \"location\"] = None\n df.loc[df[\"location\"].isna(), \"location\"] = df.loc[df[\"location\"].isna(), \"iso_code\"].apply(\n lambda x: coco.convert(x, to=\"name_short\", not_found=None))\n df[self.PROVINCE] = self.UNKNOWN\n return df", "def maybe_download_and_extract():\n dest_directory = FLAGS['model_dir']\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' %\n (filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def generic_download(self, data_set, scene, output_dir, chunk_size=1024):\n\n ### LANDSAT DOWNLOAD ###\n if is_product_id(scene['displayId']):\n filename = self.download(scene['displayId'], output_dir)\n\n ### NON-LANDSAT ###\n else:\n filename = self.download(scene['displayId'], output_dir, data_set=data_set)\n\n return filename", "def fetch(thread=False):\r\n if thread:\r\n Fetch.start()\r\n else:\r\n urlretrieve(OBSURL,ZFILE)", "def download_tzdata(self):\n tzdata_download_path = tempfile.mktemp(\".tar.gz\", prefix=\"zones\")\n sys.stderr.write(\"Downloading tzdata-latest.tar.gz from\" \\\n \" ftp.iana.org to %s\\n\"\"\" % tzdata_download_path)\n ftp = ftplib.FTP(\"ftp.iana.org\")\n ftp.login()\n ftp.retrbinary(\"RETR /tz/tzdata-latest.tar.gz\", open(tzdata_download_path, \"wb\").write)\n ftp.quit()\n\n self.tzdata_path = tempfile.mkdtemp(prefix=\"zones\")\n sys.stderr.write(\"Extracting %s to %s\\n\" % (tzdata_download_path, self.tzdata_path))\n tarfile.open(tzdata_download_path).extractall(path=self.tzdata_path)\n os.unlink(tzdata_download_path)", "def download_and_extract(self, package_name):\n self.download(package_name)\n self.extract(package_name)", "def download():\n try:\n response = send_from_directory(\n app.config.get(\"DATA_DIR\"), \"whiteboard.zip\", as_attachment=True\n )\n\n # change headers to stop browser from delivering cached version\n response.headers[\"Last-Modified\"] = datetime.now()\n response.headers[\n \"Cache-Control\"\n ] = \"no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0\"\n response.headers[\"Pragma\"] = \"no-cache\"\n response.headers[\"Expires\"] = \"-1\"\n\n return response\n\n except:\n return traceback.format_exc()" ]
[ "0.66224813", "0.63485897", "0.6268055", "0.6250528", "0.60906255", "0.6000069", "0.59467095", "0.5897437", "0.5875298", "0.5873493", "0.5796746", "0.5769636", "0.57534075", "0.57515734", "0.5747376", "0.5746502", "0.5727455", "0.5726162", "0.5718221", "0.57016945", "0.5694621", "0.5694621", "0.56852186", "0.56740814", "0.56583333", "0.5636215", "0.56280816", "0.56214005", "0.5591625", "0.55885893", "0.5570598", "0.5535615", "0.5527967", "0.5492164", "0.5492164", "0.54902446", "0.5488321", "0.54828", "0.54828", "0.5477756", "0.5473602", "0.5466477", "0.544928", "0.5420022", "0.5414538", "0.5401263", "0.5398896", "0.53983647", "0.5393658", "0.5389108", "0.5377083", "0.5345121", "0.5344235", "0.53206515", "0.5309994", "0.53088075", "0.53054607", "0.53032124", "0.5293019", "0.5285462", "0.5280499", "0.52750736", "0.52734774", "0.5267713", "0.52673495", "0.526586", "0.52468854", "0.52460897", "0.5245898", "0.5242977", "0.5242116", "0.52375686", "0.52363366", "0.52290887", "0.5220056", "0.5219202", "0.52169895", "0.5206586", "0.52010876", "0.51993084", "0.51971346", "0.5190866", "0.5185076", "0.51827055", "0.5178318", "0.5177109", "0.51585597", "0.5137086", "0.5136724", "0.5129462", "0.5129462", "0.5124314", "0.5119638", "0.5119357", "0.51128906", "0.5109781", "0.5106489", "0.5104269", "0.5090067", "0.5088678" ]
0.7480389
0
Download and unpack Zenodominted stitches package data that matches the current installed stitches distribution.
def install_package_data(data_dir: str = None): zen = InstallPackageData(data_dir=data_dir) zen.fetch_zenodo()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_zenodo(self):\n\n # full path to the stitches root directory where the example dir will be stored\n if self.data_dir is None:\n data_directory = pkg_resources.resource_filename('stitches', 'data')\n else:\n data_directory = self.data_dir\n\n # build needed subdirectories if they do not already exist\n tas_data_path = os.path.join(data_directory, \"tas-data\")\n temp_data_path = os.path.join(data_directory, \"temp-data\")\n if not os.path.exists(tas_data_path):\n os.mkdir(tas_data_path)\n if not os.path.exists(temp_data_path):\n os.mkdir(temp_data_path)\n\n # get the current version of stitches that is installed\n current_version = pkg_resources.get_distribution('stitches').version\n\n try:\n data_link = InstallPackageData.DATA_VERSION_URLS[current_version]\n\n except KeyError:\n msg = f\"Link to data missing for current version: {current_version}. Using default version: {InstallPackageData.DEFAULT_VERSION}\"\n\n data_link = InstallPackageData.DEFAULT_VERSION\n\n print(msg)\n\n # retrieve content from URL\n print(\"Downloading example data for stitches version {}. This may take a few minutes...\".format(current_version))\n response = requests.get(data_link)\n\n with zipfile.ZipFile(BytesIO(response.content)) as zipped:\n\n # extract each file in the zipped dir to the project\n for f in zipped.namelist():\n\n extension = os.path.splitext(f)[-1]\n\n # Extract only the csv and nc files\n if all([len(extension) > 0, extension in (\".csv\", \".nc\")]):\n\n basename = os.path.basename(f)\n\n # Check to see if tas-data is in the file path\n if \"tas-data\" in f:\n basename = os.path.join(\"tas-data\", basename)\n\n out_file = os.path.join(data_directory, basename)\n\n # extract to a temporary directory to be able to only keep the file out of the dir structure\n with tempfile.TemporaryDirectory() as tdir:\n\n # extract file to temporary directory\n zipped.extract(f, tdir)\n\n # construct temporary file full path with name\n tfile = os.path.join(tdir, f)\n\n print(f\"Unzipped: {out_file}\")\n # transfer only the file sans the parent directory to the data package\n shutil.copy(tfile, out_file)", "def fetch_the_data():\n subprocess.run([\"wget\", \"https://storage.googleapis.com/recipe-box/recipes_raw.zip\"])\n subprocess.run([\"unzip\", \"recipes_raw.zip\", \"-d\", RECIPES_DIRPATH])\n subprocess.run([\"rm\", \"recipes_raw.zip\"])", "def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))", "def download_data():\n url = 'https://www.dropbox.com/s/h9ubx22ftdkyvd5/ml-latest-small.zip?dl=1'\n urllib.request.urlretrieve(url, 'ml-latest-small.zip')\n zfile = zipfile.ZipFile('ml-latest-small.zip')\n zfile.extractall()\n zfile.close()", "def download(self, args):\n\n\t\t\"\"\" Default argument for Architecture \"\"\"\n\t\tif len(args) >= 4:\n\t\t\tarch = args[3]\n\t\telse:\n\t\t\tarch = platform.processor()\n\n\t\t\"\"\" Default argument for Version \"\"\"\n\t\tif len(args) >= 3:\n\t\t\tif args[2] == \"latest\":\n\t\t\t\tversion = \"Latest\"\n\t\t\telse:\n\t\t\t\tversion = args[2]\n\t\telse:\n\t\t\tversion = \"Latest\"\n\n\t\t\"\"\" Find package path from package list, based on prev. arguments \"\"\"\n\t\tif len(args) >= 2:\n\t\t\tpackage = args[1]\n\t\t\tfilename = False\n\t\t\t\n\t\t\tversions = self.master.Dump(package)\n\t\t\tfor d in versions:\n\t\t\t\tif d[\"Version\"] == version:\n\t\t\t\t\tif d[\"Version\"] != \"Latest\" and d[\"Architecture\"] == arch:\n\t\t\t\t\t\tfilename = d[\"Filename\"]\n\t\t\t\t\telse:\n\t\t\t\t\t\tfor e in versions:\n\t\t\t\t\t\t\tif e[\"Version\"] == d[\"LatestVersion\"] and e[\"Architecture\"] == arch:\n\t\t\t\t\t\t\t\tfilename = e[\"Filename\"]\n\t\t\t\t\t\t\t\tversion = d[\"LatestVersion\"];\n\t\t\tif not filename:\n\t\t\t\tself.write_line(\"ERROR XXX: Package not found.\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Find chunks to download \"\"\"\n\t\t\tid = 0\n\t\t\tto_download = False\n\t\t\tfor f in self.torrent_info.files():\n\t\t\t\tprint(f.path.replace(\"packages/\", \"\") + \" = \" + filename);\n\t\t\t\tif f.path.replace(\"packages/\", \"\") == filename:\n\t\t\t\t\tto_download = f\n\t\t\t\t\tbreak;\n\t\t\t\tid += 1\n\t\t\tif not to_download:\n\t\t\t\tprint(\"ERROR XXX: dunno\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Set chunks priority to 7? (download max priority) \"\"\"\n\t\t\tpr = self.torrent_info.map_file(id, 0, to_download.size);\n\t\t\tn_pieces = math.ceil(pr.length / self.torrent_info.piece_length() + 1);\n\n\t\t\tfor i in range(self.torrent_info.num_pieces()):\n\t\t\t\tif i in range(pr.piece, pr.piece + n_pieces):\n\t\t\t\t\tself.handler.piece_priority(i, 7)\n\n\n\t\t\t\"\"\" Print download of package status \"\"\"\n\t\t\tself.print_status(id, pr, package, version, filename)\n\t\t\t\t\n\t\t\t\"\"\" Check the server for hash validation \"\"\"\n\t\t\tif self.valid_tpkg_file(to_download.path):\n\t\t\t\tself.write_line(\"DONE {0} {1} {2} {3}\".format(package, version, arch, self.config[\"daemon\"][\"rootdir\"] + \"/\" + to_download.path).replace('//', '/'))\n\t\t\telse:\n\t\t\t\tself.write_line(\"ERROR XXX: Hash verification failed.\")\n\t\telse:\n\t\t\tself.write_line(\"INVALID ARGUMENTS\");", "def download():\n base_loc = DATA_DIR + '/raw/human_activity'\n loc = base_loc + '/human_activity.zip'\n if os.path.exists(loc):\n print('Path already exists at {}. If you wish to re-download you must delete this folder.'.format(loc))\n return\n if not os.path.exists(base_loc):\n os.mkdir(base_loc)\n\n url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/00341/HAPT%20Data%20Set.zip'\n urllib.request.urlretrieve(url, loc)\n\n with zipfile.ZipFile(loc, 'r') as zip_ref:\n zip_ref.extractall(base_loc)", "def _download(self):\n self._system.download(\"http://geant4.web.cern.ch/geant4/support/source/\" + self._tar_name)", "def download_hess_dr1_data():\n download_data_files(FILENAMES_HESS_DR1)", "def swissprot_dat_downloader(database_directory: Path) -> Path:\n # Create dat files folder\n temp_dat_files = database_directory / \"temp_swissprot_dat_files\"\n temp_dat_files.mkdir(parents=True, exist_ok=True)\n # Overwrite existing temporal files\n output_dat_file = temp_dat_files / \"uniprot_sprot.dat.gz\"\n if output_dat_file.is_file():\n output_dat_file.unlink()\n # Download swissprot and trembl dat files\n logger.info(\"Downloading Swissprot .dat files\")\n dat_url = (\n f\"ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/\"\n f\"knowledgebase/complete/uniprot_sprot.dat.gz\"\n )\n wget.download(dat_url, out=str(output_dat_file))\n logger.info(\"Finished\")\n\n return output_dat_file", "def get_software(self):\n\n logging.info('downloading OCP 4.3 software bits into {}'.format(self.software_dir))\n for url_key in self.ocp_urls.keys():\n url = self.ocp_urls[url_key]\n dest_name = url.split('/')[-1]\n dest_path = self.software_dir + '/' + dest_name\n dest_path_exist = check_path(dest_path, isfile=True)\n url_check = ''\n if dest_path_exist:\n logging.info('file {} already exists in {}'.format(dest_name, self.software_dir))\n self.inventory_dict['csah']['vars'][url_key] = dest_name\n else:\n url_check = validate_url(url)\n if url_check == '':\n logging.error('file {} in {} is not available'.format(dest_name, url_key))\n self.inventory_dict['csah']['vars'][url_key] = ''\n\n if url_check != '' and url_check.code == 200:\n logging.info('downloading {}'.format(dest_name))\n urlretrieve('{}'.format(url),'{}/{}'.format(self.software_dir, dest_name))\n self.inventory_dict['csah']['vars'][url_key] = dest_name", "def download_data():\n # Download Unihan meta data for radical-stroke analysis\n os.system(' mkdir Unihan')\n os.system(' curl -O http://unicode.org/Public/UCD/latest/ucd/Unihan.zip')\n os.system(' apt-get -y install unzip')\n os.system(' unzip Unihan.zip -d Unihan/')\n os.system(' rm Unihan.zip')\n\n data_path = 'Unihan/Unihan_RadicalStrokeCounts.txt'\n assert(os.path.isfile(data_path))\n\n return data_path", "def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()", "def download_and_extract(down_dir=download_dir, url=tuda_url):\n\n wget.download(url, down_dir) \n tar_filepath = os.path.join(down_dir, \"german-speechdata-package-v2.tar.gz\")\n #with tarfile.open(tar_filepath, \"r\") as tar:\n # tar.extractall(down_dir)", "def download_data(origin_time, net, sta, loc, chan):\n \n dataDir_get = '/import/netapp-m-02-bay200/mseed_online/archive/'\n \n fileName = \".\".join((net, sta, \".\" + chan + \".D\",\n origin_time.strftime(\"%Y.%j\")))\n filePath = os.path.join(dataDir_get, origin_time.strftime(\"%Y\"),\n net, sta, chan + '.D', fileName)\n o_time2 = origin_time + 86400\n fileName2 = \".\".join((net, sta, \".\" + chan + \".D\",\n o_time2.strftime(\"%Y.%j\")))\n filePath2 = os.path.join(dataDir_get, o_time2.strftime(\"%Y\"),\n net, sta, chan + '.D', fileName2)\n\n if os.path.isfile(filePath):\n if origin_time.hour > 21:\n st = Stream()\n st.extend(read(filePath, starttime = origin_time - 180,\n endtime = origin_time + 3 * 3600))\n st.extend(read(filePath2, \n starttime = UTCDateTime(o_time2.year, o_time2.month, \n o_time2.day, 0, 0),\n endtime = origin_time + 3 * 3600))\n st.merge(method=-1)\n else:\n st = read(filePath, starttime = origin_time - 180,\n endtime = origin_time + 3 * 3600)\n else:\n print \"++++ cannot find the following file: \\n %s \\n++++\" % filePath\n\n if not st:\n raise RotationalProcessingException('Data not available for this'\n ' event...')\n st.trim(starttime=origin_time-180, endtime=origin_time+3*3600)\n\n print 'Download of', st[0].stats.station, st[0].stats.channel, \\\n 'data successful!'\n\n return st", "def download_and_install(self, version=\"latest\", os_name=None, bitness=None, show_progress_bar=True):\n filename_with_path = self.download(version,\n os_name=os_name,\n bitness=bitness,\n show_progress_bar=show_progress_bar)\n filename = os.path.split(filename_with_path)[1]\n if filename.lower().endswith(\".tar.gz\"):\n extract_dir = os.path.join(self.get_download_path(version), filename[:-7])\n elif filename.lower().endswith(\".zip\"):\n extract_dir = os.path.join(self.get_download_path(version), filename[:-4])\n else:\n error_message = \"Unknown archive format: {0}\".format(filename)\n logger.error(error_message)\n raise RuntimeError(error_message)\n if not os.path.isdir(extract_dir):\n os.makedirs(extract_dir)\n logger.debug(\"Created directory: {0}\".format(extract_dir))\n if filename.lower().endswith(\".tar.gz\"):\n with tarfile.open(os.path.join(self.get_download_path(version), filename), mode=\"r:*\") as tar:\n tar.extractall(extract_dir)\n logger.debug(\"Extracted files: {0}\".format(\", \".join(tar.getnames())))\n elif filename.lower().endswith(\".zip\"):\n with zipfile.ZipFile(os.path.join(self.get_download_path(version), filename), mode=\"r\") as driver_zipfile:\n driver_zipfile.extractall(extract_dir)\n driver_filename = self.get_driver_filename(os_name=os_name)\n for root, dirs, files in os.walk(extract_dir):\n for curr_file in files:\n if curr_file == driver_filename:\n actual_driver_filename = os.path.join(root, curr_file)\n break\n if os_name is None:\n os_name = platform.system()\n if os_name in ['Darwin', 'Linux']:\n symlink_src = actual_driver_filename\n symlink_target = os.path.join(self.link_path, driver_filename)\n if os.path.islink(symlink_target):\n if os.path.samefile(symlink_src, symlink_target):\n logger.info(\"Symlink already exists: {0} -> {1}\".format(symlink_target, symlink_src))\n return tuple([symlink_src, symlink_target])\n else:\n logger.warning(\"Symlink {0} already exists and will be overwritten.\".format(symlink_target))\n os.unlink(symlink_target)\n os.symlink(symlink_src, symlink_target)\n logger.info(\"Created symlink: {0} -> {1}\".format(symlink_target, symlink_src))\n st = os.stat(symlink_src)\n os.chmod(symlink_src, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)\n return tuple([symlink_src, symlink_target])\n elif os_name == \"Windows\":\n src_file = actual_driver_filename\n dest_file = os.path.join(self.link_path, driver_filename)\n if os.path.isfile(dest_file):\n logger.info(\"File {0} already exists and will be overwritten.\".format(dest_file))\n shutil.copy2(src_file, dest_file)\n return tuple([src_file, dest_file])", "def download_data():\r\n print('Downloading cifar-10 data...')\r\n request.urlretrieve(dataurl)\r\n print('Done')\r\n print('Please unzip files. command is:')\r\n print('gzip -d cifar-10-python.tar.gz')\r\n print('tar -xf cifar-10-python.tar')\r\n exit()", "def fetch_taiwan_ntu_dsi():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n uraw = 'http://dl.dropbox.com/u/2481924/taiwan_ntu_dsi.nii.gz'\n ubval = 'http://dl.dropbox.com/u/2481924/tawian_ntu_dsi.bval'\n ubvec = 'http://dl.dropbox.com/u/2481924/taiwan_ntu_dsi.bvec'\n ureadme = 'http://dl.dropbox.com/u/2481924/license_taiwan_ntu_dsi.txt'\n folder = pjoin(dipy_home, 'taiwan_ntu_dsi')\n\n md5_list = ['950408c0980a7154cb188666a885a91f', # data\n '602e5cb5fad2e7163e8025011d8a6755', # bval\n 'a95eb1be44748c20214dc7aa654f9e6b', # bvec\n '7fa1d5e272533e832cc7453eeba23f44'] # license\n\n url_list = [uraw, ubval, ubvec, ureadme]\n fname_list = ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw DSI data (91MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n print('See DSI203_license.txt for LICENSE.')\n print('For the complete datasets please visit :')\n print('http://dsi-studio.labsolver.org')\n\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def maybe_download():\n\n print(\"Downloading Inception 5h Model ...\")\n download.maybe_download_and_extract(url=data_url, download_dir=data_dir)", "def download():\n try:\n response = send_from_directory(\n app.config.get(\"DATA_DIR\"), \"whiteboard.zip\", as_attachment=True\n )\n\n # change headers to stop browser from delivering cached version\n response.headers[\"Last-Modified\"] = datetime.now()\n response.headers[\n \"Cache-Control\"\n ] = \"no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0\"\n response.headers[\"Pragma\"] = \"no-cache\"\n response.headers[\"Expires\"] = \"-1\"\n\n return response\n\n except:\n return traceback.format_exc()", "def download_dataset(dataset):\n\n if dataset not in URLS:\n print(f\"unknown dataset {dataset}\")\n sys.exit(0)\n\n filename = f'{dataset}.tar.gz'\n url = URLS[dataset]\n\n if not os.path.exists(filename):\n print(f'downloading dataset \"{dataset}\"')\n os.system(f'curl \"{url}\" -o {filename}')\n else:\n print(f'zipfile \"{filename}\" already exists, remove it if you want to re-download.')\n\n if not os.path.exists(dataset):\n print(f'extracting \"{filename}\"')\n os.system(f'tar -xvf {filename}')\n else:\n print(f'folder \"{dataset}\" already exists, remove it if you want to re-create.')\n\n image_chips = f'{dataset}/image-chips'\n label_chips = f'{dataset}/label-chips'\n if not os.path.exists(image_chips) and not os.path.exists(label_chips):\n print(\"creating chips\")\n libs.images2chips.run(dataset)\n else:\n print(f'chip folders \"{image_chips}\" and \"{label_chips}\" already exist, remove them to recreate chips.')", "def download_dailydialog(daily_raw_fname: str, data_path: str):\n wget.download(daily_raw_fname, data_path)\n # Manually unzip the train/dev/test files", "def download(self, version_build=None):\n if version_build is not None:\n self.version_build = version_build\n # Skip download if packages is already downloaded\n if has_dot_net4_dot_5(self.version_build):\n expected_binary = \"{}/couchbase-lite-net-mono-{}-liteserv/net45/LiteServ.exe\".format(BINARY_DIR, self.version_build)\n else:\n expected_binary = \"{}/couchbase-lite-net-mono-{}-liteserv/LiteServ.exe\".format(BINARY_DIR, self.version_build)\n\n if os.path.isfile(expected_binary):\n log_info(\"Package already downloaded: {}\".format(expected_binary))\n return\n\n version, build = version_and_build(self.version_build)\n download_url = \"{}/couchbase-lite-net/{}/{}/LiteServ.zip\".format(LATEST_BUILDS, version, build)\n\n downloaded_package_zip_name = \"couchbase-lite-net-mono-{}-liteserv.zip\".format(self.version_build)\n log_info(\"Downloading {} -> {}/{}\".format(download_url, BINARY_DIR, downloaded_package_zip_name))\n resp = requests.get(download_url)\n resp.raise_for_status()\n with open(\"{}/{}\".format(BINARY_DIR, downloaded_package_zip_name), \"wb\") as f:\n f.write(resp.content)\n\n extracted_directory_name = downloaded_package_zip_name.replace(\".zip\", \"\")\n with ZipFile(\"{}/{}\".format(BINARY_DIR, downloaded_package_zip_name)) as zip_f:\n zip_f.extractall(\"{}/{}\".format(BINARY_DIR, extracted_directory_name))\n\n # Remove .zip\n os.remove(\"{}/{}\".format(BINARY_DIR, downloaded_package_zip_name))\n\n # HACK - To get around https://github.com/couchbase/couchbase-lite-net/issues/672\n # This is fixed 1.4+ but need to keep it around to allow running against older versions of LiteServ\n if version.startswith(\"1.2\") or version.startswith(\"1.3\"):\n shutil.rmtree(\"{}/{}/x64\".format(BINARY_DIR, extracted_directory_name))\n shutil.rmtree(\"{}/{}/x86\".format(BINARY_DIR, extracted_directory_name))", "def fetch_zenodo(self):\n\n # retrieve content from URL\n try:\n logging.info(f\"Downloading example data from {self.url}\")\n r = requests.get(self.url, stream=True)\n with io.BytesIO() as stream:\n with tqdm.wrapattr(\n stream,\n 'write',\n file=sys.stdout,\n miniters=1,\n desc=self.url,\n total=int(r.headers.get('content-length', 0))\n ) as file:\n for chunk in r.iter_content(chunk_size=4096):\n file.write(chunk)\n with zipfile.ZipFile(stream) as zipped:\n # extract each file in the zipped dir to the project\n for f in zipped.namelist():\n logging.info(\"Unzipped: {}\".format(os.path.join(self.destination, f)))\n zipped.extract(f, self.destination)\n\n logging.info(\"Download and install complete.\")\n\n self.close_logger()\n\n except requests.exceptions.MissingSchema:\n msg = f\"Unable to download data from {self.url}\"\n logging.exception(msg)\n self.close_logger()\n raise", "def fetch_nspl():\n nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\"\n\n if os.path.exists(nspl_target) is True:\n logging.info(\"Already collected NSPL\")\n else:\n os.makedirs(nspl_target, exist_ok=True)\n req = requests.get(nspl_url)\n zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target)", "def fetch_syn_data():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://dl.dropboxusercontent.com/u/5918983/'\n t1 = url + 't1.nii.gz'\n b0 = url + 'b0.nii.gz'\n \n folder = pjoin(dipy_home, 'syn_test')\n\n md5_list = ['701bda02bb769655c7d4a9b1df2b73a6', # t1\n 'e4b741f0c77b6039e67abb2885c97a78'] # b0\n\n url_list = [t1, b0]\n fname_list = ['t1.nii.gz', 'b0.nii.gz']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading t1 and b0 volumes from the same session (12MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download_and_extract(self, package_name):\n self.download(package_name)\n self.extract(package_name)", "def download(root: str) -> None:\n for ix in [1, 2]:\n fn = f\"lizard_images{ix}.zip\"\n url = f\"https://warwick.ac.uk/fac/cross_fac/tia/data/lizard/{fn}\"\n SimpleDownloader.download(url, root)\n\n url = \"https://warwick.ac.uk/fac/cross_fac/tia/data/lizard/lizard_labels.zip\"\n SimpleDownloader.download(url, root)\n LizardDataModule.extract_zips(root, rm=True)", "def download_stage0(self):\n rustc_channel = self.rustc_channel\n cargo_channel = self.cargo_channel\n\n if self.rustc().startswith(self.bin_root()) and \\\n (not os.path.exists(self.rustc()) or\n self.program_out_of_date(self.rustc_stamp())):\n if os.path.exists(self.bin_root()):\n shutil.rmtree(self.bin_root())\n filename = \"rust-std-{}-{}.tar.gz\".format(\n rustc_channel, self.build)\n pattern = \"rust-std-{}\".format(self.build)\n self._download_stage0_helper(filename, pattern)\n\n filename = \"rustc-{}-{}.tar.gz\".format(rustc_channel, self.build)\n self._download_stage0_helper(filename, \"rustc\")\n self.fix_executable(\"{}/bin/rustc\".format(self.bin_root()))\n self.fix_executable(\"{}/bin/rustdoc\".format(self.bin_root()))\n with output(self.rustc_stamp()) as rust_stamp:\n rust_stamp.write(self.date)\n\n # This is required so that we don't mix incompatible MinGW\n # libraries/binaries that are included in rust-std with\n # the system MinGW ones.\n if \"pc-windows-gnu\" in self.build:\n filename = \"rust-mingw-{}-{}.tar.gz\".format(\n rustc_channel, self.build)\n self._download_stage0_helper(filename, \"rust-mingw\")\n\n if self.cargo().startswith(self.bin_root()) and \\\n (not os.path.exists(self.cargo()) or\n self.program_out_of_date(self.cargo_stamp())):\n filename = \"cargo-{}-{}.tar.gz\".format(cargo_channel, self.build)\n self._download_stage0_helper(filename, \"cargo\")\n self.fix_executable(\"{}/bin/cargo\".format(self.bin_root()))\n with output(self.cargo_stamp()) as cargo_stamp:\n cargo_stamp.write(self.date)", "def download_and_unzip_data(\n url=\"https://storage.googleapis.com/simpeg/em_examples/tdem_groundedsource/tdem_groundedsource.tar\",\n):\n # download the data\n downloads = utils.download(url)\n\n # directory where the downloaded files are\n directory = downloads.split(\".\")[0]\n\n # unzip the tarfile\n tar = tarfile.open(downloads, \"r\")\n tar.extractall()\n tar.close()\n\n return downloads, directory", "def download():\n toydata = requests.get(DATA_URL).json()\n return toydata", "def download_and_extract(self, package_name):\n raise NotImplementedError('Implement this method.')", "def sdssDownload(band, location, size, path):\n\n debug = 0\n\n \n # Build the URL to get image metadata\n \n url = \"http://montage.ipac.caltech.edu/cgi-bin/ArchiveList/nph-archivelist?survey=SDSSDR7+\" \\\n + urllib.parse.quote_plus(band) \\\n + \"&location=\" \\\n + urllib.parse.quote_plus(location) \\\n + \"&size=\" \\\n + str(size) + \"&units=deg&mode=JSON\"\n \n if debug:\n print('DEBUG> url = \"' + url + '\"')\n \n \n # Retrieve the image metadata and convert\n # the JSON to a Python dictionary\n \n fjson = urllib.request.urlopen(url)\n \n data = json.load(fjson)\n \n if debug:\n print(\"DEBUG> data: \")\n print(data)\n \n nimages = len(data)\n \n if debug:\n print(\"DEBUG> nimages = \" + str(nimages))\n \n \n # We need to check the given directory, \n # whether it exists, whether it is writeable,\n # etc. We'll do it by trying to create it,\n # then trying to write the image data it.\n \n rtn = {} \n \n try:\n \n if not os.path.exists(path):\n os.makedirs(path)\n \n except:\n rtn['status'] = 1\n rtn['msg' ] = 'Cannot create output directory.'\n return rtn \n \n \n # Retrieve all the images into the data directory\n\n try:\n for index in range(0,nimages):\n \n datafile = path + \"/\" + data[index]['file']\n url = data[index]['url']\n archivefile = url\n archivefile = archivefile.replace('http://das.sdss.org','/home/idies/workspace/sdss_das/das2')\n\n if debug:\n print('copy file ' + archivefile + ' to ' + datafile)\n\n copyfile(archivefile, datafile)\n\n except:\n \n rtn['status'] = 1\n rtn['msg' ] = 'Error reading or writing data'\n return rtn\n \n \n # Success\n \n rtn['status'] = 0\n rtn['count' ] = nimages\n return rtn", "def download():\n env_banner()\n\n download_data = Download()\n download_data()\n click.echo('Download done.')", "def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None", "def _Download( self ):\n self._DownloadPipe += PackageUtil.ExecuteSimpleCommand( \"git\", [\"clone\", \"git@github.com:mastbaum/avalanche.git\", self.GetInstallPath()], None, os.getcwd() )\n return", "def download(self, tag):\n pkg = PACKAGES.get(tag, None)\n if not pkg:\n print \"Could not find package {}\".format(tag)\n sys.exit(1)\n\n pth = self._ensure_relative_directory('data/{}'.format(tag))\n pkg['download'](pth)", "def download_data(dataset: str, destination: str = None, manifest: str = pkg_resources.resource_filename('mosartwmpy', 'data_manifest.yaml')) -> None:\n\n data_dictionary = benedict(manifest, format='yaml')\n \n if not data_dictionary.get(dataset, None):\n raise Exception(f'Dataset \"{dataset}\" not found in the manifest ({manifest}).')\n \n get = InstallSupplement(\n url=data_dictionary.get(f'{dataset}.url'),\n destination=destination if destination is not None else Path(data_dictionary.get(f'{dataset}.destination', './'))\n )\n get.fetch_zenodo()", "def fetch_scil_b0():\n zipname = 'datasets_multi-site_all_companies'\n url = 'http://scil.dinf.usherbrooke.ca/wp-content/data/'\n uraw = url + zipname + '.zip'\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n folder = pjoin(dipy_home, zipname)\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading SCIL b=0 datasets from multiple sites and multiple companies (9.2MB)...')\n opener = urlopen(uraw)\n open(folder+'.zip', 'wb').write(opener.read())\n\n print('Unziping '+folder+'.zip ...')\n zip = zipfile.ZipFile(folder+'.zip', 'r')\n zip.extractall(dipy_home)\n\n print('Done.')\n print('Files copied in folder %s' % dipy_home)\n else:\n print('Dataset already in place. If you want to fetch again please first remove folder %s ' % dipy_home)", "def fetch_isbi2013_2shell():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://dl.dropboxusercontent.com/u/2481924/isbi2013_merlet/'\n uraw = url + '2shells-1500-2500-N64-SNR-30.nii.gz'\n ubval = url + '2shells-1500-2500-N64.bval'\n ubvec = url + '2shells-1500-2500-N64.bvec'\n folder = pjoin(dipy_home, 'isbi2013')\n\n md5_list = ['42911a70f232321cf246315192d69c42', # data\n '90e8cf66e0f4d9737a3b3c0da24df5ea', # bval\n '4b7aa2757a1ccab140667b76e8075cb1'] # bvec\n\n url_list = [uraw, ubval, ubvec]\n fname_list = ['phantom64.nii.gz', 'phantom64.bval', 'phantom64.bvec']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw 2-shell synthetic data (20MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def _download_pkg(self, context):\n pkg_url = context.package.arg\n dst_file_path = context.package.full_path\n log.debug('downloading {0} to {1}'.format(pkg_url, dst_file_path))\n download_file(pkg_url, dst_file_path, context.package.get('timeout', 1), verify_https=context.get('verify_https', False))", "def download(self):\n if not os.path.exists(self.pkg_dir):\n os.makedirs(self.pkg_dir)\n\n url = self.metadata_pkg[\"url\"]\n\n # Download modelpkg only if not already downloaded.\n if os.path.exists(self.file_path):\n self.is_downloaded = True\n else:\n print(f\"Fetching {os.path.basename(self.file_path)} model package from {url} to {self.file_path}\", flush=True)\n r = requests.get(url, stream=True)\n with open(self.file_path, \"wb\") as file_out:\n for chunk in r.iter_content(chunk_size=2048):\n file_out.write(chunk)\n r.close()\n self.is_downloaded = True", "def main():\n get_obofoundry(force_download=True)", "def download_and_unpack_model(model_name, model_date='20200711'):\n\n # pretrained models\n pretrained_models_dir = os.path.join(ROOT_DIR, 'pre-trained-models')\n create_directory_if_not_exists(pretrained_models_dir)\n\n # pretrained model archives\n model_archive_dir = os.path.join(pretrained_models_dir, 'archives')\n create_directory_if_not_exists(model_archive_dir)\n\n # determine the (expected) path of the downloaded model\n model_file = f'{model_name}.tar.gz'\n model_archive_path = os.path.join(model_archive_dir, model_file)\n\n # if the tar.gz exists, try to unpack it\n retry = False\n if os.path.exists(model_archive_path):\n logger.info(f'Attempting to unpack {full_path(model_archive_path)}...')\n try:\n shutil.unpack_archive(model_archive_path, os.path.dirname(model_archive_dir))\n except EOFError:\n logger.info('Cannot unpack. Archive is corrupt. Attempting to retry...')\n retry = True\n\n # if the tar.gz does not exist or is corrupt (unpacking failed), download it, then unpack it\n if not os.path.exists(model_archive_path) or retry:\n base_url = 'http://download.tensorflow.org/models/object_detection/tf2/'\n url = base_url + model_date + '/' + model_file\n logger.info(f'Downloading from {url}...')\n\n # download file as stream\n response = requests.get(url, stream=True)\n with open(model_archive_path, 'wb') as handle:\n progress_bar = tqdm(unit=\"B\", total=int(response.headers['Content-Length']), unit_scale=True, unit_divisor=1024)\n for data in response.iter_content(chunk_size=8192):\n progress_bar.update(len(data))\n handle.write(data)\n progress_bar.close()\n\n # try to unpack tar.gz\n logger.info(f'Attempting to unpack {full_path(model_archive_path)}...')\n try:\n shutil.unpack_archive(model_archive_path, os.path.dirname(model_archive_dir))\n except EOFError:\n # give up if unpacking failed\n logger.info('Archive cannot be unpacked')\n sys.exit(1)\n\n logger.info('Successfully downloaded and unpacked model')", "def download_software(\n self,\n options=None,\n os_list=None,\n service_pack=None):\n\n # To set the default value if option is none\n if options is None:\n options = 'latest service pack'\n\n if DownloadOptions.LATEST_SERVICEPACK.value == options:\n self.update_option = {\n 'SPName': 'latest',\n 'IsSPName': False,\n 'isSpDelayedDays': True,\n 'isHotfixesDownload': False\n }\n elif DownloadOptions.LATEST_HOTFIXES.value == options:\n self.update_option = {\n 'SPName': 'hotfix',\n 'IsSPName': False,\n 'isSpDelayedDays': True,\n 'isHotfixesDownload': True\n }\n else:\n if service_pack is None:\n raise SDKException('Download', '102')\n\n self.update_option = {\n 'SPName': 'SP{0}'.format(service_pack),\n 'IsSPName': True,\n 'isSpDelayedDays': False,\n 'isHotfixesDownload': False\n }\n\n # to set the default value if os_list is none\n if os_list is None:\n os_list = ['Windows(X64)']\n\n request_json = {\n \"taskInfo\": {\n \"task\": {\n \"taskType\": 1,\n \"initiatedFrom\": 2,\n \"policyType\": 0,\n \"alert\": {\n \"alertName\": \"\"\n },\n \"taskFlags\": {\n \"isEdgeDrive\": False,\n \"disabled\": False\n }\n },\n \"subTasks\": [\n {\n \"subTaskOperation\": 1,\n \"subTask\": {\n \"subTaskType\": 1,\n \"operationType\": 4019\n },\n \"options\": {\n \"adminOpts\": {\n \"updateOption\": {\n \"syncUpdateCaches\": True,\n \"spName\": self.update_option['SPName'],\n \"isWindows\": True,\n \"majorOnly\": False,\n \"isSpName\": self.update_option['IsSPName'],\n \"copyUpdates\": True,\n \"isHotfixesDownload\": self.update_option['isHotfixesDownload'],\n \"isSpDelayedDays\": self.update_option['isSpDelayedDays'],\n \"copySoftwareAndUpdates\": False,\n \"isUnix\": True,\n \"unixDownloadPackages\": {\n \"linuxosX64\": 'Linux X86_64' in os_list,\n \"solarisosX64\": 'Solaris X86_64' in os_list,\n \"solsparcos\": 'Solaris-SPARC-X86' in os_list,\n \"freeBSDos\": 'Freebsd X86' in os_list,\n \"linuxos\": 'Linux X86' in os_list,\n \"linuxosPPC64le\": 'Linux PPC64le' in os_list,\n \"freeBSDosX64\": 'Freebsd X86_64' in os_list,\n \"solarisos\": 'Solaris SPARC' in os_list,\n \"linuxs390os\": 'Linux-S390-31' in os_list,\n \"darwinos\": 'macOS' in os_list,\n \"linuxosS390\": 'Linux-S390' in os_list,\n \"aixppcos\": 'Aix-PPC-32' in os_list,\n \"linuxosPPC64\": 'Linux-PPC-64' in os_list,\n \"aixos\": 'Aix PPC' in os_list,\n \"hpos\": 'HP IA64' in os_list,\n \"solos\": 'Solaris X86' in os_list\n },\n \"windowsDownloadPackages\": {\n \"windowsX64\": 'Windows(X64)' in os_list,\n \"windows32\": 'Windows(32)' in os_list\n },\n \"clientAndClientGroups\": [\n {\n \"_type_\": 2\n }\n ],\n \"downloadUpdatesJobOptions\": {\n \"downloadSoftware\": True\n }\n }\n }\n }\n }\n ]\n }\n }\n\n flag, response = self._cvpysdkcommcell_object.make_request(\n 'POST', self._services['CREATE_TASK'], request_json\n )\n\n if flag:\n if response.json():\n if \"jobIds\" in response.json():\n return Job(self.commcell_object, response.json()['jobIds'][0])\n\n else:\n raise SDKException('Download', '101')\n\n else:\n raise SDKException('Response', '102')\n else:\n raise SDKException('Response', '101')", "def download_and_prepare(self):\n self._download_and_prepare()", "def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()", "def fetch_stanford_hardi():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://stacks.stanford.edu/file/druid:yx282xq2090/'\n uraw = url + 'dwi.nii.gz'\n ubval = url + 'dwi.bvals'\n ubvec = url + 'dwi.bvecs'\n folder = pjoin(dipy_home, 'stanford_hardi')\n\n md5_list = ['0b18513b46132b4d1051ed3364f2acbc', # data\n '4e08ee9e2b1d2ec3fddb68c70ae23c36', # bval\n '4c63a586f29afc6a48a5809524a76cb4'] # bvec\n\n url_list = [uraw, ubval, ubvec]\n fname_list = ['HARDI150.nii.gz', 'HARDI150.bval', 'HARDI150.bvec']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw HARDI data (87MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download():\n raise NotImplementedError", "def download_stewicombo_from_remote(name):\n meta = set_stewicombo_meta(name, category='')\n log.info(f'attempting download of {name} from {paths.remote_path}')\n download_from_remote(meta, paths)", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def _download(self):\n self._system.download_file(\"http://curl.haxx.se/download/\" + self._tar_name)", "def download_http(self, url):\n\n # Set things up.\n # ==============\n\n out = None\n headers = {}\n if (url.username is not None) and (url.password is not None):\n tmp = base64.b64encode(':'.join([url.username, url.password]))\n headers['Authorization'] = \"Basic %s\" % tmp\n\n\n # Toe the waters.\n # ===============\n # We start with an HTTP HEAD request to check the status.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"HEAD\", url.path, '', headers)\n r = conn.getresponse()\n conn.close()\n if self.verbose:\n print >> sys.stderr, url, r.status, ''\n\n\n # Bail.\n # =====\n # Short-cut when we just care whether it's a package.\n\n if url.path.endswith('/'):\n out = r.status == 200\n\n\n elif r.status == 200:\n\n # Wade in.\n # ========\n # If the status is positive we check to see if we've already\n # downloaded the latest copy.\n\n etag = r.getheader('etag', '')\n lm = r.getheader('last-modified', '')\n key = sha.new(str(url) + etag + lm).hexdigest()\n\n if not self.cachedir:\n raise ValueError(\"netimp.importer.cachedir not set\")\n if not os.path.isdir(self.cachedir):\n raise IOError( \"netimp.importer.cachedir not found \"\n + \"(%s)\" % self.cachedir\n )\n\n path = join(self.cachedir, key)\n if os.path.isfile(path):\n out = open(path, 'rb')\n else:\n\n # Dive in!\n # ========\n # We don't have this module locally yet: download it for real.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"GET\", url.path, '', headers)\n r = conn.getresponse()\n if r.status == 200: # just in case!\n fp = open(path, 'w+b')\n fp.write(r.read())\n fp.flush()\n fp.close()\n out = open(path, 'rb')\n conn.close()\n\n return out", "def do_pack():\n try:\n if not os.path.exists(\"versions\"):\n local(\"mkdir versions\")\n date = datetime.now()\n date = date.strftime(\"%Y%m%d%H%M%S\")\n new_versions = \"versions/web_static_{}.tgz\".format(date)\n local(\"tar -cvzf {} web_static\".format(new_versions))\n return new_versions\n except:\n return None", "def download_data():\n urllib.request.urlretrieve('http://cs.iit.edu/~culotta/cs579/a1/edges.txt.gz', 'edges.txt.gz')", "def download(self):\n opsys, machine = get_platform()\n _platform = f\"{opsys}_{machine}\"\n\n plugin_dir = f\"{self._temp_dir}/terraform-plugins\"\n\n if not os.path.isdir(plugin_dir):\n os.mkdir(plugin_dir)\n for name, details in self._plugins.items():\n uri = get_url(name, details)\n file_name = uri.split(\"/\")[-1]\n\n click.secho(\n f\"getting plugin: {name} version {details['version']} from {uri}\",\n fg=\"yellow\",\n )\n\n with urllib.request.urlopen(uri) as response, open(\n f\"{plugin_dir}/{file_name}\", \"wb\"\n ) as plug_file:\n shutil.copyfileobj(response, plug_file)\n with zipfile.ZipFile(f\"{plugin_dir}/{file_name}\") as zip_file:\n zip_file.extractall(f\"{plugin_dir}/{_platform}\")\n os.remove(f\"{plugin_dir}/{file_name}\")\n\n files = glob.glob(f\"{plugin_dir}/{_platform}/terraform-provider*\")\n for afile in files:\n os.chmod(afile, 0o755)\n filename = os.path.basename(afile)\n if self._tf_version_major >= 13:\n source = PluginSource(name, details)\n host_dir = os.path.join(plugin_dir, source.host)\n namespace_dir = os.path.join(host_dir, source.namespace)\n provider_dir = os.path.join(namespace_dir, name)\n version_dir = os.path.join(provider_dir, details[\"version\"])\n platform_dir = os.path.join(version_dir, _platform)\n os.makedirs(platform_dir, exist_ok=True)\n os.rename(afile, os.path.join(platform_dir, filename))\n else:\n os.rename(afile, f\"{plugin_dir}/{filename}\")\n\n click.secho(f\"plugin installed to: {plugin_dir}/{_platform}/\", fg=\"yellow\")", "def fetch(self, url) -> bytes:\n buffer = self.download(url)\n zfs = ZipFileSystem(buffer, \"r\")\n return zfs.open(zfs.glob(\"*\")[0]).read()", "def download_file(self, packageName):\n detail = self.details(packageName)\n price = detail.docV2.offer[0].formattedAmount.lower()\n is_purchased = False\n if price != 'free':\n is_purchased = True\n version_code = detail.docV2.details.appDetails.versionCode\n offer_type = detail.docV2.offer[0].offerType\n data = self.download(packageName, version_code, offer_type,\n is_purchased=is_purchased)\n\n pkg_filename = \"{}.pkg\".format(packageName)\n zf = zipfile.ZipFile(pkg_filename, \"w\")\n zf.writestr(data['apk']['filename'], data['apk']['data'])\n\n for obb in data['obb']:\n zf.writestr(obb['filename'], obb['data'])\n\n zf.close()\n return pkg_filename\n print \"pkg file {} downloaded.\".format(pkg_filename)", "def download_data():\n\n if not os.path.exists(zipfile_path):\n print(f'Downloading {config.download_url} to {zipfile_path}')\n urlretrieve(config.download_url, zipfile_path)\n print(f'Successfully downloaded {zipfile_path}')\n\n zip_ref = ZipFile(zipfile_path, 'r')\n zip_ref.extractall(config.raw_data_dir)\n zip_ref.close()\n\n os.rename(f\"{config.raw_data_dir}/cornell movie-dialogs corpus\", extracted_dir)", "def download(self):\n if not self.url:\n raise RuntimeError(self.tips)\n\n download_file_name = os.path.join(\n self.raw_path, os.path.splitext(os.path.basename(self.url))[0]\n )\n file_format = self.url.split(\".\")[-1]\n if \"amazon\" in self.url:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.json.{file_format}\"\n )\n else:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if \"1drv.ms\" in self.url:\n file_format = \"zip\"\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if not os.path.exists(raw_file_path):\n print(f\"download_file: url: {self.url}, raw_file_path: {raw_file_path}\")\n download_file(self.url, raw_file_path)\n if \"amazon\" in raw_file_path:\n # amazon dataset do not unzip\n print(\"amazon dataset do not decompress\")\n return\n elif file_format == \"gz\":\n file_name = raw_file_path.replace(\".gz\", \"\")\n with gzip.open(raw_file_path, \"rb\") as fin:\n with open(file_name, \"wb\") as fout:\n shutil.copyfileobj(fin, fout)\n else:\n shutil.unpack_archive(\n raw_file_path, self.raw_path, format=get_format(file_format)\n )\n\n if not os.path.exists(download_file_name):\n return\n elif os.path.isdir(download_file_name):\n os.rename(\n download_file_name, os.path.join(self.raw_path, self.dataset_name)\n )\n else:\n os.rename(\n download_file_name,\n os.path.join(\n self.raw_path,\n f'{self.dataset_name}.{download_file_name.split(\".\")[-1]}',\n ),\n )", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath,\n reporthook=_progress)\n print()\n statinfo = os.stat(filepath)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def manually_download_MNIST(DATASET_DIR):\n\n output_path = os.path.join(DATASET_DIR, \"MNIST.zip\")\n if not os.path.exists(DATASET_DIR):\n os.mkdir(DATASET_DIR)\n url = \"https://github.com/vandedok/IIC_tutorial/releases/download/v0.2/MNIST.zip\"\n print(\"Downloading MNIST...\", end=\" \")\n urllib.request.urlretrieve(url, output_path)\n print(\"Done!\")\n\n with zipfile.ZipFile(output_path, \"r\") as zip_ref:\n zip_ref.extractall(DATASET_DIR)", "def download(data_root, version):\n if version not in GroceriesReal.GROCERIES_REAL_DATASET_TABLES.keys():\n raise ValueError(\n f\"A valid dataset version is required. Available versions are:\"\n f\"{GroceriesReal.GROCERIES_REAL_DATASET_TABLES.keys()}\"\n )\n dest_path = os.path.join(\n data_root, GroceriesReal.LOCAL_PATH, f\"{version}.zip\"\n )\n expected_checksum = GroceriesReal.GROCERIES_REAL_DATASET_TABLES[\n version\n ].checksum\n extract_folder = os.path.join(data_root, GroceriesReal.LOCAL_PATH)\n if os.path.exists(dest_path):\n logger.info(\"The dataset file exists. Skip download.\")\n try:\n validate_checksum(dest_path, expected_checksum)\n except ChecksumError:\n logger.info(\n \"The checksum of the previous dataset mismatches. \"\n \"Delete the previously downloaded dataset.\"\n )\n os.remove(dest_path)\n if not os.path.exists(dest_path):\n source_uri = GroceriesReal.GROCERIES_REAL_DATASET_TABLES[\n version\n ].source_uri\n GroceriesReal._download_http(source_uri, dest_path, version)\n GroceriesReal._extract_file(dest_path, extract_folder)", "def _download_and_extract_popt_devel(self):\n raise NotImplementedError('Implement this method.')", "def unzip() -> None:\n logger = logging.getLogger(__name__)\n logger.info('Download the dataset')\n\n # define the destination\n destination = project_dir / 'data' / 'raw'\n\n # extract zip\n zip_file = ZipFile(destination / \"original.zip\")\n zip_file.extractall(destination)", "def download_goes_hotspot_characterization(folder, start, end, satellite=\"G17\", full_disk=False):\n if full_disk:\n product = 'ABI-L2-FDCF'\n else:\n product = 'ABI-L2-FDCC'\n \n return download_goes_data(folder, start, end, product, satellite)", "def download_demo_files():\n\n master_zip_url = 'https://github.com/%s/archive/%s.zip' % \\\n (sample_data_gh_repo, sample_data_gh_commit)\n ofile = os.path.join(cache_dir,\n 'salem-sample-data-%s.zip' % sample_data_gh_commit)\n odir = os.path.join(cache_dir)\n\n # download only if necessary\n if not os.path.exists(ofile):\n print('Downloading salem-sample-data...')\n _urlretrieve(master_zip_url, ofile)\n\n # Trying to make the download more robust\n try:\n with zipfile.ZipFile(ofile) as zf:\n zf.extractall(odir)\n except zipfile.BadZipfile:\n # try another time\n if os.path.exists(ofile):\n os.remove(ofile)\n _urlretrieve(master_zip_url, ofile)\n with zipfile.ZipFile(ofile) as zf:\n zf.extractall(odir)\n\n # list of files for output\n out = dict()\n for root, directories, filenames in os.walk(sample_data_dir):\n for filename in filenames:\n out[filename] = os.path.join(root, filename)\n\n return out", "def _download_spin_data(system_name, boundary_condition, nspins, data_dir):\n # Set default storage location.\n if data_dir is None:\n data_dir = os.path.expanduser(\"~/tfq-datasets\")\n\n # Use Keras file downloader.\n file_path = tf.keras.utils.get_file(\n fname=system_name + '.zip',\n cache_dir=data_dir,\n cache_subdir='spin_systems',\n origin=\"https://storage.googleapis.com/download\"\n \".tensorflow.org/data/quantum/\"\n \"spin_systems/\" + system_name + \".zip \",\n extract=True)\n\n file_path = os.path.splitext(file_path)[0]\n\n data_path = os.path.join(file_path, boundary_condition, str(nspins))\n return data_path", "def dascasi_download():\n p = argparse.ArgumentParser(description=\"download DASC all-sky camera data\")\n p.add_argument(\"site\", choices=[\"EAA\", \"FYU\", \"KAK\", \"PKR\", \"TOO\", \"VEE\"])\n p.add_argument(\n \"startend\", help=\"start/end times UTC e.g. 2012-11-03T06:23 2012-11-03T07\", nargs=2\n )\n p.add_argument(\"odir\", help=\"directory to write downloaded FITS to\")\n p.add_argument(\"-w\", \"--wavelen\", help=\"request specific wavelength(s)\", nargs=\"+\")\n p.add_argument(\"-host\", default=\"ftp://optics.gi.alaska.edu\")\n p = p.parse_args()\n\n # host = \"ftp://mirrors.arsc.edu/AMISR/PKR/DASC/RAW/\"\n download(p.startend, p.site, p.odir, p.host, p.wavelen)", "def download_package(package, filepath, config):\n pkgtype = package['type']\n\n if pkgtype in ['dsc']:\n result = download_srcpackage(package, filepath, config)\n else:\n result = download_binpackage(package, filepath, config)\n\n return result", "def download_mission(self):\n cmds = self.vehicle.commands\n cmds.download()\n # Wait until download is complete.\n cmds.wait_valid()", "def _download_sst(cls):\n path = Path(cls.dataset_path)\n if path.exists():\n return\n\n path.mkdir(parents=True, exist_ok=True)\n generic_download(\n url=\"https://s3.amazonaws.com/enso-data/SST-binary.csv\",\n text_column=\"Text\",\n target_column=\"Target\",\n filename=SST_FILENAME\n )", "def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def goes_download_wrapper_satpy(smoke_plume_data, temp_data_path, save_data_path, extra_desc, bounds, bands, curr_dir_files=None):\n if curr_dir_files is None:\n curr_dir_files = [file[11:-5] for file in os.listdir('../data/img/') if (file[0:4] == 'true')]\n\n # create s3 client\n config = Config(retries=dict(max_attempts=20))\n s3 = boto3.client('s3', config=config, region_name='us-east-1')\n\n for row in tqdm(smoke_plume_data.itertuples()):\n \n # set file name\n file_name = '{0}_doy{5}_s{1}{2}{3}{4}_e{1}{2}{3}'.format('G16', row.year, str(row.month).zfill(2),\n str(row.day).zfill(2),\n row.conus_time, str(row.doy).zfill(3))\n\n # get filenames for the current files already in the directory\n file_seen = any(file_name in files for files in curr_dir_files)\n\n # only try to download the files that have not been downloaded yet\n if file_seen:\n print(\"skipping {} | doy: {} | conus_time: {}\".format(file_name,str(row.doy), str(row.conus_time)))\n continue\n\n else:\n print(\"doy: {} | conus_time: {}\".format(str(row.doy), str(row.conus_time)))\n for band in bands:\n\n # download goes imagery, crop, and save in temporary directory\n aws_goes_download_satpy(year=row.year, doy=row.doy, hour=row.conus_time[0:2], minute=row.conus_time[2:4], \n band=band, view=\"C\", extra_desc=extra_desc, bounds=bounds, \n save_data_path=temp_data_path, client=s3)\n\n try:\n # process using satpy to generate true color image\n dp.generate_satpy_nc_tiff(bounds=bounds, save_nc_path=save_data_path, base_dir=temp_data_path,\n #width=4800, height=2700, desc='',\n width=1200, height=1200, desc='',\n proj_desc='Geodetic Projection', datasets=['true_color', 'C07', 'C11'])\n\n except ValueError:\n print('No files for SatPy to open')\n\n except KeyError:\n print('Missing files; moving to next file')\n\n curr_dir_files.append(file_name)\n\n # delete original .nc files\n for file in glob.glob(temp_data_path+'*.nc'):\n os.remove(file)", "def _download(data_folder): # pragma: no cover\n\n logger.info(f\"Downloading {SOURCE_URL}.\")\n\n with urlopen(SOURCE_URL) as zipresp:\n with zipfile.ZipFile(io.BytesIO(zipresp.read())) as zfile:\n zfile.extractall(data_folder)", "def _fetch_full():\n resource(\n target=data_path(\"eeg\", \"eeg_full.tar\"),\n url=\"https://kdd.ics.uci.edu/databases/eeg/eeg_full.tar\",\n )\n dependency(\n target=data_path(\"eeg\", \"full\"),\n source=data_path(\"eeg\", \"eeg_full.tar\"),\n commands=[\n \"mkdir full\",\n \"tar xf eeg_full.tar -C full\",\n \"ls full | grep gz$ | xargs -I {} tar xzf full/{} -C full\",\n \"ls full | grep gz$ | xargs -I {} rm full/{}\",\n \"find full | grep gz$ | xargs gunzip\",\n ],\n )", "def _get_nessus_file(online=True, save=False):\n ns_download = downloader.NessusDownloader()\n if online:\n if save:\n file_path, file_data = ns_download.download()\n else:\n file_name, file_data = ns_download.open()\n # either download (with local saving) or just \"open\" (in memory) - we should check the file_data\n if file_data is None:\n LOG.error(f'Failed to download file from {ns_download.url}')\n exit(consts.ERROR.DOWNLOAD_ERROR)\n stream = io.BytesIO(file_data)\n else:\n # \"offline\" - use a mock file for testing\n stream = io.open(\"../app/mock/nessus.json.zip\", \"rb\")\n\n return stream", "def fetch_housing_data(housing_url=HOUSING_URL, housing_path=HOUSING_PATH):\n logging.info(\"Fetch housing data.....\")\n os.makedirs(housing_path, exist_ok=True)\n tgz_path = os.path.join(housing_path, \"housing.tgz\")\n urllib.request.urlretrieve(housing_url, tgz_path)\n housing_tgz = tarfile.open(tgz_path)\n housing_tgz.extractall(path=housing_path)\n housing_tgz.close()", "def download_data():\n url = 'https://www.dropbox.com/s/xk4glpk61q3qrg2/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()", "def _download_to_flc(self):\n self.communicator.download_to_flc()", "def download_data():\n url = 'https://www.dropbox.com/s/8oehplrobcgi9cq/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()", "def maybe_download_and_extract():\n dest_directory = FLAGS['model_dir']\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' %\n (filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def UpgradeDUT(self, build):\n elapsed = None\n factory_id = False\n upgrade_id = True\n list_of_connected_aps = list()\n build_stream = build.build_stream.name\n build_version = str(build_stream.split(\"_\")[1])\n bno = build.number\n zd_model_num = build_stream.split(\"_\")[0]\n base_build_project_num = build_stream.split(\"_\")[1]\n mesh_enabled = self.is_mesh_enabled_in_testbed()\n ap_upgrade_timeout = 1500\n \n build_url = build.URL\n byte = None\n mb = None\n tb_config = self.config\n \n #get the switch component object\n if 'L3Switch' in self.components.keys():\n l3switch = self.components['L3Switch']\n \n #because upgrade to the base build, waiting time too long, \n #the station sockect connection break, so quit the station at first,\n #after the upgrage zd, recreate the station object:\n for station in self.components['Station']:\n station.__del__()\n del(self.components['Station'])\n \n #set the image file name.\n if server_url_map.SAVE_REPOSITORY.has_key(zd_model_num):\n filename = zd_model_num + \"_\" + base_build_project_num + \".\" + str(bno) + \".tar.gz\"\n if os.path.isdir(server_url_map.SAVE_REPOSITORY[zd_model_num]['share_folder_path']):\n full_fname = server_url_map.SAVE_REPOSITORY[zd_model_num]['share_folder_path'] + filename\n else:\n full_fname = server_url_map.SAVE_REPOSITORY[zd_model_num]['local_path'] + filename\n \n #if no the image file in the target folder,\n #the script will be downloaded it from the build server\n #if the image file is in the target folder,\n #the script will upgrade zd to the base build which is used the image. \n if os.path.isfile(full_fname):\n pass\n elif os.path.isdir(full_fname):\n logging.info(\"Please remove the folder of %s\" % filename)\n raise Exception(\"This is a folder, instead of a file.\")\n else:\n build_url = ih.get_build_url(build_stream, bno)\n if 'http' in build_url:\n if '.img' in build_url:\n filename = re.findall(r'^.*ZD\\d+\\w+/*(.*)', build_url)[0]\n if os.path.isdir(server_url_map.SAVE_REPOSITORY[zd_model_num]['share_folder_path']):\n full_fname = server_url_map.SAVE_REPOSITORY[zd_model_num]['share_folder_path'] + filename\n else:\n full_fname = server_url_map.SAVE_REPOSITORY[zd_model_num]['local_path'] + filename\n fin = ih.download_build_v2(build_url, full_fname)\n if fin:\n pass\n else:\n raise Exception(\"downloaded is not successufully.\")\n else:\n full_fname = build_url\n \n logging.info(\"Waiting all aps join in zd...\")\n if not self.dut.wait_aps_join_in_zd_with_the_expect_status(self.config['ap_mac_list'], self.config['ap_sym_dict']):\n logging.info(\"ap rejoin in zd failed, enable all aps's switch ports\")\n for ap_mac in self.config['ap_mac_to_port'].keys():\n l3switch.enable_interface(self.config['ap_mac_to_port'][ap_mac])\n \n (elapsed, factory_id) = self.dut.upgrade_sw(full_fname, False, True, build_version, False, mesh_enabled)\n \n if factory_id:\n logging.info(\"ZD be setted factory default, so enable all switch ports of the aps.\")\n for ap_mac in self.config['ap_mac_to_port'].keys():\n l3switch.enable_interface(self.config['ap_mac_to_port'][ap_mac])\n \n logging.info(\"Waiting 2 minutes, let ZD all service module enabled.\")\n time.sleep(120)\n\n logging.info(\"Waiting for APs to be upgraded and reconnect. This process takes some minutes. Please wait... \")\n ap_upgrade_start_time = time.time()\n list_of_connected_aps = list()\n for associated_ap in self.config['ap_mac_list']:\n while True:\n if (time.time() - ap_upgrade_start_time) > ap_upgrade_timeout:\n raise Exception(\"Error: AP upgrading failed. Timeout\")\n \n si_ap_info = self.dut._get_ap_info(associated_ap)\n status = si_ap_info['status']\n logging.info('ap %s status is %s'%(associated_ap, status))\n if status.lower().startswith(\"connected\"):\n list_of_connected_aps.append(si_ap_info)\n break\n \n return upgrade_id, factory_id, list_of_connected_aps", "def do_pack():\n now = datetime.now()\n file_name = \"web_static_{}{}{}{}{}{}.tgz\".format(\n now.year,\n now.month,\n now.day,\n now.hour,\n now.minute,\n now.second\n )\n try:\n local(\"sudo tar -cvzf {} ./web_static\".format(file_name))\n local(\"sudo mkdir -p versions\")\n local(\"sudo mv ./{} versions/\".format(file_name))\n except:\n return (None)\n return (\"versions/{}\".format(file_name))", "def _download_karakas():\n #url = 'http://zenodo.org/record/12800/files/dartmouth.h5'\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FMNRAS%2F403%2F1413'\n import urllib\n print('Downloading Karakas 2010 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def _download_karakas():\n #url = 'http://zenodo.org/record/12800/files/dartmouth.h5'\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FMNRAS%2F403%2F1413'\n import urllib\n print('Downloading Karakas 2010 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()", "def repack_wheel(data: bytes):\n new_data = BytesIO()\n with ZipFile(BytesIO(data)) as existing_zip:\n with ZipFile(new_data, mode=\"w\") as new_zip:\n for zipinfo in existing_zip.infolist():\n if re.search(r\"pip-.+\\.dist-info/\", zipinfo.filename):\n continue\n new_zip.writestr(zipinfo, existing_zip.read(zipinfo))\n\n return new_data.getvalue()", "def download(self):\n \n if not os.path.exists(self.directory):\n os.mkdir(self.directory)\n if not os.path.exists(self.fullPath):\n os.mkdir(self.fullPath)\n \n dm = pymodis.downmodis.downModis(self.fullPath, self.password, self.username, self.url, self.tiles, self.path, self.dataset, \n self.today, self.enddate, jpg = False, debug = True, timeout = 30)\n dm.connect()\n self.filelist = dm.getListDays() \n self.observations = len(dm.getListDays()) \n \n if self.dataset != 'MOD13Q1.005':\n if self.observations % 2 != 0:\n raise IOError(\"The total number of observations through time must be an even number. Please add or remove an observation before or after %s\" % str(self.filelist[0]))\n \n dm.downloadsAllDay()\n logger.log('SUCCESS', 'Downloading is complete! %d HDF files of %s data for tiles %s were downloaded for the following days: %s' % (self.observations*len(self.tiles), str(self.dataset), str(self.tiles), str(self.filelist)))", "def download():\n basedir = os.path.dirname(os.path.dirname(__file__))\n print(basedir)\n datadir = os.path.join(basedir,\"data/NeonTreeEvaluation/\")\n print(\"Downloading data files to {}\".format(datadir)) \n eval_url = zenodo_url(concept_rec_id=\"3723356\", datadir=datadir)", "def _download_data(self):\n logger.info('Downloading ChemIDplus data...')\n outfile_path = self._src_data_dir / self._src_fname\n\n self._ftp_download(self._src_server,\n self._src_dir_path,\n self._src_data_dir,\n self._src_fname)\n\n parser = ET.iterparse(outfile_path, ('start', 'end'))\n date = next(parser)[1].attrib['date']\n version = date.replace('-', '')\n outfile_path.rename(self._src_data_dir / f'chemidplus_{version}.xml')\n logger.info('Finished downloading ChemIDplus data')", "def download_data(self):\n headers = {'User-Agent': 'Mozilla/5.0',}\n\n #Request for html data of url page\n r = requests.get(self.url, headers = headers, allow_redirects=True)\n soup = BeautifulSoup(r.text, \"html.parser\")\n\n #Checking if folder path exists, if not, creats it\n i=0\n while i<len(self.folder)-1:\n if self.folder[i] == '/':\n if not os.path.isdir(self.folder[:i]):\n os.mkdir(self.folder[:i])\n i+=1\n if i==len(self.folder)-1:\n if not os.path.isdir(self.folder):\n os.mkdir(self.folder)\n\n # if not os.path.isdir(self.folder):\n # os.mkdir(self.folder)\n\n #Gets every href to zip file with data\n entries = []\n for link in soup.find_all('a'):\n if re.search(\"^data/.*.zip\", link.get('href')):\n entries.append(link.get('href'))\n\n #Gets the newest dataset\n self.getCurrentData(entries)\n\n i=0\n #Saves each file in dataset\n for list in self.ListOfZipFiles:\n if not os.path.isfile(self.folder+list[4:]):\n r = requests.get(self.url+list)\n open(self.folder+list[4:], 'wb').write(r.content)\n #deletes prefix \"data/\"\n self.ListOfZipFiles[i] = list[4:]\n i+=1", "def download_source():\n \n #if os.path.exists(UNFCC_FILE): \n # os.rename(UNFCC_FILE,'old_'+UNFCC_FILE)\n #if os.path.exists(EBAL_FILE):\n # os.rename(EBAL_FILE,'old_'+EBAL_FILE)\n\n try:\n unsd = sdmx.Request('UNSD')\n sdmx.logger.setLevel(logging.INFO)\n \n logger.info('Loading UNFCC Data')\n resp_unfcc = unsd.data('DF_UNData_UNFCC')\n\n logger.info('Loading UN Energy Balance Data')\n resp_ebal = unsd.data('DF_UNData_EnergyBalance')\n except Exception as e:\n logger.error('Error!! Please look at SDMX logs to troubleshoot' + str(e))\n traceback.print_exc(file = sys.stdout)\n\n try:\n df_ebal = resp_ebal.to_pandas()\n df_unfcc = resp_unfcc.to_pandas()\n\n df_unfcc.reset_index().to_csv(UNFCC_FILE,index=False)\n logger.info('UNFCC Greenhouse Data stored as {}'.format(UNFCC_FILE))\n\n df_ebal.reset_index().to_csv(EBAL_FILE,index=False)\n logger.info('UN Energy Balance Data stored as {}'.format(EBAL_FILE))\n except Exception as e:\n logger.error('Error!! While saving data from SDMX to CSV ' + str(e))\n traceback.print_exc(file = sys.stdout)", "def fetch_sherbrooke_3shell():\n dipy_home = pjoin(os.path.expanduser('~'), '.dipy')\n url = 'https://dl.dropboxusercontent.com/u/2481924/sherbrooke_data/'\n uraw = url + '3shells-1000-2000-3500-N193.nii.gz'\n ubval = url + '3shells-1000-2000-3500-N193.bval'\n ubvec = url + '3shells-1000-2000-3500-N193.bvec'\n folder = pjoin(dipy_home, 'sherbrooke_3shell')\n\n md5_list = ['0b735e8f16695a37bfbd66aab136eb66', # data\n 'e9b9bb56252503ea49d31fb30a0ac637', # bval\n '0c83f7e8b917cd677ad58a078658ebb7'] # bvec\n\n url_list = [uraw, ubval, ubvec]\n fname_list = ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec']\n\n if not os.path.exists(folder):\n print('Creating new directory %s' % folder)\n os.makedirs(folder)\n print('Downloading raw 3-shell data (184MB)...')\n\n for i in range(len(md5_list)):\n _get_file_data(pjoin(folder, fname_list[i]), url_list[i])\n check_md5(pjoin(folder, fname_list[i]), md5_list[i])\n\n print('Done.')\n print('Files copied in folder %s' % folder)\n else:\n print('Dataset is already in place. If you want to fetch it again, please first remove the folder %s ' % folder)", "def download_vctk(destination, tmp_dir=None, device=\"cpu\"):\r\n dataset_name = \"noisy-vctk-16k\"\r\n if tmp_dir is None:\r\n tmp_dir = tempfile.gettempdir()\r\n final_dir = os.path.join(tmp_dir, dataset_name)\r\n\r\n if not os.path.isdir(tmp_dir):\r\n os.mkdir(tmp_dir)\r\n\r\n if not os.path.isdir(final_dir):\r\n os.mkdir(final_dir)\r\n\r\n prefix = \"https://datashare.is.ed.ac.uk/bitstream/handle/10283/2791/\"\r\n noisy_vctk_urls = [\r\n prefix + \"clean_testset_wav.zip\",\r\n prefix + \"noisy_testset_wav.zip\",\r\n prefix + \"testset_txt.zip\",\r\n prefix + \"clean_trainset_28spk_wav.zip\",\r\n prefix + \"noisy_trainset_28spk_wav.zip\",\r\n prefix + \"trainset_28spk_txt.zip\",\r\n ]\r\n\r\n zip_files = []\r\n for url in noisy_vctk_urls:\r\n filename = os.path.join(tmp_dir, url.split(\"/\")[-1])\r\n zip_files.append(filename)\r\n if not os.path.isfile(filename):\r\n logger.info(\"Downloading \" + url)\r\n with urllib.request.urlopen(url) as response:\r\n with open(filename, \"wb\") as tmp_file:\r\n logger.info(\"... to \" + tmp_file.name)\r\n shutil.copyfileobj(response, tmp_file)\r\n\r\n # Unzip\r\n for zip_file in zip_files:\r\n logger.info(\"Unzipping \" + zip_file)\r\n shutil.unpack_archive(zip_file, tmp_dir, \"zip\")\r\n os.remove(zip_file)\r\n\r\n # Move transcripts to final dir\r\n shutil.move(os.path.join(tmp_dir, \"testset_txt\"), final_dir)\r\n shutil.move(os.path.join(tmp_dir, \"trainset_28spk_txt\"), final_dir)\r\n\r\n # Downsample\r\n dirs = [\r\n \"noisy_testset_wav\",\r\n \"clean_testset_wav\",\r\n \"noisy_trainset_28spk_wav\",\r\n \"clean_trainset_28spk_wav\",\r\n ]\r\n\r\n downsampler = Resample(orig_freq=48000, new_freq=16000)\r\n\r\n for directory in dirs:\r\n logger.info(\"Resampling \" + directory)\r\n dirname = os.path.join(tmp_dir, directory)\r\n\r\n # Make directory to store downsampled files\r\n dirname_16k = os.path.join(final_dir, directory + \"_16k\")\r\n if not os.path.isdir(dirname_16k):\r\n os.mkdir(dirname_16k)\r\n\r\n # Load files and downsample\r\n for filename in get_all_files(dirname, match_and=[\".wav\"]):\r\n signal, rate = torchaudio.load(filename)\r\n downsampled_signal = downsampler(signal.view(1, -1).to(device))\r\n\r\n # Save downsampled file\r\n torchaudio.save(\r\n os.path.join(dirname_16k, filename[-12:]),\r\n downsampled_signal[0].cpu(),\r\n sample_rate=16000,\r\n channels_first=False,\r\n )\r\n\r\n # Remove old file\r\n os.remove(filename)\r\n\r\n # Remove old directory\r\n os.rmdir(dirname)\r\n\r\n logger.info(\"Zipping \" + final_dir)\r\n final_zip = shutil.make_archive(\r\n base_name=final_dir,\r\n format=\"zip\",\r\n root_dir=os.path.dirname(final_dir),\r\n base_dir=os.path.basename(final_dir),\r\n )\r\n\r\n logger.info(f\"Moving {final_zip} to {destination}\")\r\n shutil.move(final_zip, os.path.join(destination, dataset_name + \".zip\"))", "def maybe_download_and_extract():\n dest_directory = MODEL_DIR\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def get_snodas_ds(dem_dt, code=1036):\n import tarfile\n import gzip\n snodas_ds = None\n snodas_url_str = None\n\n outdir = os.path.join(datadir, 'snodas')\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n #Note: unmasked products (beyond CONUS) are only available from 2010-present\n if dem_dt >= datetime(2003,9,30) and dem_dt < datetime(2010,1,1):\n snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/masked/%Y/%m_%b/SNODAS_%Y%m%d.tar'\n tar_subfn_str_fmt = 'us_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'\n elif dem_dt >= datetime(2010,1,1):\n snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/unmasked/%Y/%m_%b/SNODAS_unmasked_%Y%m%d.tar'\n tar_subfn_str_fmt = './zz_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'\n else:\n print(\"No SNODAS data available for input date\")\n\n if snodas_url_str is not None:\n snodas_url = dem_dt.strftime(snodas_url_str)\n snodas_tar_fn = iolib.getfile(snodas_url, outdir=outdir)\n print(\"Unpacking\")\n tar = tarfile.open(snodas_tar_fn)\n #gunzip to extract both dat and Hdr files, tar.gz\n for ext in ('dat', 'Hdr'):\n tar_subfn_str = tar_subfn_str_fmt % (code, ext)\n tar_subfn_gz = dem_dt.strftime(tar_subfn_str)\n tar_subfn = os.path.splitext(tar_subfn_gz)[0]\n print(tar_subfn)\n if outdir is not None:\n tar_subfn = os.path.join(outdir, tar_subfn)\n if not os.path.exists(tar_subfn):\n #Should be able to do this without writing intermediate gz to disk\n tar.extract(tar_subfn_gz)\n with gzip.open(tar_subfn_gz, 'rb') as f:\n outf = open(tar_subfn, 'wb')\n outf.write(f.read())\n outf.close()\n os.remove(tar_subfn_gz)\n\n #Need to delete 'Created by module comment' line from Hdr, can contain too many characters\n bad_str = 'Created by module comment'\n snodas_fn = tar_subfn\n f = open(snodas_fn)\n output = []\n for line in f:\n if not bad_str in line:\n output.append(line)\n f.close()\n f = open(snodas_fn, 'w')\n f.writelines(output)\n f.close()\n\n #Return GDAL dataset for extracted product\n snodas_ds = gdal.Open(snodas_fn)\n return snodas_ds", "def package(request, name):\n return HttpResponse(get_koji_download_url(name), mimetype='application/json')", "def package(request, name):\n return HttpResponse(get_koji_download_url(name), mimetype='application/json')", "def download_fabric_factory():\n local('hg clone http://bitbucket.org/yml/fabric_factory/')" ]
[ "0.76543707", "0.60858387", "0.6030473", "0.6029866", "0.5999589", "0.5853508", "0.57119447", "0.5664164", "0.5609213", "0.5573679", "0.55025905", "0.54998595", "0.54737514", "0.54679334", "0.5459293", "0.5448745", "0.5415353", "0.54141283", "0.5396013", "0.53918666", "0.53849125", "0.5373254", "0.5368611", "0.53562164", "0.5354774", "0.5354501", "0.53220093", "0.5297231", "0.5296136", "0.52907294", "0.5273362", "0.52632844", "0.52500844", "0.523791", "0.5234924", "0.5232899", "0.5211932", "0.5210898", "0.51992273", "0.51987344", "0.5191365", "0.51885164", "0.5167756", "0.51641196", "0.5160513", "0.51579916", "0.51502466", "0.51448435", "0.5141664", "0.51358676", "0.5134599", "0.5131375", "0.51307267", "0.5129356", "0.5125004", "0.5123534", "0.5119977", "0.51178044", "0.5114032", "0.5112258", "0.5103757", "0.50984997", "0.509495", "0.50886726", "0.5083764", "0.50799316", "0.5065957", "0.50655496", "0.50577706", "0.50513345", "0.50512785", "0.50446624", "0.50392425", "0.50392425", "0.50368977", "0.50362545", "0.50354224", "0.5031162", "0.5027416", "0.50267345", "0.5012284", "0.5007108", "0.50033045", "0.499772", "0.4995676", "0.49926543", "0.49926543", "0.49925795", "0.49905738", "0.4990518", "0.49862722", "0.49788675", "0.49653587", "0.49616328", "0.49484918", "0.49480817", "0.4943635", "0.49330804", "0.49330804", "0.4932988" ]
0.52912843
29
Get all morbidities by war name.
def get_morbidities_for_war_era(): war_era_name = request.args.get('warEra') if not war_era_name: raise BadRequestError("warEra parameter is missing") return datasources_service.get_morbidities_for_war_era(war_era_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_war_eras():\n return datasources_service.get_war_eras()", "def get_movies(self):\n worlds = ['cinemaworld',\n 'filmworld']\n\n pool = Pool(2)\n movies_world = pool.map(self.get_movies_list, worlds)\n pool.close()\n pool.join()\n\n for m_world in movies_world:\n world_type = list(m_world)[0]\n if world_type == \"cinemaworld\":\n cinemaworld_movies = m_world[world_type]\n elif world_type == \"filmworld\":\n filmworld_movies = m_world[world_type]\n\n return cinemaworld_movies, filmworld_movies", "def get_movies_list(self, world):\n api_url = self.api_url_base + '/api/{}/movies'.format(world)\n movies_dict = self.get_dict_from_apis(api_url)\n ret_dict = {world: None}\n if movies_dict is not None:\n ret_dict[world] = movies_dict['Movies']\n return ret_dict", "def get_weathers():\n names = [\n name for name in dir(carla.WeatherParameters)\n if re.match('[A-Z].+', name)\n ]\n weathers = {x: getattr(carla.WeatherParameters, x) for x in names}\n return weathers", "def _getAllMinistries(date):\n session = Session()\n mfilter=sql.or_( \n sql.between(date, schema.groups.c.start_date, schema.groups.c.end_date),\n sql.and_(\n (schema.groups.c.start_date < date ),\n (schema.groups.c.end_date == None)\n )\n )\n query = session.query(domain.Ministry).filter(mfilter)\n return query.all()", "def load_towns():\n if not hasattr(g, 'towns'):\n #g.towns = run_query('select id, name from municipios')\n g.towns = get_towns()\n return g.towns", "def query_interstate_wars(req_war_id):\n\tthis_query = Query('interstate', req_war_id)\n\tthis_query.send_query()\n\tresponse = this_query.pull_result()\n\treturn jsonify(response)\n\t#return render_template('response.html', response=response)", "def pharmacies(self) -> PharmasiesList:\n data = self.get(\"minhealth_pharmacies\")\n \n ls = [Pharmacies(**pharm) for pharm in data]\n return PharmasiesList(items=ls)", "def mlbwar(self, irc, msg, args, opttype):\n \n opttype = opttype.lower()\n \n wartypelist = ['overall','pitching','offense','fielding']\n \n if opttype not in wartypelist:\n irc.reply(\"WAR type must be one of: %s\" % wartypelist)\n return\n \n url = self._b64decode('aHR0cDovL2VzcG4uZ28uY29tL21sYi8=')\n\n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except: \n irc.reply(\"Failed to open: %s\" % url)\n return\n \n soup = BeautifulSoup(html)\n regexString = 'war' + opttype + '.*?' # build regex ourselves for searching.\n div = soup.find('div', attrs={'id':re.compile(regexString)})\n\n table = div.find('table')\n rows = table.findAll('tr')[1:] # skip header.\n\n append_list = []\n\n for row in rows:\n rank = row.find('td')\n player = rank.findNext('td')\n team = player.findNext('td')\n war = team.findNext('td')\n append_list.append(ircutils.bold(player.getText()) + \" (\" + team.getText() + \") \" + war.getText())\n\n descstring = string.join([item for item in append_list], \" | \")\n output = \"{0} {1} :: {2}\".format(ircutils.mircColor(\"WAR Leaders for:\", 'red'), ircutils.underline(opttype.title()), descstring)\n \n irc.reply(output)", "def get(self, cityname):\n response = hereService.getWeatherByCity(cityname)\n return response", "def get_marcels(goalies, date, df):\n goalies_marcels = []\n for goalie in goalies:\n goalie_marcels = marcels_players(goalie, date, df)\n goalies_marcels.append({\"goalie\": goalie, \"adj_fsv\": goalie_marcels['fsv'], \"gp\": goalie_marcels['gp']})\n\n return goalies_marcels", "def get_all(self, name):\n\t\tpass", "def get_boards(trello_client, board_filter=None, board_name=None):\n all_boards = trello_client.list_boards(board_filter=board_filter)\n\n if board_name is not None:\n all_boards = [b for b in all_boards if board_name == b.name]\n\n logger.debug(\"{} boards loaded using '{}' filter\".format(len(all_boards), board_filter))\n\n return all_boards", "def get_all_movies(self):\n cinemaworld_movies, filmworld_movies = self.get_movies()\n\n if cinemaworld_movies is not None:\n self.get_title_map(cinemaworld_movies, \"cinemaworld\")\n if filmworld_movies is not None:\n self.get_title_map(filmworld_movies, \"filmworld\")\n\n return self.title_map", "def effect_list(self):\n moods = []\n for mood in self._moodlist:\n if \"name\" in mood:\n moods.append(mood['name'])\n return moods", "def get_wrf_stations(pool):\n\n wrfv3_stations = {}\n\n connection = pool.connection()\n try:\n with connection.cursor() as cursor:\n sql_statement = \"SELECT `id`, `name` FROM `station` WHERE `id` like %s\"\n row_count = cursor.execute(sql_statement, \"11_____\")\n if row_count > 0:\n results = cursor.fetchall()\n for dict in results:\n wrfv3_stations[dict.get(\"name\")] = dict.get(\"id\")\n return wrfv3_stations\n else:\n return None\n except Exception as exception:\n error_message = \"Retrieving wrf stations failed\"\n logger.error(error_message)\n traceback.print_exc()\n raise exception\n finally:\n if connection is not None:\n connection.close()", "def fetch_mines(self, planet=None):\n return self.fetch_levels(\"resources\", planet, codes.mines)", "def get_all_masses(self):\n allMasses = set()\n for interval in self.mz_tree:\n allMasses.add( interval.data[\"mass\"] )\n\n return allMasses", "def warping_grp(self, run_idx):\n return self.records_grp(run_idx, WARPING)", "def get_mke_scores():\n _scores = {k:[] for k in time_str_to_time.keys()} \n _scores['all'] = [] # add key for all milwaukee and all time zones\n for zip_ in zip_populations.keys(): \n res = query_all_crimes(zip_=zip_)\n print(f'[PROCESSING] {zip_}')\n crimes = to_df(res)\n create_crime_cat(crimes)\n integrate_weight_to_df(crimes)\n for time_sl in time_str_to_time.keys():\n sub = extract_crimes_by_sl(crimes, time_str_to_time[time_sl]) \n cas = compute_crime_score(sub, zip_) \n _scores[time_sl].append(cas)\n _scores['all'].append(cas)\n return _scores", "def query_intrastate_wars(req_war_id):\n\tthis_query = Query('intrastate', req_war_id)\n\tthis_query.send_query()\n\tresponse = this_query.pull_result()\n\treturn jsonify(response)\n\t#return render_template('response.html', response=response)", "def get_matching_war(self, clan, war=None):\n\n if war and war.date_start <= self.time <= war.date_end:\n return war\n\n try:\n return ClanWar.objects.get(\n clan=clan,\n date_start__lte=self.time,\n date_end__gte=self.time\n )\n except ClanWar.DoesNotExist:\n return None\n except ClanWar.MultipleObjectsReturned:\n return None", "def get_all_boards():\n return [board for board in GRAPH_DB.find(\"board\")]", "def mlbweather(self, irc, msg, args, optteam):\n \n optteam = optteam.upper().strip()\n\n if optteam not in self._validteams():\n irc.reply(\"Team not found. Must be one of: %s\" % self._validteams())\n return\n \n url = self._b64decode('aHR0cDovL3d3dy5wYXJrZmFjdG9ycy5jb20v')\n\n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except:\n irc.reply(\"Failed to open: %s\" % url)\n return\n \n if \"an error occurred while processing this directive\" in html:\n irc.reply(\"Something broke with parkfactors. Check back later.\")\n return\n \n html = html.replace('&amp;','&').replace('ARZ','ARI').replace('CHW','CWS').replace('WAS','WSH').replace('MLW','MIL') # need some mangling.\n\n soup = BeautifulSoup(html)\n h3s = soup.findAll('h3')\n\n object_list = []\n\n for h3 in h3s:\n park = h3.find('span', attrs={'style':'float: left;'})\n factor = h3.find('span', attrs={'style': re.compile('color:.*?')})\n matchup = h3.findNext('h4').find('span', attrs={'style':'float: left;'})\n winddir = h3.findNext('img', attrs={'class':'rose'})\n windspeed = h3.findNext('p', attrs={'class':'windspeed'}).find('span')\n weather = h3.findNext('h5', attrs={'class':'l'})\n if weather.find('img', attrs={'src':'../images/roof.gif'}):\n weather = \"[ROOF] \" + weather.text \n else:\n weather = weather.text.strip()\n\n d = collections.OrderedDict()\n d['park'] = park.renderContents().strip()\n d['factor'] = factor.renderContents().strip()\n d['matchup'] = matchup.renderContents().strip()\n d['winddir'] = str(''.join(i for i in winddir['src'] if i.isdigit()))\n d['windspeed'] = windspeed.renderContents().strip()\n d['weather'] = weather.replace('.Later','. Later').replace('&deg;F','F ')\n object_list.append(d)\n\n output = False \n \n for each in object_list:\n if optteam in each['matchup']:\n output = \"{0} at {1}({2}) Weather: {3} Wind: {4}mph ({5}deg)\".format(ircutils.underline(each['matchup']),\\\n each['park'], each['factor'], each['weather'], each['windspeed'], each['winddir'])\n \n if not output:\n irc.reply(\"No match-up found for: %s\" % optteam)\n return\n else:\n irc.reply(output)", "def query_extrastate_wars(req_war_id):\n\tthis_query = Query('extrastate', req_war_id)\n\tthis_query.send_query()\n\tresponse = this_query.pull_result()\n\t#response = json.dumps(response)\n\treturn jsonify(response)", "def list_missions(self):\n\n # getting all the histogram information\n service = \"Mast.Caom.All\"\n params = {}\n response = self.service_request_async(service, params, format='extjs')\n jsonResponse = response[0].json()\n\n # getting the list of missions\n histData = jsonResponse['data']['Tables'][0]['Columns']\n for facet in histData:\n if facet['text'] == \"obs_collection\":\n missionInfo = facet['ExtendedProperties']['histObj']\n missions = list(missionInfo.keys())\n missions.remove('hist')\n return missions", "def get_cities(self, city_name: str = None):", "def getMyArmies(self):\n r = []\n for army in self.__armies:\n if (army.getOwner() == 1):\n r.append(army)\n return r", "def find_all(self, params={}, **options):\n return self.client.get_collection(\"/workspaces\", params, **options)", "def list_mc_servers(self, by_name=False, all_data=False):\n status, data, errors, messages = self._make_get_request(MCAPIRoutes.LIST)\n \n if status == 200:\n if by_name:\n y = 0\n returnData = dict()\n for items in data['servers']:\n returnData[y] = items.get(\"id\", 0)\n y += 1\n returnData[y] = items.get(\"name\", 0)\n return returnData\n if all_data:\n y = 0\n returnData = dict()\n for items in data['servers']:\n returnData[y] = items.get(\"id\", 0)\n y += 1\n returnData[y] = items.get(\"name\", 0)\n y += 1\n returnData[y] = items.get(\"running\", 0)\n y = y + 1\n returnData[y] = items.get(\"auto_start\", 0)\n return returnData\n del returnData\n else:\n return data['servers']\n elif status == 500:\n self._check_errors(errors, messages)", "def get_by_name(name):\n return database.get_all(Domain, name, field=\"name\").all()", "def all_monoms(f):\n return dmp_all_monoms(f.rep, f.lev, f.dom)", "def get_warc(self):\n raise NotImplementedError()", "def get_all_windows(self):\n success, result = self.manager.c.eval(\n textwrap.dedent(\n \"\"\"\n [win.wid for win in self.core.mapped_windows]\n \"\"\"\n )\n )\n assert success\n return eval(result)", "def info_towers_get():\n session = info_map.Session()\n\n q = session.query(info_map.Tower)\n q_towers = q.all()\n towers = []\n for q_tower in q_towers:\n tower = TowerInfo(\n type=q_tower.type,\n fuel_bay=q_tower.fuel_bay,\n stront_bay=q_tower.stront_bay,\n name=q_tower.name,\n storage_mult=q_tower.storage_mult,\n cpu=q_tower.cpu,\n powergrid=q_tower.powergrid,\n fuel_usage=q_tower.fuel_usage,\n stront_usage=q_tower.stront_usage,\n fuel_type=q_tower.fuel_type)\n\n towers.append(tower)\n\n return towers, 200", "def get_wing_list():\n try:\n society_id = request.form['society_id']\n society_wing_list = queries['get_society_wing_list']\n query = society_wing_list.format(society_id)\n\n with dbm.dbManager() as manager:\n result = manager.getDataFrame(query)\n return jsonify(result.to_dict(orient='records'))\n except psycopg2.DatabaseError as error:\n errors = {'get_wing_list': False, 'error': (error)}\n return str(errors)", "def get_mos_from_localhost(self):\n rewards = dict() # saves the reward of each client from each ap\n _, data = self.command_ap('localhost', 8080, '', \"/get_mos_client\") # the interface (3rd param) does not matter\n self.log.debug(\"data for MOS @ {} => {}\".format('all', data))\n stations = {'gnu-nb3': ['cloud'],\n 'fenrir': ['storm'],\n }\n for ap in self.aps:\n d = []\n for sta in stations[ap.name]:\n entries = [x[:4] for x in data if x[4] == sta]\n d.extend(entries)\n rs = self.get_rs(d)\n rewards[ap.id] = rs\n return rewards", "def get_all_by_name():\n name = request.args['name']\n return jsonify(service.get_all_data_by_name(name))", "def waste_water(osm_path): \n return (retrieve(osm_path,'multipolygons',['man_made'],**{'man_made':[\"='wastewater_plant'\"]})).rename(columns={'man_made': 'asset'})", "def get_inspection_periods_all_sites(db_name, team: str) -> List[Any]:\n db_file = os.path.join(TWDFT_DATA_DIR, db_name)\n try:\n conn = sqlite3.connect(db_file)\n except FileNotFoundError:\n raise\n c = conn.cursor()\n if not team:\n c.execute(\"SELECT name, last_inspection, freq_target, county FROM site WHERE NOT site_type =\\\"PSA\\\"\")\n else:\n team = \" \".join([\"Maritime\", team])\n c.execute(\"SELECT name, last_inspection, freq_target, county FROM site WHERE team=? AND NOT site_type =\\\"PSA\\\" AND rdf=0\", (team,))\n result = c.fetchall()\n conn.close()\n return result", "def all_rooms(self):\n pass", "def get_wims(self, uuid_or_name=None, tenant=None, **kwargs):\n kwargs.update(wim=uuid_or_name, tenant=tenant)\n from_ = _WIM_JOIN if tenant else _WIM\n select_ = _WIM_SELECT[:] + (['wim_account.*'] if tenant else [])\n\n kwargs.setdefault('SELECT', select_)\n return self.query(from_, **kwargs)", "def query_nonstate_wars(req_war_id):\n\tthis_query = Query('nonstate', req_war_id)\n\tthis_query.send_query()\n\tresponse = this_query.pull_result()\n\treturn jsonify(response)\n\t#return render_template('response.html', response=response)", "def associate_war_battles(self):\n orphan_battles = self.get_players_battles().filter(war__isnull=True, mode__war_day=True, war_processing_status='pending')\n for battle in orphan_battles:\n battle.process_war(self)", "def list(self):\n return self.rpc.call(MsfRpcMethod.DbWorkspaces)['workspaces']", "def get_player_war_br(self, player, kind , year):\n if kind == 'batter':\n war = '$br.Batting Value.{}.WAR'.format(year)\n off = '$br.Batting Value.{}.oWAR'.format(year)\n def_ = '$br.Batting Value.{}.dWAR'.format(year)\n res = self._db.Players.aggregate([{'$match': {'Name' : player}},\n {'$project': {'_id' : 0,\n 'war' : war,\n 'off' : off,\n 'def' : def_}}])\n elif kind == 'pitcher':\n war = '$br.Pitching Value.{}.WAR'.format(year)\n war = '$fg.pit.{}.pit_WAR'.format(year)\n res = self._db.Players.aggregate([{'$match': {'Name' : player}},\n {'$project': {'_id' : 0,\n 'war' : war}}])\n return list(res)[0]", "def get_weather(html):\n\tcheck_page_type(html)\n\tget_temp(html)\n\tget_table(html)\n\treturn weather_dict", "def get(self, request, m_name):\n machines = Machine.objects.get(name=str(m_name))\n serializer = MachineSerializer(machines, many=False)\n return Response(serializer.data)", "def get_all_motors():\n return mc.get('motor_values')", "def get_cities(self, city_name: str = \"\"):", "def get_facwartopstats_ids(self):\n fwts = self.api.eve.FacWarTopStats()\n charids = set()\n corpids = set()\n for list_ in ['KillsYesterday', 'KillsLastWeek', 'KillsTotal',\n 'VictoryPointsYesterday', 'VictoryPointsLastWeek',\n 'VictoryPointsTotal']:\n for row in getattr(fwts.characters, list_):\n charids.add(row.characterID)\n for row in getattr(fwts.corporations, list_):\n corpids.add(row.corporationID)\n for charid in charids:\n self.conn.ensure_character_exists(charid)\n self.conn.mark_chars_for_api(charids)\n for corpid in corpids:\n self.conn.ensure_corporation_exists(corpid)\n self.conn.mark_corps_for_api(corpids)\n self.conn.mark_corps_for_cache(corpids)", "def get_worms_for_screen_type(cls, screen_type):\n if screen_type not in {'ENH', 'SUP'}:\n raise Exception('screen_type must be ENH or SUP')\n\n worms = cls.objects\n\n if screen_type == 'ENH':\n return worms.exclude(permissive_temperature__isnull=True)\n else:\n return worms.exclude(restrictive_temperature__isnull=True)", "def get_wineries():\n wineries = session.query(Winery).all()\n return render_template(\"wineries.html\", wineries=wineries)", "def get_mike_stations(pool):\n\n mike_stations = {}\n\n connection = pool.connection()\n try:\n with connection.cursor() as cursor:\n sql_statement = \"SELECT * FROM `station` WHERE `id` like %s\"\n row_count = cursor.execute(sql_statement, \"18_____\")\n if row_count > 0:\n results = cursor.fetchall()\n for dict in results:\n mike_stations[dict.get(\"name\")] = [dict.get(\"id\"), dict.get(\"latitude\"), dict.get(\"longitude\")]\n return mike_stations\n else:\n return None\n except Exception as exception:\n error_message = \"Retrieving mike stations failed\"\n logger.error(error_message)\n traceback.print_exc()\n raise exception\n finally:\n if connection is not None:\n connection.close()", "def get_observation(world_state,weather,time_now):\n obs = np.zeros((2, OBS_SIZE, OBS_SIZE))\n object_list=[]\n while world_state.is_mission_running:\n time.sleep(0.1)\n world_state = agent_host.getWorldState()\n \n if len(world_state.errors) > 0:\n raise AssertionError('Could not load grid.')\n\n if world_state.number_of_observations_since_last_state > 0:\n # First we get the json from the observation API\n msg = world_state.observations[-1].text\n observations = json.loads(msg)\n # Get observation\n if weather != 'clear':\n object_list.append(weather)\n else:\n object_list.append('sunny')\n\n if time_now>13000:\n object_list.append(\"night\")\n else:\n object_list.append(\"morning\")\n grid = observations['floorAll']\n \n if grid.count('water')>3:\n object_list.append(\"river\")\n if grid.count('leaves')>2:\n object_list.append(\"tree\")\n if grid.count('lava')>1:\n object_list.append(\"lava\")\n \n animal=observations['NearbyEntities']\n target_ani=['Sheep','Cow','Pig']\n for i in animal:\n if i['name'] not in object_list and i['name'] in target_ani:\n object_list.append(i['name'])\n break\n \n \n return object_list", "def sentientPlanets():\n planet_list = []\n url = 'https://swapi-api.hbtn.io/api/species'\n while url is not None:\n data = requests.get(url).json()\n for species in data['results']:\n if ((species['designation'] == 'sentient'\n or species['designation'] == 'reptilian')):\n if species['homeworld'] is not None:\n hw = requests.get(species['homeworld']).json()\n planet_list.append(hw['name'])\n url = data['next']\n return planet_list", "def all_measurements_lookup(client):\n dbs_dict = db_lookup(client)\n m_list_dict = []\n for db in dbs_dict:\n m_list_dict.append({db['name']: measurements_lookup(client, db['name'])})\n # print(\"def all_measurements_lookup 1: \", m_list_dict[:10])\n return m_list_dict", "def get_solar(self, name_building):\n return self._solar.loc[name_building]", "def scrape_all_world_cup_goals():\n def scrape_goals_year(year):\n urls = scrape_world_cup_scoreboard(year)\n goals = []\n for url in urls:\n goals.extend(scrape_fifa_goals(url, 'FIFA World Cup'))\n return goals\n\n l = []\n for year in sorted(world_cup_mapping.keys()):\n l.extend(scrape_goals_year(year))\n return l", "def get_all_of_experiment(self, experiment_name: str):\n query = (\n f\"SELECT * FROM {self.table_name} WHERE experiment_name='{experiment_name}'\"\n )\n c = self.db.cursor()\n c.execute(query)\n queries = c.fetchall()\n return queries", "def list_workspaces(client):\n return client._creoson_post(\"windchill\", \"list_workspaces\", key_data=\"workspaces\")", "def get(self, name, fuzzy_matches=0, skip_cache=False):\n now = _time_ms(datetime.datetime.utcnow())\n if skip_cache or now - self._last_updated > CACHE_LIMIT:\n self._process_stations()\n\n name = name.strip().lower()\n station = self._stations_map.get(name, None)\n if station is None:\n if not fuzzy_matches:\n return []\n names = self._stations_map.keys()\n matches = difflib.get_close_matches(name, names, n=fuzzy_matches, \n cutoff=0)\n if matches:\n return [self._stations_map[x] for x in matches]\n else:\n return [station]", "def get_all_meals():", "def pharmacists(self) -> PharmasictsList:\n data = self.get(\"minhealth_pharmacists\")\n \n ls = [Pharmacists(**pharm) for pharm in data]\n return PharmasictsList(items=ls)", "async def search_law_by_name(self, name: str, connection=None) -> typing.Dict[str, None]:\n\n con = connection or self.bot.db\n\n query = \"\"\"SELECT law_id FROM legislature_laws AS l\n JOIN legislature_bills lb ON l.bill_id = lb.id\n WHERE lower(lb.bill_name) LIKE '%' || $1 || '%' \n ORDER BY similarity(lower(lb.bill_name), $1) DESC\n LIMIT 10;\"\"\"\n\n laws = await con.fetch(query, name.lower())\n\n found = dict()\n\n for law_id in laws:\n law = await Law.convert(MockContext(self.bot), law_id['law_id'])\n found[f\"Law #{law.id} - [{law.bill.name}]({law.bill.link})\"] = None\n\n return found", "def cities_weather(cities):\n result = pd.DataFrame()\n for city in cities:\n print(city)\n weather = ask_google_weather(city=city)\n weather = pd.DataFrame.from_dict(weather)\n result = pd.concat([result, weather], axis=0)\n result.reset_index(drop=True, inplace=True)\n return result", "def get_by_name(self, wk_name):\n work_role = WorkRole.query.filter_by(name=wk_name).first()\n\n return work_role", "def queryMBeans(domain='WebSphere', **attributes):\n queryString = '%s:*' % domain\n for (k, v) in attributes.items():\n queryString += ',%s=%s' % (k, v)\n result = []\n for name in AdminControl.queryNames(queryString).splitlines():\n if name.strip() != '':\n result.append(MBean(name))\n return result", "def get_weather(self, time_delta: int = 30) -> list:\n time = datetime(1980, 1, 1)\n weather_list: list = []\n index = 0\n\n for index in range(len(self.locations)):\n if time == datetime(1980, 1, 1):\n time = self.timestamps[index]\n location = (self.locations[index][0], self.locations[index][1])\n weather_response = self.__weather_api_call(\n time, location, index,\n )\n weather_list.append(weather_response)\n elif time + timedelta(minutes=time_delta) < self.timestamps[index]:\n time = self.timestamps[index]\n location = (self.locations[index][0], self.locations[index][1])\n weather_response = self.__weather_api_call(\n time, location, index,\n )\n weather_list.append(weather_response)\n\n if index == len(self.locations) - 1:\n time = self.timestamps[index] + timedelta(minutes=60)\n location = (self.locations[index][0], self.locations[index][1])\n weather_response = self.__weather_api_call(\n time, location, index,\n )\n weather_list.append(weather_response)\n\n return weather_list", "def get_weather(days, hours, db):\n days = format_list_for_db(days)\n hours = format_list_for_db(hours)\n sql = f\"SELECT * FROM weather WHERE day in {days} AND HOUR in {hours}\"\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchall()\n cursor.close()\n\n weathers = []\n if len(data) > 0:\n for weather in data:\n weather = {\"hour\": weather[1],\n \"day\": day_absolute_to_relative(weather[2]),\n \"temperature\": weather[3],\n \"apparenttemperature\": weather[4],\n \"precipitationintensity\": weather[5],\n \"precipitationprobability\": weather[6],\n \"humidity\": weather[7],\n \"dewpoint\": weather[8],\n \"windspeed\": weather[9],\n \"windbearing\": weather[10],\n \"windgust\": weather[11],\n \"pressure\": weather[12],\n \"cloudcover\": weather[13],\n \"uvindex\": weather[14],\n \"visibility\": weather[15]}\n weathers.append(weather)\n return weathers", "def cities(self):\n objs = models.storage.all()\n tmp = []\n for key, value in objs.items():\n name = key.split('.')\n if name[0] == \"City\":\n if value.state_id == str(self.id):\n tmp.append(objs[key])\n return tmp", "def dormitories(self) -> list[Dormitory]:\n return list(self._dormitories.values())", "def GetWorldCities():\n return GetDataFromCsvFile('world_cities.csv')", "def amenities(self):\n ats = storage.all(Amenity)\n ltats = []\n for objects in ats.values():\n if self.amenity_ids == objects.id:\n ltats.append(objects)\n return ltats", "def cities(self):\n from models import storage\n city_list = []\n cities_dict = storage.all(cls=\"City\")\n for k, v in cities_dict.items():\n if v.get(\"state_id\") == self.id:\n city_list.append(v)\n return city_list", "def select_all_mip_objects():\n\n for mip in Mip.objects.all():\n print mip", "def get_all_finished_missions(self):\n self.lock.acquire()\n result = self.__Session.query(Mission).filter_by(status=3).all()\n self.lock.release()\n return result", "def amenities_all():\n return jsonify(list(map(lambda x: x.to_dict(),\n list(storage.all(Amenity).values()))))", "def getRooms(building_data):\n rooms = []\n floors = building_data['containedSpaces']\n for floor in floors:\n if(floor['type'] == 'FLOOR'):\n r = requests.get(FenixSpacesAPI_URL + \"/\" + str(floor['id']))\n floor_data = r.json()\n for floor_rooms in floor_data['containedSpaces']:\n if(floor_rooms['type'] == 'ROOM'):\n rooms.append(format_floor(floor_rooms))\n return rooms", "def lom(self):\n return self.get_queryset().filter(award__name=MatchAward.LOM)", "def mine_all(self):\n\n # Query databse\n query_string = \"SELECT * from planets_in_range;\"\n self.conn_cur.execute(query_string)\n results = self.conn_cur.fetchall()\n\n # Check planets in range\n for ship in results:\n self.mine(str(ship[0]), str(ship[1]))", "def get_zakopane_hourly_weather():\n zakopane = TwelveHoursWeatherForecast(location.get(\"zakopane\", \"\"))\n zakopane_weather_detail = zakopane.get_hourly_weather_details()\n zakopane_hourly_weather_detail = []\n for data in zakopane_weather_detail:\n zakopane_hourly_weather_detail.append(data)\n return zakopane_hourly_weather_detail", "def pom(self):\n return self.get_queryset().filter(award__name=MatchAward.POM)", "def get(self, name):\n res = self.rpc.call(MsfRpcMethod.DbGetWorkspace, [name])\n if 'workspace' in res:\n return res['workspace']\n else:\n return", "def get_script ( self, name ):\n return list ( self.find_all_by_name ( name ) )", "def fetch_weather(y):\r\n # request parameter(s): Start with '?'\r\n # separate name and value with '='\r\n # multiple parameter name value pairs are separate with '&'\r\n query_string = \"?id={}&units=imperial&APIKEY={}\".format(y, API_KEY)\r\n request_url = WS_URL + query_string\r\n print(\"Request URL: \", request_url)\r\n response = requests.get(request_url)\r\n if response.status_code == 200:\r\n city_name = response.json()[\"city\"][\"name\"]\r\n lst = response.json()[\"list\"]\r\n tmp_list = []\r\n for i in range(len(lst) // 8):\r\n li = [x for x in range(len(lst)) if x // 8 == i]\r\n tmp_list.append(max([lst[j][\"main\"][\"temp_max\"] for j in li]))\r\n return City(city_name, tmp_list)\r\n else:\r\n print(\"How should I know?\")\r\n return None", "def get_thermostats(client, hod, building):\n\n query = \"\"\"SELECT ?uri ?zone FROM %s WHERE {\n ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat .\n ?tstat bf:uri ?uri .\n ?tstat bf:controls/bf:feeds ?zone .\n };\"\"\"\n\n # Start of FIX for missing Brick query\n query = \"\"\"SELECT ?zone ?uri FROM %s WHERE {\n ?tstat rdf:type brick:Thermostat .\n ?tstat bf:controls ?RTU .\n ?RTU rdf:type brick:RTU .\n ?RTU bf:feeds ?zone. \n ?zone rdf:type brick:HVAC_Zone .\n ?tstat bf:uri ?uri.\n };\"\"\"\n # End of FIX - delete when Brick is fixed\n building_query = query % building\n\n tstat_query_data = hod.do_query(building_query)['Rows']\n tstats = {tstat[\"?zone\"]: Thermostat(client, tstat[\"?uri\"]) for tstat in tstat_query_data}\n return tstats", "def get_all(self) -> List[LoadedLabware]:\n return list(self._state.labware_by_id.values())", "def get_immats_fromdwh(dwh_schema, table_name, connection, date_col=\"date_immat\"):\n \n query = f\"SELECT distinct date AS date_immat FROM {dwh_schema}.{table_name}\"\n df = pd.read_sql(query,con=connection) \n df[\"year_immat\"] = df[date_col].str[-4:]\n dc_years = dict()\n for year, nb_mois in df.year_immat.value_counts().iteritems():\n if nb_mois < 12:\n key_name = f\"immats/immats_{year}.csv\"\n dc_years[key_name] = df[df[\"year_immat\"]==year][date_col].tolist()\n return dc_years", "def get_player_war_fg(self, player, kind, year):\n if kind == 'batter':\n war = '$fg.bat.{}.bat_WAR'.format(year)\n off = '$fg.bat.{}.Off'.format(year)\n def_ = '$fg.bat.{}.Def'.format(year)\n res = self._db.Players.aggregate([{'$match': {'Name' : player}},\n {'$project': {'_id' : 0,\n 'war' : war,\n 'off' : off,\n 'def' : def_}}])\n elif kind == 'pitcher':\n war = '$fg.pit.{}.pit_WAR'.format(year)\n res = self._db.Players.aggregate([{'$match': {'Name' : player}},\n {'$project': {'_id' : 0,\n 'war' : war}}])\n return list(res)[0]", "def schedules(self, term, include_units=False):\n params = {'termCode': term.code}\n r = self.get(self.HOME_ENDPOINT, params=params)\n soup = BeautifulSoup(r.text, 'html.parser')\n schedules = dict()\n # Finding schedule names\n name_matches = list(re.finditer('Schedules\\[Schedules\\.length\\] = \\{\"Name\":\"(.+?)\"',\n r.text))\n course_re = re.compile('Schedules\\[Schedules\\.length \\- 1\\]\\.SelectedList\\.t'\n '([0-9A-Z]+) =.+?\"UNITS\":\"([0-9])\"', flags=re.DOTALL)\n start = 0\n\n for idx, name_match in enumerate(name_matches):\n name = name_match.group(1)\n schedules[name] = list()\n\n try:\n end = name_matches[idx + 1].start()\n except IndexError:\n end = len(r.text)\n course_match = None\n for course_match in course_re.finditer(r.text, name_match.start(), end):\n crn = course_match.group(1)\n if include_units:\n units = int(course_match.group(2))\n schedules[name].append((crn, units))\n else:\n schedules[name].append(crn)\n\n return schedules", "def get_all_labs():\n return Lab.query.all()", "def get_all_lod(namestr):\n meshes = []\n for me in bpy.data.meshes:\n if \"|q\" in me.name and namestr in me.name:\n meshes.append(me)\n return meshes", "async def hourly(self) -> list:\n data: dict = await self._request(\"get\", \"restrictions/hourly\")\n return data[\"hourlyRestrictions\"]", "def list_minerals():\n return _list_tindyb_unique_values(\"name\", dbpath=__dbpath__)", "def getweigths():\n ls = []\n for i_lay in range(1, len(layers)):\n ls.append(layers[i_lay][\"weigths\"])\n return ls", "def get(self, request):\n\t\tworkingHours = GymModels.WorkingHours.objects.all()\n\t\tserializer = PeopleSerializer.WorkingHourSerializer(workingHours, many=True)\n\t\treturn Response(serializer.data)", "def list_melons():\n melons = model.get_melons()\n return render_template(\"all_melons.html\",\n melon_list = melons)", "def get_by_name(self, name):\n ksat = Ksat.query.filter_by(name=name).first()\n\n return ksat", "def amenity_get_all():\n am_list = []\n am_obj = storage.all(\"Amenity\")\n for obj in am_obj.values():\n am_list.append(obj.to_json())\n\n return jsonify(am_list)" ]
[ "0.530085", "0.5216914", "0.52164996", "0.52158", "0.512819", "0.5046316", "0.49238253", "0.48991495", "0.48724052", "0.4854176", "0.48369068", "0.47993955", "0.4775364", "0.47421068", "0.47291753", "0.46694636", "0.4650255", "0.46441507", "0.46405992", "0.46394694", "0.46272328", "0.4622484", "0.4615833", "0.4612752", "0.4594808", "0.45941487", "0.45882702", "0.45876667", "0.4579765", "0.45696202", "0.4558967", "0.45522574", "0.45479417", "0.4545498", "0.45440927", "0.45325503", "0.45295632", "0.4510229", "0.4503892", "0.44882134", "0.44792107", "0.4475687", "0.44732606", "0.4469734", "0.44647437", "0.44643402", "0.44507724", "0.44507676", "0.44479173", "0.44391075", "0.44371167", "0.4428116", "0.44190544", "0.44130194", "0.44130045", "0.44123787", "0.4409263", "0.44064075", "0.43982023", "0.43924898", "0.43922102", "0.43874472", "0.43871295", "0.4370827", "0.43702576", "0.43642536", "0.43554232", "0.43549633", "0.43512505", "0.4346343", "0.43443865", "0.43384108", "0.43381214", "0.4338093", "0.43255585", "0.43248802", "0.43239522", "0.43214235", "0.43209225", "0.43193686", "0.43182254", "0.4316696", "0.4313482", "0.43122876", "0.4311494", "0.4310127", "0.43080154", "0.43071508", "0.4306382", "0.43061712", "0.42992136", "0.42963818", "0.4296057", "0.42929283", "0.42901975", "0.4288888", "0.42860627", "0.4278043", "0.42776194", "0.42720792" ]
0.69981116
0
Get list of all war eras.
def get_war_eras(): return datasources_service.get_war_eras()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ewriters():\n return dict(_ewriters)", "def get_morbidities_for_war_era():\n war_era_name = request.args.get('warEra')\n if not war_era_name:\n raise BadRequestError(\"warEra parameter is missing\")\n return datasources_service.get_morbidities_for_war_era(war_era_name)", "def list_shelves(self):\n shelflist = []\n for i in self.get_shelves():\n shelflist.append(i)\n return shelflist", "def get_resources(self):\n return []", "def get_all(self):\n\n url = 'equipment/all'\n\n code, xml = self.submit(None, 'GET', url)\n\n return self.response(code, xml)", "def list_all(self):\n\n url = 'equipamento/list/'\n\n code, xml = self.submit(None, 'GET', url)\n\n return self.response(code, xml)", "def get_all_ribs_per_router(self):\n return self._get_all_ribs(lambda r: r.name)", "def list_router(self):\n _url = \"http://\" + self.host_ip + \":9696/v2.0/routers.json\"\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.project_info[\"token_project\"]}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while listing routers.\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"List router Failed with status %s \" %\n response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Router List : %s \" % output)\n\n return output[\"routers\"]", "def get_all_environments():\n return ENVIRONMENTS", "def all(self) -> list[dict[str, Any]]:\n return self.client.get(self._url())", "def alerts_all_zones(self: SimpleNWS) -> List[Dict[str, Any]]:\n return self._alerts_all_zones", "def get_all_explorations():\n return [exp_domain.Exploration(e) for e in\n exp_models.ExplorationModel.get_all()]", "def get_etfs_list(self):\n return list(self.etfs.keys())", "def resources(self):\n return list(self.get_resources_for_type(gdef.ResType_All))", "def all_errata(self):\n return self._all_errata", "def get_all_reporters():\r\n for ep in iter_entry_points('attest.reporters'):\r\n yield ep.name", "def get_all_entities(self):\n return Artifact.get_all()", "def _get_all_resources(self):\n all_resources = []\n for resource in ResourceModel.scan():\n all_resources.append(resource)\n return all_resources", "def get_laser_echoes(self):\n return self._request_data(\"/lokarria/laser/echoes\")", "def etls(self):\r\n return self._etls", "def get_all_thermals(self):\n return self._thermal_list", "def get_possible_absentees(self) -> List[QualifiedAgent]:\n wum: WorklistUpdateManagerApi = self._service_provider.get_service(WorklistUpdateManagerApi)\n return self._rem_iter_handler.consume(\n wum.get_possible_absentees(),\n \"agents\",\n PossAbsRemoteIteratorApi,\n PossAbsRemoteIteratorApi.poss_abs_get_next,\n )", "def all_hosts(self):\n ...", "def list_runs(self):\n postresult = requests.get(\n f\"{self.proto}://{self.host}/ga4gh/wes/v1/runs\", headers=self.auth\n )\n return wes_reponse(postresult)", "def get_all_routers(self):\n import network\n sta_if = network.WLAN(network.STA_IF)\n sta_if.active(True)\n all_routers = sta_if.scan()\n\n routers = []\n for router_tuple in all_routers:\n router = Router(router_tuple[0], router_tuple[1], router_tuple[3])\n routers.append(router)\n\n return routers", "def list(self):\n path = \"authSettings/exemptedUrls\"\n return self._session.get(path)", "def rehearsal_list(request_dict):\n rehearsals = Rehearsal.query.all()\n rehearsals_list = list()\n for rehearsal in rehearsals:\n rehearsals_list.append(rehearsal)\n\n return JSONTools.rehearsal_list_reply(rehearsals_list)", "def find_all(client):\n return list(map(lambda s: Site(s), client.get_api_resource(\"self/sites\")))", "def episodes(self):\n episodes = []\n for series in self.series:\n episodes.extend(series.episodes)\n return episodes", "def get_all(self):\n\n servers = self._scoped_servers()\n servers = [{u'id': x.id, u'name': x.name} for x in servers]\n return self.format_collection(servers)", "def get_all_servers(self) -> List[Server]:\n pass", "async def running(self) -> list[dict[str, Any]]:\n data = await self.controller.request(\"get\", \"watering/program\")\n return cast(list[dict[str, Any]], data[\"programs\"])", "def episodes(self):\n episodes = []\n for season in self.seasons:\n episodes.extend(season.episodes)\n return episodes", "def get_admins(self):\n from Employee import Employee\n admins = list()\n cursorRoles = self.dbconnect.get_cursor()\n cursorRoles.execute('select * from employeeRoles where role=\\'admin\\'')\n for row in cursorRoles:\n admins.append(self.get_employee(row[0]))\n return admins", "def get_artefacts(self, leverable):\n\n if self.url == 'test':\n artefactlist = ['fk-' + leverable + '_wlsapp', 'fk-' + leverable + '_tuxapp']\n else:\n artefactlist = []\n try:\n response = urlopen(\n 'http://' + self.url + '/nexus/service/local/lucene/search?repositoryId=rpm-dev&g=fk.rpm.'\n + leverable)\n except (HTTPError, URLError) as e:\n logger.error(e)\n return ['Error getting artefacts!!!']\n\n metadata_root = elementTree.parse(response)\n for data in metadata_root.iter('artifact'):\n artefact = data.find('artifactId').text\n if artefact not in artefactlist:\n artefactlist.append(artefact)\n\n return artefactlist", "def getEntries(self):\n return self.entries", "def get_shelters():\n shelters = Shelter.get_shelters()\n\n if shelters:\n return jsonify(message=shelters), 200\n else:\n return jsonify(message='Failed to get shelters'), 500", "def getHosts(self):\n raise \"not implemented\"", "def list(self):\n url = self._resource_name\n return self._get(url)", "def getHeteroAtoms(self):\n\n\t\thetlist = []\n\t\tfor chain in self.chain:\n\t\t\tfor res in chain.residue:\n\t\t\t\tfor atm in res.atom:\n\t\t\t\t\tif atm.kind == \"HETATM\":\n\t\t\t\t\t\thetlist.append(res)\t\t\n\t\t\t\t\t\tbreak\n\n\t\treturn hetlist", "def all(self):\n return self.client.request_with_method(Methods.LIST % self.name)['items']", "async def get_events(self) -> list[Event]:\n log.debug(\"Discovering events in branding repository.\")\n\n try:\n event_directories = await self.fetch_directory(\"events\", types=(\"dir\",)) # Skip files.\n except Exception:\n log.exception(\"Failed to fetch 'events' directory.\")\n return []\n\n instances: list[Event] = []\n\n for event_directory in event_directories.values():\n log.trace(f\"Attempting to construct event from directory: '{event_directory.path}'.\")\n try:\n instance = await self.construct_event(event_directory)\n except Exception as exc:\n log.warning(f\"Could not construct event '{event_directory.path}'.\", exc_info=exc)\n else:\n instances.append(instance)\n\n return instances", "def watershedlist():\n opts = watersheds_db()\n return [(opts[opt]['name'] + ' (' + opts[opt]['delineation'] + ')', opt) for opt in opts]", "def get_all(self):\n\n layer_names = rs.LayerNames()\n\n layers = []\n\n for layer_name in layer_names:\n\n layer = GiraffeLayer(layer_name)\n \n layers.append(layer)\n\n return layers", "def read_wks(self):\n list = []\n with open(self.wks_file) as wks_file:\n fieldnames = ['name', 'ip', 'port']\n routers = csv.DictReader(wks_file, fieldnames=fieldnames)\n for router in routers:\n list.append(router)\n return list", "def getRoutes(self):\n pass", "def get_all_offices():\n return [vars(office) for office in OFFICES]", "def resources(self):\r\n return self.page.object_list", "def get_all_apps(self):\n return list(self.apps.values())", "def getAllSheetNames(self):\n\n\t\t\treturn self.thing.get_sheet_names()", "def IEs(self):\n return self._ies", "def all_entries(cls):\n info = Diary.entries\n response = jsonify({\"data\": info})\n response.status_code = 200\n return response", "def read_all():\n\n # Create the list of environments from our data\n environment = Environment.query.order_by(Environment.key).all()\n app.logger.debug(pformat(environment))\n # Serialize the data for the response\n environment_schema = EnvironmentSchema(many=True)\n data = environment_schema.dump(environment)\n return data", "def get(self):\r\n return get_all()", "def list_events():\n return [\n snow,\n mosquito,\n sun_heat,\n orage,\n overflowing,\n gathering,\n trampling,\n pollution,\n southern_wind,\n northern_wind,\n fog,\n sun\n ]", "def get_instance_essentials(self):\n ret = []\n for instance in self.all_instances:\n ret.append(instance.get_essentials())\n return ret", "def get_router_availability_zones(self, router):\n return [self._get_router_az_obj(router).name]", "def list_hosts():\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n res = hosts.get_all(db)\n res = {'list': res}\n return jsonify(res)", "def getRaceList(self):\n\t\tl = []\n\t\tfor r in self.races:\n\t\t\tl.append(r.name)\n\t\treturn l", "def all_hosts(self):\n if not 'scan' in list(self._scan_result.keys()):\n return []\n listh = list(self._scan_result['scan'].keys())\n listh.sort()\n return listh", "def getRaritiesList():\n return Gw2Spidy._request('rarities')['results']", "def getEssentialList(self):\n return self.essentials", "def get_er_exceptions():\n express_route_exceptions_lst = []\n try:\n for i in get_data():\n if i['expressRoute'] is False:\n express_route_exceptions_lst.append(i)\n express_route_exceptions_dic = {'expressRoutesExceptions': express_route_exceptions_lst}\n return get_json(express_route_exceptions_dic)\n except ValueError as e:\n print(e)", "def getExceptions(self):\n return self.getOrDefault(\"exceptions\")", "def get_entries_all(self):\n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n return self.database.find_entries_by_title('.*', \n regex=True)", "def get_all(self):\n result_get = GetRest(function = self.function).performRequest()\n return result_get", "def get_event_list(self):\n pass", "def get_allhosts():\n connection, tablename = HomeNetwork.get_connection_info()\n query = 'SELECT hostname from {}'.format(tablename)\n output = pandas.read_sql_query(query, connection).to_json(orient='records')\n\n for host in json.loads(output):\n yield host[\"hostname\"]", "def get_list_of_sites(self) -> list:\n ah_write = self.get_iis_object()\n section = ah_write.GetAdminSection(\"system.applicationHost/sites\", \"MACHINE/WEBROOT/APPHOST\")\n collection = section.Collection\n result = []\n\n for i in range(collection.Count):\n\n site = collection[i]\n prop = site.Properties\n # site_id = prop[\"id\"].Value\n name = prop[\"name\"].Value\n default_app = self.get_default_app(site)\n bindings = self.get_site_bindings(site.ChildElements)\n applications = self.get_applications(site)\n if default_app and not os.path.exists(self.core.expandvars(default_app[\"physicalPath\"])):\n # не показывать сайты для которых нет физ. директории для иис экспреса\n continue\n site = Site(name, bindings, default_app, applications)\n if hasattr(site, 'port') and site.port != 0:\n result.append(site)\n\n return result", "def all_events(cls) -> \"IFilterPattern\":\n return jsii.sinvoke(cls, \"allEvents\", [])", "def getListOfEvents(self):\n return self.model.getListOfEvents()", "def calendars(self):\n return self.calendar_home_set.calendars()", "def get_all_image_names(self):\n\n # for RD analysis ...\n\n result = []\n for sweep in self._sweeps:\n result.extend(sweep.get_all_image_names())\n return result", "def list_yara(self):\n return self.__make_api_call('list/yara')", "def GetResourceNames(self):\r\n return [x.name for x in self.resources]", "def getEntries(self):\n return self.__entries", "def list_all_ephemerides_files(self) -> Dict:\n ephs = self.list_result_ephemerides_files()\n while 'nextPageToken' in ephs:\n next_page_token = ephs['nextPageToken']\n _, e = self.list_result_ephemerides_files(page_token=next_page_token)\n ephs['ephemerisResourcePath'].extend(e['ephemerisResourcePath'])\n return ephs", "def get_urls():\r\n return []", "def getAllRoles(self):\n\n # Learn URL of AllRoles service\n url = self.config.get(\"Authorization\",\"allroles\") # http://erra.ccss.cz/g4i-portlet/service/list/roles/en\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] AllRoles url: %s\"% url)\n \n # Request all roles from LifeRay\n import httplib2\n h = httplib2.Http()\n header, content = h.request(url, \"GET\")\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] response header: %s\"% header)\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] response content: %s\"% content)\n\n # Parse the response\n try:\n allRolesJson = json.loads(content)\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] AllRoles reply succesfully parsed\")\n except ValueError,e:\n logging.error(\"[LaymanAuthLiferay][getAllRoles] Cannot parse AllRoles reply: '%s'\"% content)\n raise AuthError(500, \"Cannot parse GET All Roles response [%s] as JSON:%s\"% (content,e)) \n \n roles = allRolesJson[\"roles\"]\n\n # lower() and spaces\n for rr in roles:\n rr[\"roleName\"] = rr[\"roleName\"].lower()\n rr[\"roleName\"] = \"_\".join(rr[\"roleName\"].split(' '))\n\n # Return roles\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] Return roles: %s\"% str(roles))\n return roles", "def get_weathers():\n names = [\n name for name in dir(carla.WeatherParameters)\n if re.match('[A-Z].+', name)\n ]\n weathers = {x: getattr(carla.WeatherParameters, x) for x in names}\n return weathers", "def get_league_listing(self):\n url = self.__build_url(urls.GET_LEAGUE_LISTING)\n req = self.executor(url)\n if self.logger:\n self.logger.info('URL: {0}'.format(url))\n if not self.__check_http_err(req.status_code):\n return response.build(req, url, self.raw_mode)", "def iter_alpha_helicies(self):\n if self.default_model:\n return self.default_model.iter_alpha_helicies()\n return iter(list())", "def get_all_events(cls):\n try:\n events = list(events_coll.find())\n events_list = []\n if events is not None:\n for event in events:\n one_event = cls(**event)\n events_list.append(one_event)\n return events_list\n except Exception as e:\n print(e)", "def toolpaths_list(self) -> List[dict]:\n self.__logger.debug('Eva.toolpaths_list called')\n return self.__http_client.toolpaths_list()", "def _get_threats_lists(self):\n # response is googleapiclient.discovery.Resource object\n response = self.service.threatLists()\n \n # response is googleapiclient.http.HttpRequest object\n response = response.list()\n \n # response is a dict file\n response = response.execute()\n \n return response['threatLists']", "def getAllRooms(z, opts):\n params = {}\n dmerge(params, parse_param('@attrs=uid'))\n dmerge(params, parse_param('@types=resources'))\n #dmerge(params, parse_param('@limit=5'))\n response = z.request('SearchDirectoryRequest', params=params, opts=opts)\n names = [item['name'] for item in response['SearchDirectoryResponse']['calresource']]\n return names", "def get(self):\n return get_all_fuelmaster()", "def get_router_transports(self, details=None):\n self.log.debug(\"{}.get_router_transports\".format(self.__class__.__name__))\n\n res = []\n for transport in sorted(self.transports.values(), key=lambda c: c.created):\n res.append({\n 'id': transport.id,\n 'created': utcstr(transport.created),\n 'config': transport.config,\n })\n return res", "def get_all_games():\n games = brain.get_all_games()\n return games", "def get_resources(self, **extra_args):\n return [lrms for lrms in self.resources.itervalues()]", "def fetch_all(self):\n result = self._client.get(self._full_path())\n if 'list' in result:\n triggers = []\n for trigger in result['list']:\n triggers.append(Trigger(self._client, **trigger))\n return triggers\n else:\n raise ResponseStructureError(\"list doesn't exist in response\", result)", "def routes(self):\n return self._routes", "def get_events(self):\n return self.s.query(Event).all()", "def lights(self) -> List[dict]:\n return self.items_by_domain(\"light\")", "def get_all_restaurants():\n return list(Restaurant.objects.all().values())", "def get_all(self):\n return [self.get(name) for name in self.factories.iterkeys()]", "def getAll(self):\n result_get = GetRest(function = self.function).performRequest()\n return result_get", "def get_all_resources(self) -> list[Resource]:\n return self.get_session.query(self.resource_model).all()", "def restaurants_all() -> str:\n restaurant_objects = restaurants.load_restaurants()\n return jsonify(restaurant_objects)", "def resources(self):\n return [self]" ]
[ "0.5828334", "0.5808068", "0.57432085", "0.5644334", "0.56183976", "0.5573372", "0.5563877", "0.55565184", "0.55515826", "0.55351365", "0.55152845", "0.55032134", "0.54578054", "0.54362935", "0.54297596", "0.5429045", "0.5412836", "0.54041094", "0.5400069", "0.5393652", "0.5388164", "0.53848094", "0.5379429", "0.53649694", "0.5347273", "0.5325162", "0.53024733", "0.5293844", "0.5244459", "0.52426714", "0.52000606", "0.51860815", "0.51856595", "0.51758593", "0.5174012", "0.51685345", "0.5164706", "0.51603025", "0.51509136", "0.51475126", "0.5146914", "0.51378506", "0.51360846", "0.5113752", "0.5109127", "0.5108884", "0.51070464", "0.50982976", "0.50977975", "0.50975853", "0.5095144", "0.5090122", "0.50878274", "0.5084246", "0.5083689", "0.50776714", "0.5076506", "0.50754654", "0.50750816", "0.50747097", "0.50727975", "0.50715417", "0.50714606", "0.50685436", "0.50649524", "0.50580716", "0.5056206", "0.5054001", "0.50486076", "0.50420177", "0.5037291", "0.50353295", "0.50325894", "0.50290304", "0.5028224", "0.50231373", "0.5021931", "0.50208247", "0.5020705", "0.50185275", "0.5018446", "0.50164735", "0.50077355", "0.5006582", "0.50046563", "0.49976182", "0.49973252", "0.49914223", "0.4984571", "0.4984481", "0.4979128", "0.49779493", "0.49768367", "0.4971461", "0.49675405", "0.49621576", "0.49613982", "0.49594593", "0.49584293", "0.49582407" ]
0.7025474
0
Create the request client instance.
def __init__(self, **kwargs): self.__kwargs = kwargs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_client(self, context):\n return Client(self.settings['client_routing'], context=context)", "def create_client(self) -> None:\n pass", "def create_client(self) -> None:\n self._client = discovery.build('ml', 'v1')", "def client_setup(self):\n self.client = Client()", "def _client(self) -> httpx.Client:\n return httpx.Client(\n base_url=self._base_url,\n headers=self._authorization_headers,\n proxies=self._proxies,\n )", "def client(self):\r\n if self._client is None:\r\n self._client = self._client_cls(self._server, self._params, self)\r\n return self._client", "def create_client(self):\n client = iperf3.Client()\n client.duration = self._host[CONF_DURATION]\n client.server_hostname = self._host[CONF_HOST]\n client.port = self._host[CONF_PORT]\n client.num_streams = self._host[CONF_PARALLEL]\n client.protocol = self._host[CONF_PROTOCOL]\n client.verbose = False\n return client", "def __init__(self, **kwargs):\r\n super(Client, self).__init__()\r\n self.httpclient = client.HTTPClient(**kwargs)\r\n self.version = '2.0'\r\n self.format = 'json'\r\n self.action_prefix = \"/v%s\" % (self.version)\r\n self.retries = 0\r\n self.retry_interval = 1", "def create_client(self) -> None:\n self._client = gapic.JobServiceClient(\n client_options=dict(api_endpoint=self._region + _UCAIP_ENDPOINT_SUFFIX))", "def client():\n\n client = Client()\n return client", "def client():\n return Client(**common_data.AUTH_ARGS)", "def make_client(instance):\n network_client = utils.get_client_class(\n API_NAME,\n instance._api_version[API_NAME],\n API_VERSIONS)\n LOG.debug('Instantiating network client: %s', network_client)\n\n endpoint = instance.get_endpoint_for_service_type(\n API_NAME,\n region_name=instance._region_name,\n )\n\n return network_client(\n username=instance._username,\n tenant_name=instance._project_name,\n password=instance._password,\n region_name=instance._region_name,\n auth_url=instance._auth_url,\n endpoint_url=endpoint,\n token=instance.auth.get_token(instance.session),\n insecure=instance._insecure,\n ca_cert=instance._cacert,\n )", "def __init__(self, client):\n\n self.client = client", "def api_client(request):\n\n base_url = request.config.getoption(\"--url\")\n delay = request.config.getoption(\"--delay\")\n templates = CONF.ENDPOINT_TEMPLATES\n return client.APIClient(base_url=base_url, delay=delay, templates=templates)", "def __init__(self, client):\n self.client = client", "def __init__(self, **kwargs):\n self.config = kwargs[\"config\"]\n self.cli = client.DefaultClient(app_key=self.config[\"app_key\"], app_secret=self.config[\"app_secret\"])\n self.req = None", "def create(app, client_stream, client_addr, client_sock=None):\n # request line\n line = Request._safe_readline(client_stream).strip().decode()\n if not line:\n return None\n method, url, http_version = line.split()\n http_version = http_version.split('/', 1)[1]\n\n # headers\n headers = NoCaseDict()\n while True:\n line = Request._safe_readline(client_stream).strip().decode()\n if line == '':\n break\n header, value = line.split(':', 1)\n value = value.strip()\n headers[header] = value\n\n return Request(app, client_addr, method, url, http_version, headers,\n stream=client_stream, sock=client_sock)", "def make_request(self, environ, **kwargs):\n factory = self.get(abcs.ARequest)\n request = factory(environ, self, **kwargs)\n self._set_request_attributes(request)\n return request", "def __init__(self, client):\n self._client = client", "def __init__(self, client=None):\n self._client = client", "def request_factory(self) -> 'JSONRPCRequest':\n return JSONRPCRequest()", "def get_client():\n return Client(__address, authkey='strumamor')", "def create(cls, request):\n if isinstance(request, Request):\n request = request.prepare()\n\n # Method\n method = request.method.lower()\n\n # Cookies\n cookie = {}\n if request._cookies is not None:\n # cookies are stored in a cookiejar object\n cookie = request._cookies.get_dict()\n\n # Preparing a request formats the URL with params, strip them out again\n o = urlparse(request.url)\n params = parse_qs(o.query)\n # extract the URL without query parameters\n url = o._replace(query=None).geturl()\n\n # Order matters because all python requests issued from a session\n # include Accept */* which does not necessarily match the content type\n mimetype = request.headers.get(\"Content-Type\") or request.headers.get(\n \"Accept\"\n )\n\n # Headers - request.headers is not an instance of Headers\n # which is expected\n header = Headers(dict(request.headers))\n\n # Body\n # TODO: figure out if request._body_position is relevant\n body = request.body\n\n # Path gets deduced by path finder against spec\n parameters = RequestParameters(\n query=ImmutableMultiDict(params),\n header=header,\n cookie=cookie,\n )\n return OpenAPIRequest(\n full_url_pattern=url,\n method=method,\n parameters=parameters,\n body=body,\n mimetype=mimetype,\n )", "def _get_client(self):\n _client = KOPS(provider=self.provider, config=self.config)\n return _client", "def make_client(instance):\r\n neutron_client = utils.get_client_class(\r\n API_NAME,\r\n instance._api_version[API_NAME],\r\n API_VERSIONS,\r\n )\r\n instance.initialize()\r\n url = instance._url\r\n url = url.rstrip(\"/\")\r\n if '2.0' == instance._api_version[API_NAME]:\r\n client = neutron_client(username=instance._username,\r\n tenant_name=instance._tenant_name,\r\n password=instance._password,\r\n region_name=instance._region_name,\r\n auth_url=instance._auth_url,\r\n endpoint_url=url,\r\n token=instance._token,\r\n auth_strategy=instance._auth_strategy,\r\n insecure=instance._insecure,\r\n ca_cert=instance._ca_cert)\r\n return client\r\n else:\r\n raise exceptions.UnsupportedVersion(_(\"API version %s is not \"\r\n \"supported\") %\r\n instance._api_version[API_NAME])", "def __init__(self, client):\n super().__init__(client)", "def _init_http_client(service_id=None, opts=None):\n if service_id:\n opts = _get_trs_opts(service_id)\n\n http_client = RequestsClient()\n\n http_client.set_api_key(host=opts['host'],\n api_key=opts['auth'],\n param_in='header')\n return http_client", "def create_rbclient(self):\n return RBClient(url=self.TEST_SERVER_URL,\n transport_cls=URLMapTransport)", "def new_request(self, **kwargs):\n url = self.config[\"base_url\"]\n\n if kwargs.get(\"user_id\") is not None:\n url = url + kwargs[\"user_id\"]\n\n self.req = request.Request(host=self.config[\"host\"], protocol=constant.HTTP, url=url,\n method=kwargs[\"method\"], time_out=kwargs[\"timeout\"])\n\n return self", "def api_request_factory() -> APIRequestFactory:\n\n return APIRequestFactory()", "def __init__(self, **kwargs):\n\n builder_kwargs = {}\n\n if \"token\" in kwargs and str(kwargs[\"token\"]) != \"None\":\n\n # If there is a token use it along with the specified proxy details if specified\n config = ApiConfiguration(\n api_url=kwargs.get(\"api_url\", None),\n certificate_filename=kwargs.get(\"certificate_filename\", None),\n proxy_config=ProxyConfig(\n address=kwargs.get(\"proxy_url\", None),\n username=kwargs.get(\"proxy_username\", None),\n password=kwargs.get(\"proxy_password\", None),\n ) if kwargs.get(\"proxy_url\", None) is not None else None,\n app_name=kwargs.get(\"app_name\", None)\n )\n\n builder_kwargs[\"api_configuration\"] = config\n builder_kwargs[\"token\"] = kwargs[\"token\"]\n\n # Otherwise use a secrets file if it exists\n builder_kwargs[\"api_secrets_filename\"] = kwargs.get(\"api_secrets_filename\", None)\n\n # add the correlation id if specified\n builder_kwargs[\"correlation_id\"] = kwargs.get(\"correlation_id\", None)\n\n # add the id provider response handler if specified\n builder_kwargs[\"id_provider_response_handler\"] = kwargs.get(\"id_provider_response_handler\", None)\n\n builder_kwargs[\"tcp_keep_alive\"] = kwargs.get(\"tcp_keep_alive\", False)\n\n # Call the client builder, this will result in using either a token, secrets file or environment variables\n self.api_client = ApiClientBuilder.build(**builder_kwargs)", "def gen_neutron_client(self):\n\n print \"\\t* Generating neutron client\"\n self.neutronclient = neutronclient.Client(auth_url=self.auth_url,\n username=self.username,\n password=self.password,\n tenant_name=self.tenant_name,\n region_name=self.region_name)", "def _client(self) -> httpx.AsyncClient:\n return httpx.AsyncClient(\n base_url=self._base_url,\n headers=self._authorization_headers,\n proxies=self._proxies,\n )", "def client(self):\n\n if self._client is None:\n self._client = self._get_client()\n return self._client", "def _create_instance(cls, configuration, auth_type):\n auth = ClientAuthFactory.get(\n username=configuration.username,\n password=configuration.password,\n auth_type=auth_type\n )\n instance = HttpClient(configuration.url, auth)\n cls._INSTANCES[configuration] = instance\n return instance", "def client(self) -> 'BaseClient':\n return self", "def __init__(self, client):\n self.client = client\n self.call_params = {\n }", "def __init__(self, client_id: str):\n\n self._cs = aiohttp.ClientSession(\n loop=asyncio.get_event_loop(),\n raise_for_status=True,\n headers={\"Client-ID\": client_id},\n )", "def init_client(self, client):\n self.client = client", "def __init__(self, serializer=None):\r\n self.client = Client()\r\n self.serializer = serializer\r\n\r\n if not self.serializer:\r\n self.serializer = Serializer()", "def request_factory() -> RequestFactory:\n\n return RequestFactory()", "def __init__(self, config, **kwargs):\n validate_config(config, signer=kwargs.get('signer'))\n if 'signer' in kwargs:\n signer = kwargs['signer']\n else:\n signer = Signer(\n tenancy=config[\"tenancy\"],\n user=config[\"user\"],\n fingerprint=config[\"fingerprint\"],\n private_key_file_location=config.get(\"key_file\"),\n pass_phrase=get_config_value_or_default(config, \"pass_phrase\"),\n private_key_content=config.get(\"key_content\")\n )\n\n base_client_init_kwargs = {\n 'regional_client': True,\n 'service_endpoint': kwargs.get('service_endpoint'),\n 'timeout': kwargs.get('timeout'),\n 'base_path': '/20160918',\n 'skip_deserialization': kwargs.get('skip_deserialization', False)\n }\n self.base_client = BaseClient(\"identity\", config, signer, identity_type_mapping, **base_client_init_kwargs)\n self.retry_strategy = kwargs.get('retry_strategy')", "def __init__(self):\n self.__client = Client(verify_ssl_cert=True)\n self.__headers = {'Content-Type': 'application/json'}\n self.login()", "def test_create_namespaced_build_request_instantiate(self):\n pass", "def get_client(self):\n if self._client_state is not None:\n # Copy client state because `from_json_snapshot` modifies it...\n client = AxClient.from_json_snapshot(copy.deepcopy(self._client_state))\n else:\n client = AxClient(\n random_seed=self.seed,\n enforce_sequential_optimization=False,\n verbose_logging=False,\n )\n\n client.create_experiment(\n parameters=orion_space_to_axoptimizer_space(self.space),\n choose_generation_strategy_kwargs={\n \"num_initialization_trials\": self.n_initial_trials,\n \"max_parallelism_override\": self.max_trials,\n },\n objectives={\n \"objective\": ObjectiveProperties(minimize=True),\n **{\n o: ObjectiveProperties(minimize=True)\n for o in self.extra_objectives\n },\n },\n outcome_constraints=self.constraints,\n )\n\n yield client\n\n self._client_state = client.to_json_snapshot()", "def request_factory(environ):\n request = Request(environ)\n if request.is_xhr:\n request.response = Response()\n request.response.headerlist = []\n request.response.headerlist.extend(\n (\n ('Access-Control-Allow-Origin', '*'),\n ('Access-Control-Allow-Credentials', 'true'),\n ('Access-Control-Max-Age', 86400),\n ('Content-Type', 'application/json')\n )\n )\n return request", "def _create_es_client(self):\n from elasticsearch._async.client import AsyncElasticsearch\n\n use_basic_auth = self._username is not None and self._password is not None\n\n serializer = get_serializer()\n\n if use_basic_auth:\n auth = (self._username, self._password)\n return AsyncElasticsearch(\n [self._url],\n http_auth=auth,\n serializer=serializer,\n verify_certs=self._verify_certs,\n ssl_show_warn=self._verify_certs,\n ca_certs=self._ca_certs,\n timeout=self._timeout,\n )\n\n return AsyncElasticsearch(\n [self._url],\n serializer=serializer,\n verify_certs=self._verify_certs,\n ssl_show_warn=self._verify_certs,\n ca_certs=self._ca_certs,\n timeout=self._timeout,\n )", "def __init__(self, client, **kwargs):\n self._ac = client\n self._wrapped = kwargs", "def req():\n return Request()", "def __init__(self, client, use_stubs=True):\n super().__init__(client, use_stubs)", "def __init__(self, client, use_stubs=True):\n super().__init__(client, use_stubs)", "def __init__(self, client, use_stubs=True):\n super().__init__(client, use_stubs)", "def __init__(self, client, use_stubs=True):\n super().__init__(client, use_stubs)", "def __init__(self, client, use_stubs=True):\n super().__init__(client, use_stubs)", "def setUp(self):\n self.client = api.Client(config.get_config(), api.json_handler)", "def _client(self):\n\n if self._suds_client is None:\n self._suds_client = suds.client.Client(SERVICE_WSDL_URL)\n # Add SOAP Security tokens\n self.set_security_token()\n\n return self._suds_client", "def create_api_client(base_path, access_token):\n api_client = ApiClient()\n api_client.host = base_path\n api_client.set_default_header(header_name=\"Authorization\",\n header_value=f\"Bearer {access_token}\")\n return api_client", "def client(self,\n name,\n method=None,\n url=None,\n status_callback_event=None,\n status_callback_method=None,\n status_callback=None,\n **kwargs):\n return self.append(Client(\n name,\n method=method,\n url=url,\n status_callback_event=status_callback_event,\n status_callback_method=status_callback_method,\n status_callback=status_callback,\n **kwargs\n ))", "def __init__(self, **kwargs):\n super(Request, self).__init__(**kwargs)", "def testclient():\n base_url = PARAMS.get(\"url\") + \"/v2\"\n client = Client(\n base_url=base_url,\n headers={\n \"Authorization\": f\"GenieKey {PARAMS.get('token')}\",\n }\n )\n return client", "def CreateClient():\n client = gdata.docs.client.DocsClient(source=SampleConfig.APP_NAME)\n client.http_client.debug = SampleConfig.DEBUG\n # Authenticate the user with CLientLogin, OAuth, or AuthSub.\n try:\n gdata.sample_util.authorize_client(\n client,\n service=client.auth_service,\n source=client.source,\n scopes=client.auth_scopes\n )\n except gdata.client.BadAuthentication:\n exit('Invalid user credentials given.')\n except gdata.client.Error:\n exit('Login Error')\n return client", "def get_httpx_client() -> httpx.Client:\n return httpx.Client(**CLIENT_PARAMETERS) # type: ignore", "def request_factory(environ):\n request = Request(environ)\n _LOG.debug('trunctated request body: {b}'.format(b=request.body[:1000]))\n return request", "def __init__(self, client):\n super().__init__(client)\n\n loop = asyncio.get_running_loop() # pylint: disable=no-member\n\n task_factory = loop.get_task_factory()\n if task_factory is None or not task_factory.__trace_task_factory__:\n new_task_factory = create_task_factory(task_factory)\n loop.set_task_factory(new_task_factory)", "def newRequest(self):\n return Request( )", "def _create_client_input(train_batch_size, test_batch_size, context=None):\n client_input = collections.OrderedDict()\n client_input['train_data'] = _create_dataset(train_batch_size)\n client_input['test_data'] = _create_dataset(test_batch_size)\n if context is not None:\n client_input['context'] = context\n return client_input", "def __init__(self, kubeconfig_path=None):\n config.load_kube_config(config_file=kubeconfig_path)\n self.api_client = client.ApiClient()\n self.core_client = client.CoreV1Api()\n self.batch_client = client.BatchV1Api()\n self.crd_client = client.CustomObjectsApi()", "def get_client(self):\n token = self.get_access_token()\n if self.client is None:\n credentials = AccessTokenCredentials(token, 'vetware/1.0')\n # credentials = SignedJwtAssertionCredentials(self.email, self.private_key,\n # \"https://www.googleapis.com/auth/calendar\")\n http = credentials.authorize(Http())\n self.client = build('calendar', 'v3', http=http)\n return self.client", "def create_client(\n body: ClientmodelClientCreateRequest,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n request = CreateClient.create(\n body=body,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def test_create_client(self):\n pass", "def client(self):\n\t\t# pylint: disable=invalid-name\n\t\treturn self._client", "def _get_client(self):\n if self._client is None:\n self._client = self.boto.client(service_name='elb', region_name=self.boto.cli_region)\n\n return self._client", "def __init__(self, transport):\n serializer = auth_context.RpcContextSerializer(\n auth_context.JsonPayloadSerializer())\n target = messaging.Target(topic=cfg.CONF.engine.topic)\n self._client = messaging.RPCClient(transport, target,\n serializer=serializer)", "def __init__(self, app_key=None, app_sid=None, base_url=None,\n api_version=None, debug=False, proxy=None):\n configuration = Configuration(app_key=app_key,\n app_sid=app_sid,\n base_url=base_url,\n api_version=api_version,\n debug=debug,\n\t\t\t\t\t\t\t\t\t proxy=proxy)\n self.api_client = ApiClient(configuration)", "def client(self):\n app.testing = True\n client = app.test_client()\n\n with app.app_context():\n yield client", "def __init__(self, requestor, client_id, redirect_uri=None):\n self._requestor = requestor\n self.client_id = client_id\n self.redirect_uri = redirect_uri", "def _get_auth_client(self, request):\n if self._auth_client is None:\n # Use PyFxa defaults if not specified\n server_url = fxa_conf(request, 'oauth_uri')\n auth_cache = self._get_cache(request)\n self._auth_client = OAuthClient(server_url=server_url, cache=auth_cache)\n\n return self._auth_client", "def __init__(self, *args, **kwargs):\n super(Client, self).__init__(role='c', *args, **kwargs)\n\n # Internal variables\n self._bulksize = None\n self._server_hostname = None\n self._port = None\n self._num_streams = None\n self._zerocopy = False", "async def create_client_async(\n body: ClientmodelClientCreateRequest,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n request = CreateClient.create(\n body=body,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def __init__(__self__, *,\n client_id: Optional[pulumi.Input[str]] = None,\n client_secret: Optional[pulumi.Input[str]] = None,\n metadata_url: Optional[pulumi.Input[str]] = None,\n scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):\n if client_id is not None:\n pulumi.set(__self__, \"client_id\", client_id)\n if client_secret is not None:\n pulumi.set(__self__, \"client_secret\", client_secret)\n if metadata_url is not None:\n pulumi.set(__self__, \"metadata_url\", metadata_url)\n if scopes is not None:\n pulumi.set(__self__, \"scopes\", scopes)", "def __init__(self, hostname, port, protocol, auth, tenant_id, entry):\n self.auth = auth\n self.hostname = hostname\n self.port = port\n self.protocol = protocol\n self.tenant_id = tenant_id\n self._api = ContainerClient(hostname=hostname, auth=self.auth, protocol=protocol,\n port=port, entry=entry)", "def __init__(self, client):\n self._client = client\n self._argument_converter = ArgumentConverter()", "def create_client():\n host_api_id = Config.api_id\n host_api_hash = Config.api_hash\n host_user_id = Config.user_id\n host_phone = Config.phone\n\n client = TelegramClient(host_user_id, host_api_id, host_api_hash)\n client.connect()\n if not client.is_user_authorized():\n client.send_code_request(host_phone)\n client.sign_in(host_phone, input('Enter code sent to your telegram: '))\n return client", "def req(self, method, *args, **kwargs):\n req = getattr(RequestFactory(), method)(*args, **kwargs)\n req.user = Mock()\n return req", "def __init__(self, service, acces_key, secret_key):\n \n self.client = boto3.client(\n service,\n aws_access_key_id=acces_key,\n aws_secret_access_key=secret_key,\n )", "def __init__(\n self,\n message_serializer: 'Serializer',\n timeout: int = 60,\n headers_callback: Optional[Callable[[], Dict[str, str]]] = None):\n self._client = DaprHttpClient(message_serializer, timeout, headers_callback)", "def client():\n\n app.testing = True\n app.config[\"WTF_CSRF_ENABLED\"] = False\n client = app.test_client()\n\n\n return client", "def client():\n\n app.testing = True\n app.config[\"WTF_CSRF_ENABLED\"] = False\n client = app.test_client()\n\n\n return client", "def __init__(self, client, name):\n if not isinstance(client, couch.Client):\n raise Exception(\"'client' arg must be instance of couch.Client\")\n\n self.client = client\n self.name = name", "def __init__(self, name, client):\n self.name = name\n self.client = client", "def __init__(self):\n self.client.ssl = True\n self.client.http_client_debug = False\n self.createBaseFolder()", "def create_http_client(http_client_name=None, metrics=None, cache=None, config=None):\n client_config = config and config.requests.get_section(http_client_name or 'default')\n cache_ttl = None\n max_retries = 0\n client_cache = None\n api_name = http_client_name\n\n if client_config:\n if cache and client_config.cache.get('enabled'):\n client_cache = cache.http\n cache_ttl = client_config.cache.get('ttl')\n max_retries = client_config.get('max_retries')\n api_name = client_config.get('api_name', api_name)\n\n return HttpClient(api_name=api_name,\n metrics=metrics,\n cache=client_cache,\n cache_ttl=cache_ttl,\n max_retries=max_retries)", "def _init_raw_client(self) -> None:\n if self.credentials:\n auth = HTTPBasicAuth(self.credentials['username'], self.credentials['password'])\n else:\n auth = None\n base_url = \"http://\" if self.untrusted else \"https://\"\n base_url += self.url\n self.raw_client = client.DockerRegistryClient(base_url=base_url, auth=auth)", "def setUp(self):\r\n super(SSLClientTest, self).setUp()\r\n self.client = Client()\r\n self.factory = RequestFactory()\r\n self.mock = Mock()", "def build_client(module):\n return drac.DRACClient(module.params['address'],\n module.params['username'],\n module.params['password'])", "def client(self):\n raise NotImplementedError()", "def __new__(cls, host=None, user=None, client=None):\n cls.__check_parameters(host=host, user=user)\n if client is None:\n raise InvalidClientException(\"Integrated Client during connection creation can't be None\")\n return super(Connection, cls).__new__(cls, host=host, user=user, client=client)", "def client(self) -> WebClient:\n return WebClient(**self._get_conn_params())", "def __init__(self):\n super(NovaClientWrapper, self).__init__(\n retry_exceptions=(nova_exc.ConnectionRefused,\n nova_exc.Conflict),\n auth_exceptions=(nova_exc.Unauthorized),\n name=\"Nova\")", "def __init__(self, username, password, tenant_id, auth_url, api_protocol, api_host, api_port, api_resource):\n\n __logger__.info(\"Init CLOTO Client\")\n __logger__.debug(\"Client parameters: Username: %s, Password: %s, TenantId: %s, API protocol: %s, API host: %s, \"\n \"API port: %s, Base resource: %s\", username, password, tenant_id, api_protocol, api_host,\n api_port, api_resource)\n\n self.headers = dict()\n self.api_protocol = api_protocol\n self.api_host = api_host\n self.api_port = api_port\n self.api_resource = api_resource\n\n set_representation_headers(self.headers, content_type=HEADER_REPRESENTATION_JSON,\n accept=HEADER_REPRESENTATION_JSON)\n\n self._init_keystone_client(username, password, tenant_id, auth_url)\n self.token = self._get_auth_token()\n __logger__.debug(\"Token: %s\", self.token)\n\n self.headers.update({X_AUTH_TOKEN: self.token})\n self.headers.update({TENANT_ID: tenant_id})\n __logger__.debug(\"Headers with OpenStack credentials: %s\", self.headers)", "def __init__(self):\n self.service = Client(key=GEO_LOCATION_API_KEY)" ]
[ "0.7219166", "0.716613", "0.70292443", "0.6970843", "0.65893507", "0.65696806", "0.6560892", "0.65602523", "0.65598226", "0.65531236", "0.65434104", "0.65080535", "0.6499727", "0.64700776", "0.6468287", "0.64645094", "0.64377755", "0.6397173", "0.636765", "0.63654023", "0.6353936", "0.6349146", "0.6338433", "0.6322594", "0.6268442", "0.61638737", "0.6156048", "0.6127321", "0.6125538", "0.61032337", "0.6101006", "0.60979813", "0.60947895", "0.6081879", "0.6078203", "0.60661805", "0.6059759", "0.60536104", "0.60009795", "0.5995173", "0.59865975", "0.598593", "0.59809047", "0.59766746", "0.59656805", "0.59633434", "0.59616804", "0.5958848", "0.5955523", "0.59415704", "0.59415704", "0.59415704", "0.59415704", "0.59415704", "0.59373754", "0.59304166", "0.59169966", "0.5909513", "0.58875036", "0.58787096", "0.5873937", "0.5857086", "0.58569676", "0.58444595", "0.5841383", "0.5837446", "0.5832117", "0.5828729", "0.58237255", "0.5822167", "0.5810112", "0.5808925", "0.5808692", "0.58068717", "0.5804615", "0.5801272", "0.57868487", "0.5775052", "0.5772404", "0.57681435", "0.5766357", "0.57662964", "0.5764928", "0.5754921", "0.5751608", "0.574618", "0.57345605", "0.57345605", "0.5733134", "0.57303226", "0.57223254", "0.5721338", "0.57164747", "0.5715231", "0.5713784", "0.5704526", "0.5693233", "0.56902045", "0.56822056", "0.56803066", "0.56784785" ]
0.0
-1
Get the withdraw records of an account.
def get_deposit_withdraw(self, op_type: 'str', currency: 'str' = None, from_id: 'int' = None, size: 'int' = None, direct: 'str' = None) -> list: check_should_not_none(op_type, "operate type") params = { "currency": currency, "type": op_type, "from": from_id, "direct": direct, "size": size } from huobi.service.wallet.get_deposit_withdraw import GetDepositWithdrawService return GetDepositWithdrawService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def withdraws(self, asset=None, timestamp=None):\n\t\tif self._session:\n\t\t\tdata = {}\n\t\t\tif asset:\n\t\t\t\tdata['asset'] = asset\n\t\t\tif timestamp:\n\t\t\t\tdata['startTime'] = int(timestamp*1000)\n\n\t\t\tresult = self._session.get_withdraw_history(**data)\n\n\t\t\tif result and result.get('success'):\n\t\t\t\treturn result.get('withdrawList', [])\n\n\t\treturn []", "def export_records() -> List[Dict[str, Any]]:\n return_val = []\n with session_scope() as session:\n filter_after = datetime.today() - timedelta(12 * 30)\n\n records = (\n session.query(Users, func.sum(Orders.cost).label(\"total_account_value\"))\n .join(Orders)\n .filter(\n extract(\"year\", Orders.date) >= filter_after.year,\n extract(\"month\", Orders.date) >= filter_after.month,\n extract(\"day\", Orders.date) >= filter_after.day,\n )\n .group_by(Users.account)\n .all()\n )\n\n for user_account, total_account_value in records:\n user_account = {\n \"account\": user_account.account,\n \"active\": user_account.active,\n \"is_demo\": user_account.is_demo,\n \"total_account_value\": total_account_value,\n }\n return_val.append(user_account)\n return return_val", "def get_drawings(self):\n _url = f\"{self.connector.base_url}/projects/{self.project_id}/drawings\"\n\n _response = self.connector.http_call(\"get\", _url)\n self.drawings = _response.json()", "def get_holdings(self,account=None, verbose=False):\n \n # Imply account\n if account == None:\n account = self.params['account']\n account = int(account)\n \n # Assemble URL\n url = self.endpoints['base'] +\\\n 'accounts/' +\\\n str(account) +\\\n '/holdings.json'\n \n # Create auth\n session = requests.Session()\n auth = self.create_auth()\n req = requests.Request('GET',url,auth=auth).prepare()\n \n # Send Request\n self.holdings = session.send(req).json()\\\n ['response']['accountholdings']\n \n # Get accounts (necessary?)\n if self.accounts == []:\n self.get_accounts()\n \n return self.holdings", "def query_accounts(self):\n return self._call_txtrader_api('query_accounts', {})", "def returnBalances(self, account=None):\n return self.dpay.get_balances(account)", "def withdraw(account, amount):\n pass", "def get_accounts(self):\n\n data = {\n 'customerId': self.personal_identity_number,\n 'responseControl': {\n 'filter': {\n 'includes': ['ALL']\n }\n }\n }\n\n headers = {'Content-type': 'application/json',\n 'Accept': 'application/json',\n 'CSRFToken': self.json_token}\n path = '/im/json/overview/getaccounts'\n req = self.session.post(\n self.BASE_URL + path,\n data=json.dumps(data),\n headers=headers)\n\n for account in req.json()['response']['accounts']:\n self.accounts[account['number']] = account\n del(self.accounts[account['number']]['number'])\n\n return self.accounts", "def get_accounts(self):\n return self.accounts.all()", "def transactions(self):\r\n return tx.AccountTransactions(self)", "def get_accounts(self):\n me = objects.AdUser(fbid=\"me\")\n my_accounts = list(me.get_ad_accounts(fields=[\n 'id',\n 'name',\n 'timezone_name',\n 'amount_spent',\n 'currency']))\n return my_accounts", "def get_account_trades(self, symbol: Symbol, trade_id: Optional[int],\n limit: int = 100, receive_window: Optional[int] = None):\n api_params = {\n \"symbol\": symbol.value,\n \"limit\": limit,\n \"timestamp\": get_current_time_milliseconds()\n }\n\n if trade_id is not None:\n api_params['tradeId'] = trade_id\n\n if receive_window is not None:\n api_params['receiveWindow'] = receive_window\n\n return self.request.get(path='/trade/account', params=api_params)", "def get_bank_accounts(self):\n spec = {'owner': DBRef(self.collection_name, self._id)}\n return BankAccount.collection.find(spec)", "def list(ctx):\n if ctx.obj.get('NAMESPACE') != 'accounts':\n click.echo(\n click.style('Only account data is available for listing.', fg='red')\n )\n return\n\n swag = create_swag_from_ctx(ctx)\n accounts = swag.get_all()\n _table = [[result['name'], result.get('id')] for result in accounts]\n click.echo(\n tabulate(_table, headers=[\"Account Name\", \"Account Number\"])\n )", "def accounts(self):\r\n return acc.Accounts(self)", "def get_withdraw_history(self, asset: Asset, start_time: Optional[int] = None,\n end_time: Optional[int] = None, receive_window: Optional[int] = None):\n api_params = {\n \"asset\": asset.value,\n \"timestamp\": get_current_time_milliseconds()\n }\n\n if start_time is not None:\n api_params['startTime'] = start_time\n\n if end_time is not None:\n api_params['endTime'] = end_time\n\n if receive_window is not None:\n api_params['receiveWindow'] = receive_window\n\n return self.request.get(path='/withdraw/history', params=api_params)", "def get_withdrawal_history(self, currency=None):\n if not currency:\n currency = \"\"\n return self.__call__('balance', \"getwithdrawalhistory\", \n {\"currencyname\": currency})", "def withdraw(self, currency, amount, address):\n return self.api_query('withdraw', {\"currency\": currency, \"amount\": amount, \"address\": address})", "def _get_accounts_data(self, accounts, display_account,tables,where_clause,where_params):\n\n account_result = {}\n # Prepare sql query base on selected parameters from wizard\n tables, where_clause, where_params = tables,where_clause,where_params\n\n # print tables, where_clause, where_params\n # print \"tables data\",tables\n # print \"Table Type\",type(tables)\n # print \"where clause data\",where_clause\n # print \"where clause\",type(where_clause)\n # print \"where params data\",where_params\n # print \"where params\",type(where_params)\n\n tables = tables.replace('\"','')\n if not tables:\n tables = 'account_move_line'\n wheres = [\"\"]\n if where_clause.strip():\n wheres.append(where_clause.strip())\n filters = \" AND \".join(wheres)\n # compute the balance, debit and credit for the provided accounts\n request = (\"SELECT account_id AS id, SUM(debit) AS debit, SUM(credit) AS credit, (SUM(debit) - SUM(credit)) AS balance\" +\\\n \" FROM \" + tables + \" WHERE account_id IN %s \" + filters + \" GROUP BY account_id\")\n params = (tuple(accounts.ids),) + tuple(where_params)\n self.env.cr.execute(request, params)\n for row in self.env.cr.dictfetchall():\n account_result[row.pop('id')] = row\n account_res = []\n for account in accounts:\n res = dict((fn, 0.0) for fn in ['credit', 'debit', 'balance'])\n currency = account.currency_id and account.currency_id or account.company_id.currency_id\n res['code'] = account.code\n res['name'] = account.name\n if account.id in account_result.keys():\n res['debit'] = account_result[account.id].get('debit')\n res['credit'] = account_result[account.id].get('credit')\n res['balance'] = account_result[account.id].get('balance')\n if display_account == 'all':\n account_res.append(res)\n if display_account == 'not_zero' and not currency.is_zero(res['balance']):\n account_res.append(res)\n if display_account == 'movement' and (not currency.is_zero(res['debit']) or not currency.is_zero(res['credit'])):\n account_res.append(res)\n print \"data from core report model\",account_res\n return account_res", "def accounts(self):\n if self._accounts is None:\n url = f'{self._ynab.api_url}/budgets/{self.id}/accounts'\n response = self._ynab.session.get(url)\n if not response.ok:\n self._logger.error('Error retrieving accounts, response was : %s with status code : %s',\n response.text,\n response.status_code)\n return []\n self._accounts = [Account(self, account)\n for account in response.json().get('data', {}).get('accounts', [])]\n return self._accounts", "def get_withdraws_keys(self) -> list:\n return list(self.withdraws.keys())", "def get_spot_withdraws(self, asset: Optional[str] = None, start_time: Optional[int] = None,\n end_time: Optional[int] = None):\n conditions_list = []\n table = tables.SPOT_WITHDRAW_TABLE\n if asset is not None:\n conditions_list.append((table.asset,\n SQLConditionEnum.equal,\n asset))\n if start_time is not None:\n conditions_list.append((table.applyTime,\n SQLConditionEnum.greater_equal,\n start_time))\n if end_time is not None:\n conditions_list.append((table.applyTime,\n SQLConditionEnum.lower,\n end_time))\n return self.get_conditions_rows(table, conditions_list=conditions_list)", "def get_all_accounts():\n accounts = Account.query.all()\n print(accounts)\n return \"\"", "def accounts_info(self):\r\n param = {}\r\n param['appid'] = self.apiKey\r\n param['nonce'] = int(time.time() * 1000)\r\n param['timestamp'] = int(time.time())\r\n return self.__signed_GET('/api/v1/account/all', param, self.timeout)", "def GetAccountList(self):\n\t\treturn self.accounts.keys()", "def account_balances(self):\n return self.get('balances', auth=True)", "def get_all_user_record(deleted=False):\n session = get_session()\n query = session.query(models.UserAccountRecord).\\\n filter_by(deleted=deleted).\\\n all()\n\n return query", "def get_account_balances(self):\n params = clean_locals(locals())\n date_time_sent = datetime.datetime.utcnow()\n response = self.request('GetAccountBalances', params, secure=True)\n data = self.process_response(response, date_time_sent, None)\n return parse_account_balance(data.get('data', {})) if data.get('data') else {}", "def withdraw(holder):\n account = Account.query.filter_by(holder=holder).first()\n amount = request.json.get(\"amount\")\n if not account:\n return jsonify({\"error\": \"Account does not exist\"})\n if account.balance >= amount:\n account.balance -= amount\n db.session.commit()\n return jsonify(\n {\n \"holder\": account.holder,\n \"balance\": account.balance,\n \"message\": \"The withdraw has been processed\",\n }\n )\n return jsonify({\"error\": \"The account balance is insufficient\"})", "def transactions(self):\n return self._call_account_method(\n 'transactions'\n )", "def accounts(self):\n return self._accounts.values()", "def fetch_bank_transactions(self):\n return self.fetch('/bank_transactions')", "def account_df_trades(self, improve=False):\n return(self.account_df('trades', improve))", "def display_accounts_details():\n return Records.display_records()", "def get(self):\n accounts = database.get_all(Accounts)\n all_accounts = []\n for account in accounts:\n all_transactions = []\n for transaction in account.transactions:\n all_transactions.append(transaction.id)\n new_account = {\n \"id\": account.id,\n \"name\": account.name,\n \"iban\": account.iban,\n \"balance\": float(account.balance),\n \"currency\": account.currency,\n \"transactions ids\": all_transactions\n }\n\n all_accounts.append(new_account)\n return json.dumps(all_accounts), 200", "def get_withdrawal(self, withdrawal):\r\n method = self.wallet_endpoints['get_withdrawal']['method']\r\n url = self.base_url + self.wallet_endpoints['get_withdrawal']['url'].format(withdrawalId=withdrawal)\r\n req = requests.request(method, url, headers=self.get_auth_headers())\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res", "def getCustomerAccount(self):\n self.logger.debug(\"\")\n for cust in self.getCustomerAccountData():\n accounts = len(cust['accounts'])\n self.logger.debug(\"%d accounts in %s\", accounts, cust['CustomerId'])\n ii = 1\n for acct in cust['accounts']:\n self.logger.debug(\"yield %s, %s\", cust['CustomerId'], acct['Id'])\n yield cust['CustomerId'], acct['Id'], ii, accounts\n ii += 1", "def find_by_account_id(auth_account_id: str, page: int, limit: int):\n current_app.logger.debug(f'<search_purchase_history {auth_account_id}')\n statements, total = StatementModel.find_all_statements_for_account(auth_account_id, page, limit)\n\n statements_schema = StatementModelSchema()\n data = {\n 'total': total,\n 'page': page,\n 'limit': limit,\n 'items': statements_schema.dump(statements, many=True)\n }\n current_app.logger.debug('>statements_find_by_account_id')\n return data", "def other_entries(self):\r\n\r\n l = []\r\n t = self._e.transaction\r\n for ae in t.entries.all():\r\n if ae != self._e:\r\n amount = ae.amount * ae.account._DEBIT_IN_DB()\r\n l.append( (amount, ae.account) )\r\n\r\n return l", "def balances(self):\n\t\tif self._session:\n\t\t\treturn self._session.get_account().get('balances', [])\n\n\t\treturn []", "def chart_of_accounts(qbo_session, attrs = \"strict\"):\n\n #query all the accounts\n accounts = qbo_session.get_objects(\"Account\")\n\n #by strict, I mean the order the docs say to use when udpating:\n #https://developer.intuit.com/docs/0025_quickbooksapi/\n #0050_data_services/030_entity_services_reference/account\n\n if attrs == \"strict\":\n attrs = [\n \"Id\", \"SyncToken\", \"MetaData\", \"Name\", \"SubAccount\",\n \"ParentRef\", \"Description\", \"FullyQualifiedName\", \"Active\",\n \"Classification\", \"AccountType\", \"AccountSubType\", \"AcctNum\",\n \"OpeningBalance\", \"OpeningBalanceDate\", \"CurrentBalance\",\n \"CurentBalanceWithSubAccounts\", \"CurrencyRef\"\n ]\n\n else:\n #TODO: validate the attrs against the 'strict' list above\n pass\n\n #As a first cut, we'll sort them by AccountType in trial balance order\n\n tb_type_order = [\n \"Bank\", \"Accounts Receivable\", \"Other Current Asset\",\n \"Fixed Asset\", \"Other Asset\",\n \"Accounts Payable\", \"Credit Card\",\n \"Other Current Liability\", \"Other Liability\",\n \"Equity\",\n \"Income\", \"Other Income\",\n \"Expense\", \"Other Expense\", \"Cost of Goods Sold\"\n ]\n\n accounts_by_type = {} #{Accounts_Payable:[row_list]\n\n for a_id in accounts:\n a = accounts[a_id]\n at = a[\"AccountType\"]\n if at not in tb_type_order:\n raise Exception(\"Unexpected AccountType: %s\" % at)\n\n if at not in accounts_by_type:\n accounts_by_type[at]=[]\n\n this_row = []\n for field in attrs:\n if field not in a:\n this_row.append(\"\")\n else:\n value = a[field]\n if isinstance(value,(list,tuple,dict)):\n this_row.append(\"<complex>\")\n else:\n this_row.append(a[field])\n\n accounts_by_type[at].append(this_row)\n\n rows = [attrs] #headers are the first row\n for at in tb_type_order:\n if at in accounts_by_type:\n for row in accounts_by_type[at]:\n rows.append(row)\n\n return rows", "def balance(self) -> Decimal:\n return sum_queryset(AccountEntry.objects.filter(account=self.account, timestamp__lte=self.timestamp).exclude(timestamp=self.timestamp, id__gt=self.id))", "def get_file_a_accounts(self, federal_accounts):\n filters, annotations = self.get_common_query_objects(federal_accounts, \"treasury_account_identifier\")\n\n return (\n AppropriationAccountBalances.objects.filter(*filters)\n .annotate(**annotations)\n .values(\"name\", \"account_code\")\n .annotate(\n total_budgetary_resources=Sum(\"total_budgetary_resources_amount_cpe\"),\n )\n .values(\"name\", \"account_code\", \"total_budgetary_resources\")\n )", "def returnDepositsWithdrawals(self,\n start=datetime.now() - timedelta(days=1),\n end=datetime.now()):\n pass", "def get_acc_tx_history(account_id, total):\n query = iroha.query(\n \"GetAccountTransactions\", account_id=account_id, page_size=total\n )\n ic.sign_query(query, user_private_key)\n response = net.send_query(query)\n data = MessageToDict(response)\n pprint(data, indent=2)", "def full_table():\n #oen the the file\n list_of_current_account_objects = []\n opened_file = open('customers.txt')\n opened_file.readline()\n for line in opened_file: #get a list of all the customers accounts as objects\n line_array = line.split(\",\")\n customer = Account((line_array[0]+\" \"+line_array[1]),line_array[2],line_array[4])\n list_of_current_account_objects.append(customer)\n #update the savings & current variables for all accounts.\n for i in list_of_current_account_objects:\n i.set_sav_bal(account_bal(i,\"savings\"))\n i.set_cur_bal(account_bal(i,\"current\"))\n\n #print the answer\n print(\"customer customer account number-avings balance-current balance\")\n for i in list_of_current_account_objects:\n print(i.get_name()+\"---\"+i.get_acc_num()+\"---\"+str(i.get_sav_bal())+\"---\"+str(i.get_cur_bal()))\n print()", "def get_accounts(self):\r\n return self._accounts", "def get_withdraws(self, index: Tuple[int, Route]) -> list:\n if isinstance(index, Route):\n index = hash(index.addr)\n return self.withdraws[index]", "def get_accounts(self):\n return self.accounts", "def withdraw(self, request, *args, **kwargs):\n account = self.get_object()\n account_serializer = self.get_serializer()\n value = request.data.get(\"valor\", None)\n\n try:\n withdraw_result = account_serializer.withdraw(value, account)\n except ValueError as ve:\n return Response({\"detail\": \"Could not withdraw: {0}.\".format(ve),\n \"status_code\": status.HTTP_400_BAD_REQUEST}, status=status.HTTP_400_BAD_REQUEST)\n\n return Response(withdraw_result)", "def get_balance(self, t: datetime):\n return sum_queryset(self.accountentry_set.all().filter(timestamp__lt=t))", "def accounts():", "def get_account_withdraw_quota(self, currency: 'str') -> list:\n check_should_not_none(currency, \"currency\")\n\n params = {\n \"currency\": currency,\n }\n\n from huobi.service.wallet.get_account_withdraw_quota import GetAccountWithdrawQuotaService\n return GetAccountWithdrawQuotaService(params).request(**self.__kwargs)", "def accounts(self):\r\n return accounts.Accounts(self)", "def balances(self, account_name, begin_date=None, end_date=None,\n min_accounts=None):\n real_account = self._real_account(account_name, self.entries,\n begin_date, end_date, min_accounts)\n\n return self._table_tree(real_account)", "def get_wim_accounts(self, **kwargs):\n kwargs.setdefault('postprocess', _postprocess_wim_account)\n kwargs.setdefault('WHERE', {\"sdn\": \"false\"})\n return self.query(FROM=_WIM_ACCOUNT_JOIN, **kwargs)", "def get_all_project_record(deleted=False):\n session = get_session()\n query = session.query(models.ProjectAccountRecord).\\\n filter_by(deleted=deleted).\\\n all()\n\n return query", "def get_account_balances(self, include_admin=False, user=None):\n trans_status = WalletTransStatus()\n # returns the wallet balances of all users.\n\n balances = self.model.objects.filter(\n Q(status=trans_status.complete()) | Q(status=trans_status.success()) | Q(status=trans_status.pending())\n )\n\n if user:\n balances = balances.filter(user=user)\n\n if not include_admin:\n balances = balances.filter(user__is_admin=False)\n\n balances = balances.values('user', 'user__username', 'user__first_name', 'user__last_name', 'user__id').annotate(\n balance=Coalesce(Sum(F('amount')+F('charge')), V(0.00))\n ).order_by('-balance')\n \n return balances", "def get_balances(self):\r\n balances = self.api.find(\"tokens\", \"balances\", query={\"account\": self.account})\r\n return balances", "def execute_withdraws(self):\n withdraws = [v for v in self.action_register if v['action'] == 'withdraw']\n for withdraw in withdraws:\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].euro_wallet -= withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].withdrawn_euros += withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].last_withdraw_tick = self.model.schedule.steps", "def get(self):\n held_accounts = User.get_held_accounts(\n get_jwt_identity(), initialize_models=True)\n\n schema = AccountsListSchema(many=True)\n response = schema.dumps(held_accounts)\n\n return jsonify_response(json.loads(response.data), 200)", "def get_entries(self, acc: Account, cls: Type[AccountEntry] = AccountEntry) -> QuerySet:\n return cls.objects.filter(Q(account=acc) & (Q(source_invoice=self) | Q(settled_invoice=self))) if acc else cls.objects.none()", "def display_accounts(cls):\n return cls.account_list", "def new_get_buys_transaction_history(self, cb_account):\n date: datetime = now()\n if cb_account == \"wallet_id_btc\":\n return MockAPIObject(\n data=[{\n \"created_at\": str(date + timedelta(days=-1)),\n \"resource\": \"buy\",\n \"status\": \"completed\",\n \"amount\": {\n \"amount\": 10,\n \"currency\": \"BTC\"\n },\n \"total\": {\n \"amount\": 10,\n \"currency\": \"BTC\"\n },\n \"fees\": [{\n \"amount\": {\n \"amount\": 1,\n \"currency\": \"EUR\"\n }\n }]\n }, {\n \"created_at\": str(date + timedelta(days=1)),\n \"resource\": \"buy\",\n \"status\": \"completed\",\n \"amount\": {\n \"amount\": 5,\n \"currency\": \"BTC\"\n },\n \"total\": {\n \"amount\": 5,\n \"currency\": \"BTC\"\n },\n \"fees\": [{\n \"amount\": {\n \"amount\": 0.5,\n \"currency\": \"EUR\"\n }\n }]\n }])\n else:\n return MockAPIObject()", "def invoices(self):\r\n return inv.AccountInvoices(self)", "def get_accounts(self):\n\n\t\treturn self.__accounts", "def getCustomerAccountData(self):\n self.logger.debug(\"\")\n #Process each entry returned by getCustomersInfo through getAccountsInfo.\n customersInfoResponse = self.getCustomersInfo()\n if customersInfoResponse is None:\n self.logger.debug(\"did not get data from self.getCustomersInfo()\")\n raise RuntimeError()\n first = True\n cInfos = self.parseCustomerInfo(customersInfoResponse)\n self.logger.debug(\"%d cInfos\", len(cInfos))\n data = {}\n for cInfo in cInfos:\n if first:\n first = False\n else: # Adds a newline separator for text output.\n self.output.write(self.format({}))\n data['CustomerId'] = cInfo['Id']\n accountsInfoResponse = self.getAccountsInfo(cInfo['Id'], \"true\")\n if accountsInfoResponse is not None:\n data['accounts'] = self.parseAccountInfo(accountsInfoResponse)\n else:\n data['accounts'] = []\n self.logger.debug(\"yield %r\", data)\n yield data", "def show_accounts(conn, userid):\n print('\\n\\nAccount statment for user', (userid))\n with conn.cursor() as curs:\n curs.execute('SELECT id, type, balance FROM accounts WHERE owner_id=%s', (userid,))\n rows = curs.fetchall()\n print('Number of results:', curs.rowcount)\n for row in rows:\n print(row)", "def record_get_for_user(project_id, user_id, deleted=False):\n session = get_session()\n query = session.query(models.UserAccountRecord).\\\n filter_by(project_id=project_id).\\\n filter_by(user_id=user_id).\\\n filter_by(deleted=deleted)\n\n return query.all()", "def get_accounts(self, count: int = 100, account_type: str = None) -> list:\n all_accounts = list(\n itertools.islice(self.client.accounts.get_all_generator(), count)\n )\n if account_type is None:\n return all_accounts\n return [a for a in all_accounts if a[\"acctType\"] == account_type]", "def list(self, **params):\n\n _, _, account_charts = self.http_client.get(\"/accountcharts\", params=params)\n return account_charts", "def list_accounts(self):\n pass", "def get_spend_by_account_custom_daterange(self, account_id, start_date, end_date):\n try:\n account = Client.objects.get(id=account_id)\n except Client.DoesNotExist:\n return\n\n spend_sum = 0\n adwords_accounts = account.adwords.all()\n for adwords_account in adwords_accounts:\n client = get_client()\n client.client_customer_id = adwords_account.dependent_account_id\n\n report_downloader = client.GetReportDownloader(version=settings.API_VERSION)\n\n campaign_report_selector = {\n 'fields': ['Cost', 'CampaignId', 'CampaignStatus', 'CampaignName', 'Labels', 'Impressions'],\n 'predicates': [\n {\n 'field': 'Cost',\n 'operator': 'GREATER_THAN',\n 'values': '0'\n },\n ],\n 'dateRange': {\n 'min': start_date.strftime('%Y%m%d'),\n 'max': end_date.strftime('%Y%m%d')\n }\n }\n\n try:\n campaign_exclusion = CampaignExclusions.objects.get(account=account)\n excluded_campaign_ids = [campaign.campaign_id for campaign in campaign_exclusion.aw_campaigns.all()]\n if len(excluded_campaign_ids) > 0:\n campaign_report_selector['predicates'].append({\n 'field': 'CampaignId',\n 'operator': 'NOT_IN',\n 'values': excluded_campaign_ids\n })\n except CampaignExclusions.DoesNotExist:\n pass\n\n campaign_report_query = {\n 'reportName': 'CAMPAIGN_PERFORMANCE_REPORT',\n 'dateRangeType': 'CUSTOM_DATE',\n 'reportType': 'CAMPAIGN_PERFORMANCE_REPORT',\n 'downloadFormat': 'CSV',\n 'selector': campaign_report_selector\n }\n\n campaign_report = Reporting.parse_report_csv_new(\n report_downloader.DownloadReportAsString(campaign_report_query))\n for campaign_row in campaign_report:\n # This is the cost for this timerange\n cost = int(campaign_row['cost']) / 1000000\n spend_sum += cost\n\n return spend_sum", "def get_adusers(self, account_id, batch=False):\n path = 'act_%s/users' % account_id\n return self.make_request(path, 'GET', batch=batch)", "def withdraws(account):\r\n limit = 500\r\n print(\"Your account balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your withdraw limit is $\", format(limit, \"0.2f\"), sep='')\r\n while True:\r\n try:\r\n withdraw_amount = int(input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n # Checking if the customer has sufficient funds/over daily limit\r\n while withdraw_amount > account or withdraw_amount > limit:\r\n print(\"Insufficient funds or daily limit exceeded.\")\r\n while True:\r\n try:\r\n withdraw_amount = int(\r\n input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n account -= withdraw_amount\r\n limit -= withdraw_amount\r\n print(\"Your new balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your new limit is $\", format(limit, \"0.2f\"), sep='')", "def get_records():\n with RECORD_LOCK: # since flask 1.0 multi-threaded is enabled by default\n return jsonify(RECORDS)", "def get_balance(self, acc: Account) -> Decimal:\n return sum_queryset(self.get_entries(acc))", "def accounts(self):\n # get the summary data\n options = { 'PayLoadText' : self.request_xml() }\n\n print(self.url)\n print(options)\n\n response = requests.get(self.url, params=options) \\\n .content\n print(response)\n xml_tree = xml.etree.cElementTree.fromstring(response)\n\n status = xml_tree.find('ServiceResponse/Status').text\n\n if status != 'success':\n raise requests.exceptions.RequestException()\n\n self.security_token = xml_tree.find('ClientSecurityToken').text\n\n accounts = [ \n self.create_account(account)\n for account in xml_tree.iter('CardAccounts')\n ]\n\n return accounts", "def list_accounts():\n\n try:\n accounts = Account.query.all()\n except NoResultFound:\n print(f\"No account configured yet.\")\n return\n n_len = max([len(a.nickname) for a in accounts if a.nickname != 'no.name'])\n fmt = \"{nickname:\" + str(n_len) + \"s}: {email:s}\"\n #import pdb; pdb.set_trace()\n for acct in [acct for acct in accounts if acct.nickname != 'no.name']:\n print(fmt.format(nickname=acct.nickname, email=acct.email))\n return", "def listreceivedbyaccount(self, minconf=1, includeempty=False):\n return [AccountInfo(**x) for x in\n self.proxy.listreceivedbyaccount(minconf, includeempty)]", "def balances():\n loop.run_until_complete(app.exchanges.fetch_balances())\n print(app.exchanges.balances_str)", "def test_list_grading_periods_accounts(self):\r\n account_id = None # Change me!!\r\n\r\n r = self.client.list_grading_periods_accounts(account_id)", "def list_account_request(request):\n account_list = Account.objects.all()\n context = {'account_list': account_list}\n return render(request, \"accounts/account_list.html\", context)", "def accounts(self):\r\n return resources.Accounts(self)", "def billing_history(cls, user=None):\n invoices = Invoice.query.filter(Invoice.user_id == user.id) \\\n .order_by(Invoice.created_on.desc()).limit(12)\n\n return invoices", "def get_records(module):\n if True:\n classes = Attendance.objects.filter(module_id=module).order_by('-time')\n\n data = []\n for c in classes:\n attend = {\"time_id\": c.time, \"lt\": c.lecture_or_tutorial, \"owner\": c.owner,\n \"students\": [p.person_id for p in Attend_Recodes.objects.filter(attendance=c)],\n \"images\": [{\"url\": settings.MEDIA_URL + IMG_FOLDER_NAME + img.path.name,\n \"data\": json.loads(img.data)} for img in Images.objects.filter(attendance=c)]}\n data.append(attend)\n\n return data\n else:\n return None", "def list_records(self, zone):\n return self._zones[zone.id][\"records\"].values()", "def get_account_transactions(self, StartTime, EndTime):\n params = clean_locals(locals())\n date_time_sent = datetime.datetime.utcnow()\n response = self.request('ListAccountPostings', params, secure=True)\n data = self.process_response(response, date_time_sent, None)\n return parse_account_postings(data.get('data', {})) if data.get('data') else {}", "def test_retrieve_all_by_bank(self):\n swa = frontend.SupplyWinApi()\n query_dict = dict(\n dev=\"rrenaud\",\n targets=\"\",\n interaction=\"Bank\",\n unconditional=\"true\",\n )\n\n card_stats = swa.retrieve_data(query_dict)\n\n # Gets 288 entries back, because one for each of the 144\n # cards, plus the unconditioned version of each\n self.assertEquals(len(card_stats), 288)\n\n self.assertEquals(card_stats[0]['card_name'], 'Adventurer')\n\n json = swa.readable_json_card_stats(card_stats)\n self.assertEquals(json[0:14], '[{\"card_name\":')", "def get_transaction_list2(self, account_id, aid):\n endpoint = 'accounts/{0}/transactions/sinceid'.format(account_id)\n\n params = {}\n params[\"id\"] = aid\n\n return self._api.request(endpoint, params=params)", "def get_balances(self, account_id=None) -> Balances:\n if account_id is None:\n account_id = self.default_account_id\n url = f\"/v1/accounts/{account_id}/balances\"\n data = self.get(url, {})\n res = AccountsAPIResponse(**data)\n return res.balances", "def get_records(self) -> List[DBModelInstance]:\n return self._get_all_records()", "def get_transactions(self, account_id, from_date=None, to_date=None,\n page_size=None, type_list=None):\n endpoint = 'accounts/{0}/transactions'.format(account_id)\n\n params = {}\n\n if from_date:\n params[\"from\"] = from_date\n\n if to_date:\n params[\"to\"] = to_date\n\n if page_size:\n params[\"pageSize\"] = page_size\n\n if type_list:\n type_list = \"%2C\".join(type_list)\n params[\"type\"] = type_list\n\n return self._api.request(endpoint, params=params)", "async def fetch_deposits_withdrawals(self, code: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):\n await self.load_markets()\n request = {\n 'currency': 'all',\n # 'start': 123,\n }\n #\n # if since is not None:\n # # date-based pagination not supported\n # }\n #\n currency = None\n if code is not None:\n currency = self.currency(code)\n request['currency'] = currency['id']\n if limit is not None:\n request['count'] = limit\n response = await self.privateGetUserWalletHistory(self.extend(request, params))\n transactions = self.filter_by_array(response, 'transactType', ['Withdrawal', 'Deposit'], False)\n return self.parse_transactions(transactions, currency, since, limit)", "def get_trades(self):\n return trade.Trade.all_from_account_id(self.id)", "def add_withdraw(self, withdraw_id: str, tx_id: str, apply_time: int, asset: str, amount: float, fee: float,\n auto_commit: bool = True):\n row = (withdraw_id, tx_id, apply_time, asset, amount, fee)\n self.add_row(tables.SPOT_WITHDRAW_TABLE, row, auto_commit=auto_commit)", "def all(cls):\n with sqlite3.connect(cls.dbpath) as connection:\n connection.row_factory = sqlite3.Row\n cursor = connection.cursor()\n SELECTSQL = \"SELECT * FROM accounts;\"\n cursor.execute(SELECTSQL)\n result = []\n for dictrow in cursor.fetchall():\n result.append(cls(**dictrow))\n return result", "def accounts():\n if not session.get('authed', False):\n flash(\"Please log in.\")\n return redirect(my_url('index'))\n account_ids = redis_client.smembers('%s-accounts' % session['phone'])\n accounts = [kloudless.Account.retrieve(i) for i in account_ids]\n callback_url = quote_plus(my_url('auth_callback'))\n return render_template('accounts.html', accounts=accounts, app_number=APP_NUMBER,\n callback_url=callback_url, app_id=KLOUDLESS_APP_ID)", "def get_all(self, id_):\n\n joined_query = self._db.User.join(\n self._db.Profile, self._db.User.c.id_ == self._db.Profile.c.userId)\n joined_query = joined_query.join(\n self._db.Prefecture, self._db.Profile.c.prefectureId == self._db.Prefecture.c.id_)\n joined_query = joined_query.select(\n self._db.User.c.id_ == id_).with_only_columns(self.account_columns.values())\n\n record = joined_query.execute().fetchone()\n\n return {str(key): str(value) for key, value in zip(self.account_columns.keys(), record)}", "def transactions(self):\n url = f'{self._ynab.api_url}/budgets/{self.budget.id}/accounts/{self.id}/transactions'\n response = self._ynab.session.get(url)\n if not response.ok:\n self._logger.error('Error retrieving transactions, response was : %s with status code : %s',\n response.text,\n response.status_code)\n return []\n return [Transaction(self._ynab, transaction)\n for transaction in response.json().get('data', {}).get('transactions', [])]", "def get_adaccount(self, account_id, fields=None, batch=False):\n path = 'act_%s' % account_id\n args = {'fields': fields} if fields else {}\n return self.make_request(path, 'GET', args, batch=batch)" ]
[ "0.6707817", "0.62090015", "0.61132365", "0.59777224", "0.586913", "0.5829058", "0.58250797", "0.5786732", "0.572243", "0.56471694", "0.56391835", "0.558515", "0.5583775", "0.55779237", "0.5575182", "0.5569372", "0.5547758", "0.55422384", "0.55409026", "0.550295", "0.5485568", "0.5476998", "0.54741544", "0.544315", "0.5407965", "0.53755426", "0.5371875", "0.5335015", "0.5324369", "0.5309471", "0.53041476", "0.53040165", "0.53015244", "0.5274396", "0.52666086", "0.5266179", "0.5237146", "0.5223076", "0.52193636", "0.521428", "0.5211222", "0.52089584", "0.5201909", "0.51954603", "0.5195246", "0.51951605", "0.5190822", "0.519036", "0.51823014", "0.5177809", "0.51538175", "0.51218075", "0.5116172", "0.5110556", "0.50873756", "0.5071474", "0.5069756", "0.5067495", "0.5064951", "0.505264", "0.50506413", "0.5050641", "0.5048416", "0.5043689", "0.50419766", "0.5036607", "0.50256467", "0.5016719", "0.50167024", "0.50165725", "0.5002131", "0.49997258", "0.49864954", "0.49859464", "0.49749717", "0.49695134", "0.49668708", "0.4966414", "0.49622786", "0.49580595", "0.49569738", "0.49397615", "0.49363086", "0.49352548", "0.49188256", "0.49104357", "0.4893391", "0.48923016", "0.4888822", "0.48884022", "0.48864916", "0.48856053", "0.4883531", "0.48826373", "0.48817766", "0.487898", "0.48774046", "0.48723054", "0.4866278", "0.48655346", "0.48506325" ]
0.0
-1
Submit a request to withdraw some asset from an account.
def post_create_withdraw(self, address: 'str', amount: 'float', currency: 'str', fee: 'float', chain: 'str' = None, address_tag: 'str' = None) -> int: check_symbol(currency) check_should_not_none(address, "address") check_should_not_none(amount, "amount") check_should_not_none(fee, "fee") params = { "currency": currency, "address": address, "amount": amount, "fee": fee, "chain": chain, "addr-tag": address_tag } from huobi.service.wallet.post_create_withdraw import PostCreateWithdrawService return PostCreateWithdrawService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def withdraw(self, asset: Asset, address: str, amount: float,\n receive_window: Optional[int] = None):\n api_params = {\n 'asset': asset.value,\n 'address': address,\n 'amount': amount,\n 'timestamp': get_current_time_milliseconds()\n }\n\n if receive_window is not None:\n api_params['receiveWindow'] = receive_window\n\n return self.request.post(path='/withdraw', json_data=api_params)", "def withdraw(account, amount):\n pass", "def withdraw(self, request, *args, **kwargs):\n account = self.get_object()\n account_serializer = self.get_serializer()\n value = request.data.get(\"valor\", None)\n\n try:\n withdraw_result = account_serializer.withdraw(value, account)\n except ValueError as ve:\n return Response({\"detail\": \"Could not withdraw: {0}.\".format(ve),\n \"status_code\": status.HTTP_400_BAD_REQUEST}, status=status.HTTP_400_BAD_REQUEST)\n\n return Response(withdraw_result)", "def withdraw(self, currency, amount, address):\n pass", "def withdraw(self, currency, amount, address):\n return self.api_query('withdraw', {\"currency\": currency, \"amount\": amount, \"address\": address})", "def withdraw(holder):\n account = Account.query.filter_by(holder=holder).first()\n amount = request.json.get(\"amount\")\n if not account:\n return jsonify({\"error\": \"Account does not exist\"})\n if account.balance >= amount:\n account.balance -= amount\n db.session.commit()\n return jsonify(\n {\n \"holder\": account.holder,\n \"balance\": account.balance,\n \"message\": \"The withdraw has been processed\",\n }\n )\n return jsonify({\"error\": \"The account balance is insufficient\"})", "def withdraw_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.withdraw_money(credentials)\n start_again() if result else BankOperationsUi.withdraw_money()", "def add_withdraw(self, withdraw_id: str, tx_id: str, apply_time: int, asset: str, amount: float, fee: float,\n auto_commit: bool = True):\n row = (withdraw_id, tx_id, apply_time, asset, amount, fee)\n self.add_row(tables.SPOT_WITHDRAW_TABLE, row, auto_commit=auto_commit)", "def withdraw(self, currency, amount, address):\n return self.__call__('balance', 'withdrawcurrency',\n {\"currencyname\": currency, \n \"quantity\": amount, \n \"address\": address})", "def api_asset_add(char_code: str, name: str, capital: str, interest: str):\n capital, interest = float(capital), float(interest)\n asset = Asset(char_code=char_code, name=name, capital=capital, interest=interest)\n\n if app.bank.contains(asset):\n return f\"Asset '{name}' already exists\", 403\n\n app.bank.add(asset)\n return f\"Asset '{name}' was successfully added\", 200", "def register_withdraw(self, withdraw_intent): \n if withdraw_intent > 0:\n self.teo.register_withdraw(self, withdraw_intent)", "def execute_withdraws(self):\n withdraws = [v for v in self.action_register if v['action'] == 'withdraw']\n for withdraw in withdraws:\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].euro_wallet -= withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].withdrawn_euros += withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].last_withdraw_tick = self.model.schedule.steps", "def test_withdraw_no_asset(client):\n response = client.post(WITHDRAW_PATH, follow=True)\n content = response.json()\n assert response.status_code == 400\n assert content == {\"error\": \"'asset_code' is required\"}", "def withdraw(self, amount, trigger_transaction, trans=None):\n\n #\n # validates the amount is positive\n self.validate_amount(amount)\n\n #\n # Validate the user has the amount for the withdraw\n if not self.check_sufficient_funds(amount):\n raise OverdraftException(self.user.username)\n\n #\n # creates the transaction\n category = TransactionType.objects.get(pk=TransactionTypeConstants.BonusCashWithdraw.value)\n\n #\n # makes the amount negative because it is a withdrawal\n self.create(category, -amount, trans)\n self.transaction_detail.trigger_transaction = trigger_transaction\n self.transaction_detail.save()\n\n Logger.log(ErrorCodes.INFO,\"Bonus Cash Withdraw\", self.user.username+\" withdrew \"+str(amount)+\" \"+self.accountName+\" from their account.\")", "def cryptocurrency_withdrawal_request(self, walletId, currency, amount, address):\n return", "def test_withdraw_success(client):\n usd = Asset.objects.create(\n code=\"USD\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=True,\n distribution_seed=Keypair.random().secret,\n )\n response = client.post(\n WITHDRAW_PATH, {\"asset_code\": usd.code, \"amount\": \"100\"}, follow=True\n )\n content = response.json()\n assert content[\"type\"] == \"interactive_customer_info_needed\"\n assert \"100\" in content[\"url\"]\n assert content.get(\"id\")\n\n t = Transaction.objects.filter(id=content.get(\"id\")).first()\n assert t\n assert t.stellar_account == \"test source address\"\n assert t.account_memo is None\n assert t.muxed_account is None\n assert t.asset.code == usd.code\n assert t.protocol == Transaction.PROTOCOL.sep24\n assert t.kind == Transaction.KIND.withdrawal\n assert t.status == Transaction.STATUS.incomplete\n assert t.receiving_anchor_account is None\n assert t.memo is None\n assert t.memo_type == Transaction.MEMO_TYPES.hash\n assert t.from_address is None", "def make_transaction():\n account_id = request.json['account_id']\n aux_account = [account for account in accounts if account['id'] == account_id]\n if len(aux_account) == 0:\n abort(404)\n account_balance = Decimal(aux_account[0].get('balance')).quantize(Decimal('0.00'))\n transaction = request.json['transaction']\n transaction_amount = Decimal(abs(request.json['amount'])).quantize(Decimal('0.00'))\n\n if not request.json:\n abort(400)\n if transaction not in ['withdrawal', 'deposit']:\n abort(400, f'Invalid transaction name: {transaction}')\n if transaction == 'withdrawal':\n transaction_amount = transaction_amount*-1\n\n # the user can't withdraw more than the account has\n validation_sum = (account_balance + transaction_amount).quantize(Decimal('.01'), rounding=ROUND_DOWN)\n if validation_sum >= 0:\n for real_account in accounts:\n if real_account.get('id') == account_id:\n real_account['balance'] = round(float(validation_sum),2)\n else:\n abort(400, {'error':'Not enough funds for this transaction'})\n\n return json.dumps({f'{transaction.capitalize()} Done. New balance': str(validation_sum)}, ensure_ascii=False), 200", "def bank_withdraw_money(stub, request):\n # print(\"In method bank_withdraw_money:\")\n\n try:\n result = stub.Withdraw(request)\n except DatabaseOptFailure:\n return \"IO_Failure\"\n return result", "async def transfer_asset(request):\n \n required_fields = ['label', 'source', 'target' , 'amount' ,'resource' ]\n common.validate_fields(required_fields, request.json)\n\n transfer = _create_transfer_dict(request)\n sender = _create_transfer_participant(request.json, transfer)\n signer = await common.get_signer(request)\n\n # print(\"transfer =======> \", transfer)\n # print(\"sender =========> \", sender)\n\n batches, batch_id = transaction_creation.transfer_asset(\n txn_key = signer,\n batch_key = request.app.config.SIGNER,\n identifier = transfer['id'],\n label = transfer.get('label'),\n sender = sender,\n amount = transfer['amount'])\n\n # print(\"batches =========> \", batches)\n\n await messaging.send(\n request.app.config.VAL_CONN,\n request.app.config.TIMEOUT,\n batches)\n\n await messaging.check_batch_status(request.app.config.VAL_CONN, batch_id)\n\n return response.json({\"transfer\" : \"asad\"})", "def do_withdrawal_requests(self,args):\n try:\n ppdict(bitstamp.withdrawal_requests())\n except Exception as e:\n print \"Unexpected Error: %s\" % e\n self.onecmd('help withdrawal_requests')", "def post(self):\n\n action = self.request.get('action')\n if not action:\n raise ErrorMessage(404, 'missing action (requested_action) params')\n\n self.require_action_permitted('grant')\n\n account = model.Account.get(self.request.get('key'))\n if not account:\n raise ErrorMessage(404, 'bad key given')\n\n #TODO(eyalf): define account.display_name() or something\n name = account.email\n if not action in account.requested_actions:\n #i18n: Error message\n raise ErrorMessage(404, _('No pending request for '\n '%(account_action)s by %(user)s')\n % (action, name))\n account.requested_actions.remove(action)\n grant = self.request.get('grant', 'deny')\n if grant == 'approve':\n account.actions.append(action)\n account.put()\n logging.info('%s request for %s was %s' % (account.email,\n action,\n grant))\n\n if self.params.embed:\n if grant == 'approve':\n self.write(\n #i18n: Application for the given permission action approved\n _('Request for becoming %(action)s was approved.') % action)\n else:\n self.write(\n #i18n: Application for the given permission action denied\n _('Request for becoming %(action)s was denied.') % action)\n else:\n raise Redirect(self.get_url('/grant_access'))", "def buy_asset(self, asset):\n from Game.models import Asset, Transaction\n asset_comms = ACommunication(settings.API_URL)\n asset = asset_comms.get_asset_quote(asset)\n total = (asset.buy * asset.quantity)\n buy = asset.buy\n sell = asset.sell\n quantity = asset.quantity\n name = asset.name\n type = asset.type\n\n if quantity <= 0:\n return {\"error\": True,\n \"message\": \"You need to buy at least one asset\"}\n\n if self.liquid_with_loans >= total:\n asset = Asset.safe_get(name=asset.name)\n # if not asset then create one\n if not asset:\n asset = Asset(name=name,\n type=type)\n asset.save()\n\n asset.quantity = quantity\n asset.buy = buy\n asset.sell = sell\n\n self.create_or_update_ownership(asset, quantity)\n\n Transaction(wallet=self, asset=asset, asset_price_buy=asset.buy,\n asset_price_sell=asset.sell,\n date=datetime.datetime.now(), quantity=quantity,\n is_purchase=True, visibility=False).save()\n\n self.liquid -= total\n self.liquid = round(self.liquid, 3)\n self.save()\n return {\"error\": False, \"message\": \"Purchase has been successful\"}\n else:\n return {\"error\": True, \"message\": \"Not enough cash\"}", "def test_route_existing_account():\n\n account_repository = AccountRepositoryMock()\n get_account = GetAccountMock(account_repository)\n withdraw_account = WithdrawAccount(account_repository, get_account)\n withdraw_account_controller = WithdrawAccountController(withdraw_account)\n\n attributes = {\n \"type\": \"withdraw\",\n \"origin\": \"100\",\n \"amount\": faker.random_number(digits=3),\n }\n\n response = withdraw_account_controller.route(HttpRequest(body=attributes))\n\n assert get_account.account_id_param[\"account_id\"] == int(attributes[\"origin\"])\n assert account_repository.update_account_params[\"account_id\"] == int(\n attributes[\"origin\"]\n )\n\n assert (\n account_repository.update_account_params[\"balance\"]\n == get_account.account_balance_param[\"balance\"] - attributes[\"amount\"]\n )\n\n assert response.status_code == 201\n assert response.body", "def withdraw(self, amount, budget):\r\n if budget != \"Total Balance\":\r\n assert budget in self.budgets, \"Specified budget doesn't exist\"\r\n self.budgets[budget] -= float(amount)\r\n self.balance -= float(amount)", "def send_asset(receiver):\n\n client = _algod_client()\n params = client.suggested_params()\n note = TRANSACTION_NOTE\n\n decimals = _algod_client().asset_info(ASSET_ID).get(\"params\").get(\"decimals\")\n amount = GIVEAWAY_AMOUNT * (10 ** decimals)\n\n unsigned_txn = AssetTransferTxn(\n SENDER_ADDRESS,\n params,\n receiver,\n amount,\n index=ASSET_ID,\n note=note.encode(),\n )\n try:\n signed_txn = unsigned_txn.sign(mnemonic.to_private_key(SENDER_PASSPHRASE))\n except WrongChecksumError:\n return \"Checksum failed to validate\"\n except ValueError:\n return \"Unknown word in passphrase\"\n\n try:\n transaction_id = client.send_transaction(signed_txn)\n _wait_for_confirmation(client, transaction_id, 4)\n except Exception as err:\n return str(err)\n\n print(f\"Amount of {GIVEAWAY_AMOUNT} sent to {receiver}\")\n return \"\"", "def deposit_swth_before_withdrawal(request, url):\n\n def tear_down():\n # clean up here\n pass\n\n request.addfinalizer(tear_down)\n\n # encrypted_key = b\"6PYKozqwKwRYi77GN3AwTwXEssJZWbfneYKEYbiSeuNtVTGPFT2Q7EJpjY\"\n # pub_key = \"ANVLCD3xqGXKhDnrivpVFvDLcvkpgPbbMt\"\n client = SwitcheoApi(base_url=\"https://test-api.switcheo.network\")\n\n priv_key_wif = 'L4FSnRosoUv22cCu5z7VEEGd2uQWTK7Me83vZxgQQEsJZ2MReHbu'\n deposit_response = client.deposit(priv_key_wif=priv_key_wif, asset_id=\"SWTH\", amount=50,\n contract_hash=\"a195c1549e7da61b8da315765a790ac7e7633b82\",\n blockchain=\"neo\")\n print(deposit_response.text)", "def deposit(holder):\n account = Account.query.filter_by(holder=holder).first()\n if not account:\n return jsonify({\"error\": \"Account does not exist\"})\n amount = request.json.get(\"amount\")\n account.balance += amount\n db.session.commit()\n return jsonify(\n {\n \"holder\": account.holder,\n \"balance\": account.balance,\n \"message\": \"The deposit has been processed\",\n }\n )", "def withdrawn(self, withdrawn):\n\n self._withdrawn = withdrawn", "def do_withdraw(self,args):\n try:\n address = raw_input(\"Enter the address you want to withdraw to: \")\n totalbalance = prompt(\"Do you want to withdraw your ENTIRE balance?\",False)\n if totalbalance == False:\n amount = D(raw_input(\"Enter the amount of BTC to withdraw: \"))\n else:\n amount,_ = bal()\n \n result = bitstamp.bitcoin_withdraw(address,amount)\n if result:\n print \"%s BTC successfully sent to %s\" % (amount,address)\n else:\n print \"There was an error withdrawing.\"\n except Exception as e:\n traceback.print_exc()\n print \"Unexpected Error: %s\" % e\n self.onecmd('help withdraw')", "def submit_invoices(self, **kwargs) -> ApiResponse:\n \n return self._request(kwargs.pop('path'), data=kwargs)", "def withdraw(self, amount):\n\n print(\"\\nWithdrawal - {self.name}\".format(self=self))\n\n # retrieves the available balance in the account\n availableBalance = self.getAvailableBalance()\n \n # checks for negative amount value \n if amount < 0:\n print(\"Cannot withdraw £{0:.2f}\".format(amount))\n print(\"Deposit amount cannot be a negative value.\")\n\n # checks whether amount requested is greater than the available balance\n elif amount > availableBalance:\n print(\"Cannot withdraw £{0:.2f}\".format(amount))\n print(\"Insufficient funds.\")\n\n # subtracts amount from account balance\n else:\n self.balance -= amount\n print(\"{0} has withdrew £{1:.2f}. New balance is £{2:.2f}\".format(self.name, amount, self.balance))", "def withdraw_by_username(self,amount,username):\r\n pass", "def req_qry_trading_account(self):\n pass", "def deposit(account, amount):\n pass", "def post(self):\n ctx = _request_ctx_stack.top\n current_user = ctx.user\n request_body = request.get_json()\n name = request_body.get('name')\n account_type = request_body.get('type')\n initial_balance = request_body.get('ini_bal')\n if name:\n try:\n acc_factory = AccountFactory()\n if account_type == 'credit':\n limit = request_body.get('limit')\n if limit is None:\n return response('failed', 'Please specify a credit limit for a credit account', 400)\n new_account = acc_factory.create_account(\n name=name,\n account_type=account_type,\n user_id=current_user.id,\n initial_balance=initial_balance,\n limit=limit\n )\n else:\n new_account = acc_factory.create_account(\n name=name,\n account_type=account_type,\n user_id=current_user.id,\n initial_balance=initial_balance\n )\n new_account.save()\n except IntegrityError:\n return response('failed', 'Duplicate account name', 400)\n else:\n return response_created_account(new_account, 200)\n return response('failed', 'Missing account name attribute', 400)", "def send_to_bank(self, transaction_id, narration, bank_code, bank_account, amount: float, sender_name):\n response = send_request(\n data=self.get_bank_parameters(\n vendor_id=self.vendor_id,\n transaction_id=transaction_id,\n narration=narration,\n bank_code=bank_code,\n bank_account=bank_account,\n amount=amount,\n sender_name=sender_name\n ),\n url=self.get_bank_url()\n )\n return response", "def do_withdraw(self, args):\n \n amount = float(input(\"How much? \"))\n \n balance = self.cur.execute(\"SELECT * FROM balance ORDER BY date DESC\").fetchone()[2]\n if amount > balance:\n print(\"Insufficient funds! Withdrawl canceled.\")\n print(\"Use the `balance` command to check your account balance\")\n return\n \n balance -= amount\n now = time()\n self.cur.execute(\"INSERT INTO withdrawls VALUES (?,?)\", (now, amount))\n self.cur.execute(\"INSERT INTO balance VALUES (?,?,?)\", (now, 0.0, balance))\n self.db.commit()\n print(\"Withdrawl complete. Your new balance is $%.2f\" % balance)", "def test_withdraw_invalid_operation(client):\n eth = Asset.objects.create(\n code=\"ETH\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=False,\n distribution_seed=Keypair.random().secret,\n )\n response = client.post(WITHDRAW_PATH, {\"asset_code\": eth.code}, follow=True)\n content = response.json()\n assert response.status_code == 400\n assert content == {\"error\": \"invalid operation for asset ETH\"}", "def manage_access_approval_withdraw(\n request: AuthenticatedHttpRequest,\n *,\n access_request_pk: int,\n entity: Literal[\"importer\", \"exporter\"],\n approval_request_pk: int,\n) -> HttpResponse:\n\n with transaction.atomic():\n model_cls = ImporterAccessRequest if entity == \"importer\" else ExporterAccessRequest\n access_request = get_object_or_404(model_cls, pk=access_request_pk)\n\n case_progress.access_request_in_processing(access_request)\n\n approval_request = get_object_or_404(\n access_request.approval_requests.filter(is_active=True).select_for_update(),\n pk=approval_request_pk,\n )\n\n approval_request.is_active = False\n approval_request.status = ApprovalRequest.Statuses.CANCELLED\n approval_request.save()\n\n return redirect(\n reverse(\n \"access:case-management-access-approval\",\n kwargs={\"access_request_pk\": access_request.pk, \"entity\": entity},\n )\n )", "def test_withdraw_interactive_no_asset(client):\n Asset.objects.create(\n code=\"USD\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=True,\n distribution_seed=Keypair.random().secret,\n )\n response = client.get(f\"{WEBAPP_PATH}?transaction_id=2\", follow=True)\n assert response.status_code == 400\n assert \"asset_code\" in response.content.decode()", "def withdraw_funds(self, dt, amount):\n # Check that amount is positive and that there is\n # enough in the portfolio to withdraw the funds\n if dt < self.current_dt:\n raise ValueError(\n 'Withdrawal datetime (%s) is earlier than '\n 'current portfolio datetime (%s). Cannot '\n 'withdraw funds.' % (dt, self.current_dt)\n )\n self.current_dt = dt\n\n if amount < 0:\n raise ValueError(\n 'Cannot debit negative amount: '\n '%0.2f from the portfolio.' % amount\n )\n\n if amount > self.cash:\n raise ValueError(\n 'Not enough cash in the portfolio to '\n 'withdraw. %s withdrawal request exceeds '\n 'current portfolio cash balance of %s.' % (\n amount, self.cash\n )\n )\n\n self.cash -= amount\n\n self.history.append(\n PortfolioEvent.create_withdrawal(self.current_dt, amount, self.cash)\n )\n\n self.logger.info(\n '(%s) Funds withdrawn from portfolio \"%s\" '\n '- Debit: %0.2f, Balance: %0.2f' % (\n self.current_dt.strftime(settings.LOGGING[\"DATE_FORMAT\"]),\n self.portfolio_id, round(amount, 2),\n round(self.cash, 2)\n )\n )", "def post_cancel_withdraw(self, withdraw_id: 'int') -> int:\n params = {\n \"withdraw-id\": withdraw_id\n }\n\n from huobi.service.wallet.post_cancel_withdraw import PostCancelWithdrawService\n return PostCancelWithdrawService(params).request(**self.__kwargs)", "def withdraw(self, amount):\n message = self.account.withdraw(float(amount))\n if message:\n return message\n else:\n self.myView.displayAccount()\n return \"success\"", "def test_withdraw_interactive_no_txid(client):\n usd = Asset.objects.create(\n code=\"USD\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=True,\n distribution_seed=Keypair.random().secret,\n )\n response = client.get(f\"{WEBAPP_PATH}?asset_code={usd.code}\", follow=True)\n assert response.status_code == 400\n assert \"transaction_id\" in response.content.decode()", "def deposit(self, amount=None):\n if amount is None:\n amount = random() * 1000\n acct_info = {\"account_num\": choice(TRANSACTION_ACCT_LIST),\n \"routing_num\":\"111111111\"}\n transaction = {\"account\": json.dumps(acct_info),\n \"amount\": amount,\n \"uuid\": generate_username()}\n with self.client.post(\"/deposit\",\n data=transaction,\n catch_response=True) as response:\n if response.url is None or \"failed\" in response.url:\n response.failure(\"deposit failed\")", "def withdraw(self, amount):\n self.deposit(-amount)", "def request_to_burst(self, t: TransicationRecord):\n conf = t.burst_config\n id = t.id_int\n requests = max(\n conf.max_burst_output,\n conf.max_burst_output / t.supply_rate.mean()\n )\n requests = round(requests)\n if requests > self.can_trans:\n return\n self.requests[id] = requests\n self.expect_supply[id] = conf.max_burst_output\n self.inventory += conf.max_burst_output / t.src_type.unit_cost\n self.burst_count[id] -= 1\n self.buy_next_time[id] = self.current_week + \\\n conf.burst_dura // conf.burst_supply_count\n self.can_trans -= self.requests[id]\n if self.burst_count[id] <= 0:\n self.burst_count[id] = conf.burst_supply_count\n self.buy_next_time[id] = self.current_week + conf.cooling_dura", "def asset_transfer(sender, private_key, receiver, amount, index):\n params = ALGODCLIENT.suggested_params()\n txn = AssetTransferTxn(sender, params, receiver, amount, index)\n signed_tx = txn.sign(private_key)\n ALGODCLIENT.send_transaction(signed_tx)\n return True", "def withdraw(self,withdrawal_money):\r\n if self.balance < withdrawal_money:\r\n print(\"Funds are insufficient\")\r\n \r\n else:\r\n self.balance -= withdrawal_money\r\n print(\"Withdrawal Accepted\")", "def _request(self, account, method, params, key):\n params_bytes = py23_bytes(json.dumps(params), self.ENCODING)\n params_enc = base64.b64encode(params_bytes).decode(self.ENCODING)\n timestamp = datetime.utcnow().strftime(self.TIMEFORMAT)[:-3] + \"Z\"\n nonce_int = random.getrandbits(64)\n nonce_bytes = struct.pack('>Q', nonce_int) # 64bit ULL, big endian\n nonce_str = \"%016x\" % (nonce_int)\n\n message = self.prehash_message(timestamp, account, method,\n params_enc, nonce_bytes)\n signature = sign_message(message, key)\n signature_hex = hexlify(signature).decode(self.ENCODING)\n\n request = {\n \"jsonrpc\": \"2.0\",\n \"id\": self.id,\n \"method\": method,\n \"params\": {\n \"__signed\": {\n \"account\": account,\n \"nonce\": nonce_str,\n \"params\": params_enc,\n \"signatures\": [signature_hex],\n \"timestamp\": timestamp\n }\n }\n }\n r = requests.post(self.url, data=json.dumps(request))\n self.id += 1\n return r.json()", "def add_assets(char_code, name, capital, interest):\n try:\n capital = float(capital)\n interest = float(interest)\n except:\n redirect(url_for(\"page_not_found\"))\n if name in app.bank:\n abort(403)\n app.bank[name] = Asset(name, char_code, capital, interest)\n return f\"Asset '{name}' was successfully added\", 200", "async def update_account_balance():\n\n try:\n balance = App.client.get_asset_balance(asset=App.config[\"base_asset\"])\n except Exception as e:\n log.error(f\"Binance exception in 'get_asset_balance' {e}\")\n return\n\n App.base_quantity = Decimal(balance.get(\"free\", \"0.00000000\")) # BTC\n\n try:\n balance = App.client.get_asset_balance(asset=App.config[\"quote_asset\"])\n except Exception as e:\n log.error(f\"Binance exception in 'get_asset_balance' {e}\")\n return\n\n App.quote_quantity = Decimal(balance.get(\"free\", \"0.00000000\")) # USD\n\n pass", "def buy():\n # User reached route via POST (as by submitting a form via POST)\n if request.method == \"POST\":\n\n # Ensure symbol was submitted\n if not request.form.get(\"symbol\"):\n return apology(\"must provide symbol\", 403)\n \n # Creates dict\n symbol_info = lookup(request.form.get(\"symbol\"))\n \n # Checks that symbol exists\n if symbol_info == None:\n return apology(\"Invalid Symbol\", 403)\n \n # Ensure number of shares was submitted\n if not request.form.get(\"shares\"):\n return apology(\"must provide number of shares\", 403)\n \n # Ensure shares is valid\n try:\n if not int(request.form.get(\"shares\")) > 0:\n return apology(\"invalid value\", 403)\n except ValueError:\n return apology(\"invalid value\", 403)\n \n # Ensure there's enough money to buy share\n user_money = db.execute(\"SELECT cash FROM users WHERE id=:userid\", userid=session[\"user_id\"])\n cash = float(user_money[0][\"cash\"])\n if cash < float(symbol_info[\"price\"]) * float(request.form.get(\"shares\")):\n return apology(\"Not enough money\", 403)\n \n # Update user\n updated_money = cash - (float(symbol_info[\"price\"]) * float(request.form.get(\"shares\")))\n db.execute(\"UPDATE users SET cash = :updated WHERE id=:usid\", updated=updated_money, usid=session[\"user_id\"])\n \n # Update shares table\n symbol_dicts = db.execute(\"SELECT share FROM shares WHERE user_id = :usid\", usid=session[\"user_id\"])\n exist = 0\n for i in range(len(symbol_dicts)):\n if symbol_dicts[i][\"share\"].upper() == request.form.get(\"symbol\").upper():\n exist = 1\n break\n \n if exist == 0:\n db.execute(\"INSERT INTO shares (user_id, share, share_count) VALUES (:usid, :symbol, :count)\", usid=session[\"user_id\"], symbol=request.form.get(\"symbol\").upper(), count=int(request.form.get(\"shares\")))\n else:\n db.execute(\"UPDATE shares SET share_count = share_count + :count WHERE share = :symbol AND user_id = :usid\", count=int(request.form.get(\"shares\")), symbol=request.form.get(\"symbol\").upper(), usid=session[\"user_id\"])\n \n # Record transaction\n db.execute(\"INSERT INTO history (user_id, symbol, shares, time, price) VALUES (:usid, :symbol, :shares, :time, :price)\", usid=session[\"user_id\"], symbol=symbol_info[\"symbol\"], shares=request.form.get(\"shares\"), time=str(db.execute(\"SELECT CURRENT_TIMESTAMP\")[0][\"CURRENT_TIMESTAMP\"]), price=str(symbol_info[\"price\"]))\n \n return redirect(\"/\")\n\n # User reached route via GET (as by clicking a link or via redirect)\n else:\n return render_template(\"buy.html\")", "def save(self, *args, **kwargs):\n wallet = self.wallet.withdraw(self.value)\n super(Payment, self).save(*args, **kwargs)", "def ConcludeTransaction(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def bank_save_money(stub, request):\n # print(\"In method bank_withdraw_money:\")\n\n try:\n result = stub.Save(request)\n except DatabaseOptFailure:\n return \"IO_Failure\"\n return result", "def withdrawMoney(self, withdraw_amount):\r\n self.balance_amt = self.balance_amt - withdraw_amount", "def withdraw_funds(self, cause_id):\n # Verify if the cause_id exists\n sp.verify(self.data.causes.contains(cause_id))\n # Verify if the sender/reciever is the owner of the cause\n sp.verify(self.data.causes[cause_id].owner == sp.sender)\n # Transfer the collected funds\n sp.send(self.data.causes[cause_id].owner, self.data.causes[cause_id].balance)\n # Reset the amount as it's withdrawn now.\n self.data.causes[cause_id].balance = sp.mutez(0)", "def test_withdraw_success_with_memo(client):\n usd = Asset.objects.create(\n code=\"USD\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=True,\n distribution_seed=Keypair.random().secret,\n )\n response = client.post(\n WITHDRAW_PATH, {\"asset_code\": usd.code, \"amount\": \"100\"}, follow=True\n )\n content = response.json()\n assert content[\"type\"] == \"interactive_customer_info_needed\"\n assert \"100\" in content[\"url\"]\n assert content.get(\"id\")\n\n t = Transaction.objects.filter(id=content.get(\"id\")).first()\n assert t\n assert t.stellar_account == \"test source address\"\n assert t.account_memo is TEST_ACCOUNT_MEMO\n assert t.muxed_account is None\n assert t.asset.code == usd.code\n assert t.protocol == Transaction.PROTOCOL.sep24\n assert t.kind == Transaction.KIND.withdrawal\n assert t.status == Transaction.STATUS.incomplete\n assert t.memo_type == Transaction.MEMO_TYPES.hash\n assert t.from_address is None", "def withdraw(self, amount):\n self.balance -= amount", "def awaiting_payment(self):", "def request_action(self, request, data):\n\n response = self.oauth.post(url=f'{self.base_url}/json/{request}', data=data)\n return response.json()", "def withdrawMoney(self, withdraw_amount):\r\n if (self.balance_amt - withdraw_amount) > 0:\r\n self.balance_amt = self.balance_amt - withdraw_amount\r\n else:\r\n raise WithdrawError #Exception('Overdraft withdrawal Error. Cannot withdraw more than amount in account balance: {}'.format(self.balance_amt))\r", "def submit_transfer_request(self):\n submit_button = BaseElement(self.driver, locators.SCHEDULE_TRANSFER_BUTTON)\n submit_button.click()\n BaseElement(self.driver, locators.RECEIPT_STATUS_TEXT).wait_until_displayed()", "def deposit_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.deposit_money(credentials)\n start_again() if result else BankOperationsUi.deposit_money()", "def account(request: Request) -> Dict:\n # Get account\n account_id: int = request.matchdict.get(\"account_id\")\n account_obj: Optional[Account] = get_account_by_id(\n session=request.dbsession,\n account_id=account_id,\n )\n # TODO: Check access\n\n\n return {\n \"account\": account_obj,\n }", "def post_order(access_token,json_request):\r\n orders_url = 'https://api.tdameritrade.com/v1/accounts/{}/orders'.format(TDAuth_Info.account_num)\r\n\r\n #The header for placing in order needs to define the input type (json)\r\n headers = {'Authorization':'Bearer {}'.format(access_token),\r\n 'Content-Type':'application/json'}\r\n\r\n #Post the order on TD Ameritrade and check the response\r\n post_order_response=requests.post(url=orders_url,headers=headers,json=json_request)\r\n\r\n return post_order_response", "def make_deposit(conn, userid, acctype, amount):\n print('\\n\\nUpdating account user:{}, type:{}, amount:{}'.format(userid, acctype, amount))\n with conn.cursor() as curs:\n res = curs.execute(\"\"\"UPDATE accounts\n SET balance=%s\n WHERE owner_id=%s AND type=%s\"\"\", (amount, userid, acctype))\n if res is not None:\n print(res)", "def test_make_withdrawal(client):\n request_test = client.put('/checks_and_balances/api/v1.0/transaction',json=\n {\n \"account_id\": 1,\n \"transaction\": \"withdrawal\",\n \"amount\": 10.03\n })\n\n json_data = request_test.get_json(force=True)\n\n wanted_result = {\"Withdrawal Done. New balance\": \"90.47\"}\n\n assert json_data == wanted_result", "def withdraw_money(c_id, amount):\n return ar.withdraw_money(c_id, amount)", "def put(self, account=None, user=None, account_id=None):\n return super().put()", "def post(self, request, organization):\n if not self.has_feature(request, organization):\n return Response(status=404)\n\n project = self.get_project(request, organization)\n\n base_filter = {\"organization\": organization, \"owner\": request.user}\n\n with transaction.atomic():\n serializer = KeyTransactionSerializer(data=request.data, context=base_filter)\n if serializer.is_valid():\n data = serializer.validated_data\n base_filter[\"transaction\"] = data[\"transaction\"]\n base_filter[\"project\"] = project\n\n if KeyTransaction.objects.filter(**base_filter).exists():\n return Response(status=204)\n\n try:\n KeyTransaction.objects.create(**base_filter)\n return Response(status=201)\n # Even though we tried to avoid it, this KeyTransaction was created already\n except IntegrityError:\n return Response(status=204)\n return Response(serializer.errors, status=400)", "def create_buyback_account(name, asset, buyback_markets):\n if inst.wallet.locked():\n inst.wallet.unlock(TEST_WALLET_PWD)\n try:\n ## private key of faucet, which is the referrer\n inst.wallet.addPrivateKey(private)\n except Exception as e:\n pass\n \n account = cybex.Account(inst.const['master_account'])\n\n kwargs = {\n 'fee':{\"amount\": 100, \"asset_id\": \"1.3.0\"},\n 'registrar':account[\"id\"],\n 'referrer':account[\"id\"],\n 'referrer_percent':1,\n 'name':name,\n 'owner': {'account_auths': [['1.2.3', 1]],\n 'key_auths': [],\n \"address_auths\": [],\n 'weight_threshold': 1},\n 'active': {'account_auths': [['1.2.3', 1]],\n 'key_auths': [],\n \"address_auths\": [],\n 'weight_threshold': 1},\n \"options\": {\"memo_key\": 'CYB8fEEQ19N4LTVpg4wqX6B97ouDaNDDFK5fWbuaoNy4HPF9qnq4K',\n \"voting_account\": account[\"id\"],\n \"num_witness\": 0,\n \"num_committee\": 0,\n \"votes\": [],\n \"extensions\": []\n },\n \"extensions\": {'buyback_options': {'asset_to_buy': asset_id, 'asset_to_buy_issuer': account['id'], 'markets': buyback_markets}},\n \"prefix\": \"CYB\"\n }\n\n op = operations.Account_create(**kwargs)\n ops=[]\n ops.append(op)\n inst.finalizeOp(ops, account, \"active\")\n return True", "def rule_withdraw(self, st_acct, st_amount, st_idx):\n if self.active_token_ids.get(st_acct):\n # choose from the caller's valid NFT token IDs, if there are any\n idx = int(st_idx * len(self.active_token_ids[st_acct]))\n token_id = self.active_token_ids[st_acct][idx]\n else:\n # if the caller does not own any NFTs, choose from any token ID\n token_ids = self._all_token_ids()\n idx = int(st_idx * len(token_ids))\n token_id = token_ids[idx]\n\n amount = int(st_amount * 10 ** 18)\n if self.active_token_ids.get(st_acct):\n # when the action is possible, don't exceed the max underlying balance\n balance = self.swap.token_info(token_id)[\"underlying_balance\"]\n amount = min(amount, balance)\n\n if self.active_token_ids.get(st_acct):\n self.swap.withdraw(token_id, amount, {\"from\": st_acct})\n if balance == amount:\n self.active_token_ids[st_acct].remove(token_id)\n self.used_token_ids.append(token_id)\n else:\n with brownie.reverts():\n self.swap.withdraw(token_id, amount, {\"from\": st_acct})", "def put_account(self, account):\n \n pass", "def update_account(row, account):\n if row['LAST_UPDATED_FROM_PAYGOV']:\n updated_at = datetime_from(row['LAST_UPDATED_FROM_PAYGOV'])\n account.donations.filter(time__lte=updated_at).delete()\n if account.category == Account.PROJECT:\n set_balances(row, account)\n account.save()", "def transfer_money(request):\n source = Account.objects.get(pk=int(request.POST.get('source-id', False)))\n destination = Account.objects.get(pk=int(request.POST.get('destination-id', False)))\n amount = float(request.POST.get('amount', False))\n enough_cash = source.available_cash >= amount\n if enough_cash:\n source.available_cash -= amount\n source.save()\n destination.available_cash += amount\n destination.save()\n messages.success(request, 'OK 200: Transfer successfully executed.')\n else:\n messages.error(request, f'Error 400: Tried to transfer {amount} from {source.name}, but only had {source.available_cash} available.')\n \n transaction = Transaction(description=f\"Transfer from {source.name} to {destination.name}.\", success=enough_cash, cash_amount=amount, source_account=source, \n destination_account=destination)\n transaction.save()\n\n return redirect('overview')", "def buy(self, key):\n\t\turl = \"https://habitica.com/api/v3/user/buy/\" + str(key)\n\t\treturn(postUrl(url, self.credentials))", "def suspend_acct(request):\r\n params = request.params\r\n user = request.user\r\n\r\n # we need to get the user from the email\r\n email = params.get('email', None)\r\n\r\n if email is None and hasattr(request, 'json_body'):\r\n # try the json body\r\n email = request.json_body.get('email', None)\r\n\r\n if user is None and email is None:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'error': \"Please submit an email address\",\r\n })\r\n\r\n if user is None and email is not None:\r\n user = UserMgr.get(email=email)\r\n\r\n if user is None:\r\n request.response.status_int = 404\r\n return _api_response(request, {\r\n 'error': \"Please submit a valid address\",\r\n 'email': email\r\n })\r\n\r\n # check if we've already gotten an activation for this user\r\n if user.activation is not None:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'error': \"\"\"You've already marked your account for reactivation.\r\nPlease check your email for the reactivation link. Make sure to\r\ncheck your spam folder.\"\"\",\r\n 'username': user.username,\r\n })\r\n\r\n # mark them for reactivation\r\n user.reactivate(u\"FORGOTTEN\")\r\n\r\n # log it\r\n AuthLog.reactivate(user.username)\r\n\r\n # and then send an email notification\r\n # @todo the email side of things\r\n settings = request.registry.settings\r\n msg = ReactivateMsg(user.email,\r\n \"Activate your Bookie account\",\r\n settings)\r\n\r\n msg.send({\r\n 'url': request.route_url(\r\n 'reset',\r\n username=user.username,\r\n reset_key=user.activation.code),\r\n 'username': user.username\r\n })\r\n\r\n return _api_response(request, {\r\n 'message': \"\"\"Your account has been marked for reactivation. Please\r\n check your email for instructions to reset your\r\n password\"\"\",\r\n })", "def test_submit_asset_to_submission_service(self):\n pass", "def test_withdraw_amount_view(self):\n self.account.current_balance = 100000\n self.account.save()\n\n amount = random.randint(10, 100000)\n client.force_authenticate(user=self.account.user, token=self.token)\n url = reverse('customer_withdraw')\n request = client.post(url, {'amount': amount}, format='json')\n self.account.refresh_from_db()\n self.assertEqual(100000-amount, self.account.current_balance)", "def Commit(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def cash_withdrawal(amt):\r\n global withdraw_money\r\n global balance_money\r\n withdraw_money = amt\r\n print(\"Amout enetered : \", withdraw_money)\r\n balance_money = balance_money - withdraw_money\r\n print(\"Withdraw success\")", "def deposit(self, amount, budget):\r\n if budget != \"Total Balance\":\r\n assert budget in self.budgets, \"Specified budget doesn't exist\"\r\n self.budgets[budget] += float(amount)\r\n self.balance += float(amount)", "def release_ownership_access_approval(\n request: AuthenticatedHttpRequest,\n *,\n approval_request_pk: int,\n entity: Literal[\"importer\", \"exporter\"],\n) -> HttpResponse:\n\n with transaction.atomic():\n if entity == \"importer\":\n approval_request = get_object_or_404(\n ImporterApprovalRequest.objects.select_for_update(), pk=approval_request_pk\n )\n else:\n approval_request = get_object_or_404(\n ExporterApprovalRequest.objects.select_for_update(), pk=approval_request_pk\n )\n\n case_progress.approval_request_in_processing(approval_request)\n\n if approval_request.requested_from != request.user:\n raise PermissionDenied\n\n org = approval_request.access_request.get_specific_model().link\n if not can_user_manage_org_contacts(request.user, org):\n raise PermissionDenied\n\n approval_request.requested_from = None\n approval_request.save()\n\n return redirect(reverse(\"workbasket\"))", "def account_withdraw(self, currency_symbol, quantity, crypto_address, paymentid):\n return self.post('withdrawals', {\n 'currencySymbol': currency_symbol,\n 'quantity': quantity,\n 'cryptoAddress': crypto_address,\n 'cryptoAddressTag': paymentid\n }, auth=True)", "def buy():\n if request.method == \"POST\":\n\n # Ensure symbol was submitted\n if not request.form.get(\"symbol\"):\n return apology(\"must provide symbol\", 400)\n\n # Ensure shares was submitted\n elif not request.form.get(\"shares\"):\n return apology(\"must provide shares\", 400)\n\n if not request.form.get(\"shares\").isdigit():\n return apology(\"must be integer\",400)\n\n elif int(request.form.get(\"shares\"))<1 :\n return apology(\"must be positive integer\", 400)\n\n elif lookup(request.form.get(\"symbol\"))==None:\n return apology(\"Must be a valid symbol\",400)\n\n #ensure money>price\n quote=lookup(request.form.get(\"symbol\"))\n shares=request.form.get(\"shares\")\n cash=db.execute(\"SELECT cash FROM users WHERE id=?\",session[\"user_id\"])\n if cash[0][\"cash\"]<int(quote[\"price\"])*int(shares):\n return apology(\"You can't affort this/these\",400)\n\n #BUY, STORE DATA IN REPOSITORY AND RECORD\n\n #record this transaction\n db.execute(\"INSERT INTO record(userID,transactions,symbol,price,t1) VALUES(?,?,?,?,strftime('%Y-%m-%d %H:%M:%S','now'))\",session[\"user_id\"],int(shares),quote[\"symbol\"],float(quote[\"price\"]))\n\n #deduct the cash\n total=int(quote[\"price\"])*int(shares)\n db.execute(\"UPDATE users SET cash=cash- (?) WHERE id=?\",total,session[\"user_id\"])\n\n return redirect(\"/\")\n\n else:\n return render_template(\"buy.html\")", "def submit(request, session, **kwargs):\n\n from ..models import (\n FacilityTransaction,\n Allocation,\n FollowupRequest,\n Instrument,\n )\n\n instrument = (\n Instrument.query_records_accessible_by(request.requester)\n .join(Allocation)\n .join(FollowupRequest)\n .filter(FollowupRequest.id == request.id)\n .first()\n )\n\n name = request.obj.tns_name\n if name is None:\n request.status = 'No TNS name'\n else:\n try:\n lc = Table.read(\n f\"{lightcurve_url}/lc_{name}_cleaned\",\n format='ascii',\n header_start=1,\n )\n\n if 'BTJD' not in list(lc.columns):\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n else:\n IOLoop.current().run_in_executor(\n None,\n lambda: commit_photometry(\n lc, request.id, instrument.id, request.requester.id\n ),\n )\n\n except FileNotFoundError:\n request.status = f\"TESS alert {name} not found.\"\n except Exception:\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n\n transaction = FacilityTransaction(\n request=None,\n response=None,\n followup_request=request,\n initiator_id=request.last_modified_by_id,\n )\n\n session.add(transaction)", "def submitRequest(self, json):\n uID = json.get('uID')\n request = True\n approval = \"Wait\"\n if uID:\n\n RequestsDAO().insertRequest(uID, request, approval)\n mapped_result = self.buildRequestToDict(uID, request, approval)\n return jsonify(TURN=mapped_result), 201\n\n else:\n return jsonify(Error=\"Unexpected attributes in post request\"), 400", "def withdraw(self, amount):\n self.withdrw = amount\n \n if (self.balance-self.withdrw) < 0:\n self.balance = self.balance - 5 - self.withdrw\n self.fee += 5\n else:\n self.balance -= self.withdrw", "def register_withdraw(self, agent, value):\n if value <= agent.euro_wallet - agent.staged_euro and self.model.schedule.steps - agent.last_withdraw_tick >= 2:\n action = {\n 'unique_id': agent.unique_id,\n 'action': 'withdraw',\n 'value': value\n }\n self.action_register.append(action)\n agent.staged_euro += value", "def commit_purchase(self, purchaseid, transferid):\n return self.request(\n 'put',\n safeformat('purchases/{:int}', purchaseid),\n json.dumps({'transferid': transferid})\n )", "def _setup_account_general(insid, start_date, rate_dict, counterparty,\n prf_name, account_name, reinvest,\n funding_instype, external_id=None):\n calendar = acm.FCalendar['ZAR Johannesburg']\n next_bus_day = calendar.AdjustBankingDays(acm.Time.DateToday(), 1)\n day_after_start_date = calendar.AdjustBankingDays(start_date, 1)\n # Make sure that two conditions are met:\n # 1. End date doesn't lie in the past.\n # 2. Start date predates end date.\n end_date = max(next_bus_day, day_after_start_date)\n\n deposit = acm.FInstrument[insid]\n if deposit:\n LOGGER.info(\"The instrument {} already exists\".format(insid))\n if deposit.ExternalId1():\n LOGGER.info(\"Updating the external id from {} to {}\".format(\n deposit.ExternalId1(), external_id))\n deposit.ExternalId1(external_id)\n deposit.Commit()\n return None\n\n LOGGER.info('Creating %s...', insid)\n acm.BeginTransaction()\n try:\n # Instrument\n deposit = acm.FDeposit()\n deposit.Currency(CURRENCY)\n deposit.Name(insid)\n deposit.DayCountMethod(DAY_COUNT_METHOD)\n deposit.SpotBankingDaysOffset(0)\n # this sets the exp_time, which has a higher priority over exp_day,\n # which is set when calling re_rate(...) from ael. If the exp_time\n # is not set, acm (trading manager) uses the exp_day.\n # deposit.ExpiryDate(end_date)\n deposit.ContractSize(1)\n deposit.Quotation('Clean')\n deposit.QuoteType('Clean')\n deposit.OpenEnd('Open End')\n deposit.MinimumPiece(MINIMUM_PIECE)\n deposit.PayOffsetMethod('Business Days')\n if external_id:\n deposit.ExternalId1(external_id)\n\n # Leg\n leg = deposit.CreateLeg(1)\n leg.LegType('Call Fixed Adjustable')\n leg.Decimals(11)\n leg.StartDate(start_date)\n leg.EndDate(end_date)\n leg.EndPeriodUnit('Days')\n leg.DayCountMethod(DAY_COUNT_METHOD)\n if rate_dict['type'] == 'fixed':\n leg.FixedRate(rate_dict['rate'])\n leg.ResetDayOffset(0)\n leg.ResetType('Weighted')\n leg.ResetPeriod('1d')\n leg.ResetDayMethod('Following')\n leg.Currency(CURRENCY)\n leg.NominalFactor(1)\n leg.Rounding('Normal')\n leg.RollingPeriod('1m')\n leg.RollingPeriodBase(acm.Time.FirstDayOfMonth(acm.Time.DateAddDelta(\n start_date, 0, 1, 0)))\n leg.PayDayMethod('Following')\n leg.PayCalendar(calendar)\n leg.FixedCoupon(True)\n leg.NominalAtEnd(True)\n leg.FloatRateFactor(1)\n leg.FixedCoupon(True)\n leg.StartPeriod('-1d')\n leg.Reinvest(reinvest)\n if rate_dict['type'] == 'float':\n deposit.AddInfoValue('CallFloatRef', rate_dict['ref'])\n deposit.AddInfoValue('CallFloatSpread', rate_dict['spread'])\n deposit.Commit() # Commits both the instrument and the leg.\n\n # Trade\n trade = acm.FTrade()\n trade.Instrument(deposit)\n trade.Counterparty(counterparty)\n trade.Acquirer('PRIME SERVICES DESK')\n trade.AcquireDay(start_date)\n trade.ValueDay(start_date)\n trade.Quantity(1)\n trade.TradeTime(start_date)\n trade.Currency(CURRENCY)\n trade.Price(0)\n trade.Portfolio(acm.FPhysicalPortfolio[prf_name])\n trade.Type('Normal')\n trade.TradeTime(start_date)\n trade.Status('Simulated') # To allow for delete in case of rollback.\n trade.AddInfoValue('Funding Instype', funding_instype)\n trade.AddInfoValue('Call_Region', 'BB SANDTON')\n trade.AddInfoValue('Account_Name', account_name)\n trade.Commit()\n \n acm.CommitTransaction()\n except Exception as e:\n acm.AbortTransaction()\n LOGGER.exception(\"Could not create call/loan account {}\".format(insid))\n raise e\n\n deposit = acm.FInstrument[insid]\n if deposit:\n trades = deposit.Trades()\n if trades:\n LOGGER.info('The following trade has been created:{}\\n'.format(trades[0].Oid()))\n else:\n raise RuntimeError('Could not create trade!')\n else:\n raise RuntimeError('Could not create deposit!')", "def req_qry_trading_account(self):\n self.invoke_log('on_invoke_req_qry_trading_account')\n if self._xapi:\n func = self._xapi.X_ReqQryTradingAccount\n func.restype = None\n func.argtypes = [c_void_p, c_void_p]\n func(self.p_fun, self.p_api)", "def bankerInvest(account, ongAmount):\n # RequireWitness(account)\n if CheckWitness(account) == False:\n # \"Check witness failed!\",\n Notify([\"BankerInvestErr\", 101])\n return False\n\n currentRound = getCurrentRound()\n\n # Require(getRoundGameStatus(currentRound) == STATUS_ON)\n if getRoundGameStatus(currentRound) != STATUS_ON:\n # Please wait for the admin to set initial investment!\n Notify([\"BankerInvestErr\", 102])\n return False\n\n # Require(_transferONG(account, ContractAddress, ongAmount))\n res = _transferONG(account, ContractAddress, ongAmount)\n if res == False:\n # Transfer ONG failed!\n Notify([\"BankerInvestErr\", 103])\n return False\n # try to update banker list\n bankersListKey = concatKey(concatKey(ROUND_PREFIX, currentRound), BANKERS_LIST_KEY)\n bankersListInfo = Get(GetContext(), bankersListKey)\n bankersList = []\n if bankersListInfo:\n bankersList = Deserialize(bankersListInfo)\n if checkInBankerList(account, bankersList):\n bankersList.append(account)\n bankersListInfo = Serialize(bankersList)\n Put(GetContext(), bankersListKey, bankersListInfo)\n else:\n bankersList.append(account)\n bankersListInfo = Serialize(bankersList)\n Put(GetContext(), bankersListKey, bankersListInfo)\n\n dividendForBankersPercentage = getDividendForBankersPercentage()\n runningVaultPercentage = getRunningVaultPercentage()\n\n # add dividend to all the bankers, 48%\n dividend = Div(Mul(ongAmount, dividendForBankersPercentage), 100)\n\n # update profit per investment for bankers\n bankersInvestment = getBankersInvestment(currentRound)\n if bankersInvestment != 0:\n profitPerInvestmentForBankersToBeAdd = Div(Mul(dividend, MagnitudeForProfitPerSth), bankersInvestment)\n Put(GetContext(), concatKey(concatKey(ROUND_PREFIX, currentRound), PROFIT_PER_INVESTMENT_FOR_BANKERS_KEY), Add(profitPerInvestmentForBankersToBeAdd, getProfitPerInvestmentForBankers(currentRound)))\n else:\n # there will be no dividend\n dividend = 0\n # add running vault, 50%\n runningVaultToBeAdd = Div(Mul(ongAmount, runningVaultPercentage), 100)\n Put(GetContext(), concatKey(concatKey(ROUND_PREFIX, currentRound), RUNNING_VAULT_KEY), Add(getRunningVault(currentRound), runningVaultToBeAdd))\n\n # add running vault balance\n Put(GetContext(), concatKey(concatKey(ROUND_PREFIX, currentRound), concatKey(BANKER_RUNING_VAULT_BALANCE_PREFIX, account)), Add(getBankerBalanceInRunVault(currentRound, account), runningVaultToBeAdd))\n # update real time running vault\n Put(GetContext(), concatKey(concatKey(ROUND_PREFIX, currentRound), REAL_TIME_RUNNING_VAULT), Add(getRealTimeRunningVault(currentRound), runningVaultToBeAdd))\n\n # treat the rest as the commission fee to admin, 2%\n restOngAmount = Sub(Sub(ongAmount, dividend), runningVaultToBeAdd)\n # update the commission fee\n Put(GetContext(), COMMISSION_KEY, Add(getCommission(), restOngAmount))\n\n # update the account (or the banker) 's dividend\n updateBankerDividend(account)\n # update account's investment\n bankerKey = concatKey(concatKey(ROUND_PREFIX, currentRound), concatKey(BANKER_INVEST_BALANCE_PREFIX, account))\n Put(GetContext(), bankerKey, Add(getBankerInvestment(currentRound, account), ongAmount))\n\n # update total bankers' investment\n Put(GetContext(), concatKey(concatKey(ROUND_PREFIX, currentRound), BANKERS_INVESTMENT_KEY), Add(bankersInvestment, ongAmount))\n\n # update total ong amount\n Put(GetContext(), TOTAL_ONG_KEY, Add(getTotalONG(), ongAmount))\n\n Notify([\"bankerInvest\", currentRound, account, ongAmount])\n\n return True", "def pay():\n data = request.get_json()\n print(data)\n intent = stripe.PaymentIntent.create(amount=data['amnt'], currency='usd', metadata={'integration_check': 'accept_a_payment'})\n return jsonify(client_secret=intent.client_secret)", "def withdraws(account):\r\n limit = 500\r\n print(\"Your account balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your withdraw limit is $\", format(limit, \"0.2f\"), sep='')\r\n while True:\r\n try:\r\n withdraw_amount = int(input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n # Checking if the customer has sufficient funds/over daily limit\r\n while withdraw_amount > account or withdraw_amount > limit:\r\n print(\"Insufficient funds or daily limit exceeded.\")\r\n while True:\r\n try:\r\n withdraw_amount = int(\r\n input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n account -= withdraw_amount\r\n limit -= withdraw_amount\r\n print(\"Your new balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your new limit is $\", format(limit, \"0.2f\"), sep='')", "def test_withdraw_muxed_account_success(client):\n usd = Asset.objects.create(\n code=\"USD\",\n issuer=Keypair.random().public_key,\n sep24_enabled=True,\n withdrawal_enabled=True,\n distribution_seed=Keypair.random().secret,\n )\n response = client.post(\n WITHDRAW_PATH,\n {\"asset_code\": usd.code, \"amount\": \"100\", \"account\": TEST_MUXED_ACCOUNT},\n follow=True,\n )\n content = response.json()\n assert content[\"type\"] == \"interactive_customer_info_needed\"\n assert \"100\" in content[\"url\"]\n assert content.get(\"id\")\n\n t = Transaction.objects.filter(id=content.get(\"id\")).first()\n assert t\n assert t.stellar_account == MuxedAccount.from_account(TEST_MUXED_ACCOUNT).account_id\n assert t.muxed_account == TEST_MUXED_ACCOUNT\n assert t.account_memo is None\n assert t.asset.code == usd.code\n assert t.protocol == Transaction.PROTOCOL.sep24\n assert t.kind == Transaction.KIND.withdrawal\n assert t.status == Transaction.STATUS.incomplete\n assert t.memo_type == Transaction.MEMO_TYPES.hash\n assert t.from_address == TEST_MUXED_ACCOUNT", "def cryptocurrency_deposit_request(self, walletId, currency):\n return", "def withdraw(self, amount):\n self.transactions += [('withdraw', amount)]\n if amount > self.balance:\n return 'Insufficient funds'\n self.balance = self.balance - amount\n return self.balance", "def buy():\n\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n\n # User reached route via POST (as by submitting a form via POST)\n shares = int(request.form.get(\"shares\"))\n symbol = request.form.get(\"symbol\")\n quote = lookup(symbol)\n\n if not quote:\n return apology(\"invalid symbol\", 404)\n\n price = quote['price']\n value = round(shares * price, 2)\n user = Users.query.get(session.get(\"user_id\"))\n\n if value > user.cash:\n return apology(\"You don't have enough cash\", 406)\n\n record = Records(symbol=quote['symbol'], company_name=quote['name'],\n transact_type=\"buy\", shares=shares, price=price, user_id=user.id)\n user.cash -= value\n db.session.add(record)\n db.session.commit()\n\n flash(\"Bought\")\n return redirect(url_for('index'))" ]
[ "0.6766613", "0.6515961", "0.6493949", "0.6168783", "0.61496735", "0.60446924", "0.58922", "0.5787488", "0.5744847", "0.56431615", "0.5603435", "0.54944474", "0.5494445", "0.548808", "0.547827", "0.5470316", "0.54234326", "0.5407679", "0.53537494", "0.5329677", "0.5327134", "0.5307443", "0.5273772", "0.5258725", "0.52437454", "0.5239289", "0.5239228", "0.52338266", "0.52070624", "0.5205387", "0.51722544", "0.5161111", "0.5158823", "0.5155472", "0.515211", "0.51431507", "0.5122938", "0.51226246", "0.5112168", "0.5112023", "0.50933254", "0.5079848", "0.506424", "0.5061925", "0.50467193", "0.50181323", "0.5015127", "0.5008592", "0.50040156", "0.49919522", "0.49772903", "0.49652952", "0.4965143", "0.49604338", "0.49533707", "0.4944121", "0.49434227", "0.49427244", "0.49380815", "0.49360183", "0.49341697", "0.49306288", "0.4929335", "0.49259964", "0.49238896", "0.4896245", "0.4896164", "0.4881585", "0.4880004", "0.48648843", "0.48421308", "0.4839136", "0.48364848", "0.48352164", "0.4833677", "0.48309493", "0.4830897", "0.48273855", "0.48223093", "0.4815518", "0.48135245", "0.48132718", "0.48095316", "0.48068076", "0.48022908", "0.48011073", "0.479773", "0.4791851", "0.47876912", "0.47875684", "0.47735286", "0.477068", "0.47691625", "0.47631034", "0.47625747", "0.47553843", "0.47518507", "0.47416878", "0.4739538", "0.47389078", "0.4737075" ]
0.0
-1
Cancel an withdraw request.
def post_cancel_withdraw(self, withdraw_id: 'int') -> int: params = { "withdraw-id": withdraw_id } from huobi.service.wallet.post_cancel_withdraw import PostCancelWithdrawService return PostCancelWithdrawService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Cancel(self, request, global_params=None):\n config = self.GetMethodConfig('Cancel')\n return self._RunMethod(\n config, request, global_params=global_params)", "def cancel(self):\r\n self.require_item()\r\n\r\n url = '{0}/cancel'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n request.use_xml = False\r\n\r\n return request, parsers.parse_empty", "def cancelRequest(self, json):\n uID = json.get('uID')\n print(RequestsDAO().getRequestByuID(uID))\n if not RequestsDAO().getRequestByuID(uID):\n return jsonify(Error=\"No request found\"), 404\n else:\n\n if uID:\n RequestsDAO().deleteRequest(uID)\n return jsonify(User=\"User deleted\"), 200\n else:\n return jsonify(Error=\"Unexpected attributes in update request\"), 400", "def on_cancel(self):\n self.state = CANCELED\n self._reject()", "def cancel_game(self, req):\n game = models.BattleShip.getByUrlKey(req.url_key)\n game.cancelled = True\n game.put()\n return msgs.StringMessage(msg=\"Game has been cancelled.\")", "def cancel(request, pk=None):\n # Check request is still valid or not\n friend_request = get_or_none(FriendRequest, pk=pk)\n # if request is not valid\n if friend_request is None:\n return Response({'status': '400', 'code': 'E_REQUEST_NOT_FOUND',\n 'detail': code['E_REQUEST_NOT_FOUND']}, status=400)\n # Delete request\n friend_request.delete()\n return Response({'status': '200', 'code': 'OK_CANCEL_FRIEND_REQUEST',\n 'detail': code['OK_CANCEL_FRIEND_REQUEST']}, status=200)", "def cancel(self):\n self.stop()\n self.make_callback('canceled')", "async def cancel(self, ctx):\n\n return", "def requestCancelled(builder, request):", "def cancel():", "def cancel(self):\n self.__canceled = True", "def cancel_request(request, id):\n user = get_object_or_404(User, id=id)\n f_request = FriendRequest.objects.filter(\n from_user=request.user,\n to_user=user\n )\n f_request.delete()\n messages.success(\n request,\n f'Your friend request to {user} has cancelled.'\n )\n return redirect('profiles:profile')", "def on_cancel(self, *args):\n self.response(Gtk.ResponseType.CANCEL)", "def do_cancel(self):\r\n self.write({'cancelled': True})", "def hook_cancel_assistance(self, data):\n request_id = data[\"request_id\"]\n assignee_chat_id = data[\"volunteer\"]\n log.info(\"CANCEL req:%s\", request_id)\n self.send_message(assignee_chat_id, c.MSG_REQUEST_CANCELED)\n\n self.updater.dispatcher.user_data[assignee_chat_id].update(\n {\"current_request\": None, \"reviewed_request\": None, \"state\": c.State.AVAILABLE}\n )\n del self.updater.dispatcher.bot_data[request_id]\n self.updater.dispatcher.update_persistence()", "def do_cancel(order):\r\n self.gox.cancel(order.oid)", "def cancel(self, membership, callback=None):", "def cancel(self):", "def cancel(self):", "def cancel(self):", "def AbortRequest(self, request_id):\n request_path = self._GetRequestPathname(request_id, self._ABORTING)\n open(request_path, 'w').close()", "def DismissApprovalRequest(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details(\"Method not implemented!\")\n raise NotImplementedError(\"Method not implemented!\")", "def cancel(self, update, context):\n\n output = \"\"\n user = self.User(update)\n message = update.message.text.lower().split(\" \")\n print(message[1], message[2])\n if user.id not in self.__users.keys():\n output = \"looks like you don't have any requests at all.\"\n elif message[1].isnumeric() and message[2].isnumeric():\n user = self.__users[user.id]\n line_num = int(message[1])\n station_num = int(message[2])\n found_match = False\n for station in user.stations:\n if station.line_number == line_num and station.station_number == station_num:\n user.remove_station(station)\n self.bus_controller.remove_person_from_the_station(station)\n output = \"Canceled the request\"\n found_match = True\n break\n if not found_match:\n output = \"this doesn't match with any of your active requests, so you can't cancel it.\\n\" \\\n \"make sure that you don't have any typing mistakes\"\n else:\n output = \"the values you entered seem wrong, the values must be number.\"\n self.data_base.log(user, update.message.text, output)\n user.send_message(output)", "def cancel(self, request):\n self.clear(request)", "def cancel_stripe(self):\n TransactionLog = Pool().get('payment_gateway.transaction.log')\n\n if self.state != 'authorized':\n self.raise_user_error('cancel_only_authorized')\n\n stripe.api_key = self.gateway.stripe_api_key\n\n try:\n charge = stripe.Charge.retrieve(\n self.provider_reference\n ).refund(idempotency_key=('refund_%s' % self.uuid))\n except (\n stripe.error.InvalidRequestError,\n stripe.error.AuthenticationError, stripe.error.APIConnectionError,\n stripe.error.StripeError\n ), exc:\n TransactionLog.serialize_and_create(self, exc.json_body)\n else:\n self.state = 'cancel'\n self.save()\n TransactionLog.create([{\n 'transaction': self,\n 'log': unicode(charge),\n }])", "def cancel(self):\n self.cancelled = True", "def cancel(self):\n self.cancelled = True", "def cancel(self):\n pass", "def cancel(bot, update):\n bot.sendMessage(chat_id=update.message.chat_id, text=\"As you wish, the operation has been cancelled! 😊\")\n return ConversationHandler.END", "def canceled(self):\n self.reject()", "def CancelOperation(\n self,\n request: google.longrunning.operations_pb2.CancelOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.protobuf.empty_pb2.Empty:", "def action_cancel(self):\n self.state = 'canceled'", "def cancel(self, comment=None):\n payload = {\n \"Comment\": comment\n }\n qry = ServiceOperationQuery(self, \"cancel\", None, payload)\n self.context.add_query(qry)\n return self", "def cancel_order(self, **kwargs):\n return self.client.execute(\"order/refund\", \"POST\", kwargs)", "def cancel_run(self, run_id):\n raise NotImplementedError()", "def cancel(self, uuid):\n return self.__call__('market', 'tradecancel',\n {'orderId': uuid})", "def cancel(self):\n self.on_cancel()", "def cancel_proposal(self, id: bytes, proposer: 'Address', current_block_height: int) -> None:\n if not self._check_registered_proposal(id):\n revert(\"No registered proposal\")\n\n proposal_info = ProposalInfo.from_bytes(self._proposal_list[id])\n\n if proposal_info.end_block_height < current_block_height:\n revert(\"This proposal has already expired\")\n\n if proposer != proposal_info.proposer:\n revert(\"No permission - only for proposer\")\n\n if proposal_info.status != NetworkProposalStatus.VOTING:\n revert(\"Can not be canceled - only voting proposal\")\n\n proposal_info.status = NetworkProposalStatus.CANCELED\n self._proposal_list[id] = proposal_info.to_bytes()", "def cancel_order(self, walletId, orderId):\n return", "def cancel(self):\n raise NotImplementedError(\n u\"%s: Method not implemented\", self.__class__.__name__)", "def cancel_game(self, request):\n game = get_by_urlsafe(request.urlsafe_game_key,Game)\n if not game:\n raise endpoints.NotFoundException('A Game with that key does not exist!')\n if game.game_over:\n raise endpoints.ForbiddenException('Game has ended.')\n else:\n game.key.delete()\n return StringMessage(message = 'Game Cancelled!')", "def cancel(self):\n GameLoop.getInstance()._cancelation_token = True", "def cancel(self):\n return self.RES_OK", "def on_cancel_order(self, data, request):\n self.update_rate_limit(request)", "def cancel(self):\n self.cancelled.set()", "def cancel(self):\n # type: () -> None\n if self.query_id is None or self.is_finished():\n return\n\n self._cancelled = True\n url = self._request.get_url(\"/v1/query/{}\".format(self.query_id))\n logger.debug(\"cancelling query: %s\", self.query_id)\n response = self._request.delete(url)\n logger.info(response)\n if response.status_code == requests.codes.no_content:\n logger.debug(\"query cancelled: %s\", self.query_id)\n return\n self._request.raise_response_error(response)", "def OnCancel(self, event):\n pass", "def OnCancel(self, event):\n pass", "def action_cancel(self):\n ids = isinstance(self.ids, (int)) and [self.ids] or self.ids\n context = self._context or {}\n self.cancel_move()\n self.clear_wh_lines()\n return True", "def onCancelButtonClick(self, event):\n self.EndModal(wx.ID_CANCEL)\n event.Skip()", "def cancel_game(self, request):\n game = get_by_urlsafe(request.urlsafe_game_key, Game)\n if game:\n if game.game_over:\n return game.to_form('Game is over. Cannot cancel game.')\n else:\n game.history.append('Game canceled!')\n game.end_game(False)\n return game.to_form('Game canceled!')\n else:\n raise endpoints.NotFoundException('Game not found!')", "def cancel_operation(self):\n # <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>\n self.proceed = False\n self.entry_view.destroy()", "def cancel(self):\n\n self.end()\n super().cancel()", "def cancel(self, order_id):\n Library.functions.cancel(self._book, order_id)", "def cancel_or_refund_request(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def cancel(self, uuid):\n\n result = self.api_query('CancelOrder', {'order_id': uuid})\n return result", "def cancel(self): #$NON-NLS-1$\r", "def cancelbooking():\n # check of user is loggedin\n if 'loggedin' in session:\n if request.method == 'POST':\n bookingid = request.form['bookingid']\n\n response = requests.delete(\n \"http://localhost:8080/api/bookings/\"+str(bookingid))\n acc = json.loads(response.text)\n return redirect(url_for('site.bookings'))", "def global_cancel(self):\n return self._call_txtrader_api('global_cancel', {})", "def cancel(self):\n self.session.rollback()", "async def cancel_reservation_endpoint(request):\n reservation_id = request.args[\"reservation_id\"][0]\n model.cancel_reservation(reservation_id)\n return json({\"success\": True})", "def cancel(self, run_id):\n postresult = requests.post(\n f\"{self.proto}://{self.host}/ga4gh/wes/v1/runs/{run_id}/cancel\",\n headers=self.auth,\n )\n return wes_reponse(postresult)" ]
[ "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.66425055", "0.6436429", "0.6304208", "0.6227452", "0.6217388", "0.6199144", "0.6197805", "0.61780316", "0.6167289", "0.6161669", "0.61528957", "0.615034", "0.6131002", "0.6127299", "0.61230314", "0.61200106", "0.6112569", "0.6087993", "0.6087993", "0.6087993", "0.60730046", "0.6055191", "0.6034027", "0.60160244", "0.60060847", "0.5975002", "0.5975002", "0.5972475", "0.5967799", "0.5949387", "0.59386855", "0.59195477", "0.59162307", "0.5901403", "0.58726716", "0.58724236", "0.5861415", "0.5856832", "0.58521044", "0.5834264", "0.58288735", "0.5821411", "0.5808253", "0.5807099", "0.5782923", "0.577772", "0.57753694", "0.57753694", "0.5763937", "0.5755028", "0.5742122", "0.5738026", "0.5735437", "0.5724316", "0.5719909", "0.56971854", "0.56921196", "0.5688149", "0.56871283", "0.5680792", "0.56776893", "0.56701815" ]
0.6858336
0
Get deposit address of corresponding chain, for a specific crypto currency (except IOTA)
def get_account_deposit_address(self, currency: 'str') -> list: check_should_not_none(currency, "currency") params = { "currency": currency } from huobi.service.wallet.get_account_deposit_address import GetAccountDepositAddressService return GetAccountDepositAddressService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_deposit_address(self, currency):\n return self.__call__('balance', \"getdepositaddress\", \n {\"currencyname\": currency})", "def generate_deposit_address(self, currency):\n return self.__call__('balance', \"generatedepositaddress\", \n {\"currencyname\": currency})", "def account_deposit_address(self, currency_symbol):\n return self.get(f'addresses/{currency_symbol}', auth=True)", "async def fetch_deposit_address(self, code: str, params={}):\n await self.load_markets()\n networkCode = None\n networkCode, params = self.handle_network_code_and_params(params)\n if networkCode is None:\n raise ArgumentsRequired(self.id + ' fetchDepositAddress requires params[\"network\"]')\n currency = self.currency(code)\n currencyId = currency['id']\n idLength = len(currencyId)\n currencyId = currencyId[0:idLength - 1] + currencyId[idLength - 1:idLength].lower() # make the last letter lowercase\n params = self.omit(params, 'network')\n request = {\n 'currency': currencyId,\n 'network': self.network_code_to_id(networkCode, currency['code']),\n }\n response = await self.privateGetUserDepositAddress(self.extend(request, params))\n #\n # '\"bc1qmex3puyrzn2gduqcnlu70c2uscpyaa9nm2l2j9le2lt2wkgmw33sy7ndjg\"'\n #\n return {\n 'currency': code,\n 'address': response.replace('\"', '').replace('\"', ''), # Done twice because some languages only replace the first instance\n 'tag': None,\n 'network': networkCode,\n 'info': response,\n }", "def fetch_deposit_address(self, code: str, params={}):\n self.load_markets()\n currency = self.currency(code)\n if self.is_fiat(code):\n raise NotSupported(self.id + ' fetchDepositAddress() does not support fiat currencies')\n request = {\n 'Coin': currency['id'],\n }\n response = self.privatePostFundsAddCoinFundsRequest(self.extend(request, params))\n #\n # {\n # 'address': '0xf14b94518d74aff2b1a6d3429471bcfcd3881d42',\n # 'hasTx': False\n # }\n #\n return self.parse_deposit_address(response, currency)", "def deposit_address(self):\n response = self.query('deposit_address')\n return response", "async def fetch_deposit_address(self, code: str, params={}):\n await self.load_markets()\n currency = self.currency(code)\n request = {\n 'asset': currency['id'],\n }\n response = await self.privateGetUserWithdrawalAccount(self.extend(request, params))\n data = self.safe_value(response, 'data', {})\n # Not sure about self if there could be more than one account...\n accounts = self.safe_value(data, 'accounts', [])\n firstAccount = self.safe_value(accounts, 0, {})\n address = self.safe_string(firstAccount, 'address')\n return {\n 'currency': currency,\n 'address': address,\n 'tag': None,\n 'network': None,\n 'info': response,\n }", "def do_getaddress(self,args):\n ppdict(bitstamp.get_depositaddress())", "async def fetch_deposit_address(self, code: str, params={}):\n await self.load_markets()\n currency = self.currency(code)\n # if not provided explicitly we will try to match using the currency name\n network = self.safe_string(params, 'network', code)\n currencyNetworks = self.safe_value(currency, 'networks', {})\n currencyNetwork = self.safe_value(currencyNetworks, network)\n networkId = self.safe_string(currencyNetwork, 'id')\n if networkId is None:\n raise ArgumentsRequired(self.id + \" fetchDepositAddress() could not find a network for '\" + code + \"'. You can specify it by providing the 'network' value inside params\")\n wallet = self.safe_string(params, 'wallet', 'exchange') # 'exchange', 'margin', 'funding' and also old labels 'exchange', 'trading', 'deposit', respectively\n params = self.omit(params, 'network', 'wallet')\n request = {\n 'method': networkId,\n 'wallet': wallet,\n 'op_renew': 0, # a value of 1 will generate a new address\n }\n response = await self.privatePostAuthWDepositAddress(self.extend(request, params))\n #\n # [\n # 1582269616687, # MTS Millisecond Time Stamp of the update\n # 'acc_dep', # TYPE Purpose of notification 'acc_dep' for account deposit\n # null, # MESSAGE_ID unique ID of the message\n # null, # not documented\n # [\n # null, # PLACEHOLDER\n # 'BITCOIN', # METHOD Method of deposit\n # 'BTC', # CURRENCY_CODE Currency code of new address\n # null, # PLACEHOLDER\n # '1BC9PZqpUmjyEB54uggn8TFKj49zSDYzqG', # ADDRESS\n # null, # POOL_ADDRESS\n # ],\n # null, # CODE null or integer work in progress\n # 'SUCCESS', # STATUS Status of the notification, SUCCESS, ERROR, FAILURE\n # 'success', # TEXT Text of the notification\n # ]\n #\n result = self.safe_value(response, 4, [])\n poolAddress = self.safe_string(result, 5)\n address = self.safe_string(result, 4) if (poolAddress is None) else poolAddress\n tag = None if (poolAddress is None) else self.safe_string(result, 4)\n self.check_address(address)\n return {\n 'currency': code,\n 'address': address,\n 'tag': tag,\n 'network': None,\n 'info': response,\n }", "def getAddressOfgoods(account_address):\n\n addresses = tron.transaction_builder.trigger_smart_contract(contract_address = SMART_CONTRACT_ADDRESS,\n function_selector = 'getAddressOfGoodFromWallet(address)',\n fee_limit=1000000000,\n call_value=0,\n parameters=[{'type': 'address', 'value':account_address}],\n issuer_address=account_address\n )\n addresses = addresses['constant_result']\n decodeH = decode_hex(addresses[0])\n decodeA= decode_abi(('uint256[]',),decodeH)\n print(\"----------------------------------------------------\")\n print(decodeA)\n return decodeA[0]", "def q_addressbalance(abe, page, chain):\n addr = wsgiref.util.shift_path_info(page['env'])\n if chain is None or addr is None:\n return 'returns amount of money at the given address\\n' \\\n '/chain/CHAIN/q/addressbalance/ADDRESS\\n'\n\n if not util.possible_address(addr):\n return 'ERROR: address invalid'\n\n version, hash = util.decode_address(addr)\n total = abe.store.get_balance(chain.id, hash)\n\n return (\"ERROR: please try again\" if total is None else\n format_satoshis(total, chain))", "def generateNewAddress(self, currency):\n pass", "def get_coin_address(self, coin, idx=0):\n return self.coins[coin][idx]", "async def create_deposit_address(self, code: str, params={}):\n await self.load_markets()\n request = {\n 'op_renew': 1,\n }\n return await self.fetch_deposit_address(code, self.extend(request, params))", "def get(self, currency, address):\n check_inputs(address=address, currency=currency) # abort if fails\n addr = commonDAO.get_address(currency, address)\n if addr:\n addr['tags'] = commonDAO.list_address_tags(currency, address)\n return addr\n abort(404, \"Address {} not found in currency {}\".format(address,\n currency))", "def get_zerotier_address(marketplace):\n logger.info(\"You might need to enter your superuser password.\")\n address = zerotier.get_address(marketplace)\n if not address:\n join_cmd = click.style(\"21 join\", bold=True, reset=False)\n no_zt_network = click.style(\n \"You are not part of the {}. Use {} to join the market.\",\n fg=\"red\")\n raise UnloggedException(no_zt_network.format(marketplace, join_cmd))\n\n return address", "def fetch_address(cpr: str) -> str:\n\n return \"Åbogade 15, 8200 Aarhus N\"", "def get_address_from_cep(cep: Any) -> str:\n cep = clear_cep(cep)\n req = requests.get(f\"https://ws.apicep.com/cep/{cep}.json\")\n obj = req.json()\n adr = format_adress(obj)\n\n return adr", "def get_address(self):\n if self.get_entity: # needs an entity to work\n if self.building:\n address = self.get_entity.get_institutional_address()\n address.extend(self.building.get_postal_address())\n return address\n else:\n return self.get_entity.get_address()", "def paymentAddress(self):\n return self.selectedAccount.paymentAddress()", "def user_deposit_address(currency_code: str,\n exchange: str = CRYPTO_EXCHANGE,\n api_key: str = CRYPTO_API_KEY,\n api_secret: str = CRYPTO_API_SECRET,\n exchange_password: Any = CRYPTO_API_PASSWORD,\n exchange_uid: Any = CRYPTO_API_UID,\n test_mode: bool = False) -> Any:\n try:\n if test_mode == True:\n url = CRYPTO_URL_TEST\n else:\n url = CRYPTO_URL_LIVE\n payload = {'currency_code': currency_code}\n response = requests.post('{}/deposit_address/{}'.format(url, exchange),\n headers=crypto_get_headers(\n api_key, api_secret, exchange_password,\n exchange_uid),\n json=payload)\n if response:\n return response.json()\n if response.status_code == 400:\n logger.error('Oops! An error Occurred ⚠️')\n raise BadRequest(response.text)\n if response.status_code == 401:\n logger.error('Oops! An error Occurred ⚠️')\n raise InvalidCredentials(response.text)\n except Exception as exception:\n logger.error('Oops! An error Occurred ⚠️')\n raise exception", "def forge_address(value: str, tz_only=False) -> bytes:\n prefix = value[:3]\n address = base58.b58decode_check(value)[3:]\n\n if prefix == 'tz1':\n res = b'\\x00\\x00' + address\n elif prefix == 'tz2':\n res = b'\\x00\\x01' + address\n elif prefix == 'tz3':\n res = b'\\x00\\x02' + address\n elif prefix == 'KT1':\n res = b'\\x01' + address + b'\\x00'\n else:\n raise ValueError(value)\n\n return res[1:] if tz_only else res", "def create_deposit_address(currency_code: str,\n exchange: str = CRYPTO_EXCHANGE,\n api_key: str = CRYPTO_API_KEY,\n api_secret: str = CRYPTO_API_SECRET,\n exchange_password: Any = CRYPTO_API_PASSWORD,\n exchange_uid: Any = CRYPTO_API_UID,\n test_mode: bool = False) -> Any:\n try:\n if test_mode == True:\n url = CRYPTO_URL_TEST\n else:\n url = CRYPTO_URL_LIVE\n payload = {'currency_code': currency_code}\n response = requests.post(\n '{}/create_deposit_address/{}'.format(url, exchange),\n headers=crypto_get_headers(api_key, api_secret, exchange_password,\n exchange_uid),\n json=payload)\n if response:\n return response.json()\n if response.status_code == 400:\n logger.error('Oops! An error Occurred ⚠️')\n raise BadRequest(response.text)\n if response.status_code == 401:\n logger.error('Oops! An error Occurred ⚠️')\n raise InvalidCredentials(response.text)\n except Exception as exception:\n logger.error('Oops! An error Occurred ⚠️')\n raise exception", "def __get_account(self, address):\n\t\tfor acct in self.wallet:\n\t\t\tif acct[\"address\"] == address:\n\t\t\t\treturn acct\n\t\traise ValueError(\"The given address does not exist in the bunkr-wallet\")", "def get_balance(self, crypto, address, confirmations=1):\n raise NotImplementedError(\n \"This service does not support getting address balances. \"\n \"Or rather it has no defined 'get_balance' method.\"\n )", "def getReferencedAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:\n ...", "def cryptocurrency_deposit_request(self, walletId, currency):\n return", "def get_address(self):\n entity = self\n if entity.abstract_entity:\n entity = self.get_real_ancestor()\n if entity:\n address = entity.get_institutional_address()\n building = entity.get_building()\n if building:\n if entity.building_recapitulates_entity_name: \n address.extend(building.get_postal_address()[1:])\n else:\n address.extend(building.get_postal_address())\n return address", "def get_new_address(account):\n try:\n new_address = subprocess.check_output([\"litecoin-cli\", \"getnewaddress\", account])\n except:\n sys.exit(1)\n\n return new_address.decode().strip()", "def pad_contract(chain):\n contract, hash = chain.provider.deploy_contract('TestSolidityAddressHash')\n return contract", "def get_deposit(self, deposit):\r\n method = self.wallet_endpoints['get_deposit']['method']\r\n url = self.base_url + self.wallet_endpoints['get_deposit']['url'].format(depositId=deposit)\r\n req = requests.request(method, url, headers=self.get_auth_headers())\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res", "def get_chain_info(self, symbol: str): \n return self.trader.fetch_chain_info(symbol)", "def get_addr_value(tx):\n\t# initialize lists\n\tin_addr, in_value, out_addr, out_value = [], [], [], []\n\t# for each input transaction\n\tfor input in tx[\"inputs\"]:\n\t\t# if sending address exists\n\t\t# (otherwise coin base or empty value)\n\t\tif \"prev_out\" in input.keys() and \"addr\" in input[\"prev_out\"].keys():\n\t\t\t# record address and value\n\t\t\tin_addr.append(input[\"prev_out\"][\"addr\"])\n\t\t\tin_value.append(input[\"prev_out\"][\"value\"])\n\t# for each output transaction\n\tfor output in tx[\"out\"]:\n\t\t# if receiving address exists\n\t\t# (otherwise empty value)\n\t\tif \"addr\" in output.keys():\n\t\t\t# record address and value\n\t\t\tout_addr.append(output[\"addr\"])\n\t\t\tout_value.append(output[\"value\"])\n\t# handle duplicate addresses\n\tin_addr, in_value = handle_duplicates(in_addr, in_value)\n\tout_addr, out_value = handle_duplicates(out_addr, out_value)\n\treturn in_addr, in_value, out_addr, out_value", "def get_Dogecoin_info(wallet: str, info: dict):\r\n\tresp = requests.get('https://dogechain.info/api/v1/address/received/' + wallet)\r\n\tif resp.status_code == 200:\r\n\t\tresponse = json.loads(resp.text)\r\n\t\tif response['success'] == 1:\r\n\t\t\tinfo['received'] = float(response['received'])\r\n\t\t\tinfo['received_dollars'] = info['received'] * COIN_VALUES['Dogecoin']\r\n\t\t\tinfo['updated'] = datetime.datetime.now().isoformat()\r\n\treturn info", "def getaccountaddress(self, account):\n return self.proxy.getaccountaddress(account)", "def get_balances_chain(addresses):\r\n print(\"* Using chain.so to query {} addresses - one at a time\".format(len(addresses))) # NOQA\r\n\r\n CHAINS_BALANCE_ENDPOINT = \"https://chain.so/api/v2/get_address_balance/BTC/{address}\" # NOQA\r\n balances = {}\r\n i = 0\r\n for addr in addresses:\r\n i += 1\r\n response = requests.get(CHAINS_BALANCE_ENDPOINT.format(address=addr))\r\n if response.status_code != 200:\r\n print(\"** Chain sent bad server reponse: \", response.status_code)\r\n print(\"** Query number: \", i)\r\n return None\r\n\r\n data = response.json()\r\n if data[\"status\"] != \"success\":\r\n print(\"** Chain sent bad data reponse: \", data[\"status\"])\r\n return None\r\n\r\n balances[data[\"data\"][\"address\"]] = \\\r\n data[\"data\"][\"confirmed_balance\"] + \\\r\n data[\"data\"][\"unconfirmed_balance\"]\r\n\r\n # 0.2 is 5 reqs/second. Using 0.3 just in case.\r\n time.sleep(0.3)\r\n\r\n return balances", "def forge_contract(value) -> bytes:\n parts = value.split('%')\n address, entrypoint = (parts[0], parts[1]) if len(parts) == 2 else (parts[0], 'default')\n res = forge_address(address)\n if entrypoint != 'default':\n res += entrypoint.encode()\n return res", "def get_bank_address_by_name(bank_name: str) -> str:\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select address from bank where name = '{}';\".format(bank_name)\n cursor.execute(query)\n data = cursor.fetchall()\n db.disconnect()\n return data[0][0]", "def get_address(self):\n return self.address.line[0]+\", \"+self.address.city+\", \"+self.address.state+\", \"+self.address.country", "def generate_deposit_address(self, asset: Asset,\n receive_window: Optional[int] = None):\n api_params = {\n 'asset': asset.value,\n 'timestamp': get_current_time_milliseconds()\n }\n\n if receive_window is not None:\n api_params['receiveWindow'] = receive_window\n\n return self.request.post(path='/deposit/address', json_data=api_params)", "def get_Ethereum_info(wallet: str, info: dict):\r\n\tresp = requests.get('https://api.blockcypher.com/v1/eth/main/addrs/' + wallet)\r\n\tif resp.status_code == 200:\r\n\t\tresponse = json.loads(resp.text)\r\n\t\tinfo['received'] = response['total_received'] / 10**18\r\n\t\tinfo['received_dollars'] = info['received'] * COIN_VALUES['Ethereum']\r\n\t\tinfo['updated'] = datetime.datetime.now().isoformat()\r\n\treturn info", "def withdraw_crypt(self, amount, currency, address):\n response = self.query('withdraw_crypt', params=dict(amount=amount, currency=currency, address=address))\n return response", "def q_translate_address(abe, page, chain):\n addr = wsgiref.util.shift_path_info(page['env'])\n if chain is None or addr is None:\n return 'Translates ADDRESS for use in CHAIN.\\n' \\\n '/chain/CHAIN/q/translate_address/ADDRESS\\n'\n# MULTICHAIN START\n version, hash = util.decode_check_address_multichain(addr)\n# MULTICHAIN END\n if hash is None:\n return addr + \" (INVALID ADDRESS)\"\n return util.hash_to_address(chain.address_version, hash)", "def cryptocurrency_withdrawal_request(self, walletId, currency, amount, address):\n return", "def mk_contract_address(sender: str, nonce: int) -> str:\n sender_bytes = to_bytes(hexstr=sender)\n raw = rlp.encode([sender_bytes, nonce])\n h = keccak(raw)\n address_bytes = h[12:]\n return to_checksum_address(address_bytes)", "def address(self) -> Optional[str]:\n return pulumi.get(self, \"address\")", "def address(self) -> Optional[str]:\n return pulumi.get(self, \"address\")", "def q_getreceivedbyaddress(abe, page, chain):\n addr = wsgiref.util.shift_path_info(page['env'])\n if chain is None or addr is None:\n return 'returns amount of money received by given address (not balance, sends are not subtracted)\\n' \\\n '/chain/CHAIN/q/getreceivedbyaddress/ADDRESS\\n'\n\n if not util.possible_address(addr):\n return 'ERROR: address invalid'\n\n version, hash = util.decode_address(addr)\n return format_satoshis(abe.store.get_received(chain.id, hash), chain)", "def getBtcInWallet(address):\n btc = 'https://blockchain.info/q/addressbalance/' + address\n check = requests.get(btc)\n value = int((check.content)) / 100000000.0\n return value", "def get_address(self, ):\n return self.get_parameter('address')", "def signer_address(private_key):\n return get_ethereum_address_from_private_key(private_key)", "def getAbsoluteAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:\n ...", "def returnDepositAddresses(self):\n pass", "async def get_balance(sochain_url:str, network:str, address:str):\n try:\n balance = await sochain_api.get_balance(sochain_url, network, address)\n if balance == None:\n raise Exception(\"Invalid Address\")\n return balance\n except Exception as err:\n raise Exception(str(err))", "def _get_address(self):\n return utf82unicode(pn_terminus_get_address(self._impl))", "def get_active_market_street(market):\r\n return market[-1]", "def get_address(self, address=None):\n return self.__get_addr_grp('address', address)", "def get_wallet_balance(self, walletId, currency):\n return", "def get_address_abi(name, mode):\n dict_event = get_smart_contracts_dict(mode)\n address, abi = dict_event[name]\n\n return address, abi", "def getAddressAtIndex(self, index: int) -> ghidra.program.model.address.Address:\n ...", "def get_bank_address_by_id(bank_id: int) -> str:\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select address from bank where name = '{}';\".format(bank_id)\n cursor.execute(query)\n data = cursor.fetchall()\n db.disconnect()\n return data[0][0]", "def execute_get_balance(arg):\n blockchain = Blockchain()\n blockchain.read_blockchain()\n\n address = arg['address']\n\n if address is None:\n print('You have to give you account address!!!')\n\n elif not blockchain._wallet_pool.has_address(address):\n print(f'The address {address} does not exist!!!')\n\n else:\n balance = blockchain.get_balance(address)\n print(f'Address: {address}')\n print(f'Balance = {balance}')\n\n return", "def extract_cash_account(self, cash_accounts, currency):\n for cash_account in cash_accounts:\n if cash_account['currency'] == currency:\n return cash_account\n\n return {}", "def address(self, net: str, compressed: bool) -> str:\n # encode the public key into bytes and hash to get the payload\n pkb_hash = self.encode(compressed=compressed, hash160=True)\n # add version byte (0x00 for Main Network, or 0x6f for Test Network)\n version = {'main': b'\\x00', 'test': b'\\x6f'}\n ver_pkb_hash = version[net] + pkb_hash\n # calculate the checksum\n checksum = sha256(sha256(ver_pkb_hash))[:4]\n # append to form the full 25-byte binary Bitcoin Address\n byte_address = ver_pkb_hash + checksum\n # finally b58 encode the result\n b58check_address = b58encode(byte_address)\n return b58check_address", "def get_account_address(account_name):\n command = 'getaddressesbyaccount {0}'.format(account_name)\n result = do_command(command)\n if result == -1:\n log('Fatal error: get addresses by account faild!')\n return -1\n\n json_obj = json.loads(result)\n address_count = len(json_obj)\n if address_count == 0:\n log('no account address: {0}, to create new one!'.format(account_name))\n command = 'getaccountaddress {0}'.format(account_name)\n result = do_command(command)\n if result == -1:\n log('Fatal error, create new address faild: {0}'.format(account_name))\n return -1\n else:\n return result\n else:\n return json_obj[0]", "def getNewAddress(self):\n a = self.selectedAccount.getNextPaymentAddress()\n if self.blockchain:\n self.blockchain.subscribeAddresses(a)\n self.save()\n return a", "def address(self):\n out = {'zip_code': '',\n 'city': '',\n 'street': '',\n 'phone': ''}\n if self.user.contract_member.exists():\n last_contract = self.user.contract_member.last()\n out['zip_code'] = last_contract.zip_code\n out['city'] = last_contract.city\n out['street'] = last_contract.street\n out['phone'] = last_contract.phone\n\n return out", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def get_address_balance(litecoinaddress):\n total_balance = 0\n unspent = list_unspent(litecoinaddress)\n for block in unspent:\n total_balance += float(block[\"amount\"])\n\n return total_balance", "def address(self, compressed=True, testnet=False):\n h160 = self.hash160(compressed)\n if testnet:\n prefix = b'\\x6f'\n else:\n prefix = b'\\x00'\n return encode_base58_checksum(prefix + h160)", "async def b_chain() -> dict:\n authority_chain = await chain.consensus()\n return {\"chain\": authority_chain[\"chain\"]}", "def get_chain_info(chain_code: str) -> ChainInfo:\n if chain_code not in registry.chain_dict:\n raise exceptions.ChainNotFound(chain_code)\n\n return registry.chain_dict[chain_code]", "def balance_of_address(self, address):\n balance = 0\n for block in self.chain:\n for transaction in block.transactions:\n if transaction.walletoffrom == address:\n balance -= transaction.amount\n\n if transaction.walletofto == address:\n balance += transaction.amount\n return balance", "def derive_classic_address(public_key: str) -> str:\n account_id = get_account_id(bytes.fromhex(public_key))\n return addresscodec.encode_classic_address(account_id)", "def get_balances_blockchain(addresses):\r\n print(\"* blockchain.info not yet supported\")\r\n return None", "def get_party_address_by_id(party_id: int) -> str:\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select address from party where name = '{}';\".format(party_id)\n cursor.execute(query)\n data = cursor.fetchall()\n db.disconnect()\n return data[0][0]", "def q_getsentbyaddress(abe, page, chain):\n addr = wsgiref.util.shift_path_info(page['env'])\n if chain is None or addr is None:\n return 'returns amount of money sent from given address\\n' \\\n '/chain/CHAIN/q/getsentbyaddress/ADDRESS\\n'\n\n if not util.possible_address(addr):\n return 'ERROR: address invalid'\n\n version, hash = util.decode_address(addr)\n return format_satoshis(abe.store.get_sent(chain.id, hash), chain)", "def getAddress(user):", "def currency_account(self, currency):\r\n param = {}\r\n param['currency'] = currency\r\n param['appid'] = self.apiKey\r\n param['nonce'] = int(time.time() * 1000)\r\n param['timestamp'] = int(time.time())\r\n return self.__signed_GET('/api/v1/account', param, self.timeout)", "def get_address(self):\n if self.address:\n return self.address", "def toAddr(self, offset: long) -> ghidra.program.model.address.Address:\n ...", "def withdraw(self, currency, amount, address):\n return self.__call__('balance', 'withdrawcurrency',\n {\"currencyname\": currency, \n \"quantity\": amount, \n \"address\": address})", "def get_address():\r\n address = input(\"What is the customer's address?: \")\r\n\r\n return address", "def getnewaddress(self, account=None):\n if account is None:\n return self.proxy.getnewaddress()\n else:\n return self.proxy.getnewaddress(account)", "def coinbase(self):\n cb_hex = self.app.config.get('pow', {}).get('coinbase_hex')\n if cb_hex is None:\n if not self.accounts_with_address:\n return DEFAULT_COINBASE\n cb = self.accounts_with_address[0].address\n else:\n # [NOTE]: check it!\n # if not is_string(cb_hex):\n if not isinstance(cb_hex, str):\n raise ValueError('coinbase must be string')\n try:\n cb = decode_hex(remove_0x_head(cb_hex))\n except (ValueError, TypeError):\n raise ValueError('invalid coinbase')\n if len(cb) != 20:\n raise ValueError('wrong coinbase length')\n if self.config['accounts']['must_include_coinbase']:\n if cb not in (acct.address for acct in self.accounts):\n raise ValueError('no account for coinbase')\n return cb", "def get_tx_info(tx):\n\n input_addresses = []\n output_addresses = []\n payments = []\n\n try:\n response = json.loads(make_request('http://tbtc.blockr.io/api/v1/tx/info/' + tx))\n except Exception as e:\n status = json.loads(e.message).get('status')\n if status in ['error', 'fail']:\n return {'from': None, 'to': None, 'amount': None, 'confirmations': 0}\n\n vins = response.get('data').get('vins')\n vouts = response.get('data').get('vouts')\n confirmations = response.get('data').get('confirmations')\n\n for i in range(len(vins)):\n if vins[i].get('address') not in input_addresses:\n input_addresses.append(vins[i].get('address'))\n for i in range(len(vouts)):\n output_addresses.append(vouts[i].get('address'))\n payments.append(vouts[i].get('amount'))\n\n return {'from': input_addresses, 'to': output_addresses, 'amount': payments, 'confirmations': confirmations}", "def toAddr(self, offset: int) -> ghidra.program.model.address.Address:\n ...", "def get_address_output(name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n region: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAddressResult]:\n ...", "def get(self, currency, address):\n check_inputs(address=address, currency=currency) # abort if fails\n entity = addressesDAO.get_address_entity(currency, address)\n if entity:\n entity['tags'] = entitiesDAO.list_entity_tags(currency,\n entity['entity'])\n entity['tag_coherence'] = compute_tag_coherence(entity['tags'])\n return entity\n abort(404, \"Address {} not found in currency {}\".format(address,\n currency))", "def get_address(self) -> Optional[str]:\n raise NotImplementedError()", "def address(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"address\")", "def address2(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"address2\")", "def address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address\")", "def get_balance(address: str) -> int:\n return base.Balances(balance_of).get(address)", "def get_address(address_file):\n if not path.exists(address_file) :\n print(\"file not found :\", address_file)\n return None\n addr_file = open(address_file,'r')\n address = addr_file.readlines()\n return address[0]", "def withdraw(self, currency, amount, address):\n pass", "def get_apartment_address(self, soup, apartment_dict):\n\n info_class = soup.find_all('div', {'class': 'info'})\n if info_class and len(info_class) > 0:\n info_class = info_class[0]\n address = info_class.find('h2').text.strip()\n\n from parse import parse\n address = parse(\"Location: {}\", address)[0]\n apartment_dict['address'] = address\n else:\n logging.warning(\"Failed to parse apartment address\")\n return", "def show_address_balances(self):\n\t\tret = []\n\t\tfor acct in self.wallet:\n\t\t\tutxos = get_unspent(acct[\"address\"], self.testnet)\n\t\t\tif len(utxos) != 0:\n\t\t\t\tbalance = sum(i['value'] for i in utxos)\n\t\t\t\tret.append(f\"Address {acct['address']} BTC: {str(balance/100000000.0)}\")\n\t\treturn ret" ]
[ "0.773913", "0.72328436", "0.7039697", "0.6727295", "0.6697329", "0.6667728", "0.65922564", "0.6322239", "0.6310505", "0.6198878", "0.57645744", "0.57568973", "0.5751982", "0.5747228", "0.5672455", "0.55680877", "0.5531586", "0.5513636", "0.550136", "0.54769146", "0.5449721", "0.5426616", "0.5420758", "0.5409998", "0.53504974", "0.53304", "0.53211516", "0.53204584", "0.53146386", "0.5313537", "0.528857", "0.5280197", "0.5278417", "0.52576387", "0.5226019", "0.52099866", "0.5189421", "0.51862377", "0.5171444", "0.5162263", "0.51416457", "0.5133412", "0.51137227", "0.5091346", "0.5088224", "0.5073176", "0.5073176", "0.50695276", "0.506951", "0.5065768", "0.5050275", "0.5049448", "0.5037329", "0.50321656", "0.5023113", "0.5015956", "0.50152737", "0.50103956", "0.5008296", "0.5007018", "0.49948767", "0.49877104", "0.4979822", "0.49629036", "0.49549", "0.4951207", "0.49494183", "0.49454397", "0.49454397", "0.49454397", "0.4943706", "0.49401253", "0.49372727", "0.49348277", "0.49335226", "0.49126875", "0.49111092", "0.49070796", "0.49004814", "0.48986536", "0.48940837", "0.48921788", "0.4883955", "0.48829675", "0.48817042", "0.487879", "0.487629", "0.4867124", "0.48661625", "0.4866075", "0.48497283", "0.4847858", "0.48463202", "0.4833433", "0.48321328", "0.48319402", "0.48263124", "0.48211786", "0.48187613", "0.48178124" ]
0.6650143
6
Get the withdraw quota for currencies
def get_account_withdraw_quota(self, currency: 'str') -> list: check_should_not_none(currency, "currency") params = { "currency": currency, } from huobi.service.wallet.get_account_withdraw_quota import GetAccountWithdrawQuotaService return GetAccountWithdrawQuotaService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def options_to_withdraw(self, amount):\n counter = PaperMoneyCounter() # aux class\n options = [] # options to withdraw\n remaining_cash = 0 # aux var\n\n if (amount % 20 == 0 or amount % 50 == 0) and (amount <= 1000): # is it allowed to withdraw?\n # prioritizing 100-dollar bills\n qtt_100s = counter.how_many_100s(amount)\n remaining_cash = counter.remaining_cash_without_100s(amount)\n\n qtt_50s = counter.how_many_50s(remaining_cash)\n remaining_cash = counter.remaining_cash_without_50s(remaining_cash)\n\n qtt_20s = counter.how_many_20s(remaining_cash)\n remaining_cash = counter.remaining_cash_without_20s(remaining_cash)\n\n if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:\n options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])\n\n # prioritizing 50-dollar bills\n qtt_100s = 0\n\n qtt_50s = counter.how_many_50s(amount)\n remaining_cash = counter.remaining_cash_without_50s(amount)\n\n qtt_20s = counter.how_many_20s(remaining_cash)\n remaining_cash = counter.remaining_cash_without_20s(remaining_cash)\n\n if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:\n if not(options[0] == [qtt_100s, qtt_50s, qtt_20s]):\n options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])\n\n # prioritizing 20-dollar bills\n qtt_100s = 0\n\n qtt_50s = 0\n\n qtt_20s = counter.how_many_20s(amount)\n\n if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:\n if not(options[0] == [qtt_100s, qtt_50s, qtt_20s]):\n if not(options[1] == [qtt_100s, qtt_50s, qtt_20s]):\n options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])\n\n return options\n\n return None # if it wasn't allowed to withdraw", "def quota(self) -> 'outputs.CommitmentQuotaResponse':\n return pulumi.get(self, \"quota\")", "def get_quota(self):\n raise NotImplementedError", "def withdraw(account, amount):\n pass", "def getActiveCurrencies():", "def get_send_quota(self):\r\n return self._make_request('GetSendQuota')", "def quota(self) -> int:\n return pulumi.get(self, \"quota\")", "def getCurrencies():", "def withdraw(self, currency, amount, address):\n return self.api_query('withdraw', {\"currency\": currency, \"amount\": amount, \"address\": address})", "def withdraw(self, amount):\r\n balance = self['get']('balance')\r\n if amount > balance:\r\n return 'Insufficient funds'\r\n self['set']('balance', balance - amount)\r\n return self['get']('balance')", "def getBaseCurrency():", "def online_quota(self):\r\n return self.max_contributions - self.num_tickets_total", "def withdraw_money(c_id, amount):\n return ar.withdraw_money(c_id, amount)", "def getActiveCurrency():", "def withdraw(self, currency, amount, address):\n return self.__call__('balance', 'withdrawcurrency',\n {\"currencyname\": currency, \n \"quantity\": amount, \n \"address\": address})", "def account_space(access_token):\n client = dropbox.client.DropboxClient(access_token)\n account_info = client.account_info()\n quota_info = account_info['quota_info']\n total = quota_info['quota']\n used = quota_info['normal'] + quota_info['shared']\n return total - used", "def withdraw(self, amount):\n self.transactions += [('withdraw', amount)]\n if amount > self.balance:\n return 'Insufficient funds'\n self.balance = self.balance - amount\n return self.balance", "def withdraw(holder):\n account = Account.query.filter_by(holder=holder).first()\n amount = request.json.get(\"amount\")\n if not account:\n return jsonify({\"error\": \"Account does not exist\"})\n if account.balance >= amount:\n account.balance -= amount\n db.session.commit()\n return jsonify(\n {\n \"holder\": account.holder,\n \"balance\": account.balance,\n \"message\": \"The withdraw has been processed\",\n }\n )\n return jsonify({\"error\": \"The account balance is insufficient\"})", "def withdraw(self, amount):\n if amount > self.balance:\n raise ValueError('insufficient funds to withdraw $%.2f' % amount)\n self.balance -= amount\n return self.balance", "def withdraw(self, amount):\n if amount > self.balance:\n return 'Insufficient funds'\n self.balance = self.balance - amount\n return self.balance", "def getUserCurrency():", "def withdraw(self, amount):\n if self.overdrawn:\n print('You have overdrawn, please add more money!')\n return self.balance\n self.balance = self.balance - amount\n return self.balance", "def deposits_limit(self):\n limits = self.user.limits\n value = 0\n if limits.exists():\n value = self.user.limits.get(type=Limit.DEPOSIT).value\n return value", "def get_fiat_balance():\n return get_balance(CONF.quote)", "def withdrawMoney(self, withdraw_amount):\r\n self.balance_amt = self.balance_amt - withdraw_amount", "def amount_to_pay_in_period(self):\n assert self.type == \"N\", _(\"Subscription must be normal to use this method\")\n period_start, period_end = self.get_current_period()\n price_per_day = (\n self.get_price_for_full_period() / (period_end - period_start).days\n )\n days_not_used = 30 * self.frequency - (date.today() - period_start).days\n return int(price_per_day * days_not_used)", "def get_quota(self):\n path = 'urlCategories/urlQuota'\n return self._session.get(path)", "def balance(self) -> Decimal:\n withdrawals = self.withdrawal_requests.filter(\n status=WithdrawalStatus.open,\n )\n if len(withdrawals) == 0:\n return self.internal_balance\n else:\n withdrawal_total = sum(map(lambda w: w.amount, withdrawals))\n return self.internal_balance - withdrawal_total", "def withdraw(self, currency, amount, address):\n pass", "def withdrawMoney(self, withdraw_amount):\r\n if (self.balance_amt - withdraw_amount) > 0:\r\n self.balance_amt = self.balance_amt - withdraw_amount\r\n else:\r\n raise WithdrawError #Exception('Overdraft withdrawal Error. Cannot withdraw more than amount in account balance: {}'.format(self.balance_amt))\r", "def get_margin_balance():\n try:\n if CONF.exchange == 'bitmex':\n bal = EXCHANGE.fetch_balance()[CONF.base]\n elif CONF.exchange == 'kraken':\n bal = EXCHANGE.private_post_tradebalance({'asset': CONF.base})['result']\n bal['free'] = float(bal['mf'])\n bal['total'] = float(bal['e'])\n bal['used'] = float(bal['m'])\n elif CONF.exchange == 'liquid':\n bal = get_crypto_balance()\n return bal\n\n except (ccxt.ExchangeError, ccxt.NetworkError) as error:\n LOG.error(RETRY_MESSAGE, type(error).__name__, str(error.args))\n sleep_for(4, 6)\n get_margin_balance()", "def initial_cash_balance(self) -> float:\n return self.buy_budget * len(self.stocks)", "def budget_used(self):\n return int(self.total_spent() / self.budget() * 100.0)", "def currencies():\n return _CURRENCIES", "def bank_withdraw_money(stub, request):\n # print(\"In method bank_withdraw_money:\")\n\n try:\n result = stub.Withdraw(request)\n except DatabaseOptFailure:\n return \"IO_Failure\"\n return result", "def get_balance(self, acc: Account) -> Decimal:\n return sum_queryset(self.get_entries(acc))", "def Withdrawal(self):\n self.amount = (int)(raw_input (\" Enter your withdrawal amount \"))\n return self.amount", "def get_usage(self):\n res = self.conn.get_send_quota()\n res = res['GetSendQuotaResponse']\n result = res['GetSendQuotaResult']\n quota = float(result['Max24HourSend'])\n sent = float(result['SentLast24Hours'])\n return sent, quota", "def api_quota():\n # Create the required data dictionary for Quota/Status\n api_data = {} # type: Dict[str, str]\n response = http_request(endpoint=API_QUOTA, data=api_data)\n\n if response.get('errorNo') != 0:\n return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))\n\n return response", "def get_request_limit(self, access_token):\n url = \"{0}/rate_limit?access_token={1}\"\n response = requests.get(url.format(self.ROOT_API_URL, access_token))\n data = response.json()\n return data['resources']['core'].get(\"remaining\")", "def withdraw(self, amount):\n if amount > self.balance:\n raise RuntimeError('Amount greater than available balance.')\n self.balance -= amount\n return self.balance", "def withdraw(self, amount):\n if amount > self.balance:\n raise RuntimeError('Amount greater than available balance.')\n self.balance -= amount\n return self.balance", "def get_balance(self, ticker):\n return self.trading_client.account_balance(ticker, 'usd')", "def get_used_balance():\n try:\n if CONF.exchange == 'bitmex':\n position = EXCHANGE.private_get_position()\n if not position:\n return None\n return position[0]['currentQty']\n if CONF.exchange == 'kraken':\n result = EXCHANGE.private_post_tradebalance()['result']\n return round(float(result['e']) - float(result['mf']))\n if CONF.exchange == 'liquid':\n return round(get_crypto_balance()['used'] * get_current_price())\n\n except (ccxt.ExchangeError, ccxt.NetworkError) as error:\n LOG.error(RETRY_MESSAGE, type(error).__name__, str(error.args))\n sleep_for(4, 6)\n get_used_balance()", "def total_clearance(self):\n total_clearances = 0\n debit = 0 #variable to track the remaining debit\n clearances = self.clearance_set.all() #grab all the previous clerances\n for clearance in clearances:\n total_clearances += clearance.paid_value\n return total_clearances", "def _get_next_limit(self):\n return self.__quota", "def get_quota(self, tenant_id):\n return self._get(_quota.Quota, tenant_id)", "def available_cash(self):\n return self._cash", "def weekly_benefit(self):\n total_purchase_price = 0\n total_selling_price = 0\n last_seven_day = timezone.now() - timedelta(days=7)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_seven_day)\n for item in items:\n total_purchase_price += item.price\n total_selling_price += item.selling_price\n benefit = total_selling_price - total_purchase_price\n return benefit", "def getFactor(currency):", "def withdraw_cash(self, qtt_100s, qtt_50s, qtt_20s):\n amount = PaperMoneyCounter().cash(qtt_100s, qtt_50s, qtt_20s)\n if (self.__is_logged_in) and (amount <= self.__balance) and (amount <= 1000):\n self.__balance = float(Decimal(str(self.__balance - amount)))\n self.register_operation(self.ACTIONS['WITHDRAWING'], amount)\n return True\n\n return False", "def get_placed_assets_rate(cls, instrument: Instrument) -> float:\n bank_balance = InstrumentBalance.objects.filter(\n user__email__contains='bank',\n instrument=instrument).order_by('user__created_at_dt').last()\n if bank_balance:\n return float(bank_balance.amount)\n return 0", "def free_cookie_discount():\n return DiscountPeriod.objects.get(id=settings.COOKIE_CORNER_FREE_COOKIE_DISCOUNT_PERIOD_ID)", "def BuyingPrice(self):\n return self.buying_rice", "def currency_rate(self, init):\r\n\r\n curr = CurrencyRates()\r\n curr_rate = curr.get_rates(init)\r\n return curr_rate", "def budget(self):\n return self._budget", "def getDefaultCurrency():", "def budget(self):\n\n budget = (_House.closing_cost*self.vars['after_repair_value']) - self.vars['purchase_price'] - self.vars['profit'] - _House.broker_fee\n return float(round(budget, 2))", "def getBudgetBalance(self, budgetName):\r\n assert budgetName in self.budgets, \"Specified budget doesn't exist\"\r\n return \"%.2f\" % float(self.budgets[budgetName])", "def withdraws(account):\r\n limit = 500\r\n print(\"Your account balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your withdraw limit is $\", format(limit, \"0.2f\"), sep='')\r\n while True:\r\n try:\r\n withdraw_amount = int(input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n # Checking if the customer has sufficient funds/over daily limit\r\n while withdraw_amount > account or withdraw_amount > limit:\r\n print(\"Insufficient funds or daily limit exceeded.\")\r\n while True:\r\n try:\r\n withdraw_amount = int(\r\n input(\"Enter withdraw amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n account -= withdraw_amount\r\n limit -= withdraw_amount\r\n print(\"Your new balance is $\", format(account, \"0.2f\"), sep='')\r\n print(\"Your new limit is $\", format(limit, \"0.2f\"), sep='')", "def balance(self) -> Decimal:\n return sum_queryset(AccountEntry.objects.filter(account=self.account, timestamp__lte=self.timestamp).exclude(timestamp=self.timestamp, id__gt=self.id))", "def get_billed_amount(self):\n return Leg.objects.filter(transaction__recurred_cost__recurring_cost=self, amount__gt=0).sum_to_balance()", "def budget_balance(self):\n budget_balance = round(self.budget() - self.total_spent(), 2)\n budget_balance_degree = round( (9000 * self.total_spent()) / (self.budget()), 4) #convert to degrees and round to four decimal places\n return (budget_balance, budget_balance_degree)", "def buy_cost(self):\n return self._manager.get_buy_price(self.name)", "def withdraw(self, amount):\r\n self.balance = self.balance - amount\r\n self.transactions.append(-amount)\r\n return amount", "def account_balance(self, currency_symbol):\n return self.get(f'balances/{currency_symbol}', auth=True)", "def get_btc_supply(normalize=False, at_block_index=None):\n block_count = config.CURRENT_BLOCK_INDEX if at_block_index is None else at_block_index\n blocks_remaining = block_count\n total_supply = 0 \n reward = 50.0\n while blocks_remaining > 0:\n if blocks_remaining >= 210000:\n blocks_remaining -= 210000\n total_supply += 210000 * reward\n reward /= 2\n else:\n total_supply += (blocks_remaining * reward)\n blocks_remaining = 0\n \n return total_supply if normalize else int(total_supply * config.UNIT)", "def getBalance(self, currency=''):\n\n if self.app.getExchange() == 'binance':\n if self.mode == 'live':\n model = BAuthAPI(self.app.getAPIKey(), self.app.getAPISecret())\n df = model.getAccount()\n if isinstance(df, pd.DataFrame):\n if currency == '':\n # retrieve all balances\n return df\n else:\n # retrieve balance of specified currency\n df_filtered = df[df['currency'] == currency]['available']\n if len(df_filtered) == 0:\n # return nil balance if no positive balance was found\n return 0.0\n else:\n # return balance of specified currency (if positive)\n if currency in ['EUR', 'GBP', 'USD']:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 2))\n else:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 4))\n else:\n return 0.0\n else:\n # return dummy balances\n if currency == '':\n # retrieve all balances\n return self.balance\n else:\n if self.app.getExchange() == 'binance':\n self.balance = self.balance.replace('QUOTE', currency)\n else: \n # replace QUOTE and BASE placeholders\n if currency in ['EUR','GBP','USD']:\n self.balance = self.balance.replace('QUOTE', currency)\n else:\n self.balance = self.balance.replace('BASE', currency)\n\n if self.balance.currency[self.balance.currency.isin([currency])].empty:\n self.balance.loc[len(self.balance)] = [currency, 0, 0, 0]\n\n # retrieve balance of specified currency\n df = self.balance\n df_filtered = df[df['currency'] == currency]['available']\n\n if len(df_filtered) == 0:\n # return nil balance if no positive balance was found\n return 0.0\n else:\n # return balance of specified currency (if positive)\n if currency in ['EUR', 'GBP', 'USD']:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 2))\n else:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 4))\n\n else:\n if self.mode == 'live':\n # if config is provided and live connect to Coinbase Pro account portfolio\n model = CBAuthAPI(self.app.getAPIKey(), self.app.getAPISecret(), self.app.getAPIPassphrase(), self.app.getAPIURL())\n if currency == '':\n # retrieve all balances\n return model.getAccounts()[['currency', 'balance', 'hold', 'available']]\n else:\n df = model.getAccounts()\n # retrieve balance of specified currency\n df_filtered = df[df['currency'] == currency]['available']\n if len(df_filtered) == 0:\n # return nil balance if no positive balance was found\n return 0.0\n else:\n # return balance of specified currency (if positive)\n if currency in ['EUR','GBP','USD']:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 2))\n else:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 4))\n \n else:\n # return dummy balances\n\n if currency == '':\n # retrieve all balances\n return self.balance\n else:\n # replace QUOTE and BASE placeholders\n if currency in ['EUR','GBP','USD']:\n self.balance = self.balance.replace('QUOTE', currency)\n elif currency in ['BCH','BTC','ETH','LTC','XLM']:\n self.balance = self.balance.replace('BASE', currency)\n\n if self.balance.currency[self.balance.currency.isin([currency])].empty == True:\n self.balance.loc[len(self.balance)] = [currency,0,0,0]\n\n # retrieve balance of specified currency\n df = self.balance\n df_filtered = df[df['currency'] == currency]['available']\n\n if len(df_filtered) == 0:\n # return nil balance if no positive balance was found\n return 0.0\n else:\n # return balance of specified currency (if positive)\n if currency in ['EUR','GBP','USD']:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 2))\n else:\n return float(self.app.truncate(float(df[df['currency'] == currency]['available'].values[0]), 4))", "def getBalance(self):\n connection = sqlite3.connect('/home/BorneAgain/Desktop/flasktest/accounts.db')\n\n cursor = connection.cursor()\n\n sql_command = \"\"\"select amount from accounts where name=?;\"\"\"\n\n cursor.execute(sql_command, (self.name, ))\n\n return round(float(re.sub(r'[\\(\\),]', '', str(cursor.fetchone()))), 2)", "def get_rate_limit(client):\n query = '''query {\n rateLimit {\n limit\n remaining\n resetAt\n }\n }'''\n response = client.execute(query)\n json_response = json.loads(response)\n return json_response['data']['rateLimit']", "def flyer_rate(self, obj):\n return currency(calculate_current_price(1, obj,\n obj.get_or_set_consumer_count()))", "def cash(self, qtt_100s, qtt_50s, qtt_20s):\n return (qtt_100s * 100) + (qtt_50s * 50) + (qtt_20s * 20)", "def cash_withdrawal(amt):\r\n global withdraw_money\r\n global balance_money\r\n withdraw_money = amt\r\n print(\"Amout enetered : \", withdraw_money)\r\n balance_money = balance_money - withdraw_money\r\n print(\"Withdraw success\")", "def used_daily_balance(self):\n current_date = timezone.now()\n if current_date > self.date_billing_start:\n days_used = (current_date - self.date_billing_start).days\n return round(days_used * self.plan_cost.daily_cost, 2)\n return 0", "def get_currency():\n return _currency", "def getCurrentBalance(self):\r\n return self.balance_amt", "def claim_choices(self):\r\n return money_range(self.subsession.min_amount, self.subsession.max_amount, 0.05)", "def withdraw(self, amount, budget):\r\n if budget != \"Total Balance\":\r\n assert budget in self.budgets, \"Specified budget doesn't exist\"\r\n self.budgets[budget] -= float(amount)\r\n self.balance -= float(amount)", "def withdraw(self, cr, uid, ids, amount, context=None):\n record = self.browse(cr, uid, ids, context=context)[0]\n current_amount = record.current_amount\n withdraw_amount = record.withdraw_amount\n if amount > current_amount:\n raise osv.except_osv(_('Constraint Error'), _(\"The the amount is greater than the Current Money!\"))\n\n record.write({'current_amount':current_amount - amount,\n 'withdraw_amount':withdraw_amount + amount })\n return True", "def get_rate_limit(self):\n resp = self._session.get(self.API_ROOT + \"/rate_limit\")\n log.info(resp.text)", "def api_call(cls, currency):\n headers = {\"x-accept-version\": \"2.0.0\", \"Accept\": \"application/json\"}\n r = requests.get(cls.API_URL + currency, headers=headers)\n r.raise_for_status()\n return r.json()[\"data\"][\"rate\"]", "def quota():\n try:\n fname = os.path.join(os.path.expanduser(\"~\"), \".planet.json\")\n contents = {}\n if os.path.exists(fname):\n with open(fname, \"r\") as fp:\n contents = json.loads(fp.read())\n else:\n raise IOError(\"Escape to End and Initialize\")\n if not len(contents) != 0:\n raise IOError(\"Escape to End and Initialize\")\n else:\n k = contents[\"key\"]\n main = requests.get(\n \"https://api.planet.com/auth/v1/\" + \"experimental/public/my/subscriptions\",\n auth=HTTPBasicAuth(k, \"\"),\n )\n if main.status_code == 200:\n content = main.json()\n for item_id in content:\n print(\" \")\n print(\"Allocation Name: %s\" % item_id[\"organization\"][\"name\"])\n print(\n \"Allocation active from: %s\" % item_id[\"active_from\"].split(\"T\")[0]\n )\n print(\"Quota Enabled: %s\" % item_id[\"quota_enabled\"])\n print(\"Total Quota in SqKm: %s\" % item_id[\"quota_sqkm\"])\n print(\"Total Quota used: %s\" % item_id[\"quota_used\"])\n if (item_id[\"quota_sqkm\"]) is not None:\n leftquota = float(\n item_id[\"quota_sqkm\"] - float(item_id[\"quota_used\"])\n )\n print(\"Remaining Quota in SqKm: %s\" % leftquota)\n else:\n print(\"No Quota Allocated\")\n print(\"\")\n else:\n print(\"Failed with exception code: \" + str(main.status_code))\n\n except IOError:\n print(\"Initialize client or provide API Key\")", "def withdraw_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.withdraw_money(credentials)\n start_again() if result else BankOperationsUi.withdraw_money()", "def returnCurrencies(self):\n pass", "def compute_quotation_price(self):\n result = decimal.Decimal('0')\n if self.vehiculePrice:\n result = self.vehiculePrice * 2 / 100\n if self.covWind:\n result += get_coverage_price_by_name(\"WIND\")\n if self.covPass:\n result += get_coverage_price_by_name(\"PASS\")\n if self.covFlood:\n result += get_coverage_price_by_name(\"FLOOD\")\n return result", "def sessionquota(self) :\n\t\ttry :\n\t\t\treturn self._sessionquota\n\t\texcept Exception as e:\n\t\t\traise e", "def getCurrency(self):\n return self.base.get(\"currency\", [])", "def execute_withdraws(self):\n withdraws = [v for v in self.action_register if v['action'] == 'withdraw']\n for withdraw in withdraws:\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].euro_wallet -= withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].withdrawn_euros += withdraw['value']\n self.model.schedule.agents_by_type['Customer'][withdraw['unique_id']].last_withdraw_tick = self.model.schedule.steps", "def balance(self) -> float:\n\t\tbalance = 0\n\t\tfor transaction in self.transactions:\n\t\t\tsign = 1 if transaction.receiving_account == self.__number else -1\n\t\t\tbalance += sign*transaction.usd*transaction.completed\n\t\t# The bank has infinite money\n\t\tif self.name == Account.BANK:\n\t\t\tbalance = Decimal('Infinity')\n\t\treturn balance", "def buy_limit(self, market, quantity, rate):\n\n result = self.api_query('Trade', {'type':'buy', 'pair': market, 'amount': quantity, 'rate':'%.8f'%rate})\n return result", "def withdraw(self, request, *args, **kwargs):\n account = self.get_object()\n account_serializer = self.get_serializer()\n value = request.data.get(\"valor\", None)\n\n try:\n withdraw_result = account_serializer.withdraw(value, account)\n except ValueError as ve:\n return Response({\"detail\": \"Could not withdraw: {0}.\".format(ve),\n \"status_code\": status.HTTP_400_BAD_REQUEST}, status=status.HTTP_400_BAD_REQUEST)\n\n return Response(withdraw_result)", "def get_total_supply() -> int:\n return total_supply", "def deposit(account, amount):\n pass", "def getAvailableBalance(self):\n\n # calculates the available balance as the sum of the account balance and the overdraft limit\n availableBalance = self.balance + self.overdraftLimit\n return availableBalance", "def find_balanced_budget_tax(c):\n def steady_state_budget(t):\n e, u, w = compute_steady_state_quantities(c, t)\n return t - u * c\n\n tau = brentq(steady_state_budget, 0.0, 0.9 * c)\n return tau", "def PV_BenefitSurrender(t):\n if t > last_t:\n return 0\n else:\n return (-prj_bnft_Surrender(t) + PV_BenefitSurrender(t + 1)) / (1 + DiscRate(t))", "def usage_quota(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"usage_quota\")", "def _get_money_earned(tier):\n return int(((tier**2) * 10) + 10)", "def getcurrency(self):\n return self.__currency", "def get_wallet_balance():\n try:\n if CONF.exchange == 'bitmex':\n return EXCHANGE.fetch_balance()['info'][0]['walletBalance'] * CONF.satoshi_factor\n if CONF.exchange == 'kraken':\n asset = CONF.base if CONF.base != 'BTC' else 'XBt'\n return float(EXCHANGE.private_post_tradebalance({'asset': asset})['result']['tb'])\n if CONF.exchange == 'liquid':\n result = EXCHANGE.private_get_accounts_balance()\n if result is not None:\n for bal in result:\n if bal['currency'] == CONF.base:\n return float(bal['balance'])\n\n except (ccxt.ExchangeError, ccxt.NetworkError) as error:\n LOG.error(RETRY_MESSAGE, type(error).__name__, str(error.args))\n sleep_for(4, 6)\n get_wallet_balance()" ]
[ "0.63382167", "0.62305385", "0.6179834", "0.60655946", "0.5956213", "0.5947808", "0.59429044", "0.58825815", "0.58773303", "0.5864017", "0.58567953", "0.58432806", "0.5827308", "0.5814937", "0.57675916", "0.57590467", "0.5756222", "0.57123595", "0.5685044", "0.5678759", "0.5667374", "0.5618233", "0.56103253", "0.56043047", "0.55661595", "0.5557036", "0.5556752", "0.55559623", "0.5546103", "0.55141354", "0.5499965", "0.5492151", "0.5480178", "0.54655385", "0.5450963", "0.5444612", "0.54235864", "0.54186964", "0.5410967", "0.53972226", "0.5386472", "0.5386472", "0.5385596", "0.53824717", "0.5370717", "0.5361192", "0.53371173", "0.53365785", "0.5336445", "0.53257596", "0.5324732", "0.5320393", "0.5320018", "0.5289265", "0.5285755", "0.52833664", "0.5280168", "0.52795565", "0.5262855", "0.5258586", "0.52574563", "0.5254252", "0.52524304", "0.52459264", "0.52453274", "0.5240631", "0.5233333", "0.523327", "0.5228868", "0.52255696", "0.5224032", "0.52106893", "0.5200545", "0.51919484", "0.51808536", "0.51729846", "0.51723176", "0.5164376", "0.51612896", "0.5158935", "0.5156001", "0.51476127", "0.514506", "0.5142557", "0.51251215", "0.511202", "0.5110831", "0.5102577", "0.51009583", "0.5092797", "0.50860846", "0.5084835", "0.50740105", "0.50675666", "0.50672626", "0.5065906", "0.50592333", "0.5055575", "0.5055243", "0.50548404" ]
0.6826426
0
Parent get sub user depoist history.
def get_sub_user_deposit_history(self, sub_uid: 'int', currency: 'str' = None, start_time: 'int' = None, end_time: 'int' = None, sort: 'str' = None, limit: 'int' = None, from_id: 'int' = None) -> DepositHistory: check_should_not_none(sub_uid, "sub_uid") params = { "subUid": sub_uid, "currency": currency, "startTime": start_time, "endTime": end_time, "sort": sort, "limit": limit, "fromId": from_id } from huobi.service.wallet.get_sub_user_deposit_history import GetSubUserDepositHistoryService return GetSubUserDepositHistoryService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def last_history(self, user):\n return History.objects(user=user).order_by('-created_at').first()", "def user_history(self):\n self.query_1 = \"SELECT * FROM orders WHERE user_id=%s\"\n self.input_1 = (self.user_id,) \n self.event = \"user_history\"\n self.message = \"Order history fetched successfully.\"\n self.error = \"Unable to fetch order history.\"", "def getUsernameHistory(UserId):\n url = f\"https://users.roblox.com/v1/users/{UserId}/username-history\"\n r = requests.get(url)\n j = json.loads(r.text)\n data = j['data']\n return data", "def show_history(user_id):\n return History.where('user_id', user_id).get()", "def get_user_purchase_history_admin(user_name, other_user_name):\n\n user_name = auth.get_username_from_hash(user_name)\n # user_handler.is_permitted_to_do(user_name, None, 1 << Action.USER_PURCHASE_HISTORY.value)\n # check if admin\n return purchase_handler.get_user_purchase_history(other_user_name)", "def show_history(self, user: TelegramController.User):\n\n header = connect(self.__path)\n curs = header.cursor()\n encrypted_id = md5((str(user.id) + \"typicaluser\").encode()).hexdigest()\n curs.execute(\"SELECT * FROM users WHERE id = (?)\", (encrypted_id,))\n data = curs.fetchall()[0][1]\n return data", "def get_user_history (history_id=None):\n history_id = history_id or os.environ['HISTORY_ID']\n gi = get_galaxy_connection(history_id=history_id, obj=False)\n hc = HistoryClient(gi)\n history = hc.show_history(history_id, visible=True, contents=True)\n return history", "def history():\n user_history=db.execute(\"SELECT * FROM history WHERE user_id=:u_i\",u_i=session[\"user_id\"])\n return render_template(\"history.html\",s=user_history)", "def get_history(self):\n return self.history", "def history():", "def History(self):\n return self.historydict.get('history', [])", "def history():\n transactions = db.execute(\"SELECT * FROM history WHERE user_id = ?\", session[\"user_id\"])\n user_name = db.execute(\"SELECT username, cash FROM users WHERE id = ?\", session[\"user_id\"])\n \n return render_template(\"history.html\", transactions=transactions, user_name=user_name[0][\"username\"])", "def history(self):\n return self.info['history']", "def history():\n\n # Access user's id\n user_id = session[\"user_id\"]\n\n # Obtain history information for logged in user\n TRANSACTIONS = db.execute(\"SELECT * FROM history WHERE user_id = ? ORDER BY transacted DESC\", user_id)\n\n return render_template(\"history.html\", transactions=TRANSACTIONS)", "def QueryHistory(self):\n return []", "def account_df_history(self, improve=False):\n return(self.account_df('history', improve))", "def do_gethistory(self,args):\n #Very rough. pretty print it\n history=bitstamp.get_usertransactions()\n ppdict(history)", "def history():\n\n entry = db.execute(\"SELECT * FROM users WHERE id=:id\",\n id=session['user_id'])\n user = entry[0]['username']\n owned = db.execute(\"SELECT * FROM transactions WHERE user=:user ORDER BY date\",\n user=user)\n\n return render_template(\"history.html\", stocks = owned)", "def history():\n \n user_id = session[\"user_id\"]\n history_list = hist(user_id, db)\n return render_template('history.html', history=history_list)", "def history():\n query = Records.query.filter_by(user_id=session.get(\"user_id\")).all()\n return render_template(\"history.html\", rows=query)", "def history():\n \n value_dicts = db.execute(\"SELECT * FROM history WHERE user_id = :usid\", usid=session[\"user_id\"])\n return render_template(\"history.html\", value_dicts=value_dicts)", "def get_history(self):\n return self.__history[:]", "def get_user_purchases_history(user_name):\n user_name = auth.get_username_from_hash(user_name)\n return purchase_handler.get_user_purchase_history(user_name)\n # return user_handler.get_user_purchase_history(user_name)", "def history():\n\n # get all transactions for current user\n transactions = db.execute(\"SELECT * FROM transactions WHERE user_id = :user_id\", user_id=session[\"user_id\"])\n\n # render history.html with all user transactions\n return render_template(\"history.html\", transactions=transactions, usd=usd)", "def history():\n # name variable to show current users name in template\n name = db.execute(\"SELECT username FROM users WHERE id=:id\", id=session[\"user_id\"])\n\n # user's transaction history\n hist = db.execute(\"SELECT transactid, name, price, quantity, date FROM portfolio WHERE userid = :userid\", userid=session[\"user_id\"])\n\n # return the template with the relevant objects for jinja\n return render_template(\"history.html\", name=name, hist=hist)\n\n # if function fails\n return apology(\"Can't display history\", 400)", "def history():\n\n rows = db.execute(\"SELECT * FROM 'transaction' WHERE u_id = :user_id\", user_id = session[\"user_id\"])\n return render_template(\"history.html\", rows = rows)", "def history():\n\n user_id = session.get('user_id')\n table_name = f'stocks_user{user_id}'\n rows = db.execute(\"SELECT * FROM ?\", table_name)\n\n return render_template('history.html', rows=rows)", "def orders_history(self): \n return(self._d_orders['history'])", "def history(self):\n return _uhd_swig.usrp_sink_sptr_history(self)", "async def history(self, ctx, user_id: str):\n\n session = self.bot.helpers.get_db_session()\n try:\n self.bot.log.info(\n f\"CMD {ctx.command} called by {ctx.message.author} ({ctx.message.author.id})\"\n )\n guild = ctx.message.guild\n user = await self.bot.helpers.get_member_or_user(user_id, guild)\n if not user:\n return await ctx.send(\n f\"Unable to find the requested user. Please make sure the user ID or @ mention is valid.\"\n )\n\n (\n embed_result_entries,\n footer_text,\n ) = await self.bot.helpers.get_action_history(session, user, guild)\n\n p = FieldPages(ctx, per_page=8, entries=embed_result_entries,)\n p.embed.color = 0xFF8C00\n p.embed.set_author(\n name=f\"Member: {user} ({user.id})\", icon_url=user.avatar_url\n )\n p.embed.set_footer(text=footer_text)\n await p.paginate()\n except discord.HTTPException as err:\n self.bot.log.exception(\n f\"Discord HTTP Error responding to {ctx.command} request via Msg ID {ctx.message.id}. {sys.exc_info()[0].__name__}: {err}\"\n )\n await ctx.send(\n f\"Error processing {ctx.command}. Error has already been reported to my developers.\"\n )\n except DBAPIError as err:\n self.bot.log.exception(\n f\"Error logging note to database. {sys.exc_info()[0].__name__}: {err}\"\n )\n await ctx.send(\n f\"Error processing {ctx.command}. Error has already been reported to my developers.\"\n )\n session.rollback()\n except Exception as err:\n self.bot.log.exception(\n f\"Error responding to {ctx.command} via Msg ID {ctx.message.id}. {sys.exc_info()[0].__name__}: {err}\"\n )\n await ctx.send(\n f\"Error processing {ctx.command}. Error has already been reported to my developers.\"\n )\n finally:\n session.close()", "def history(self):\n return self._history", "def history(self):\n return self._history", "def list_history(request):\n history = History.objects\n\n if not is_admin(request.user):\n history = history.filter(submitter=request.user)\n history = history.order_by('-submission_date')\n\n return render('editor/list_history.mako', request, {\n 'history': history,\n })", "def history():\n\n transactions = db.execute(\"SELECT * FROM transactions WHERE user_id = ? ORDER BY date DESC, time DESC\", session[\"user_id\"])\n\n return render_template(\"history.html\", transactions=transactions)", "def history(self, update, context):\n\n message = update.message.text.lower().split(\" \")\n user = self.User(update)\n output = \"\"\n if message[1] == \"show\":\n if not self.data_base.has_history(user):\n output = \"you don't have any history\"\n self.data_base.log(user, update.message.text, output)\n else:\n output = self.data_base.show_history(user)\n if len(output) > 4096:\n output = output[-4096::]\n self.data_base.log(user, update.message.text, \"Successfully showed history\")\n\n elif message[1] == \"clear\":\n if not self.data_base.has_history(user):\n output = \"your history is already clean\"\n else:\n self.data_base.clear_history(user)\n output = \"Clean\"\n self.data_base.log(user, update.message.text, output)\n else:\n output = \"Looks like you have a little mistake\\n\" \\\n \"the correct way of using the /history command is:\\n\" \\\n \"/history show\\n\" \\\n \"/history clear\"\n self.data_base.log(user, update.message.text, output)\n user.send_message(output)", "def history():\n history = db.execute(\"SELECT * from history WHERE id=:id\", id=session[\"user_id\"])\n\n return render_template(\"history.html\", history = history)", "def history():\n history = db.execute(\"SELECT * from history WHERE id=:id\", id=session[\"user_id\"])\n \n return render_template(\"history.html\", history = history)", "def getJobHistory(self,jobname):\n\t\tpass", "def get_deposit_history(self, currency=None):\n if not currency:\n currency = \"\"\n return self.__call__('balance', \"getdeposithistory\", \n {\"currencyname\": currency})", "def get_history_since(self, start=0):\n hist = self.service.users().history()\n try:\n results = hist.list(userId='me', startHistoryId=start).execute()\n if 'history' in results:\n yield results['history']\n while 'nextPageToken' in results:\n results = hist.list(userId='me',\n pageToken=results['nextPageToken'],\n startHistoryId=start).execute()\n if 'history' in results:\n yield results['history']\n\n except googleapiclient.errors.HttpError as ex:\n if ex.resp.status == 404:\n raise Gmail.NoHistoryException\n elif ex.resp.status == 403:\n raise Gmail.UserRateException(ex)\n else:\n raise Gmail.GenericException(ex)", "def history():\n \n # only prints shifts from current user\n usernum = db.execute(\"SELECT * FROM users WHERE id=:id\", id = session[\"user_id\"])[0][\"id\"]\n \n # stores shift data into hours\n hours = db.execute(\"SELECT * FROM history WHERE User=:id\", id = usernum)\n \n # calculates total amount of cash ever paid to user\n cash = db.execute(\"SELECT sum(total) FROM history WHERE User=:id\", id = session[\"user_id\"])[0][\"sum(total)\"]\n \n return render_template(\"history.html\", hours = hours, Total = cash)", "def get_read_by(self):\n return {entry.username for entry in self.history if entry.action == MessageAction.read}", "def history():\n current_userid = session[\"user_id\"]\n userbalance = get_userbal(db, current_userid)\n userstocks = get_userstock(db, current_userid)\n stockhistory = get_history(db, current_userid)\n stocklist = get_stocklist(db, stocksid=True, prices=True)\n if request.method == \"GET\":\n return render_template(\"history.html\", userbalance=usd(userbalance),\n userstocks=userstocks, buystocks=stocklist,\n stockhistory=stockhistory)\n else:\n return apology(\"TODO\")", "def user_history(username):\n follow_form = FollowForm()\n unfollow_form = UnfollowForm()\n return render_template('user_history.html',\n username=username,\n follow_form=follow_form,\n unfollow_form=unfollow_form)", "def get_history(self):\r\n\r\n return self.board_history", "def Modifier_History(self):\n\t\tpass", "def parent_comments_in_reverse_order(self):\n return self.exclude(parent__isnull=False).order_by(\"-created_at\")\\\n .select_related(\"user\")", "def get_history(self, key=None):\n val = self.history.values.get(key, None)\n if val is None:\n return self.history.values\n else:\n return val", "def get_order_history(self):\n return self.__call__('orders', 'getorderhistory')", "def history():\n rows = db.execute(\"SELECT * FROM histories WHERE id=:id\", id=session[\"user_id\"])\n\n return render_template(\"history.html\", rows=rows)", "def history():\n #Get users history (no amount = 0)\n #Prepare table\n\n rows = db.execute(\"SELECT * from history WHERE user_id = :userid AND amount != 0\", userid = session[\"user_id\"])\n for row in rows:\n row['price'] = usd(row['price'])\n\n return render_template(\"history.html\", history=rows)", "def get_user_borrowing_history(user_id):\n borrowed_books = BorrowBook.get_all_borrowed_books()\n user_books = [\n book for book in borrowed_books if book.user_id == user_id]\n borrowing_history = []\n book_details = {}\n for book in user_books:\n try:\n singleBook = Book.get_book_by_id(book.book_id)\n book_details[\"id\"] = singleBook.id\n book_details[\"title\"] = singleBook.title\n book_details[\"author\"] = singleBook.author\n book_details[\"isbn\"] = singleBook.isbn\n book_details[\"borrowDate\"] = book.date_borrowed\n if book.returned:\n book_details[\"returnDate\"] = book.date_returned\n else:\n book_details[\"dueDate\"] = book.date_due\n except Exception as e:\n print(e)\n finally:\n borrowing_history.append(book_details)\n book_details = {}\n return borrowing_history", "def getInterestedUsers():", "def run_history(self, expanded, unexpanded) : \n\t\toptions, args = self.getopt([\"clear\", \"user+\"], unexpanded)\n\t\tif (options is None) and (args is None) :\n\t\t\treturn -1\t# message was already displayed in self.getopt()\n\t\tif args :\n\t\t\treturn self.errormessage(\"Doesn't need any other argument\")\n\t\thistory = self.getHistory()\n\t\tif history is not None :\n\t\t\tif options.has_key(\"clear\") :\n\t\t\t\t# we want to clear it, UpdateHistory will take care of permissions\n\t\t\t\tself.UpdateHistory(\"history --clear\", clear=1)\n\t\t\telse :\n\t\t\t\t# we just want to see it.\n\t\t\t\t# Someone who can modify the .zshell_history can see all commands\n\t\t\t\tnewhistory = history.document_src()\n\t\t\t\tif not self.HasPerms(history, \"Change DTML Documents\", verbose=0) :\n\t\t\t\t\tif options.has_key(\"user\") :\n\t\t\t\t\t\treturn self.errormessage(\"You're not allowed to use this option\")\n\t\t\t\t\t# a non-Manager user can only see its commands\n\t\t\t\t\t(username, dummy) = self.WhoAmI()\n\t\t\t\t\tlines = filter(lambda line, u=username: line and (string.split(line, ',')[1] == u), string.split(newhistory, '\\n'))\n\t\t\t\t\tnewhistory = string.join(map(lambda line: string.split(line, ',')[2], lines), \"\\n\")\n\t\t\t\telse :\n\t\t\t\t\t# The person has sufficient permissions\n\t\t\t\t\t# to list only some username's commands\n\t\t\t\t\tnewh = []\n\t\t\t\t\tfor line in filter(None, string.split(newhistory, '\\n')) :\n\t\t\t\t\t\tcmduser = string.split(line, ',')[1]\n\t\t\t\t\t\t# not optimal, but works:\n\t\t\t\t\t\tif self.match_anystring(\"user\", cmduser, options) :\n\t\t\t\t\t\t\tnewh.append(line)\n\t\t\t\t\tnewhistory = string.join(newh, '\\n')\n\t\t\t\tself.htmlmessage(string.replace(newhistory, '\\n', '<BR>\\n'), safe=1)\n\t\t\t\tself.printf(\"%s\\n\" % newhistory)\n\t\telse :\n\t\t\treturn self.errormessage(\"No history available\")", "def user(self, uid):", "def revive(self):\n\t\turl = \"https://habitica.com/api/v3/user/revive\"\n\t\treturn(postUrl(url, self.credentials))", "def history(self):\n raise NotImplementedError\n # from domonic.webapi.history import History\n # return History()", "def getAccessLogForUser(cls, user):\n return cls.objects.filter(user_id=user.pk).order_by('timestamp')", "def history():\n\n if request.method == 'POST':\n user_input_uuid = request.form['uuid']\n\n dm = DatabaseManager()\n genes, diseases, uuid, query, genpanel, date =\\\n dm.retreieve_zoekopdracht(user_input_uuid)\n\n make_session(\"uuid\", uuid, 2)\n\n return redirect(url_for('vis_results'))\n\n hislis = []\n\n if session.get('history'):\n hislis = reversed(session['history'])\n\n return render_template(\"history.html\", hislis=hislis)", "def get_submenu():\t\r\n\tsiblings = Page.get_all_siblings()\r\n\t#children = Page.objects.filter(parent=self.id)\r\n\treturn {'siblings': siblings}", "def get_withdrawal_history(self, currency=None):\n if not currency:\n currency = \"\"\n return self.__call__('balance', \"getwithdrawalhistory\", \n {\"currencyname\": currency})", "def complete_user_history(request):\n #unassign_microtask(request)\n list = []\n complete_user_history = BhagirathUserHistory.objects.all()\n for i in complete_user_history:\n dict = {} \n dict['username'] = i.user\n dict['original_sentence'] = i.original_sentence\n dict['translated_sentence'] = i.translated_sentence\n dict['assign_timestamp'] = i.assign_timestamp\n dict['submission_timestamp'] = i.submission_timestamp\n list.append(dict)\n\n overall_user_history = RevisedBhagirathUserHistory.objects.all()\n for i in overall_user_history:\n dict = {} \n dict['username'] = i.user\n dict['original_sentence'] = i.original_sentence\n dict['translated_sentence'] = i.translated_sentence\n dict['assign_timestamp'] = i.assign_timestamp\n dict['submission_timestamp'] = i.submission_timestamp\n list.append(dict)\n\n data = {'complete_user_history':list} \n return render_to_response('my_admin_tools/menu/complete_user_history.html',data,context_instance=RequestContext(request))", "async def history(self, ctx):\n if ctx.invoked_subcommand is None:\n await send_cmd_help(ctx)", "def get_exercise_history():\n user_id = session.get(\"email\")\n\n history = fm.full_attempt_history(user_id)\n\n msg = \"Attempt history found for user: {}. {} records.\"\\\n .format(user_id, len(history))\n app.logger.info(msg)\n return jsonify({\"history\": history})", "def svn_fs_node_history(*args):\r\n return _fs.svn_fs_node_history(*args)", "async def history(self, ctx, *, target: Union[discord.Member, FetchUserConverter]):\n\n query = {\"target_id\": target.id, \"guild_id\": ctx.guild.id}\n count = await self.bot.mongo.db.action.count_documents(query)\n\n async def get_actions():\n async for x in self.bot.mongo.db.action.find(query).sort(\"created_at\", -1):\n yield Action.build_from_mongo(self.bot, x)\n\n def format_item(i, x):\n name = f\"{x._id}. {x.emoji} {x.past_tense.title()} by {x.user}\"\n reason = x.reason or \"No reason provided\"\n lines = [\n f\"– **Reason:** {reason}\",\n f\"– at {discord.utils.format_dt(x.created_at)} ({discord.utils.format_dt(x.created_at, 'R')})\",\n ]\n if x.duration is not None:\n lines.insert(1, f\"– **Duration:** {time.human_timedelta(x.duration)}\")\n return {\"name\": name, \"value\": \"\\n\".join(lines), \"inline\": False}\n\n pages = ViewMenuPages(\n source=AsyncEmbedFieldsPageSource(\n get_actions(),\n title=f\"Punishment History • {target}\",\n format_item=format_item,\n count=count,\n )\n )\n\n try:\n await pages.start(ctx)\n except IndexError:\n await ctx.send(\"No punishment history found.\")", "def user_last_read_at(self, user):\n user_history = [history_entry for history_entry in self.history if history_entry.user_id == user.id]\n for history_entry in user_history[::-1]:\n if history_entry.action == MessageAction.read:\n return history_entry.timestamp\n else:\n return None", "def get_fhd_history(settings_file, return_user=False):\n with open(settings_file, \"r\") as f:\n settings_lines = f.readlines()\n main_loc = None\n command_loc = None\n obs_loc = None\n user_line = None\n for ind, line in enumerate(settings_lines):\n if line.startswith(\"##MAIN\"):\n main_loc = ind\n if line.startswith(\"##COMMAND_LINE\"):\n command_loc = ind\n if line.startswith(\"##OBS\"):\n obs_loc = ind\n if line.startswith(\"User\"):\n user_line = ind\n if (\n main_loc is not None\n and command_loc is not None\n and obs_loc is not None\n and user_line is not None\n ):\n break\n\n main_lines = settings_lines[main_loc + 1 : command_loc]\n command_lines = settings_lines[command_loc + 1 : obs_loc]\n history_lines = [\"FHD history\\n\"] + main_lines + command_lines\n for ind, line in enumerate(history_lines):\n history_lines[ind] = line.rstrip().replace(\"\\t\", \" \")\n history = \"\\n\".join(history_lines)\n user = settings_lines[user_line].split()[1]\n\n if return_user:\n return history, user\n else:\n return history", "def history(self):\n return self.board.history", "def user(self):", "def get_store_purchase_history(user_name, store_name):\n\n user_name = auth.get_username_from_hash(user_name)\n permission_handler.is_permmited_to(user_name, Action.STORE_PURCHASE_HISTORY.value, store_name)\n return purchase_handler.get_store_history_purchases(store_name)", "def getOrderHistory(self):\n return self.__orderhistory", "def get_history(user):\n if user in resteems and user in honours:\n return \"**\"+str(resteems[user])+\"** Resteems, **\"+str(honours[user])+\"** Honours\"\n elif user in resteems:\n return \"**\"+str(resteems[user])+\"** Resteems, **0** Honours\"\n elif user in honours:\n return \"**0** Resteems, **\"+str(honours[user])+\"** Honours\"\n else:\n return \"**0** Resteems, **0** Honours\"", "def historystorage(self):\n return self._historystorage", "def getOutageHistory(self):\n return self._OutageHistory", "def history(self):\n return _spacegrant_swig.DeNRZI_sptr_history(self)", "def clear_history(self, user: TelegramController.User):\n header = connect(self.__path)\n curs = header.cursor()\n encrypted_id = md5((str(user.id) + \"typicaluser\").encode()).hexdigest()\n curs.execute(\"UPDATE users SET history = (?) WHERE id = (?)\", (\"\", encrypted_id))\n header.commit()", "def history():\n transactions = db.execute(\"SELECT Symbol, Shares, Price, Date FROM history WHERE UserID = :userid\", userid=session.get(\"user_id\"))\n return render_template(\"history.html\", transactionList = transactions, currentUser=session.get(\"user_id\"))", "def history():\n\n userId = session[\"user_id\"]\n\n shares = db.execute(f\"SELECT symbol, shares, price, trans_time FROM transactions WHERE user_id={userId} ORDER BY trans_id DESC\")\n\n return render_template(\"history.html\", shares=shares)", "def history():\n \n # selection of name, symbol, shares and cash of user stocks\n hist = db.execute(\"SELECT * FROM history WHERE id=:id\", id = session[\"user_id\"])\n return render_template(\"history.html\", hist=hist)", "def retrieve_conversation_history(username: Text) -> Text:\n history = inmemory_storage[username]\n if history:\n return jsonify(history)\n else:\n return jsonify(history), 404", "def get_history(hdr):\n return hdr['HISTORY']", "def history():\n histories = db.execute(\"SELECT * from purchases WHERE user_id=:id\", id=session[\"user_id\"])\n \n return render_template(\"history.html\", histories=histories)", "def child_comments_in_order(self):\n return self.order_by(\"created_at\").select_related(\"user\")", "def history():\n # extract history of operation for a particular user\n historical_data = db.execute(\"SELECT Symbol, Company, Shares, Price, Total, Timestamp FROM portfolio WHERE id = :id\", id=session[\"user_id\"])\n\n return render_template(\"history.html\", historical=historical_data)", "def revision_history(self, uuid):\n return self.write.revision_history(rid=uuid)", "def get_history():\n return response_texts_to_entries(make_post_request(HISTORY_API, data={\"k\": config[\"api_key\"]}))", "def get_game_history(self, request):\n return games_ctrl.get_game_history(request.urlsafe_game_key)", "def user(self):\n pass", "def history(self):\n return _spacegrant_swig.udp_debug_sptr_history(self)", "def getParent():", "def history():\n # User reached route via GET (as by clicking a link or via redirect)\n if request.method == \"GET\":\n # Select to buy-sell table\n bs = db.execute(\"SELECT * FROM bs WHERE userID=:userID\", userID=session[\"user_id\"])\n\n # len of buy sell table\n bslen = len(bs)\n\n # Falsh massage\n flash('history')\n\n # Rander buy sell and total return value list\n return render_template(\"history.html\", bs=bs, bslen=bslen)", "def get_queryset(self):\n valid_child_ids = self.valid_responses().values_list(\"child\", flat=True)\n return (\n User.objects.filter(children__in=valid_child_ids)\n .prefetch_related(\n Prefetch(\n \"children\", queryset=Child.objects.filter(id__in=valid_child_ids)\n )\n )\n .distinct()\n )", "def task_history(self):\n return self._task_history", "def node_history(self, node_id, items, zhistory, since, until, items_search=None):\n return self.izx.get_history((node_id,), items, zhistory, since, until, items_search=items_search)", "def get_room_history(self, room):\n pass", "def history():\n username = session.get(\"username\")\n history=db.execute(\"SELECT stock_symbol, unit_price, time, quantity, stock_name, status FROM history WHERE username=:username\",\n username=username)\n return render_template(\"history.html\", history=history)", "def history():\n\n #Query transactions by user id\n trans = Transactions.query.filter_by(owner=session['user_id']).all()\n\n #Convert Price to US Dollars and format transaction time\n for t in trans:\n t.price = usd(t.price)\n t.transacted = t.transacted.strftime('%Y-%m-%d %H:%M:%S')\n\n #Return history.html\n return render_template('history.html', trans=trans)", "def list_history_record(request, record_id):\n history = History.objects\n\n if not is_admin(request.user):\n history.filter(submitter=request.user)\n history = history.get(id=record_id)\n\n return render('editor/list_history_record.mako', request, {\n 'record': history,\n })", "def get_history(self, name):\n return self._scalar_history.get_history(name)" ]
[ "0.61068934", "0.6100039", "0.60115993", "0.59361994", "0.59344065", "0.58696795", "0.5844484", "0.5742806", "0.571162", "0.5664033", "0.56583655", "0.56562835", "0.5651735", "0.56295484", "0.5605254", "0.5565101", "0.55631995", "0.55622804", "0.55606115", "0.54985034", "0.549455", "0.5494534", "0.5458319", "0.54354095", "0.54311186", "0.5425557", "0.54193866", "0.54190576", "0.5417586", "0.5395642", "0.53917634", "0.53917634", "0.53764117", "0.5373119", "0.5359403", "0.5351156", "0.53280205", "0.53219366", "0.5318833", "0.5312092", "0.53033304", "0.5285337", "0.52768993", "0.5246239", "0.52454764", "0.52427334", "0.52323526", "0.51864004", "0.5181498", "0.51760924", "0.5172644", "0.5165904", "0.51658165", "0.51610994", "0.5155759", "0.51415205", "0.5131167", "0.51085794", "0.5089462", "0.5082354", "0.50542617", "0.50437677", "0.50392467", "0.50363666", "0.5030859", "0.50253755", "0.50200146", "0.5018712", "0.5010821", "0.500828", "0.5008162", "0.5006516", "0.5004921", "0.50011647", "0.49892473", "0.49838486", "0.49803463", "0.49774426", "0.49706018", "0.49698442", "0.49644724", "0.4961292", "0.49482533", "0.49436373", "0.49290362", "0.4908948", "0.4905237", "0.49050766", "0.48882613", "0.48832774", "0.48798612", "0.48614022", "0.48610723", "0.4857782", "0.4849362", "0.48412", "0.48253018", "0.48242927", "0.48238328", "0.4808731" ]
0.61268777
0
Parent get sub user deposit address
def get_sub_user_deposit_address(self, sub_uid: 'int', currency: 'str') -> list: check_should_not_none(sub_uid, "subUid") check_should_not_none(currency, "currency") params = { "subUid": sub_uid, "currency": currency } from huobi.service.wallet.get_sub_user_deposit_address import GetSubUserDepositAddressService return GetSubUserDepositAddressService(params).request(**self.__kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAddress(user):", "def address1(self, instance):\r\n return instance.user.profile.address1", "def deposit_address(self):\n response = self.query('deposit_address')\n return response", "def address2(self, instance):\r\n return instance.user.profile.address2", "def get_deposit_address(self, currency):\n return self.__call__('balance', \"getdepositaddress\", \n {\"currencyname\": currency})", "def do_getaddress(self,args):\n ppdict(bitstamp.get_depositaddress())", "def address1_current(self, instance):\r\n return instance.user.profile.address1_current", "def get_address(self):\n entity = self\n if entity.abstract_entity:\n entity = self.get_real_ancestor()\n if entity:\n address = entity.get_institutional_address()\n building = entity.get_building()\n if building:\n if entity.building_recapitulates_entity_name: \n address.extend(building.get_postal_address()[1:])\n else:\n address.extend(building.get_postal_address())\n return address", "def currentownerpe(self) :\n\t\ttry :\n\t\t\treturn self._currentownerpe\n\t\texcept Exception as e:\n\t\t\traise e", "def get_remit_to_address(self): \n return self.remit_to_address", "def returnDepositAddresses(self):\n pass", "def createUserAddress(self,username):\n addr = self.api.getnewaddress()\n dct = {'USER_NAME':username, 'ADDRESS':addr}\n print dct\n address = self.getAdminAddress()\n txd = self.api.publishfrom(address, 'users_addresses',username,self.bin2hex(json.dumps(dct)))\n print txd\n return addr", "def postal_code_current(self, instance):\r\n return instance.user.profile.postal_code_current", "def generate_deposit_address(self, currency):\n return self.__call__('balance', \"generatedepositaddress\", \n {\"currencyname\": currency})", "def address2_current(self, instance):\r\n return instance.user.profile.address2_current", "def rebased_addr(self):\n return self.addr + self.owner_obj.rebase_addr", "def get_one_user():", "def m_get_owner(node_name):\n\n # Initialize the contract classes. These classes only work when the smart contracts are already deployed\n# ens = ENS_class()\n# resolver = PublicResolver_class()\n\n # Get the owner of the node name\n owner = ens.owner(node_name)\n print(f\"Owner address: {owner}\")\n\n # Check if we received the zero address\n if int(owner, base=16) == 0:\n print(f\"No owner (or the record is the root record)\")\n return\n\n # Check in the database to see if we have it\n name, pubkey = wallet.account_from_address(owner)\n print(f\"Name from the database: {name}\")", "def address(self):\n out = {'zip_code': '',\n 'city': '',\n 'street': '',\n 'phone': ''}\n if self.user.contract_member.exists():\n last_contract = self.user.contract_member.last()\n out['zip_code'] = last_contract.zip_code\n out['city'] = last_contract.city\n out['street'] = last_contract.street\n out['phone'] = last_contract.phone\n\n return out", "def parent_address(self):\n address = self.address\n if address.startswith(\"/\"):\n address = address[1:]\n if address.endswith(\"/\"):\n address = address[:-1]\n\n if \"/\" in address:\n # Return everything before the last / sign\n return address.rsplit(\"/\", 1)[0]\n elif address:\n return \"\"\n else:\n return None", "def getInterestedUsers():", "def get_user(self):\n return self.get('users/self')", "def get_address(self, ):\n return self.get_parameter('address')", "def user(self):\n pass", "def user(self):", "def postal_code(self, instance):\r\n return instance.user.profile.postal_code", "def account_deposit_address(self, currency_symbol):\n return self.get(f'addresses/{currency_symbol}', auth=True)", "def parent_address(address):\n if len(address) >= 1:\n return address[0:-1]\n else:\n return ''", "def address(self):\n return self._ref_address", "def get_address(self):\n if self.get_entity: # needs an entity to work\n if self.building:\n address = self.get_entity.get_institutional_address()\n address.extend(self.building.get_postal_address())\n return address\n else:\n return self.get_entity.get_address()", "def address(self):\n ...", "def user(self):\n return self.owner.user", "def get_current_address(self):\n pass", "def nomad_address():\n\n print(nomad.get_address())", "def getaccount(self, vergeaddress):\n return self.proxy.getaccount(vergeaddress)", "def show_fresh_address(self):\n\t\treturn self.__fresh_account()[\"address\"]", "def get_owner(self, obj):\n return obj.user.username", "def get_zerotier_address(marketplace):\n logger.info(\"You might need to enter your superuser password.\")\n address = zerotier.get_address(marketplace)\n if not address:\n join_cmd = click.style(\"21 join\", bold=True, reset=False)\n no_zt_network = click.style(\n \"You are not part of the {}. Use {} to join the market.\",\n fg=\"red\")\n raise UnloggedException(no_zt_network.format(marketplace, join_cmd))\n\n return address", "def get_id(self): \n\t\treturn (self.user_id)", "def get_address(self):\n if self.address:\n return self.address", "def _get_address(self):\n return self.__address", "def getFromUser(self):\n return self.fromUser", "def get_person(self):\n return self.getParentNode()", "def get_tenants(self):", "def address(self):\n return self.data.get('address')", "def owner(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"owner\")", "def owner(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"owner\")", "def get_score_owner(self) -> Address:\n return self.owner", "def get_subscription_owner(request, profile):\n return profile.km_user.user", "def get_user(self):\n return None", "def rootuser_info(self, datadict):\n\n dict1 = OrderedDict()\n dict1 = datadict['entry_data']['ProfilePage'][0]['graphql']['user']\n\n userdict = OrderedDict()\n keylist = ['id', 'username', 'full_name', 'biography', 'edge_follow', 'edge_followed_by', 'is_private', 'external_url', 'profile_pic_url_hd']\n\n for key in keylist:\n if key is 'edge_follow':\n userdict['following'] = dict1[key]\n elif key is 'edge_followed_by':\n userdict['followers'] = dict1[key]\n else:\n userdict[key] = dict1[key]\n\n userdict['platform'] = datadict['platform']\n\n return (json.dumps(userdict, indent=4))", "def get_bookshare_user_info(patron):\n pass", "def paymentAddress(self):\n return self.selectedAccount.paymentAddress()", "def get_account_details(self):\n pass", "def getOwner(self):\r\n return self.owner", "def user_place(self):\n place = self.status.user['location']\n return place", "def getPublicUserInfo(self, username):\r\n pass", "def getAddress(self) -> int:\n ...", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def user(self, uid):", "def owner(self) -> None:\n return self.bot.get_user(self.bot.config.owner_ids[0])", "def address(self):\n return f'Address = {self._peer.address}/{self._peer.subnet.prefixlen}'", "def get_subscription_owner(request, list_entry):\n return list_entry.profile_item.topic.profile.km_user.user", "def Besucher(self):\n return self.getAnsprechpartner()", "def endpoint_sub_address(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"endpoint_sub_address\")", "def default_get(self, cr, uid, fields, context=None): \n \n \n res = super(granted_rights_order, self).default_get(cr, uid, fields, context=context)\n \n employee_obj = self.pool.get('hr.employee')\n department_obj = self.pool.get('hr.department')\n manager = False\n donor_emp_id = []\n \n if uid != 1 :\n\n donor_emp_id = employee_obj.search(cr ,uid, [('user_id' , '=' , uid )])\n deparment_id = employee_obj.browse(cr,uid,donor_emp_id[0]).department_id.id\n \n if donor_emp_id[0] == department_obj.browse(cr,uid,deparment_id).manager_id.id :\n manager = True\n \n \n \n \n \n \n \n \n \n if donor_emp_id :\n res.update({ 'employee_donor': donor_emp_id[0], \n 'department_id' : deparment_id,\n 'is_a_amanger' : manager,\n })\n return res", "def tribe(self, instance):\r\n return instance.user.profile.tribe", "def return_to_parent_value():\n print (\"\\nReturning to parent value...\\n\")", "def get_deposit(self, deposit):\r\n method = self.wallet_endpoints['get_deposit']['method']\r\n url = self.base_url + self.wallet_endpoints['get_deposit']['url'].format(depositId=deposit)\r\n req = requests.request(method, url, headers=self.get_auth_headers())\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res", "def get_accessible_user_id(self):\n ### DATABASE CODE GOES HERE\n return 1", "def owner(self):\n return Organization.objects.get(id=self.owner_id)", "def get_institutional_address(self): \n ancestors = []\n showparent = self.display_parent\n for entity in self.get_ancestors(ascending = True).exclude(abstract_entity = True):\n if showparent:\n ancestors.append(entity)\n showparent = entity.display_parent\n return ancestors", "def get_user_location(self, user_role_map_id):", "def get_user_info(self) -> str:\n return self._searcher.get_user_info()", "def user(self):\n return self.getattr('user')", "def get_user(self):\n raise NotImplementedError", "def owner_info(self) -> pulumi.Output['outputs.UserInfoResponse']:\n return pulumi.get(self, \"owner_info\")", "def __int__(self):\r\n return self.userid", "def get_building_by_user(self, user):\r\n\t\t\r\n\t\treturn self.transactions[user][1]", "def getToUser(self):\n return self.toUser", "def address(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"address\")", "def get_address(self):\n return self.address.line[0]+\", \"+self.address.city+\", \"+self.address.state+\", \"+self.address.country", "def owner(self):\n answer = self._call('owner')\n return answer.owner", "def originalownerpe(self) :\n\t\ttry :\n\t\t\treturn self._originalownerpe\n\t\texcept Exception as e:\n\t\t\traise e", "def user(self):\n return self.contact.user", "def address(self) -> Optional[str]:\n return pulumi.get(self, \"address\")", "def address(self) -> Optional[str]:\n return pulumi.get(self, \"address\")", "def get_user_id(self, details, response):\n return response.get(\"sub\")", "def get_submodule_has_out_user_under_public_parent(public_module: ModuleStruct, node_out_user: NodeStruct):\n for module_struct in public_module.module_structs:\n if node_out_user.onnx_name in module_struct.onnx_names:\n return module_struct\n return None", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def get_absolute_url(self):\n return reverse('accountInfo-detail', args=[str(self.uid)])", "def _get_address(self):\n return utf82unicode(pn_terminus_get_address(self._impl))", "def address(self):\n return self._address", "def address(self):\n return self._address", "def address(self):\n return self._address", "def address(self):\n return self._address", "def address(self):\n return self._address", "def get_user():\n\treturn '1', 200" ]
[ "0.6647167", "0.6165405", "0.61530155", "0.5871263", "0.5798319", "0.57832575", "0.5712461", "0.5691261", "0.5627003", "0.55536985", "0.5515165", "0.5496232", "0.54191124", "0.5402566", "0.5397947", "0.5349834", "0.53110534", "0.52480906", "0.5225322", "0.5217574", "0.5213821", "0.519956", "0.51834285", "0.5171264", "0.5166473", "0.5161543", "0.5158671", "0.5157844", "0.51510465", "0.5143243", "0.51415116", "0.513329", "0.51168835", "0.51129425", "0.51113087", "0.5087845", "0.5085818", "0.5074526", "0.50590885", "0.5056773", "0.50564015", "0.5053356", "0.5042821", "0.50204307", "0.5020108", "0.5008484", "0.5008484", "0.49930495", "0.49908388", "0.4980747", "0.49800777", "0.49787948", "0.49759555", "0.49486524", "0.49474213", "0.4945063", "0.49444503", "0.49399626", "0.49383646", "0.49383646", "0.49383646", "0.49343374", "0.49256054", "0.49210122", "0.49162036", "0.4898004", "0.48899564", "0.48891267", "0.48715326", "0.48699483", "0.48613647", "0.4860776", "0.485826", "0.4851568", "0.48500097", "0.48498452", "0.48449376", "0.48341984", "0.4826051", "0.4823412", "0.48166794", "0.48143852", "0.48098135", "0.48009592", "0.48005143", "0.47957763", "0.47929043", "0.4792414", "0.4792414", "0.47910705", "0.47893462", "0.47846037", "0.47803605", "0.4779273", "0.47735155", "0.47735155", "0.47735155", "0.47735155", "0.47735155", "0.47705206" ]
0.58459884
4
Add an obstacle to the map
def add_obstacle(self, obstacle_to_add): if self.obstacles.size != 0: self.obstacles = np.hstack((self.obstacles, obstacle_to_add)) else: self.obstacles = np.array([obstacle_to_add])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_obstacle(self, x, y):\n self.BOARD[y][x].traversable = False\n self.board_array[y][x] = 1", "def add_obstacle(self, x, y):\n self.BOARD[y][x].traversable = False\n self.board_array[y][x] = 1", "def add_obstacle(self, *points: Tuple[float, float]):\n self.obstacles.append(o.Obstacle(*points))", "def set_obstacle(self, pos: tuple):\n if self.within_map(pos):\n self.map[round(pos[0]), round(pos[1])] = OBSTACLE\n return True\n else:\n return False", "def update_obstacle_location(self):\n\n # find the previous location of the obstacle\n old_y = self.map_obstacle.y\n old_x = self.map_obstacle.x\n\n # remove it from the main graph\n self.main_graph[old_y][old_x].contents.remove(self.map_obstacle)\n\n # get the latest location\n self.map_obstacle.update_location()\n (new_y, new_x) = (self.map_obstacle.y, self.map_obstacle.x)\n\n # add it back into the main graph\n self.main_graph[new_y][new_x].contents.add(self.map_obstacle)\n\n # update the map obstacle (not necessary, but it doesn't hurt)\n self.map_obstacle.y = new_y\n self.map_obstacle.x = new_x", "def update_obstacles(self, new_obs):\n self.obstacles = new_obs", "def __init__(self, map_obstacle, main_graph):\n\n self.map_obstacle = map_obstacle\n self.main_graph = main_graph\n\n self.sight_range = self.calculate_sight_range()\n\n self.top_left_y = None\n self.top_left_x = None\n self.bottom_right_y = None\n self.bottom_right_x = None\n self.height = None\n self.width = None\n self.size = self.calculate_size()\n\n # nodes specific to this threat zone\n self.nodes = []", "def random_map(self, world):\n obstacles = []\n if self.cfg[\"obstacle\"][\"octagon\"][\"enabled\"]:\n obstacles += self.__generate_octagon_obstacles(world)\n if self.cfg[\"obstacle\"][\"rectangle\"][\"enabled\"]:\n obstacles += self.__generate_rectangle_obstacles(world)\n\n # update the current obstacles and goal\n self.current_obstacles = obstacles\n self.add_new_goal()\n\n # apply the new obstacles and goal to the world\n self.apply_to_world(world)", "def place_obstacles():\n #Randomly generate different sized rectangles\n #Soem may overlap, which gives more variety in shape of obstacles\n xvals = np.random.randint(0,self.map_dimensions[1],size=self.N_obstacles)\n yvals = np.random.randint(0,self.map_dimensions[0],size=self.N_obstacles)\n lower_left = zip(xvals,yvals)\n rects = []\n for LL in lower_left:\n x = LL[0]\n y = LL[1]\n wmax = self.map_dimensions[1] - x\n w = np.random.randint(0,wmax,size=1)[0]\n hmax = self.map_dimensions[0] - y\n h = np.random.randint(0,hmax,size=1)[0]\n rects += [(x,y,w,h)]\n self.coordinates__obstacles = rects", "def add_neighbors(self, pos, distance, obstacles):\n \n neighbor_list = [(pos[0]-1,pos[1]), (pos[0]+1,pos[1]), \\\n (pos[0],pos[1]-1), (pos[0], pos[1]+1)]\n # Processing each neighbor.\n for (x,y) in neighbor_list:\n if x>=0 and y>=0 and x<self.M and y<self.N: # Out from boundary?\n if (x,y) not in obstacles:\n if (x,y) not in self.footprint: # Already in done list?\n new_distance = distance + 1 + self.heuristic_map[x,y]\n if (x,y) not in self.frontier.keys(): # A new candidate to add to frontier set.\n self.frontier.update({(x,y):new_distance})\n self.distance_map[x,y] = distance + 1\n self.camefrom_map[(x,y)] = pos\n elif new_distance < self.frontier[(x,y)]: # A short path reached this neighbor.\n self.frontier[(x,y)] = new_distance\n self.distance_map[x,y] = distance + 1\n self.camefrom_map[(x,y)] = pos", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "def draw_obstacles():\n for obstacle in obstacles:\n plt.gca().add_patch(obstacle)", "def init_map(self, obstacle_rate=0.9):\n n = self.size()\n\n map_obstacles = [] # np.zeros((n, n)) # 1: obstacle, 0: non-obstacle\n \n for i in range(n):\n # We only need 2 bit to encode 1/0 for each element of NumberArray\n row = NumberArray(2, n)\n for j in range(n):\n if i == j:\n # map_obstacles[i][j] = 0\n row[j] = 0\n elif i > j:\n # map_obstacles[i][j] = map_obstacles[j][i]\n row[j] = map_obstacles[j][i]\n else:\n # map_obstacles[i][j] = 1 if random.random() > 0.9 else 0\n row[j] = 1 if random.random() > obstacle_rate else 0\n map_obstacles.append(row)\n\n self.map_obstacle = map_obstacles", "def updateHardObstacles(self):\r\n global_obs = self.calcGlobalObstaclePosition([[10, 20],[10, 0],[10, -20]])\r\n self.globalHardObstaclesList.extend(global_obs)", "def draw_obstacle(start, end, img):\n # start, end, top_right, top_left = generate_obstacle_point(start, (start[0] + _OBSTACLE_SIZE, start[1] ))\n cv2.fillPoly(img, np.array([[[start[0] - 25, start[1] - 25],\n [start[0] + 25, start[1] - 25],\n [start[0] + 25, start[1] + 25],\n [start[0] - 25, start[1] + 25]]]), _RED)\n # cv2.rectangle(img, (start[0] - 25, start[1] - 25), (start[0] + 25, start[1] + 25), (0, 255, 0), 3)\n return img", "def is_obstacle(self, pos: tuple):\n if self.within_map(pos):\n return self.map[round(pos[0]), round(pos[1])] == OBSTACLE\n else:\n return False", "def publishObstacles(self):\n mk = Marker()\n mk.header.stamp = rospy.get_rostime()\n mk.header.frame_id = '/base_link'\n\n mk.ns='basic_shapes'\n mk.id = 0\n mk.type = Marker.POINTS\n mk.scale.x = 0.3\n mk.scale.y = 0.3\n mk.scale.z = 0.3\n mk.color.r = 1.0\n mk.color.a = 1.0\n\n for value in self.obstacle_map.obstacles_in_memory:\n p = Point()\n p.x = value[0]\n p.y = value[1]\n mk.points.append(p)\n\n\n self.obs_pub.publish(mk)", "def __init__(self, map_config):\n self.current_obstacles = []\n self.current_goal = None\n self.cfg = map_config", "def updateObstacleMap(self):\n\n all_sensor_readings = self.laser_readings + self.sonar_readings\n\n #we remove all the sensor readings that occur inside the robot frame\n restricted_sensor_readings = []\n for pt in all_sensor_readings:\n if not self.obstacle_map.inRobot(pt):\n restricted_sensor_readings.append(pt)\n\n #add the obstacles to the obstacle map\n self.obstacle_map_lock.acquire()\n self.obstacle_map.addObstacles(restricted_sensor_readings)\n self.obstacle_map_lock.release()\n\n return", "def process_obstacle(color, cx, cy, box, x, y, obj_length, obj_height, obj_depth,\n\t\t\t\t\t equi_diameter, obstacle_list, obstacle_lifetime, obstacle_id, visualize, send_data):\n\tcoords = list(depth_to_point_cloud_pos(cx, cy, obj_depth)) # convert obstacle depth to XYZ coordinate\n\n\t#theta = CameraPosition['azimuth'] * math.pi / 180 # get robot pitch angle in radians\n\t#coords[0] = CameraPosition['x'] - coords[0] * math.cos(theta) # convert relative obstacle position to global\n\t#coords[2] = CameraPosition['y'] + coords[2] * math.sin(theta)\n\tmm_diameter = equi_diameter * (1.0 / CameraParams['fx']) * obj_depth # convert pixel diameter to mm\n\n\tif 100 < mm_diameter < 400:\n\t\tnew_obstacle = True\n\t\tcurrent_obstacle = None\n\t\tfor obstacle in obstacle_list:\n\t\t\tx_match = abs(obstacle.x - coords[0]) < 0.3\n\t\t\ty_match = abs(obstacle.y - coords[2]) < 0.3\n\t\t\tz_match = abs(obstacle.z - coords[1]) < 0.5\n\t\t\tdiameter_match = abs(obstacle.diameter - mm_diameter) / 1000. < 0.5\n\t\t\tif x_match and y_match:\n\t\t\t\tobstacle.x = coords[0]\n\t\t\t\tobstacle.y = coords[2]\n\t\t\t\tobstacle.z = coords[1]\n\t\t\t\tobstacle.diameter = mm_diameter / 1000.\n\t\t\t\tnew_obstacle = False\n\t\t\t\tobstacle.lifetime = obstacle_lifetime\n\t\t\t\tif send_data:\n\t\t\t\t\tsend_obstacle_data(obstacle)\n\t\t\t\tcurrent_obstacle = Obstacle(obstacle.id,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.x,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.y,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.z,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.diameter,\n\t\t\t\t\t\t\t\t\t\t\tobstacle_lifetime)\n\t\t\t\tif obstacle.lifetime == 0:\n\t\t\t\t\tobstacle_list.remove(obstacle)\n\t\t\t\tbreak\n\t\tif new_obstacle:\n\t\t\tcurrent_obstacle = Obstacle(obstacle_id,\n\t\t\t\t\t\t\t\t\t\tcoords[0],\n\t\t\t\t\t\t\t\t\t\tcoords[2],\n\t\t\t\t\t\t\t\t\t\tcoords[1],\n\t\t\t\t\t\t\t\t\t\tmm_diameter / 1000.,\n\t\t\t\t\t\t\t\t\t\tobstacle_lifetime)\n\t\t\tobstacle_id += 1\n\t\t\tif send_data:\n\t\t\t\tsend_obstacle_data(current_obstacle)\n\t\t\tobstacle_list.append(current_obstacle)\n\n\t\tif visualize:\n\t\t\t# begin visualization\n\t\t\tcv2.drawContours(color, [box], 0, (0, 0, 255), 1)\n\t\t\tcv2.rectangle(color, (x, y), (x + obj_length, y + obj_height), (0, 255, 0), 2)\n\t\t\tfont = cv2.FONT_HERSHEY_SIMPLEX\n\t\t\tcv2.putText(color, 'id = %d' % current_obstacle.id, (cx, cy + 15), font, 0.4, (255, 0, 255),\n\t\t\t\t\t\t1, cv2.LINE_AA)\n\t\t\tcv2.putText(color, \"x = %.2f\" % coords[0], (cx, cy + 30), font, 0.4, (0, 0, 255), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\t\t\tcv2.putText(color, \"y = %.2f\" % coords[2], (cx, cy + 45), font, 0.4, (0, 255, 0), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\t\t\tcv2.putText(color, \"z = %.2f\" % (obj_depth / 1000), (cx, cy + 60), font, 0.4, (255, 0, 127),\n\t\t\t\t\t\t1, cv2.LINE_AA)\n\t\t\tcv2.putText(color, \"diameter = %.2f\" % (mm_diameter / 1000), (cx, cy + 75), font, 0.4,\n\t\t\t\t\t\t(255, 127, 0), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\treturn obstacle_id", "def spawn_obstacles(self):\n self.obstacle_sprites.empty()\n number_of_obstacles = random.randint(MIN_OBSTACLES, MAX_OBSTACLES)\n while len(self.obstacle_sprites) < number_of_obstacles:\n obstacle = Obstacle(random.randrange(0, WIDTH), random.randrange(HEIGHT - 500, HEIGHT))\n obstacle_collision = pygame.sprite.spritecollide(obstacle, self.obstacle_sprites, False)\n if not obstacle_collision:\n self.obstacle_sprites.add(obstacle)", "def __init__(self, costmap):\n # Copy the map metadata\n self.resolution = costmap.info.resolution\n self.min_x = costmap.info.origin.position.x\n self.min_y = costmap.info.origin.position.y\n self.y_width = costmap.info.height\n self.x_width = costmap.info.width\n self.max_x = self.min_x + self.x_width *self.resolution\n self.max_y = self.min_y + self.y_width *self.resolution\n print self.min_x, self.min_y\n print self.max_x, self.max_y\n print \"Resolution: \", self.resolution\n print self.x_width, self.y_width\n \n\n self.motion = self.get_motion_model()\n \n # Copy the actual map data from the map\n x = 0\n y = 0\n ox = list()\n oy = list()\n # obstacle map generation\n self.obstacle_map = [[False for _ in range(self.y_width)]\n for _ in range(self.x_width)]\n obstacles = 0\n for value in costmap.data:\n if value >95:\n obstacles += 1\n self.obstacle_map[x][y] = True\n ox.append(float(x)*self.resolution +self.min_x)\n oy.append(float(y)*self.resolution +self.min_y)\n # Update the iterators\n x += 1\n if x == self.x_width:\n x = 0\n y += 1\n print \"Loaded %d obstacles\"%(obstacles)\n if show_animation: # pragma: no cover\n plt.plot(ox, oy, \".k\")\n plt.grid(True)\n \n # plt.axis(\"equal\")", "def drawpath(self,obstacles):\n for i in obstacles:\n self.distance_map[i[0],i[1]]=44\n print(\"Distance map\")\n print(self.distance_map)\n for i in self.footprint:\n self.distance_map[i[0],i[1]]=88\n print(\"Evaluated path\")\n print(self.distance_map)", "def GroundExcelAddObstacleLevel(builder, ObstacleLevel):\n return AddObstacleLevel(builder, ObstacleLevel)", "def updateObstacles(self, obstacles):\r\n global_obs = self.calcGlobalObstaclePosition(obstacles)\r\n self.globalObstaclesList.extend(global_obs)", "def load_from_info(self, course_info):\n for item in course_info[\"obstacles\"]:\n klass = self.class_map[item[0].lower()]\n midbottom = item[1]\n obstacle = klass(midbottom, self.obstacles)\n if \"gate\" in item[0].lower():\n self.gates.add(obstacle)", "def add_new_goal(self):\n while True:\n goal = self.__generate_new_goal()\n intersects = self.__check_obstacle_intersections(goal)\n if not intersects:\n self.current_goal = goal\n break", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def request_move(self, map_object, x, y):\n self.moveRequests.append([map_object, x, y])", "def move(self, fruit, corner):\n\n if not check_if_inside(self.x, self.y, corner):\n self.state = 'dead'\n self.fitness = max(1, self.fitness - 5)\n\n if self.size > 4:\n for i in range(self.size - 1):\n if (self.x, self.y) == self.tail[-(i + 2)]:\n self.state = 'dead'\n self.fitness = max(1, self.fitness - 5)\n\n if self.state == 'alive':\n\n location = (self.x, self.y)\n self.tail.append(location)\n self.tail.pop(0)\n\n data = []\n\n distance = estimate_distance(self, fruit)\n angle = estimate_angle(self, fruit)\n\n x_direction_left = round(self.direction_x * math.cos(angle) - self.direction_y * math.sin(angle))\n y_direction_left = round(self.direction_x * math.sin(angle) + self.direction_y * math.cos(angle))\n\n x_direction_right = round(self.direction_x * math.cos(angle) + self.direction_y * math.sin(angle))\n y_direction_right = round(-self.direction_x * math.sin(angle) + self.direction_y * math.cos(angle))\n\n if not check_if_inside(self.x + x_direction_left, self.y + y_direction_left, corner):\n obstacle_to_left = 1\n else:\n obstacle_to_left = 0\n\n if not check_if_inside(self.x + x_direction_right, self.y + y_direction_right, corner):\n obstacle_to_right = 1\n else:\n obstacle_to_right = 0\n\n if not check_if_inside(self.x + self.direction_x, self.y + self.direction_y, corner):\n obstacle_ahead = 1\n else:\n obstacle_ahead = 0\n\n data.append(distance)\n data.append(angle)\n data.append(obstacle_ahead)\n data.append(obstacle_to_left)\n data.append(obstacle_to_right)\n\n self.output = self.predict(data)\n\n if np.argmax(self.output) == 0:\n self.direction_x = x_direction_left\n self.direction_y = y_direction_left\n elif np.argmax(self.output) == 1:\n self.direction_x = x_direction_right\n self.direction_y = y_direction_right\n\n self.x = self.x + self.direction_x\n self.y = self.y + self.direction_y\n\n distance_after = estimate_distance(self, fruit)\n\n # if distance_after < distance:\n # self.fitness += 6\n # else:\n # self.fitness = max(1, self.fitness - 7.5)", "def get_obstacles_map(obstacles, placed_pecies):\n \n #create a mask image to draw the obstacles on\n blocks = np.zeros(ARENA_SIZE[::-1], np.uint8)\n\n #get the grid points where the robot needs to placed\n grid = get_grid(ARENA_SIZE)\n\n #draw the obstacles and their safety region on the map\n for i in obstacles.keys():\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(obstacles[i][0]/4), i[1]-int(obstacles[i][1]/4)), (i[0]+int(obstacles[i][0]/4), i[1]+int(obstacles[i][1]/4)), 255, -1)\n\n #draw the obstacles and their safety region on the map\n for i in placed_pecies.keys():\n try:\n if not i == grid[5]:\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n else:\n cv2.rectangle(blocks, (int(i[0]-7.4*placed_pecies[i][0]/4), int(i[1]-7.4*placed_pecies[i][1]/4)),\n (int(i[0]+7.4*placed_pecies[i][0]/4), int(i[1]+7.4*placed_pecies[i][1]/4)), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(placed_pecies[i][0]/4), i[1]-int(placed_pecies[i][1]/4)), (i[0]+int(placed_pecies[i][0]/4), i[1]+int(placed_pecies[i][1]/4)), 255, -1)\n except Exception as e:\n print(e)\n\n return cv2.bitwise_not(blocks)", "def addNeighbor(self, neighbor):", "def on_enter(self):\n # Add self to list of obstacles\n self.parent._obstacles.add(self)\n super().on_enter()", "def add_neighbour(self, node):\n self.neighbours.add(node)", "def add_map(new_prot, new_target, map_path, map_type):\n hotspot_map = HotspotMap.objects.get_or_create(\n map_type=map_type, target_id=new_target, prot_id=new_prot\n )[0]\n hotspot_map.map_info.save(os.path.basename(map_path), File(open(map_path, encoding='utf-8')))\n return hotspot_map", "def is_map_obstacle_in_screen_range(self):\n raise NotImplementedError", "def add_tile(self, coordinate, tile):\n self._maze[coordinate] = tile", "def add_goal(self, x, y, score=0):\r\n #print(\"WALLS:\")\r\n #print(self.walls)\r\n #print(x,y)\r\n if (x,y) in self.walls:\r\n z=1\r\n else:\r\n self.goals.append((x, y, score))", "def generate_possible_paths(self, obstacle):\n if self.does_uav_intersect_obstacle_vertically(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n if self.does_path_intersect_obstacle_2d(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n new_attempt_pos_points = [\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0], obstacle.get_point()[1] + obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0], obstacle.get_point()[1] - obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)]\n ]\n\n new_paths = []\n for new_pos_point in new_attempt_pos_points:\n if not self.does_path_intersect_obstacle_3d(obstacle, self.drone.get_point(), new_pos_point) and self.flight_boundary.is_point_in_bounds(new_pos_point):\n for recursive_new_pos_point in new_attempt_pos_points:\n if self.flight_boundary.is_point_in_bounds(recursive_new_pos_point) and abs(recursive_new_pos_point[2] - new_pos_point[2]) < 5:\n if recursive_new_pos_point[0] != new_pos_point[0] or recursive_new_pos_point[1] != new_pos_point[1]:\n if not self.does_path_intersect_obstacle_3d(obstacle, new_pos_point, recursive_new_pos_point) and not self.does_path_intersect_obstacle_3d(obstacle, recursive_new_pos_point, self.drone.get_waypoint_holder().get_current_waypoint()):\n new_paths.append([new_pos_point, recursive_new_pos_point])\n\n # Uncomment for DEBUGGING ONLY\n for path in new_paths:\n print(\"Point:\", str(path))\n\n return new_paths\n\n return []", "def add_road(ccTremb):\n pass", "def try_put(self, object_to_put, pos_x, pos_y):\n self.validate_position(pos_x, pos_y)\n if self.map[pos_x][pos_y] is None:\n self.map[pos_x][pos_y] = object_to_put\n else:\n return self.map[pos_x][pos_y]", "def add_entity(self, ent):\n self.tiles[ent.position[x]][ent.position[y]].add_entity(ent)", "def add_pion(self, p):\n self.pions[p.x][p.y] = p\n self.pionsliste.append(p)", "def remove_obstacle(self, x, y):\n self.BOARD[y][x].traversable = True\n self.board_array[y][x] = 0", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def add_to(self, geo_map):\n try:\n geo_map.add_marker(self)\n return True\n except:\n log_func.fatal(u'Error adding circular marker to map')\n return False", "def a_star_obs(obs_map):\n world_ndarray = np.copy(obs_map[0])\n\n start = tuple(np.argwhere(world_ndarray == -2)[0])\n goal = tuple(np.argwhere(world_ndarray == -3)[0])\n\n world_ndarray[world_ndarray == -2] = 0\n world_ndarray[world_ndarray == -3] = 0\n\n world_tuple = tuple(map(tuple, world_ndarray))\n\n def h_custom_i(cur, end, obstacle):\n ytop, ybot, minx = obstacle\n cur_y, cur_x = cur\n end_y, end_x = end\n obs_bot = np.where(world_ndarray[ybot] == -1)[0][0]\n mid_y = ybot + (ytop - ybot) // 2\n if cur_y in range(ybot, ytop) and cur_x in range(max(obs_bot, start[1]), end_x):\n return 5000 - abs(minx - cur_x) ** 2 - abs(cur_y - mid_y) ** 2\n return abs(cur_x - end_x) + abs(cur_y - end_y)\n\n pr_queue = [] # Use heapqueue as priority queue\n heappush(pr_queue, (0 + h_custom_i(start, goal, obs_map[1]), 0, \"\", start))\n visited = set() # Each element has to be unique in a set\n graph = get_neighbors(world_tuple)\n route_str = \"\"\n\n while pr_queue:\n _, cost, path, current = heappop(pr_queue)\n if current == goal:\n route_str = path\n break\n if current in visited:\n continue\n visited.add(current)\n for direction, neighbour in graph[current].iteritems():\n heappush(pr_queue, (cost + h_custom_i(neighbour, goal, obs_map[1]), cost + 1, path + direction, neighbour))\n world_ndarray[neighbour] = cost + 1\n\n # print \"Expanded nodes(A*+Custom H): \", len(visited), \" Path length: \", len(route_str)\n # Convert string directions to 2D(x,y) coordinates\n route_coord = [start]\n for p in route_str:\n route_coord.append(graph[route_coord[-1]][p])\n\n world_ndarray[start] = -2 # Mark the start and end coordinates again\n world_ndarray[goal] = -3\n\n return route_coord, world_ndarray, len(visited), len(route_str)", "def hit(self) -> None:\n self._obstacle.hit_points -= 1\n self._obstacle.state = HittedState(self._obstacle)", "def set_sensible_obstacles(self, obstacles):\n self.sensible_obstacles = obstacles", "def draw_obstacles(self):\n for obstacle in self.obstacles:\n obstacle.draw(self.window, Colors.BLACK.value)", "def addObject(self, item, row, column, gameGrid=None):\n if not gameGrid:\n gameGrid = self.gameGrid\n if row > self.rows-1 or row < 0 or column > self.columns-1 or column < 0:\n print \"addObject could not add %s: \\\n Location out of bounds\" % str(item)\n return None\n gameGrid.setItem(item, row, column)", "def obstacles(self):\r\n\r\n #Radious arround the head\r\n limit_sight = self.snake_sight\r\n head = self.body[0].position\r\n binary_map_complete = self.complete_mapping()\r\n map_matrix = np.matrix(binary_map_complete)\r\n obstacles = []\r\n\r\n #limits in all directions\r\n left_x = head[0] - limit_sight\r\n right_x = head[0] + limit_sight\r\n up_y = head[1] - limit_sight\r\n down_y = head[1] + limit_sight\r\n\r\n #submatrix with limits size\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:right_x+1]\r\n\r\n #Special cases where the snake approximates to the borders\r\n ##Corners\r\n if left_x < 0 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[temporal, snake_sight] \r\n return snake_sight\r\n \r\n if left_x < 0 and down_y > self.limits[1] - 1:\r\n snake_sight = map_matrix[up_y:self.limits[1], 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[temporal, snake_sight]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n\r\n ##Middle\r\n if left_x < 0:\r\n snake_sight = map_matrix[up_y:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n return snake_sight\r\n\r\n if right_x > self.limits[0]-1:\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n return snake_sight\r\n\r\n if up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:right_x+1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[interval_y_matrix, snake_sight]\r\n return snake_sight\r\n \r\n if down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:right_x+1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[snake_sight, interval_y_matrix]\r\n return snake_sight\r\n\r\n return snake_sight", "def __init__(self, ox, oy, reso, rr, start, goal):\n self.reso = reso\n self.rr = rr\n self.calc_obstacle_map(ox, oy)\n self.motion = self.get_motion_model()\n self.start = start\n self.now_pose = self.start\n self.goal = goal\n\n self.Inf = 10000\n self.queue = []\n self.km = 0", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def __init__(self):\n self.board = {} # dict of (x,y) to PlacedTile\n self.board[(0,0)] = STARTING_PIECE", "def __generate_octagon_obstacles(self, world):\n obs_radius = self.cfg[\"obstacle\"][\"octagon\"][\"radius\"]\n obs_min_count = self.cfg[\"obstacle\"][\"octagon\"][\"min_count\"]\n obs_max_count = self.cfg[\"obstacle\"][\"octagon\"][\"max_count\"]\n obs_min_dist = self.cfg[\"obstacle\"][\"octagon\"][\"min_distance\"]\n obs_max_dist = self.cfg[\"obstacle\"][\"octagon\"][\"max_distance\"]\n\n # generate the obstacles\n obstacles = []\n obs_dist_range = obs_max_dist - obs_min_dist\n num_obstacles = randrange(obs_min_count, obs_max_count + 1)\n\n test_geometries = [r.global_geometry for r in world.robots]\n while len(obstacles) < num_obstacles:\n\n # generate position\n dist = obs_min_dist + (random() * obs_dist_range)\n phi = -pi + (random() * 2 * pi)\n x = dist * sin(phi)\n y = dist * cos(phi)\n\n # generate orientation\n theta = -pi + (random() * 2 * pi)\n\n # test if the obstacle overlaps the robots or the goal\n obstacle = OctagonObstacle(obs_radius, Pose(x, y, theta))\n intersects = False\n for test_geometry in test_geometries:\n intersects |= geometrics.convex_polygon_intersect_test(test_geometry, obstacle.global_geometry)\n if not intersects:\n obstacles.append(obstacle)\n return obstacles", "def draw_on(self, folium_map):", "def add_neighbor(self):\n self.fono += 1", "def add_neighbor(self, cell):\n self.__neighbors.append(cell)", "def position_ship(self, cell, fleet, hit_power):\n if not cell.occupied:\n ship = Ship(fleet, cell, hit_power)\n cell.occupied = True\n cell.mark = constants.ACTIVE_SHIP_MARK\n cell.ship = ship\n self.shipList.append(ship)\n else:\n # raise ex.CannotPlaceFleetError()\n print(\"XXXXXX\")", "def detect_object(world):\n # create the map with only the obstucale to non-zero\n world_hsv = cv2.cvtColor(world, cv2.COLOR_BGR2HSV)\n mask_red = cv2.inRange(world_hsv, low_red, up_red)\n occupancy_grid = np.array(mask_red)\n world_rows, world_cols, _ = world.shape\n\n # create the mask in order to find the goal\n world_hsv = cv2.cvtColor(world, cv2.COLOR_BGR2HSV)\n mask_goal = cv2.inRange(world_hsv, low_blue, up_blue)\n goal_x, goal_y = (15, 15) # goal by default\n\n # look for the obstacle and increase there size\n for i in range(world_rows):\n for j in range(world_cols):\n occupancy_grid[i][j] = int(occupancy_grid[i][j] / 255)\n if mask_goal[i][j] > 200:\n goal_x, goal_y = (i, j)\n object_grid = [[goal_x, goal_y]]\n return object_grid, occupancy_grid", "def addObjectMap(self,fromMod,toMod,objectMap):\n if self.objectMaps == None: self.loadObjectMaps()\n self.objectMaps[(fromMod,toMod)] = objectMap", "def add_neighbor(self, node):\n self.neighbors.append(node)", "def callback_map(self, sonar_data, pose_data):\n self.count += 1\n\n ## update robot pose\n robo_x = pose_data.pose.pose.position.x\n robo_y = pose_data.pose.pose.position.y\n robo_theta = tf.transformations.euler_from_quaternion(\n [0, 0,\n pose_data.pose.pose.orientation.z,\n pose_data.pose.pose.orientation.w])[2] # yaw\n print(\"{}: x,y: ({}, {}); theta: {}\".format(self.count, robo_x, robo_y, robo_theta))\n \n ## update map\n for i in range(self.sonar_num):\n # update each detected object pose\n obj_x = sonar_data.points[i].x\n obj_y = sonar_data.points[i].y\n obj_alpha = robo_theta + self.sonar_headings[i]\n\n obj_r = (obj_x**2+obj_y**2)**0.5\n obj_x_proj = obj_r* np.cos(obj_alpha)\n obj_y_proj = obj_r* np.sin(obj_alpha)\n\n obj_x_world = obj_x_proj + robo_x\n obj_y_world = obj_y_proj + robo_y\n\n obj_x_map = int(obj_x_world*self.grid_scale) + self.grid_add\n obj_y_map = int(obj_y_world*self.grid_scale) + self.grid_add\n\n # plot obstacle on the map\n if obj_r < self.sonar_range:\n self.world_map[obj_x_map, obj_y_map] += (self.update - self.l0)\n \n # plot free space on the map\n line_m = np.tan(obj_alpha) # build the line from robot to the object\n line_c = robo_y - line_m*robo_x\n\n x1 = robo_x\n y1 = line_m*x1 + line_c\n r1 = ((x1-robo_x)**2+(y1-robo_y)**2)**0.5 \n while r1 < min(self.sonar_range, obj_r):\n x1_map = int(x1*self.grid_scale) + self.grid_add\n y1_map = int(y1*self.grid_scale) + self.grid_add\n self.world_map[x1_map, y1_map] -= (self.update+self.l0)\n \n if robo_x < obj_x_world:\n x1 += self.grid_size # grow x for x positive plane\n else:\n x1 -= self.grid_size # grow x for x negative plane\n y1 = line_m*x1 + line_c\n r1 = ((x1-robo_x)**2+(y1-robo_y)**2)**0.5 \n \n # normalize the world map\n self.world_prob = np.exp(self.world_map)/(1+np.exp(self.world_map)) # (0,1)", "def increased_obstacles_map(occupancy_grid):\n\n nb_rows = len(occupancy_grid)\n nb_cols = len(occupancy_grid[0])\n increased_occupancy_grid = np.zeros([nb_rows + 6, nb_cols + 6])\n\n for i in range(nb_rows):\n for j in range(nb_cols):\n\n if occupancy_grid[i, j] == OCCUPIED:\n increased_occupancy_grid[i:i + 7, j:j + 7] = np.ones([7, 7])\n\n final_occupancy_grid = increased_occupancy_grid[3:(LENGTH_case + 3), 3:(WIDTH_case + 3)]\n return final_occupancy_grid", "def waypoint_add_global(self):\n pass", "def add_to_world(self, thing):\n\t\tthing.set_world_info(self.current_id, self)\n\t\tself.gameObjects.append(thing)\n\t\tself.current_id += 1", "def add_neighbour(self, star):\n self._neighbours.append(star)", "def test_add(self):\n gm = GridMap(threshold=2, bit_depth=8)\n gm.add(14, 7, \"hello world\")\n gm.add(0, 0, \"DEADBEEF\")\n gm.add(255, 255, \"meh\")\n for x in range(256):\n for y in range(256):\n gm.add(x, y, \"\")\n gm.add(0, 255, None)\n gm.add(15, 31, 3.14598)", "def obstacles_callback(self, data):\n obs_pos = [(obs.ObsPosition.x, obs.ObsPosition.y, obs.ObsPosition.z)\n for obs in data.obs]\n obs_yaw = np.array([obs.ObsTheta for obs in data.obs])\n if len(obs_pos)==0:\n self.obs_risk = 0.0\n self.min_obs_dist = self.detect_obstacle_range + 100.0\n else:\n disp_vec = np.array(obs_pos) - self.car_pos # displacement\n dist_obs = np.linalg.norm(disp_vec, axis=1) # obstacle distance\n # ego heading unit vector\n ego_hdg = (np.cos(self.car_euler[2]), np.sin(self.car_euler[2]), 0)\n # cosine of ego heading and obs displacment\n obs_cosine = np.dot(disp_vec, ego_hdg)/dist_obs\n # angle of obs displacement w.r.t ego heading\n obs_angle = np.arccos(obs_cosine)\n # raised cosine, 1.0 within a narrow angle ahead, quickly rolloff\n # to 0.0 as angle increases \n obs_rcos = self.raised_cosine(obs_angle, np.pi/24, np.pi/48)\n # distance risk is Laplacian normalized by detection rangei\n risk_dist = np.exp(-0.1*(dist_obs-self.detect_obstacle_range))\n # relative angle between headings of ego car and obs car\n # shifted by pi\n rel_angle = self.car_euler[2] - obs_yaw + np.pi\n rel_angle = (rel_angle + np.pi) % (2*np.pi) - np.pi\n collide_rcos = self.raised_cosine(rel_angle, np.pi/24, np.pi/48)\n # total directional obs risk is distance risk multiplied by\n # raised-cosied directional weight.\n self.obs_risk = np.sum(\n risk_dist * (obs_rcos+0.1) * (collide_rcos+0.1)\n )\n if np.isnan(self.obs_risk):\n self.obs_risk = 0.0\n # idx = np.argsort(dist_obs)[::]\n # minimum obs distance\n self.min_obs_dist = min(dist_obs)\n near_obs = True if self.min_obs_dist<self.detect_obstacle_range else False\n self.pub_obs_risk.publish(self.obs_risk)\n self.pub_nearest_obs.publish(near_obs)", "def obstacles_form(self,image):\r\n major_axis=60\r\n minor_axis=30\r\n c_y=246\r\n c_x=145\r\n c_y1=90\r\n c_x1=70\r\n radius=35\r\n for i in range(len(image)):\r\n for j in range(len(image[0])):\r\n\r\n #self.ellipse(image,major_axis,minor_axis,i,j,c_x,c_y)\r\n self.circle(image,100,i,j,200,200)\r\n self.circle(image,100,i,j,800,200)\r\n #self.slanted_rect(image,i,j)\r\n self.boundary(image,i,j)\r\n self.boundary1(image,i,j)\r\n self.boundary2(image,i,j)\r\n self.c_shape(image,i,j)\r\n #exploration.c_shape(image,i,j)\r", "def add_layer(self, layer):\n idx = len(self.dict_topo)\n idx += 1\n self.dict_topo[idx] = layer", "def __init__(self, obstacles, kin):\n self.obstacles = obstacles\n self.kin = kin", "def add_neighbor(self, neighbor):\r\n self.neighbors.append(neighbor)", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def drawMap(mapObj, gameStateObj, goals, screen):\n \n # mapSurf will be the single Surface object that the tiles are drawn\n # on, so that it is easy to position the entire map on the DISPLAYSURF\n # Surface object. First, the width and height must be calculated.\n # mapWidth = len(mapObj) * TILEWIDTH\n # mapSurfHeight = (len(mapObj[0]) - 1) * TILEFLOORHEIGHT + TILEHEIGHT\n # mapSurf = pygame.Surface((mapSurfWidth, mapSurfHeight))\n # mapSurf.fill(BGCOLOR) # start with a blank color on the surface.\n \n for i in xrange(len(tiles)):\n tiles[i].hideturtle()\n \n debugprint(\"drawing map\")\n \n nxtiles = len(mapObj)\n nytiles = len(mapObj[0])\n \n xoffset = TILEWIDTH/2 + TILEWIDTH\n yoffset = WINHEIGHT - TILEHEIGHT/2 - TILEWIDTH\n \n tileCount = 0;\n \n def updateTile(screen, xpos, ypos, shape):\n global tiles\n \n if tileCount >= len(tiles):\n tiles.append(Tile(screen, xpos, ypos, shape))\n else:\n tiles[tileCount].goto(xpos, ypos)\n tiles[tileCount].shape(shape)\n tiles[tileCount].showturtle()\n\n return tileCount + 1\n \n # screen.tracer(1)\n # # Draw the tile sprites onto this surface.\n for x in range(nxtiles):\n for y in range(nytiles):\n xpos = x*TILEWIDTH + xoffset\n ypos = yoffset - y*40\n \n if mapObj[x][y] in TILEMAPPING:\n baseTile = TILEMAPPING[mapObj[x][y]]\n elif mapObj[x][y] in OUTSIDEDECOMAPPING:\n baseTile = TILEMAPPING[' ']\n\n # First draw the base ground/wall tile.\n tileCount = updateTile(screen, xpos, ypos, baseTile)\n # debugprint(xpos)\n # debugprint(ypos)\n if mapObj[x][y] in OUTSIDEDECOMAPPING:\n # Draw any tree/rock decorations that are on this tile.\n tileCount = updateTile(screen,xpos,ypos,OUTSIDEDECOMAPPING[mapObj[x][y]])\n elif (x, y) in gameStateObj['stars']:\n if (x, y) in goals:\n # A goal AND star are on this space, draw goal first.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['covered goal'])\n # Then draw the star sprite.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['star'])\n elif (x, y) in goals:\n # Draw a goal without a star on it.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['uncovered goal'])\n\n # Last draw the player on the board.\n if (x, y) == gameStateObj['player']:\n # Note: The value \"player_image\" refers\n # to a key in \"PLAYERIMAGES\" which has the\n # specific player image we want to show.\n tileCount = updateTile(screen,xpos,ypos,PLAYERIMAGES[game_state[\"player_image\"]])\n debugprint(PLAYERIMAGES[game_state[\"player_image\"]])", "def add_neighbor(self, node_name, cost):\n\t\tself.neighbors.append((node_name, cost))", "def show_obstacle(plot, points):\n for p1, p2 in zip(points, [points[-1]] + list(points)):\n plot.plot([p1[0], p2[0]], [p1[1], p2[1]], 'b')", "def add_boundary(self, boundary):\n self._bounds.append(boundary)", "def build_obstacle_list(self, vehicle_transform, prediction_msg):\n obstacle_list = []\n # look over all predictions\n for prediction in prediction_msg.predictions:\n # use all prediction times as potential obstacles\n previous_origin = None\n for transform in prediction.trajectory:\n global_obstacle = vehicle_transform * transform\n obstacle_origin = np.array(\n [global_obstacle.location.x, global_obstacle.location.y])\n # distance filtering\n if (previous_origin is None\n or np.linalg.norm(previous_origin - obstacle_origin) >\n self._flags.obstacle_filtering_distance):\n previous_origin = obstacle_origin\n dist_to_ego = np.linalg.norm([\n vehicle_transform.location.x - obstacle_origin[0],\n vehicle_transform.location.y - obstacle_origin[1]\n ])\n if dist_to_ego < self._flags.distance_threshold:\n # use 3d bounding boxes if available, otherwise use default\n if isinstance(prediction.bounding_box, BoundingBox3D):\n start_location = \\\n prediction.bounding_box.transform.location - \\\n prediction.bounding_box.extent\n end_location = \\\n prediction.bounding_box.transform.location + \\\n prediction.bounding_box.extent\n start_transform = global_obstacle.transform_locations(\n [start_location])\n end_transform = global_obstacle.transform_locations(\n [end_location])\n else:\n start_transform = [\n pylot.utils.Location(\n obstacle_origin[0] -\n self._flags.obstacle_radius,\n obstacle_origin[1] -\n self._flags.obstacle_radius, 0)\n ]\n end_transform = [\n pylot.utils.Location(\n obstacle_origin[0] +\n self._flags.obstacle_radius,\n obstacle_origin[1] +\n self._flags.obstacle_radius, 0)\n ]\n obstacle_list.append([\n min(start_transform[0].x, end_transform[0].x),\n min(start_transform[0].y, end_transform[0].y),\n max(start_transform[0].x, end_transform[0].x),\n max(start_transform[0].y, end_transform[0].y)\n ])\n if len(obstacle_list) == 0:\n return np.empty((0, 4))\n\n return np.array(obstacle_list)", "def add_neighbour(self, neighbour):\n if neighbour.name not in self.neighbours:\n self.neighbours.append(neighbour.name)\n self.neighbours.sort()", "def addPiece(self, piece):\r\n \r\n self.pieces[(piece.x, piece.y)] = piece", "def troop_place(self, troop, pos):\n x, y = pos\n self.tmap[y, x] = troop\n\n tile_id = AIV_SIZE * y + x\n print(10*troop)\n\n for slot in range(10 * troop, 10 * (troop+1)):\n if self.tarr[slot]== 0:\n self.tarr[slot] = tile_id\n return", "def addObject(self, name, object):\n self.map[name] = object", "def addHotspot( self, hotspot ):\n self._hotspots.append(hotspot)", "def _trace_route(self, debug=False, time=False):\n self.radius = 2\n self.threshold = 1\n\n obstacles = []\n for vehicle in self._world.get_actors().filter('vehicle.*'):\n #print(vehicle.bounding_box)\n # draw Box\n bb_points = TestAgent._create_bb_points(vehicle)\n global_points= TestAgent._vehicle_to_world(bb_points, vehicle)\n global_points /= global_points[3,:]\n\n my_bb_points = TestAgent._create_bb_points(self._vehicle)\n my_global_points = TestAgent._vehicle_to_world(my_bb_points, self._vehicle)\n\n my_global_points /= my_global_points[3,:]\n dist = np.sqrt((my_global_points[0,2]-global_points[0,2])**2 + (my_global_points[1,2]-global_points[1,2])**2 + (my_global_points[2,2]-global_points[2,2])**2)\n\n if 0<dist:\n vehicle_box = [global_points[0,0],global_points[1,0],global_points[0,1],global_points[1,1]]\n obstacles.append(vehicle_box)\n print(f'vehicle box: {vehicle_box}')\n\n print('number of near obstacles: ', len(obstacles))\n if len(obstacles) == 0:\n self.obstacles = np.array([[-1,-1,-1,-1]]).astype(np.float32)\n self.num_obs = self.num_obs = np.array([0]).astype(np.int32)\n else:\n self.obstacles = np.array(obstacles).astype(np.float32)\n self.num_obs = self.num_obs = np.array([self.obstacles.shape[0]]).astype(np.int32)\n\n iter_parameters = {'start':self.start, 'goal':self.goal, 'radius':self.radius, 'threshold':self.threshold, 'obstacles':self.obstacles, 'num_obs':self.num_obs}\n \n start_timer = timer()\n route = self.gmt_planner.run_step(iter_parameters, iter_limit=1000, debug=debug, time=time)\n end_timer = timer()\n print(\"elapsed time: \", end_timer-start_timer) \n\n if time:\n self.time_df = pd.DataFrame(self.gmt_planner.time_data)\n \n\n # trace_route = []\n # for r in route:\n # wp = carla.Transform(carla.Location(self.states[r][0].item(), self.states[r][1].item(), 1.2), carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()))\n # trace_route.append(wp)\n # draw_route(self._vehicle.get_world(), trace_route)\n\n index = len(route)-1\n trace_route = []\n for i in range(len(route)-1):\n wp = self._map.get_waypoint(carla.Location(self.states[route[index]][0].item(), self.states[route[index]][1].item(), 1.2)) # , carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()\n trace_route.append((wp,-1))\n index -= 1\n\n return trace_route", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def obstacle_prone_area(self,image):\r\n\r\n start_x=int(self.start[0])\r\n start_y=int(self.start[1])\r\n goal_x=int(self.goal[0])\r\n goal_y=int(self.goal[1])\r\n print(goal_x,goal_y)\r\n if (image[int(self.maximum_size-goal_x),int(goal_y),0]==0) or ((image[int(self.maximum_size-start_x),int(start_y),0]==0)):\r\n #print(1)\r\n return False\r\n else:\r\n #print(2)\r\n return True", "def getObstacles(self):\r\n ausgabeObstacle = self.globalObstaclesList + self.globalHardObstaclesList\r\n self.globalObstaclesList = []\r\n return(ausgabeObstacle)", "def __init__(self, ox, oy, resolution, rr):\n\n self.min_x, self.min_y = None, None\n self.max_x, self.max_y = None, None\n self.x_width, self.y_width, self.obstacle_map = None, None, None\n self.resolution = resolution\n self.rr = rr\n self.calc_obstacle_map(ox, oy)\n self.motion = self.get_motion_model()", "def move(self):\n # neighbor offsets\n offset = [(-1, 1),(0, 1),(1, 1),(-1, 0),(1, 0),(-1, -1),(0, -1),(1, -1)]\n for i in range(len(offset)):\n x = self.x + offset[i][0] # neighboring coordinates\n y = self.y + offset[i][1]\n if self.island.animal(x, y) == 0: # neighboring spot is open\n self.island.remove(self) # remove from current spot\n self.x = x # new coordinates\n self.y = y\n self.island.register(self) # register new coordinates\n break # finished with move", "def move(self):\n # neighbor offsets\n offset = [(-1, 1),(0, 1),(1, 1),(-1, 0),(1, 0),(-1, -1),(0, -1),(1, -1)]\n for i in range(len(offset)):\n x = self.x + offset[i][0] # neighboring coordinates\n y = self.y + offset[i][1]\n if self.island.animal(x, y) == 0: # neighboring spot is open\n self.island.remove(self) # remove from current spot\n self.x = x # new coordinates\n self.y = y\n self.island.register(self) # register new coordinates\n break # finished with move", "def _draw_line(self, event):\n if not self.obstacle_creation_mode:\n return\n\n if self.previous_coordinates is None:\n self.previous_coordinates = event.x, event.y\n self.new_obstacle.append([event.x, event.y])\n return\n\n x1, y1 = event.x, event.y\n\n if self._is_closing_shape(x1, y1, self.new_obstacle):\n x1, y1 = self.new_obstacle[0]\n else:\n self.new_obstacle.append([x1, y1])\n\n x0, y0 = self.previous_coordinates\n self.canvas.create_line(x0, y0, x1, y1, **self.LINE_OPTIONS)\n self.previous_coordinates = x1, y1", "def add_object(world_id, object_to_be_added):\n try:\n new_mapping = Map(world_id,\n object_to_be_added.x,\n object_to_be_added.y,\n object_to_be_added.__class__.__name__)\n new_mapping.save()\n return new_mapping\n except (sqlalchemy.orm.exc.FlushError, sqlalchemy.exc.IntegrityError) as e:\n db.session.rollback()\n return None", "def recreate_obstacles(self):\n self.board_matrix = np.full(Dimension.board_size(), 1)\n self.obstacles = self.create_obstacles()", "def __init__(self, shape, ssize, pos=None):\n super(Obstacle, self).__init__()\n self.pos = pos or Vec2d(0, 0)\n self.shape = shape\n # image\n self.image = pygame.Surface(ssize).convert_alpha()\n self.color = pygame.Color(\"black\")\n self.ssize = ssize\n self.rect = pygame.Rect((0, 0), self.ssize)", "def is_in_obstacle(self, x: float, y: float) -> bool:\n for obstacle in self.obstacles:\n if obstacle.contains_point((x, y)):\n return True\n return False", "def move_obstacles(obstacles_poses, obstacles_goal_poses):\n # for pose in obstacles_poses:\n # dx = random.uniform(0, 0.03); dy = random.uniform(0,0.03);\n # pose[0] -= np.sign(pose[0])*dx; pose[1] -= np.sign(pose[1])*dy;\n\n \"\"\" Each obstacles tends to go to its selected goal point with random speed \"\"\"\n for p in range(len(obstacles_poses)):\n pose = obstacles_poses[p]; goal = obstacles_goal_poses[p]\n dx, dy = (goal - pose) / norm(goal-pose) * 0.05#random.uniform(0,0.05)\n pose[0] += dx; pose[1] += dy;\n\n return obstacles_poses", "def create_room(room):\n global map\n for x in range(room.x1+1, room.x2):\n for y in range(room.y1+1, room.y2):\n map[x][y].blocked = False\n map[x][y].block_sight = False", "def add_robot(self, robot_id):\n if robot_id not in self._locations.keys():\n self._locations[robot_id] = None\n self.predictors[robot_id] = KalmanPredictor()\n self._logger.info(\"Robot {} added\".format(str(robot_id)))" ]
[ "0.75989723", "0.75989723", "0.7295586", "0.7156997", "0.6637772", "0.65937614", "0.6447093", "0.6407341", "0.6341093", "0.6300385", "0.6247986", "0.6215138", "0.61860317", "0.60853684", "0.6058344", "0.6036493", "0.60249126", "0.60026395", "0.5975957", "0.59466404", "0.59013474", "0.5880715", "0.5863128", "0.58377594", "0.58337843", "0.56859237", "0.5683584", "0.5676506", "0.56450784", "0.5617775", "0.560552", "0.55893266", "0.5527257", "0.5520143", "0.5455024", "0.5454266", "0.5429575", "0.5426936", "0.5414009", "0.5383675", "0.5381041", "0.53648096", "0.53638834", "0.5349632", "0.5349435", "0.53467613", "0.53203124", "0.5319747", "0.5300921", "0.5298649", "0.5288406", "0.52802914", "0.52761513", "0.5266543", "0.52541494", "0.5240015", "0.5230664", "0.5227463", "0.52223045", "0.5215861", "0.5204275", "0.52038807", "0.52013564", "0.5194796", "0.51920587", "0.51693636", "0.51686025", "0.51637846", "0.51577324", "0.51554304", "0.5154529", "0.5148385", "0.5147697", "0.5147105", "0.5139104", "0.5129717", "0.5127835", "0.51254255", "0.51188517", "0.51177514", "0.5117192", "0.51134306", "0.5101989", "0.5093622", "0.5080564", "0.50597596", "0.50542176", "0.50518245", "0.50499433", "0.50463325", "0.5042894", "0.5042894", "0.50359863", "0.50333714", "0.5027603", "0.5022721", "0.50122595", "0.5012097", "0.5000953", "0.50000435" ]
0.78030485
0
Add a waypoint to the drone
def add_waypoint(self, waypoint): self.drone.add_waypoint(waypoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def waypoint_add_rel(self):\n pass", "def waypoint_add_global(self):\n pass", "def create_waypoint(self, waypoint):\n connection = self.__create_connection()\n try:\n waypoint_list = list(waypoint)\n key = self.__compound_key(waypoint)\n waypoint_list.insert(0, key)\n\n keyed_waypoint = tuple(waypoint_list)\n\n sql = ''' INSERT INTO waypoints(waypoint_id, x, y, z, distance, heading, visit_count)\n VALUES(?,?,?,?,?,?,?) '''\n cur = connection.cursor()\n cur.execute(sql, keyed_waypoint)\n connection.commit()\n cur.close()\n return\n except sqlite3.Error as e:\n print(e)\n finally:\n connection.close()", "def setPath(self, request, context):\n \n cmds = self.vehicle.commands\n coordFrame, alt = None, None\n waypoints = []\n \n # The idea behind stripping off the first position is to determine what reference frame to\n # to use. Future proto changes will removed the coordinate frame boolean flag from the \n # request making the code unnecessary. For now, this is the way it is.\n firstPosition = nth(request, 0)\n lat = firstPosition.lat\n lon = firstPosition.lon\n \n useRelativeAltitude = firstPosition.useRelativeAltitude\n \n if useRelativeAltitude:\n alt = firstPosition.relativeAltitude\n coordFrame = mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT\n else:\n alt = firstPosition.gpsAltitude\n coordFrame = mavutil.mavlink.MAV_FRAME_GLOBAL\n\n print ('First position at ({0},{1}) -> {2}'.format(lat, lon, alt))\n waypoints.append([lat, lon, alt])\n nextIndex = self.vehicle.commands.next\n # Make sure the drone is not in AUTO mode. \n #self.vehicle.mode = VehicleMode(\"LOITER\")\n self.clear_mission(cmds, coordFrame)\n \n # Add first position\n cmds.add(Command( 0, 0, 0, coordFrame, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, 0, 0, 0, 0, 0, lat, lon, alt))\n \n # Add the remaining positions\n for position in request:\n lat = position.lat\n lon = position.lon\n if useRelativeAltitude:\n alt = position.relativeAltitude\n else:\n alt = position.gpsAltitude\n print ('Point at ({0},{1}) -> {2}'.format(lat, lon, alt))\n cmds.add(Command( 0, 0, 0, coordFrame, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, 0, 0, 0, 0, 0, lat, lon, alt))\n waypoints.append([lat, lon, alt])\n \n print (\"Uploading new commands to drone\")\n cmds.upload()\n \n # Reset mission set to first (0) waypoint\n #if self.vehicle.commands.next !=0:\n # print \"Continuing mission...\"\n #else:\n # print \"Starting mission\"\n # self.vehicle.commands.next = 0\n if len(self.vehicle.waypoints)==0:\n print \"Starting mission\"\n self.vehicle.commands.next = 0\n else:\n print \"Continuing mission...\"\n self.vehicle.commands.next = nextIndex\n \n self.vehicle.waypoints = waypoints \n self.vehicle.mode = VehicleMode(\"AUTO\")\n \n self.print_mission() \n \n return droneconnect_pb2.Null()", "def waypoint_callback(self, wp):\n if self.trajectory_constructed == False: \n NextwpPosition = np.array([wp.position.x, wp.position.y, wp.position.z])\n NextwpOrientation = np.array([wp.orientation.x, wp.orientation.y, wp.orientation.z, wp.orientation.w])\n self.pc_x, self.pc_y, self.pc_z, self.seg_times, self.traj_t0 = self.make_trajectory(NextwpPosition, NextwpOrientation) \n self.trajectory_constructed = True", "def addDrone(self, myDrone):\n self.drones.append(myDrone)", "def getNextWaypoint(self, request, context):\n\n waypointNumber = self.vehicle.commands.next -1\n missionlist = self.vehicle.waypoints\n if len(missionlist)==0:\n waypointNumber = -1\n dronePosition = droneconnect_pb2.Position(lat = float(0),\n lon = float(0),\n gpsAltitude = float(0))\n else:\n waypoint = missionlist[waypointNumber]\n dronePosition = droneconnect_pb2.Position(lat = float(waypoint[0]),\n lon = float(waypoint[1]),\n gpsAltitude = float(waypoint[2]))\n \n return droneconnect_pb2.IndexedPosition(position = dronePosition, index = waypointNumber)", "def onAddButtonPress(self, button):\n\t\twp_x = float(self.traj_to_x_entry.get_text())\n\t\twp_y = float(self.traj_to_y_entry.get_text())\n\t\twp_z = float(self.traj_to_z_entry.get_text())\n\t\twp_yaw = float(self.traj_to_yaw_entry.get_text())\n\n\t\t# add waypoint to list\n\t\twaypoints_gui.append([wp_x, wp_y, wp_z, wp_yaw])\n\n\t\t# reset entry fields\n\t\tself.traj_to_x_entry.set_text('')\n\t\tself.traj_to_y_entry.set_text('')\n\t\tself.traj_to_z_entry.set_text('')\n\t\tself.traj_to_yaw_entry.set_text('')", "def add_travel_direction(self, direction, node):\n self.travel_directions[direction] = node", "def add_route(self, distance, start, destination):\r\n self.edges[start].append(Edge(distance, start, destination))\r\n self.edges[destination].append(Edge(distance, destination, start))", "def move_to(self, waypoint):\n self.set_final_wp(waypoint)\n self.go()\n currPos = np.asarray(self.rexarm.get_positions())\n while(np.linalg.norm(np.asarray(waypoint) - currPos) > 0.15):\n time.sleep(0.01)", "def write_waypoint(self, latitude=None, longitude=None, description=None):\n\n if not description:\n description = ''\n\n latitude = self.format_latitude(latitude)\n longitude = self.format_longitude(longitude)\n\n self.write_config(\n 'ADDWP', '%s,%s,%s' % (latitude, longitude, description[0:50])\n )", "def add_destination(self):\n pass", "def publish_waypoints(self):\n\n # Make a lane message\n lane = Lane()\n\n # Get closest waypoint index\n closest_idx = self.get_closest_waypoint_idx()\n\n # Get farthest waypoint index\n farthest_idx = closest_idx + LOOKAHEAD_WPS\n\n # Slice to get the upcoming waypoints\n upcoming_waypoints = self.waypoints.waypoints[closest_idx:farthest_idx]\n\n # If no stopline detected or stopline is beyond farthest index...\n if (self.stopline_waypoint_idx == -1) or (self.stopline_waypoint_idx >= farthest_idx):\n\n # Follow the upcoming waypoints\n lane.waypoints = upcoming_waypoints\n\n else:\n\n # Create a list to hold modified upcoming waypoints\n temp = []\n\n # Find the relative stopline index within the upcoming waypoints\n # Back off by two waypoints so that front of car stays behind\n # stopline.\n stop_idx = max(self.stopline_waypoint_idx-closest_idx-2, 0)\n\n # Get the deceleration velocities at each upcoming waypoint\n velocities = self.deceleration_velocities(upcoming_waypoints, stop_idx)\n\n # For each upcoming waypoint...\n for i, wp in enumerate(upcoming_waypoints[:-1]):\n\n # Create a new waypoint\n p = Waypoint()\n\n # Dupicate the pose of the existing waypoint\n p.pose = wp.pose\n\n # Limit current velocities to decelration velocities\n p.twist.twist.linear.x = min(velocities[i], p.twist.twist.linear.x)\n\n # Add the modified waypoint to the list\n temp.append(p)\n\n # Follow the modified upcoming waypoints\n lane.waypoints = temp\n\n # Publish the lane message\n self.final_waypoints_pub.publish(lane)", "def process_waypoint(self, waypoint: Waypoint) -> Union[Trip, None]:\n ...", "def waypoint_callback(self,msg):\n self.waypoint_loc = msg.data", "def __init__(self):\n super().__init__()\n self.waypoint_vector = [-1, 10]", "def add_point(self, pt):\n self.points.append(pt)", "def sendWaypoints(self,waypoints):\n data = _RobotCommunicator.WAYPOINT_HEADER\n for waypoint in waypoints:\n x,y = waypoint\n data = data + pack(_RobotCommunicator.WAYPOINT_FORMAT,x,y)\n self.udpSock.sendto(data,self.addr)", "def _add_point(self):\r\n self.command_stack.do(model.structure.AddPoint(self._structure, self._point_index+1, 0, 0))", "def store_waypoint(self, msg: PoseStamped) -> None:\n rospy.logdebug(\"Received waypoint %s\" % str(msg.pose.position))\n self._waypoints.put(msg)", "def __init__(self, nav,\n waypoint=ll.LatLon(50.742810, 1.014469), # somewhere in the solent\n target_radius=2, waypoint_id=None,\n ):\n self.nav = nav\n self.waypoint = waypoint\n self.waypoint_id = waypoint_id\n x, y = self.nav.latlon_to_utm(waypoint.lat.decimal_degree, waypoint.lon.decimal_degree)\n self.waypoint_xy = Point(x, y)\n self.target_area = self.waypoint_xy.buffer(target_radius)", "def addPoint(self, point):\n self.points.append(point)", "def add(self, point):\n self.points.append(point)", "def to_waypoint_message(self):\n\n #**********************************************************************\n # Fill in frame and position\n #**********************************************************************\n wp = msg.Waypoint()\n wp.frame = msg.Waypoint.FRAME_GLOBAL\n wp.x = self.latitude\n wp.y = self.longitude\n wp.z = self.altitude\n\n #**********************************************************************\n # Set other attributes to safe defaults. Worst case, if this\n # waypoint was used unchanged to control drone, you'd expected to\n # wait at this waypoint forever (because its effectively unreachable\n # within 0 radius.\n #**********************************************************************\n wp.autocontinue = False\n wp.radius = 0.0\n wp.waitTime = rospy.Duration(secs=0.0)\n return wp", "def set_destination(self, start_waypoint, end_waypoint, time=False):\n\n self.create_samples(start_waypoint, end_waypoint)\n\n route_trace = self._trace_route(time=time)\n assert route_trace\n\n self._local_planner.set_global_plan(route_trace)", "def get_gpx_waypoint(self, route, line_location, start_time):\n\n lng, lat = self.get_coords()\n time = start_time + route.get_time_data(line_location, \"schedule\")\n altitude_on_route = route.get_distance_data(line_location, \"altitude\")\n\n return GPXWaypoint(\n name=self.name,\n longitude=lng,\n latitude=lat,\n elevation=altitude_on_route,\n type=self.place_type.name,\n time=time,\n )", "def addPoint(self, *args, **kwargs):\n ...", "def add_move(self, direction, priority=False, when_finished=None):\n if priority:\n self.path = [(direction, when_finished)] + self.path\n else:\n self.path.append((direction, when_finished))", "def append(self,d,p):\n if d.size != 3:\n raise Exception(\"New direction not 3-vector\")\n if p.size != 3:\n raise Exception(\"New point not 3-vector\")\n self.dirlist.append(d)\n self.pointlist.append(p)", "def append_step(path, neighbours_list):\n index = neighbours_list.index(8)\n directions = ['L', 'U', 'R', 'D']\n return path + directions[index]", "def __init__(self, waypoints: Tuple[Waypoint]):\n self._waypoints = waypoints", "def __init__(self, waypoints: Tuple[Waypoint]):\n self._waypoints = waypoints", "def do_add_route(self, line):\n items = line.split(' ')\n if len(items) < 3:\n log.error('route only takes at least 3 arguments: '\n 'network via_address metric')\n else:\n points = []\n i = 2\n while i < len(items):\n points.append((items[i-1], items[i]))\n i += 2\n log.critical('Add route request at %s',\n datetime.datetime.now().strftime('%H.%M.%S.%f'))\n self.fibbing.install_route(items[0], points, True)", "def nextWaypoint(self, pose):\n #DONE implement\n location = pose.position\n dist = 100000.\n dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)\n nwp = 0\n for i in range(len(self.waypoints)):\n d1 = dl(location, self.waypoints[i].pose.pose.position)\n if dist > d1:\n nwp = i\n dist = d1\n x = self.waypoints[nwp].pose.pose.position.x\n y = self.waypoints[nwp].pose.pose.position.y\n heading = np.arctan2((y-location.y), (x-location.x))\n angle = np.abs(self.theta-heading)\n if angle > np.pi/4.:\n nwp += 1\n if nwp >= len(self.waypoints):\n nwp = 0\n return nwp", "def add_route(g, origin, destination, distance, choice_dir):\n origin_code = g.convert[origin]\n destination_code = g.convert[destination]\n distance = int(distance)\n # Add route both ways\n if(choice_dir == \"y\"):\n g.city_dict[origin_code].add_flights_in((destination_code, distance))\n g.city_dict[origin_code].add_flights_out((destination_code, distance))\n \n g.city_dict[destination_code].add_flights_in((origin_code, distance))\n g.city_dict[destination_code].add_flights_out((origin_code, distance))\n # Add route one way \n if(choice_dir == \"n\"):\n g.city_dict[origin_code].add_flights_out((destination_code, distance))\n g.city_dict[destination_code].add_flights_in((origin_code, distance))\n \n \n \n return g", "def appendPoint(self, point):\n self.points.append(point)", "def addNeighbor(self, neighbor):", "def add_door(self, door):\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n\n self._update_doormath(x, y, z)", "def add_points(self, points):\n pass", "def __init__(self, name, waypoints, position2d_proxy, waypoint_distance_tolerance):\n\n self.name = name\n self.waypoints = waypoints\n self.pp = position2d_proxy\n self.waypoint_distance_tolerance = waypoint_distance_tolerance\n\n self.active_waypoint_index = 0\n self.active_waypoint = self.waypoints[self.active_waypoint_index]\n self.first_update = True\n self.finished = False\n self.last_read = None", "def InsertNextPoint(self, ):\n ...", "def publish(self, waypoints): \n lane = Lane()\n lane.header.frame_id = '/world'\n lane.header.stamp = rospy.Time(0)\n lane.waypoints = waypoints\n self.final_waypoints_pub.publish(lane)", "def traffic_waypoint_cb(self, msg):\n\n # Save waypoint index for detected traffic light\n self.stopline_waypoint_idx = msg.data", "def translate_waypoint(self, vector: Sequence[float], n_steps: int):\n for component in range(len(self.coordinates)):\n self.waypoint_vector[component] += vector[component] * n_steps", "def addPoints(self, points):\r\n self.points = points", "def sendWaypoints(self,waypoints):\n self.broadcaster.sendWaypoints(waypoints)", "def add(self, PlugLead):\n\n self.check_conflicts(PlugLead)\n self.plugleads.append(PlugLead)", "def add_landing(self, exit_waypoints=None, exit_times=None, add_on_der_fixed=dict(x=None,y=None,z=None,yaw=None)):\n\n exit_ID = self.exit_ID\n\n exit_time_unit = self.exit_time\n\n if exit_waypoints is None:\n # Default load\n if self.last_position is not None:\n # Set start to above drone\n exit_waypoints = dict()\n for key in self.last_position.keys():\n exit_waypoints[key] = self.last_position[key]\n\n exit_waypoints['z'] += 0.4\n\n q = np.array([self.last_orientation['w'],\n self.last_orientation['x'],\n self.last_orientation['y'],\n self.last_orientation['z']])\n\n exit_waypoints['yaw'] = transforms3d.euler.quat2euler(q,'rzyx')[0]\n\n # TODO (bmorrell@jpl.nasa.gov) make this an input\n exit_times = np.array([exit_time_unit])\n else:\n # set to a default 0 0 0\n exit_waypoints = dict(x=0.0,y=0.0,z=0.0,yaw=0.0)\n\n # TODO (bmorrell@jpl.nasa.gov) make this an input\n exit_times = np.array([exit_time_unit])\n elif exit_times is None:\n exit_times = np.array([exit_time_unit]*exit_waypoints['x'].shape[1])\n\n #Set last waypoints\n exit_waypoints = utils.form_waypoints_polytraj(exit_waypoints,self.qr_polytraj.n_der)\n exit_waypoints, exit_der_fixed = utils.form_entry_or_exit_waypoints(\"exit\",self.qr_polytraj, exit_waypoints, exit_ID, add_on_der_fixed=add_on_der_fixed)\n print(\"exit waypoints are: {}\".format(exit_waypoints))\n # Create new qr_polytraj\n self.qr_p_exit = traj_qr.traj_qr(exit_waypoints,\n costs=self.qr_polytraj.costs,order=self.qr_polytraj.order,\n seed_times=exit_times,\n curv_func=self.curv_func,\n der_fixed=exit_der_fixed,path_weight=None,yaw_to_traj=False)\n self.qr_p_exit.run_astro()\n\n # Create Controls and trajectory for display\n print(\"forming landing\")\n self.update_path_markers(qr_type=\"exit\")\n self.interactive_marker_worker_exit.make_controls(self.qr_p_exit.waypoints)\n acc_wp = self.get_accel_at_waypoints(\"exit\")\n self.interactive_marker_worker_exit.update_controls(self.qr_p_exit.waypoints,acc_wp=acc_wp)", "def addPointing(opnt=False):\n\n # Check optical pointing \n s.setScriptBool(odi.INDX_BOOL_LASTPOINT, False)\n if not opnt:\n s.setScriptBool(odi.INDX_BOOL_OPTRAD_VECTOR, False)", "def append_point(self, point):\n self._points.append(point)", "def __spur_on_if_needed(self):\n if len(self.waypoints) < 2:\n return\n next_speed = (get_waypoint_speed(self.waypoints[0]) +\n get_waypoint_speed(self.waypoints[1])) / 2.0\n set_waypoint_speed(self.waypoints[0], next_speed)", "def coord_to_waypoint(coordinate, alt):\n return Waypoint(coordinate.lat, coordinate.lon, alt)", "def add_take_off(self, entry_waypoints=None, entry_times=None):\n\n entry_ID = self.entry_ID\n\n entry_time_unit = self.entry_time\n\n if entry_waypoints is None:\n # Default load\n if self.last_position is not None:\n # Set start to above drone\n entry_waypoints = dict()\n for key in self.last_position.keys():\n entry_waypoints[key] = self.last_position[key]\n\n entry_waypoints['z'] += 0.05\n\n q = np.array([self.last_orientation['w'],\n self.last_orientation['x'],\n self.last_orientation['y'],\n self.last_orientation['z']])\n\n entry_waypoints['yaw'] = transforms3d.euler.quat2euler(q,'rzyx')[0]\n\n # TODO (bmorrell@jpl.nasa.gov) make this an input\n entry_times = np.array([entry_time_unit])\n else:\n # set to a default 0 0 0\n entry_waypoints = dict(x=0.0,y=0.0,z=0.0,yaw=0.0)\n entry_waypoints['z'] += 0.2\n\n # TODO (bmorrell@jpl.nasa.gov) make this an input\n entry_times = np.array([entry_time_unit])\n elif entry_times is None:\n entry_times = np.array([entry_time_unit]*entry_waypoints['x'].shape[1])\n\n #Set last waypoints\n entry_waypoints = utils.form_waypoints_polytraj(entry_waypoints,self.qr_polytraj.n_der)\n entry_waypoints, entry_der_fixed = utils.form_entry_or_exit_waypoints(\"entry\",self.qr_polytraj, entry_waypoints, entry_ID, add_on_der_fixed=dict(x=None,y=None,z=None,yaw=None))\n print(\"entry waypoints are: {}\".format(entry_waypoints))\n # Create new qr_polytraj\n self.qr_p_entry = traj_qr.traj_qr(entry_waypoints,\n costs=self.qr_polytraj.costs,order=self.qr_polytraj.order,\n seed_times=entry_times,\n curv_func=self.curv_func,\n der_fixed=entry_der_fixed,path_weight=None,yaw_to_traj=False)\n # self.qr_p_entry.initial_guess()\n self.qr_p_entry.run_astro()\n\n # Create Controls and trajectory for display\n self.update_path_markers(qr_type=\"entry\")\n self.interactive_marker_worker_entry.make_controls(self.qr_p_entry.waypoints)\n acc_wp = self.get_accel_at_waypoints(\"entry\")\n self.interactive_marker_worker_entry.update_controls(self.qr_p_entry.waypoints,acc_wp = acc_wp)", "def add_switch(self, dpid):\n\t\tself.switches.append(dpid)", "def main(argv) -> None:\n rospy.init_node('waypoint_node')\n # Register publishers first\n pub_reached = rospy.Publisher(\"~reached\", String,\n queue_size=1) # FIXME decide queue_size\n\n # Register subscribers\n ds = __DroneStates()\n # For middleware\n waypoint_topic_name = \"~waypoint\"\n _ = rospy.Subscriber(waypoint_topic_name, PoseStamped, ds.store_waypoint)\n\n # Register actionlib clients\n takeoff_topic = rospy.resolve_name(\"action/takeoff\")\n takeoff_client = SimpleActionClient(takeoff_topic, TakeoffAction)\n landing_topic = rospy.resolve_name(\"action/landing\")\n landing_client = SimpleActionClient(landing_topic, LandingAction)\n\n pose_topic = rospy.resolve_name(\"action/pose\")\n pose_client = SimpleActionClient(pose_topic, PoseAction)\n\n def action_pose_done_cb(goal_state, goal_result):\n rospy.logdebug(\"Reached\\n %s\" % str(ds.curr_waypoint.pose.position))\n ds.reset_curr_waypoint()\n\n def shutdown() -> None: # TODO Better place for this code\n \"\"\"Stop the drone when this ROS node shuts down\"\"\"\n # TODO Safe landing\n pass\n\n rospy.on_shutdown(shutdown)\n\n # TODO Wait for hector quadrotor controllers to spawn\n rospy.sleep(1)\n\n rate = rospy.Rate(100) # 100Hz TODO Pass sleep rate as a parameter?\n\n is_driving = False\n while not rospy.is_shutdown():\n rate.sleep()\n # Simple controller code for drones # TODO Need better controller\n if not is_driving: # IDLE\n if ds._waypoints.empty(): # FIXME accessing protected member\n pass # Keep idling\n else:\n ds.set_curr_waypoint()\n pose_client.wait_for_server()\n\n pose_goal = PoseGoal(target_pose=ds.target_pose())\n rospy.logdebug(\"Sending pose goal\\n %s\" % str(pose_goal))\n\n pose_client.send_goal(PoseGoal(target_pose=ds.target_pose()),\n done_cb=action_pose_done_cb)\n is_driving = True\n else: # DRIVING\n if ds.reached == ReachedEnum.NO:\n pass # Keep driving\n else:\n if ds.reached == ReachedEnum.YES_AND_REPORT:\n pub_reached.publish(ds.report_reached())\n is_driving = False", "def process_waypoint(self, waypoint: Waypoint) -> Union[Trip, None]:\n\n # ignore the first entry, just remember it for further compares\n if not self.prev_point:\n self.prev_point = waypoint\n return None\n\n if self.is_driving(self.prev_point, waypoint):\n if not self.start_point:\n # indicates trip start\n self.start_point = self.prev_point\n else:\n # indicates trip finish\n if self.start_point:\n d = self.calc_distance(self.start_point, self.prev_point)\n trip = Trip(d, self.start_point, self.prev_point)\n self.start_point = None\n return trip\n self.prev_point = waypoint\n return None", "def add_goal(self, x, y, score=0):\r\n #print(\"WALLS:\")\r\n #print(self.walls)\r\n #print(x,y)\r\n if (x,y) in self.walls:\r\n z=1\r\n else:\r\n self.goals.append((x, y, score))", "def next_gps(self):\n \n return Waypoint(0.0, 0.0)", "def addHotspot( self, hotspot ):\n self._hotspots.append(hotspot)", "def GroundExcelAddPlayerSightPointAdd(builder, PlayerSightPointAdd):\n return AddPlayerSightPointAdd(builder, PlayerSightPointAdd)", "def _add_route(self, connections):\n route = ArduinoSwitchControlRoute(connections)\n if route.input.label not in self.routes:\n self.routes[route.input.label] = {route.output.label: [route]}\n elif route.output.label not in self.routes[route.input.label]:\n self.routes[route.input.label][route.output.label] = [route]\n else:\n self.routes[route.input.label][route.output.label].append(route)", "def onTrajGoButtonPress(self, button):\n\t\tself.trajectory.start()\n\t\t# reset waypoints_gui\n\t\twaypoints_gui.clear()\n\t\twaypoints_gui.append([0, 0, 2, 0])", "def draw_waypoints(world, waypoints, z=0.01):\n for wpt in waypoints:\n wpt_t = wpt.transform\n begin = wpt_t.location + carla.Location(z=z)\n angle = math.radians(wpt_t.rotation.yaw)\n end = begin + carla.Location(x=math.cos(angle), y=math.sin(angle))\n world.debug.draw_arrow(begin, end, arrow_size=0.1, life_time=1.0)", "def add_walk(self, points):\r\n for i in range(0, len(self.walk), 2):\r\n if i + 1 != len(self.walk):\r\n if self.walk[i+1] is None:\r\n points.append(None)\r\n points.append(self.walk[i])", "def draw_waypoints(world, waypoints, z=0.5):\n for wpt in waypoints:\n wpt_t = wpt.transform\n begin = wpt_t.location + carla.Location(z=z)\n angle = math.radians(wpt_t.rotation.yaw)\n end = begin + carla.Location(x=math.cos(angle), y=math.sin(angle))\n world.debug.draw_arrow(begin, end, arrow_size=0.3, life_time=1.0)", "def add_path(self, adapter: str, wwpn: str):\n self.paths.append((adapter, wwpn))", "def advertise_route_to_neighbors(self, destination):\n distance_vector = self.hosts_to_ports[destination]\n self.handle_proper_packet(distance_vector.port, destination, distance_vector.latency, True)\n self.handle_poison_packet(distance_vector.port, destination)", "def _get_next_waypoint(self, tolerance_step):\n print('\\nGetting new nav plan.')\n\n for i in range(4):\n try:\n self.plan = self.swarmie.get_plan(\n self.goal,\n tolerance=self.tolerance,\n use_home_layer=self.avoid_home\n )\n break # plan received\n except rospy.ServiceException:\n print('ServiceException.')\n if i < 3:\n print('Expanding tolerance.')\n self.tolerance += tolerance_step\n else:\n raise # tried 3 times, we give up\n\n print('Received nav plan.')\n pose = self.plan.plan.poses[0]\n\n return Point(x=pose.pose.position.x, y=pose.pose.position.y)", "def add_point(self, point, fill_auto_fields=True, timestamp=None):\n self.points.append(point)\n if fill_auto_fields:\n self.fill_auto_fields(point)\n if timestamp:\n point.timestamp = timestamp", "def do_poortego_add(self, arg, opts=None):\n # Code moved to .command.add sub-module for easier reading/debugging \n poortego_add(self.my_interface, arg, opts)", "def add_neighbor(self, neighbor):\r\n self.neighbors.append(neighbor)", "def waypoint(self, state: Vector) -> Vector:\n return state", "def add_point(self, point: Point, interesting=False) -> Point:\n if point not in self.points:\n if not point.name:\n point.name = alphabet(len(self.points))\n self.points.add(point)\n if interesting:\n self.interesting_points.add(point)\n self.add_points_to_actions_update_queue({point})\n return point", "def add_route(self,path):\n path=self.prepare_path(path)\n \n debug.info(2,\"Adding route: {}\".format(str(path)))\n # If it is only a square, add an enclosure to the track\n if len(path)==1:\n self.add_single_enclosure(path[0][0])\n else:\n # convert the path back to absolute units from tracks\n # This assumes 1-track wide again\n abs_path = [self.convert_point_to_units(x[0]) for x in path]\n # Otherwise, add the route which includes enclosures\n if len(self.layers)>1:\n self.cell.add_route(layers=self.layers,\n coordinates=abs_path,\n layer_widths=self.layer_widths)\n else:\n self.cell.add_path(layer=self.layers[0],\n coordinates=abs_path,\n width=self.layer_widths[0])", "def append(self, point):\n self.points.append(point)\n return self", "def on_global_trajectory(self, msg):\n self._logger.debug('@{}: global trajectory has {} waypoints'.format(\n msg.timestamp, len(msg.data)))\n if len(msg.data) > 0:\n # The last waypoint is the goal location.\n self._goal_location = msg.data[-1][0].location\n else:\n # Trajectory does not contain any waypoints. We assume we have\n # arrived at destionation.\n self._goal_location = self._vehicle_transform.location\n assert self._goal_location, 'Planner does not have a goal'\n self._waypoints = deque()\n for waypoint_option in msg.data:\n self._waypoints.append(waypoint_option[0])\n self._prev_waypoints = self._waypoints", "def get_gpx_waypoint(self, route=None, start_time=datetime.utcnow()):\n route = route or self.route\n\n return self.place.get_gpx_waypoint(\n route=route,\n line_location=self.line_location,\n start_time=start_time,\n )", "def execute_waypoint_sequence(detail_of_trip):\n\n # rets (route_line, line_points)\n sliced_route_and_line_points = chunk_user_route(detail_of_trip)\n\n sliced_route = sliced_route_and_line_points[0]\n line_points = sliced_route_and_line_points[1]\n\n # Interpolate/Break into 1/10 segments\n segmented_points = interpolate_points(sliced_route, line_points)\n waypoints = find_crime_areas(segmented_points)\n\n # print \"segmented_points\", json.dumps(segmented_points, indent=2)\n print \"\\n\\n\\n\\n\" # compensating for the giant GET request\n return waypoints", "def add_point(self, x, fx, dx):\n\t\tself.v.append([x, fx, dx])\n\t\tself.v.append([x, fx, dx])\n\t\tself.poli = None", "def AddExtraPoint(self, point: str) -> None:\n self._valid = False\n self._extra_points.append(point)", "def addPoints(self, points):\n self.points.extend(points)", "def addStep(self, step):\n self.stepper.addStep(step)\n return self", "def add_point(self, xpos, ypos):\n self.arcpoints.append([xpos, ypos])", "def __init__(self):\n\n # Set a node name - something relevant\n rospy.init_node('waypoint_updater')\n\n # Most recent pose\n self.pose = None\n\n # Map waypoint list \n self.waypoints = None\n\n # Map waypoint list xy only \n self.waypoints_2d = None\n\n # Map waypoint list xy only as KDTree\n self.waypoint_tree = None\n\n # Index at which to stop the vehicle\n # Negative one is a sentinel meaning no stop is required\n self.stopline_waypoint_idx = -1\n\n # Add subscriptions and handlers for relevant messages\n rospy.Subscriber('/base_waypoints', Lane, self.base_waypoints_cb)\n rospy.Subscriber('/current_pose', PoseStamped, self.current_pose_cb)\n rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_waypoint_cb)\n\n # Create publisher for final waypoints\n self.final_waypoints_pub = rospy.Publisher('/final_waypoints', Lane, queue_size=1)\n\n # Start loop\n self.loop()", "def plot_route(world, trajectory):\n for item in trajectory:\n transform = item[0]\n scalar = 0.5\n yaw = np.deg2rad(transform.rotation.yaw)\n vector = scalar * np.array([np.cos(yaw), np.sin(yaw)])\n start = transform.location\n end = start + carla.Location(x=vector[0], y=vector[1], z=start.z)\n # plot the waypoint\n debug = world.debug\n debug.draw_arrow(start, end, thickness=0.25, arrow_size=0.25, color=Red, life_time=9999)\n debug.draw_point(start, size=0.05, color=Green, life_time=9999)\n world.tick()", "def passed_waypoint(self, waypoint_num):\n bools = self.ros_node.get_data('/diff_drive/waypoints_achieved', simple_data = False)\n # Waits for the data\n if bools is not None:\n if len(bools.bools) >= waypoint_num:\n return bools.bools[waypoint_num -1]\n \n rospy.logerr_throttle(15, \"Checking Waypoint Failed. Did not find a waypoint with the number '%s' in the path\" %(waypoint_num))\n return False\n else:\n return False", "def AddPoint(self,apoint):\n\t\tpoint=self.Space.AddPoint(apoint)\n\t\treturn point", "def add(self, node):\n self.steps += 1\n self.path.append(node)\n self.visited.add(node)\n if node in self.targets[0]:\n self.targets[0].remove(node)", "def base_waypoints_cb(self, waypoints):\n\n # Save the waypoint list\n self.waypoints = waypoints\n\n # If waypoints_2d hasn't been initialized...\n if not self.waypoints_2d:\n\n # Extract xy coordinates from the waypoint list\n self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]\n\n # Construct a KDTree from the xy coordinate list to allow fast lookup \n self.waypoint_tree = KDTree(self.waypoints_2d)", "def base_waypoints_cb(self, waypoints):\n\n # Save the waypoint list\n self.waypoints = waypoints\n\n # If waypoints_2d hasn't been initialized...\n if not self.waypoints_2d:\n\n # Extract xy coordinates from the waypoint list\n self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]\n\n # Construct a KDTree from the xy coordinate list to allow fast lookup \n self.waypoint_tree = KDTree(self.waypoints_2d)", "def add_route(self, item):\n self._routes[item.route] = item\n self.httpd.route(item.route, method=\"GET\", callback=item.get)\n self.httpd.route(item.route, method=\"POST\", callback=item.post)\n self.httpd.route(item.route, method=\"PUT\", callback=item.put)\n self.httpd.route(item.route, method=\"DELETE\", callback=item.delete)", "def __add_node(self):\n next_pos = self.__get_next_tail_pos(self.__snake[-1])\n next_d = self.__snake[-1].direct()\n node = Snake(self, direction=next_d, size=self.__cell_edge, position=next_pos)\n self.__snake.append(node)", "def generate_waypoint(lowest_crime_index, points_dict_data, segmented_points):\n\n # passes in something like waypoints_dict_data is [{dictn,}, ... ,{dictw}]\n # points is [(pointn, pointn), ... ,(pointw, pointw)]\n print \"inside generate_waypoint\"\n print \"This is points_dict_data\", points_dict_data\n\n # do a for loop to see if we find the waypoint data that matches\n print \"this is points_dict_data\", points_dict_data\n for point_data in points_dict_data:\n print \"this is point_data\", point_data\n if lowest_crime_index in point_data.values():\n # store the waypoint coords\n segmented_points[0]['data']['waypoints'].append({\n 'location': {'lat': point_data['point'][0],\n 'lng': point_data['point'][1]},\n 'stopover': False # b/c not stop on the route, a recalc\n })\n # returns nothing, just appends stuff into segmented_points", "def add_obstacle(self, *points: Tuple[float, float]):\n self.obstacles.append(o.Obstacle(*points))", "def add_new_arrival(self):\n pass", "def insert_user_location(self, lat, lon):\n # Waypoint id for user's location\n user_waypoint_id = 0\n\n # Keep track of closest waypoint to user\n closest_waypoint = None\n min_distance = float(\"inf\")\n\n # Find closest waypoint to user's location\n for key, val in self.waypoints.items():\n # Check to see if waypoint_id higher than user's\n if key > user_waypoint_id:\n user_waypoint_id = key\n\n # Calculate distance\n user_coords = (lat, lon)\n waypoint_coords = (val[\"lat\"], val[\"lon\"])\n distance = geopy.distance.distance(user_coords, waypoint_coords).miles\n\n if distance < min_distance:\n min_distance = distance\n closest_waypoint = key\n\n # Increment highest waypoint id seen to get user's starting waypoint id\n user_waypoint_id += 1\n\n # Add to graph\n self.graph.add_edge(user_waypoint_id, closest_waypoint, min_distance)\n self.graph.add_edge(closest_waypoint, user_waypoint_id, min_distance)\n\n # Return starting waypoint id\n return user_waypoint_id", "def add_location(self, **kwargs):\n \n self.options.update(kwargs)\n self.options['action'] = 'locator.location.add'\n return self.call(self.options)", "def add_neighbors(self, pos, distance, obstacles):\n \n neighbor_list = [(pos[0]-1,pos[1]), (pos[0]+1,pos[1]), \\\n (pos[0],pos[1]-1), (pos[0], pos[1]+1)]\n # Processing each neighbor.\n for (x,y) in neighbor_list:\n if x>=0 and y>=0 and x<self.M and y<self.N: # Out from boundary?\n if (x,y) not in obstacles:\n if (x,y) not in self.footprint: # Already in done list?\n new_distance = distance + 1 + self.heuristic_map[x,y]\n if (x,y) not in self.frontier.keys(): # A new candidate to add to frontier set.\n self.frontier.update({(x,y):new_distance})\n self.distance_map[x,y] = distance + 1\n self.camefrom_map[(x,y)] = pos\n elif new_distance < self.frontier[(x,y)]: # A short path reached this neighbor.\n self.frontier[(x,y)] = new_distance\n self.distance_map[x,y] = distance + 1\n self.camefrom_map[(x,y)] = pos", "def addFramePoint(self,**kwargs):\n try:\n side = kwargs['side']\n except Exception,e:\n rospy.logerr(\"%s\",str(e))\n self.mm.neglect()\n return\n step = int(self.mm.modes[self.mm.cur_mode][6:7])\n rospy.loginfo(\"step %d\"%step)\n point = self.baxter.frame.addPoint(side,step) \n self.mm.confirm()\n #self.mm.loadMenu(self.mm.cur_page)" ]
[ "0.75902", "0.7147039", "0.6792248", "0.62503475", "0.6224604", "0.6080674", "0.60524154", "0.6044039", "0.603555", "0.60317796", "0.5964286", "0.5918597", "0.58790517", "0.5873767", "0.58673775", "0.5866497", "0.585557", "0.5817661", "0.58173054", "0.57790995", "0.5753916", "0.5748289", "0.57431036", "0.57396597", "0.57374954", "0.57053655", "0.5667055", "0.56641895", "0.5646554", "0.5628921", "0.56055176", "0.5602901", "0.5602901", "0.55300075", "0.552974", "0.5515209", "0.5503196", "0.54899776", "0.5476557", "0.5467714", "0.5466525", "0.5436627", "0.5417725", "0.54170555", "0.5371089", "0.536704", "0.5365567", "0.5338318", "0.53314424", "0.5327102", "0.5325899", "0.5311317", "0.5303798", "0.52829397", "0.528254", "0.52732414", "0.52712303", "0.5271182", "0.5259979", "0.5259375", "0.52499723", "0.5248066", "0.52458245", "0.52248573", "0.52183366", "0.5218321", "0.5207767", "0.520662", "0.5193927", "0.5178379", "0.5167203", "0.51663774", "0.51587975", "0.5153146", "0.5148502", "0.5144337", "0.51407707", "0.51351106", "0.513273", "0.5132451", "0.51237845", "0.5099007", "0.5087289", "0.5086561", "0.50773084", "0.50737107", "0.50636345", "0.505718", "0.50518465", "0.5050569", "0.5050569", "0.5050093", "0.50411445", "0.50335395", "0.50327", "0.5031484", "0.5025847", "0.50206757", "0.49990362", "0.49878523" ]
0.8948917
0
Set the drone's location in the map
def set_drone_position(self, new_point): self.drone.set_drone_position(new_point)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def m_location_set(self, x: int, y: int):\n pass", "def set_location(self, location_set):", "def set_location(self, lat, long):\n self._data['loc'] = [lat, long]", "def location(self, value: 'Point'):\n self.geometry.location = value", "def set_location(self, location):\n self.location = location", "def set_coordinate(self):\n airqual_dictionary = self.realtime_data['stations'][0] #get the very first(recent) data/result\n self.latitude = airqual_dictionary['lat']\n self.longitude = airqual_dictionary['lng']", "def set_location(self, x, y):\n self.scene.set_location(x, y)\n self.redraw()", "def set_new_location(self, xPos, yPos):", "def set_location(self, client, latitude, longitude):\r\n client.setLocation(latitude, longitude)\r\n return True", "def set_loc(self, loc):\n self.loc = loc", "def set_location(self, location):\r\n self.__location = location", "def set_location(self, location):\n self.location = location", "def setPosition(self):\n # determine posX, posY for battle\n (x1,y1) = globals.battlemapQuadrants[self.systemGrid]\n self.posX = x1+self.setX\n self.posY = y1+self.setY", "def set_coordinates(self):\n client = Client(api_key=settings.YANDEX_GEOCODER_KEY)\n address = f'Россия, {self.state}, {self.city}, {self.street}'\n self.longitude, self.latitude = client.coordinates(address)", "def set_location(self, location: tuple) -> None:\n self.__location = location", "def set_coord(self, longitude, latitude):\r\n self.longitude = longitude\r\n self.latitude = latitude\r\n self._grid_proj = _get_projection(self._grid_proj, self.longitude, self.latitude)", "def set_node(self, index, node):\r\n self.loc.coord[index] = node", "def set_coordinates(self, x, y):\n self.x = x\n self.y = y", "def configure_location(self):\n # Set floor correctly\n self.floor.set(\"pos\", array_to_string(self.bottom_pos))", "def _set_location(self):\n # Get selected text\n self.locnaam = self.location_combobox.currentText()\n\n self._set_data()", "def set_position(self, lat, lng, h):\n self.lat = lat\n self.lng = lng\n self.h = h", "def setPoint(self, point):\n self.position = point.position", "def set_home_position(self, lat, lon, alt):\n pass", "def setzePosition(self, x, y):\n self.zielX = x\n self.zielY = y", "def drawlocation(self, Type, llon, rlon, llat, rlat):\n bm.BaseMapSet(Type, llon, rlon, llat, rlat)\n \n plt.scatter(self.demandx, self.demandy, 100, self.color, marker = 'o', label = self.demandname) \n \n plt.scatter(self.tranx, self.trany, 200, self.color, marker = '*', label = self.tranname) \n \n plt.scatter(self.supplyx, self.supplyy, 400, self.color, marker = '+', label = self.supplyname) \n \n plt.legend(bbox_to_anchor=(1, 1), loc='upper left', ncol=1, fontsize = 25)", "def location(self, location):\n self._location = location", "def setPoint(self, point):\n self._point = point\n self._point = self.projectPoint(Point.origin(point.dimension))", "def setMap(self, map):\n maputils.detachMapStats(self.map)\n\n activeTool = self._activeTool\n self.__base.setMap(self, map)\n if activeTool:\n self.setTool(activeTool)\n\n self._updateLevelSlider()", "def set_location(self, x, y, z=0):\n self._rect.topleft = (x, y)\n self._z = z\n self._update()", "def setLocation(self, p):\n super(PolygonTool, self).setLocation(p.point)\n _x, _y = self.getLocation().getCoords()\n _count = self.__nsides\n _inc = self.__increment\n if self.__external:\n _offset = _inc/2.0\n else:\n _offset = 0.0\n _cx, _cy = self.__center.point.getCoords()\n _xsep = _x - _cx\n _ysep = _y - _cy\n _angle = math.atan2(_ysep, _xsep) + _offset\n _rad = math.hypot(_xsep, _ysep)/math.cos(_offset)\n _xp = self.__xpts\n _yp = self.__ypts\n for _i in range(_count):\n _xp[_i] = _cx + (_rad * math.cos(_angle))\n _yp[_i] = _cy + (_rad * math.sin(_angle))\n _angle = _angle + _inc", "def _set_folium_map(self):", "def _setOceanLocation(self):\r\n\t\t## If the fluids_hrc exists\r\n\t\tif cmds.objExists('fluids_hrc'):\r\n\t\t\tif cmds.objExists('ocean_srf'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'ocean_srf.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'ocean_srf.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING ocean_srf node from scene....')\r\n\r\n\t\t\tif cmds.objExists('oceanPreviewPlane_prv'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'oceanPreviewPlane_prv.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'oceanPreviewPlane_prv.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING oceanPreviewPlane_prv node from scene....')\r\n\t\telse:\r\n\t\t\tcmds.warning('NO fluids_hrc FOUND! Can not move the ocean into final position. PLEASE CHECK FX PUBLISH NOW!')", "def setDrone(self, newDrone):\n self._drone = newDrone", "def set_initial_position(self, gps_pos, attitude):\r\n self.position, self.attitude = projection(self.mapdata.gps_pos, self.mapdata.attitude, gps_pos, attitude)", "def set_coordinates(self, coordinates):\n self.coordinates = coordinates", "def Set(self, *args):\n return _XCAFDoc.XCAFDoc_Location_Set(self, *args)", "def locateRobot(self):\n logging.info(\"Display Carte : {}\".format(self.name))\n for r, row in enumerate(self.map):\n #print(row)\n for c, cell in enumerate(row):\n if (cell == \"X\"):\n logging.info(\"r={} / c={}\".format(r, c))\n self.robot.posX = c\n self.robot.posY = r", "def setreferencelocation(self, *args, **kwargs):\n return _coordsys.coordsys_setreferencelocation(self, *args, **kwargs)", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setPosition(*args):", "def setCoordinate(self, coordinate):\n\n self.path[0].append(coordinate[0])\n self.path[1].append(coordinate[1])\n self.coordinate = coordinate", "def set_center_position(self, lon, lat):\n self.lon_center = lon\n self.lat_center = lat", "def set_loc(self, line):\n return Gumtree.gumtree.setLoc(line)", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def location(self, location):\n\n self._location = location", "def setPosition(position):", "def set_local_coordinates(self, entity_id, x, y):\n position = getattr(self.entities[entitiy_id],\n self.local_position_system)\n position.x = x \n position.y = y", "def setPoint(self, point):\n self.point = point", "def world(self, value):\n self.worlds[self.world_index] = value", "def setPosition(self, position, view) -> None:\n ...", "def updateMap(self) :\n\t\tself.dot.setPos( \\\n\t\t (self.avatarNP.getX()/(self.modelSizeX))*0.79+0.4, 0, \\\n\t\t (self.avatarNP.getY()/(self.modelSizeY))*0.79+0.21)\n\t\tfor id in self.remoteMap:\n\t\t\tself.remoteMap[id][3].setPos( \\\n\t\t\t\t(self.remoteMap[id][0].getX() / \\\n\t\t\t\t\tself.modelSizeX)*0.79+0.4, \\\n\t\t\t\t0, (self.remoteMap[id][0].getY() / \\\n\t\t\t\t\tself.modelSizeY)*0.79+0.21)", "def location(self, location):\n sql = \"\"\"UPDATE barcodes.sample\n SET sample_location = %s\n WHERE sample_id = %s\"\"\"\n with pm.sql.TRN:\n pm.sql.TRN.add(sql, [location, self.id])", "def setSetpoint(self, point):\n\n\t\tself._setpoint = point", "def set_location(self, c: str, location: tuple[float, float]) -> None:\n if len(location) != 2: # noqa: PLR2004\n raise ValueError\n self.set_x(c, location[0])\n self.set_y(c, location[1])", "def set_location(self, location):\n self._overridden_location = location", "def update_robot_location(self, x, y):\n location_id = self._team_name + \"-\" + self._robot_name\n request_name = \"set_robot_location\"\n request_type = self._request_types[request_name]\n\n arguments = dict()\n for key in request_type['schema_keys']:\n arguments[key] = None\n arguments[\"@id\"] = location_id\n arguments[\"@type\"] = request_type[\"schema_name\"]\n arguments[\"episode\"] = self._episode_name\n arguments[\"team\"] = self._team_name\n arguments[\"timestamp\"] = self._get_current_timestamp()\n arguments[\"x\"] = x\n arguments[\"y\"] = y\n arguments[\"z\"] = 0.0\n resp = self.make_request(request_name, url_id=location_id, arguments=arguments)", "def set_position(self, x, y):\n self.geometry('%s%s' % (x, y))", "def coord(self, coord):\n\n self._coord = coord", "def location(self, location: Object):\n\n self._location = location", "def setUp(self):\n self.location = [(0, 0), (0, 1)]\n self.hit = (0, 0)", "def fixed_location(self, fixed_location):\n\n self._fixed_location = fixed_location", "def set_geo(self, lon=None, lat=None):\n changed = False\n if not lon:\n lon = self.lon\n if not lat:\n lat = self.lat\n\n if not self.point and lon and lat:\n self.point = Point(lon, lat)\n changed = True\n\n if not self.mpoly and lon and lat:\n self.mpoly = MultiPolygon(Polygon(((lon, lat),\n (lon, lat),\n (lon, lat),\n (lon, lat))))\n changed = True\n\n if changed:\n self.save()", "def setDesiredPosition(self, x, y):\n (self.setX, self.setY) = (x , y)", "def set_location(self, event=None):\n if self.lhs and self.lhs.lower() != \"here\":\n try:\n try:\n room = ArxRoom.objects.get(db_key__iexact=self.lhs)\n except ArxRoom.DoesNotExist:\n room = ArxRoom.objects.get(db_key__icontains=self.lhs)\n except (ArxRoom.DoesNotExist, ArxRoom.MultipleObjectsReturned):\n raise self.CalCmdError(\n \"Could not find a unique match for %s.\" % self.lhs\n )\n else:\n if not self.caller.character:\n raise self.CalCmdError(\n \"You must be in a room to mark it as the event location.\"\n )\n room = self.caller.character.location\n if not room:\n raise self.CalCmdError(\"No room found.\")\n id_or_instance = room if event else room.id\n self.set_form_or_event_attribute(\"plotroom\", None, event)\n self.set_form_or_event_attribute(\"location\", id_or_instance, event)\n self.msg(\"Room set to %s.\" % room)", "def set(self, x, y):\n self.x = x\n self.y = y", "def set_lon(self, lon):\n self._set_sub_text('lon', text=str(lon))\n return self", "def location(self, location: object):\n\n self._location = location", "def set_location(self):\n if self.has_non_empty_attribute(\"localitate\"):\n loc_item = None\n if utils.count_wikilinks(self.localitate) > 0:\n loc_link = utils.get_wikilinks(self.localitate)[0]\n loc_item = utils.q_from_wikipedia(\"ro\", loc_link.title)\n adm_item = self.get_statement_values(\"located_adm\")\n if loc_item and loc_item != adm_item[0]:\n self.add_statement(\"location\", loc_item)\n\n if not loc_item:\n self.add_to_report(\"localitate\", self.localitate, \"location\")", "def set_point(self, x, y):\n self._x = x\n self._y = y", "def set_location(self, location, now):\n def work():\n member = db.get(self.key())\n member.location = location\n member.location_time = now\n member.put()\n db.run_in_transaction(work)", "def set_locations():\n STATUS['locations']['monster'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['monster'][1] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][1] = generate_random_coord(STATUS['grid_size'])", "def setPosition(self):\n self.data['pos-x'] = \"%s\" % self.x()\n self.data['pos-y'] = \"%s\" % self.y()", "def setNear(self, near):\n self.light.node().getLens().setNear(near)", "def setCoords(self, coords):\n\n self.coords = coords", "def set_location(self, location):\n\n if type(location) is list:\n location_as_list = location\n else:\n location_as_list = JanggiGame.translate_to_grid(location)\n\n self._location = location_as_list", "def set_location(self, obj, cursor):\n if (hasattr(cursor, 'location') and cursor.location is not None and\n cursor.location.file is not None):\n obj.location = (cursor.location.file.name, cursor.location.line)\n return", "def set_point(self, lon=None, lat=None):\n changed = False\n if not self.point and lon and lat:\n self.point = Point(lon, lat)\n changed = True\n if changed:\n self.save()\n return changed", "def location(self, location: str):\n self._location = location", "def set(self, x, y, color):\n if x < 0 or x >= self.width or y < 0 or y >= self.height:\n return\n i = self.map[y][x]\n super().set(i, color)", "def setLonLat(self, longitude, latitude):\n self._lon = longitude\n self._lat = latitude\n scene = self.scene()\n if scene is not None:\n self.updatePosition(scene)", "def setLonLat(self, longitude, latitude):\n self._lon = longitude\n self._lat = latitude\n scene = self.scene()\n if scene is not None:\n self.updatePosition(scene)", "def set_robot_pos(self):\n\t\tx,y,z = self.geo2desiredENU(self.curr_lat, self.curr_lon, self.gpsAlt)\n\t\tself.robot_msg.point.x = x\n\t\tself.robot_msg.point.y = y\n\t\tself.robot_msg.point.z = z", "def set(self, x, y=0):\n self.d[x] = y", "def update_location(self, lat, lon):\n endpoint = '/passport/user/travel'\n params = {\n \"lat\": lat,\n \"lon\": lon\n }\n return self.post_request(endpoint, params)", "def set_cell(self, point, cell):\n self._grid[point.x][point.y] = cell", "def set_2d_location(self, x, y):\r\n self.unif[42:44] = [x, y]" ]
[ "0.6776562", "0.67337173", "0.6717304", "0.66437644", "0.66012245", "0.65962243", "0.65886664", "0.6457518", "0.643699", "0.63909936", "0.62847275", "0.6281692", "0.6220761", "0.6214897", "0.60538673", "0.60247016", "0.6005318", "0.5934853", "0.5927787", "0.5925752", "0.5910971", "0.59025985", "0.58874655", "0.587144", "0.58130527", "0.5809607", "0.58010453", "0.5780912", "0.57576734", "0.5751611", "0.57349235", "0.5726638", "0.5721757", "0.5720592", "0.5701157", "0.56740934", "0.5673336", "0.56705165", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5667966", "0.5660366", "0.5651088", "0.5644324", "0.56420493", "0.56420493", "0.56420493", "0.56420493", "0.56420493", "0.56420493", "0.56420493", "0.56359875", "0.5633394", "0.5613023", "0.56046194", "0.5576569", "0.55763334", "0.5576188", "0.55644023", "0.55587995", "0.55571866", "0.55560225", "0.55489475", "0.55477166", "0.5542014", "0.55375344", "0.55350995", "0.55307263", "0.5530122", "0.55226326", "0.5520633", "0.5511239", "0.54998845", "0.5481588", "0.5475413", "0.54671776", "0.54646784", "0.54623264", "0.5454464", "0.5445907", "0.5436475", "0.54186225", "0.54134727", "0.54122484", "0.541047", "0.5404146", "0.5404146", "0.53779787", "0.53654397", "0.53581744", "0.53504634", "0.5347986" ]
0.6822219
0
Reset the obstacles' positions within the map (should be called when map is refreshed to clean the array)
def reset_obstacles(self): self.obstacles = np.array([])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset(self):\n self.obstacles = []\n self._tick = 0", "def recreate_obstacles(self):\n self.board_matrix = np.full(Dimension.board_size(), 1)\n self.obstacles = self.create_obstacles()", "def reset(self) -> None:\n self.map = []\n for col in range(self.width):\n self.map.append([])\n for cell in range(self.height):\n if col > 1 and col < self.width - 2:\n if cell == 0:\n # World Barrier - Top Middle\n self.map[col].append(StaticTile('wall_3', self.graphicsLibrary.get('wall_3'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif cell == self.height - 1:\n # World Barrier - Bottom Middle\n self.map[col].append(StaticTile('wall_12', self.graphicsLibrary.get('wall_12'), (self.scaleWidth,self.scaleHeight), barrier=True))\n else:\n # Playable Map Area\n if (col % 2) != 0 and (cell % 2) == 0:\n # Hard-Barrier Generation\n self.map[col].append(StaticTile('solid', self.graphicsLibrary.get('solid'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif (col,cell) in self.spawn_buffers:\n # Preserve Potential Spawn Points\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n elif random.randint(0, 2) == 0:\n # Soft-Barrier Generation\n self.map[col].append(DynamicTile('destructable_new', self.graphicsLibrary.get('destructable_new'), (self.scaleWidth,self.scaleHeight), destructable=\"True\", barrier=True, death_animation=self.animations_library.get('destructable_death')))\n else:\n # Fill Remaining Terrain\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n else:\n # World Barrier - Side Sections\n if col == 0 or col == self.width - 1:\n # Roof\n right_most_columns = False\n if col == self.width - 1:\n right_most_columns = True\n\n if cell == self.height - 1:\n self.map[col].append(StaticTile('wall_10', self.graphicsLibrary.get('wall_10'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_5', self.graphicsLibrary.get('wall_5'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif col == 1 or col == self.width - 2:\n # Floor \n right_most_columns = False\n if col == self.width - 2:\n right_most_columns = True\n\n if cell == self.height -1:\n self.map[col].append(StaticTile('wall_11', self.graphicsLibrary.get('wall_11'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_9', self.graphicsLibrary.get('wall_9'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_2', self.graphicsLibrary.get('wall_2'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 1:\n self.map[col].append(StaticTile('wall_6', self.graphicsLibrary.get('wall_6'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_7', self.graphicsLibrary.get('wall_7'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n self.map[col][cell].place_at(topleft=(self.scaleWidth * col, self.scaleHeight * cell))", "def _reset_map(self):\n if not self.scenario_name.startswith('random'):\n self._map = self._fixed_original_map.copy()\n else:\n from environment.scenarios import generate_random_map\n self._map = generate_random_map(self.scenario_name)\n\n # Precompute wall channel and positions since they are static\n self._walls_channel = (self._map == WALL).astype(int)\n xs, ys = np.where(self._walls_channel)\n self._wall_positions = list(zip(xs, ys))\n\n # Set avatar position bidirectional caches (first thieves then guardians)\n xs_t, ys_t = np.where(self._map == THIEF)\n xs_g, ys_g = np.where(self._map == GUARDIAN)\n xs = np.concatenate([xs_t, xs_g])\n ys = np.concatenate([ys_t, ys_g])\n for avatar_id, (x, y) in enumerate(zip(xs, ys)):\n self._id2pos[avatar_id] = x, y\n self._pos2id[(x, y)] = avatar_id\n\n self._chased_treasure_pos = _coords_where(self._map == TREASURE)\n self._chased_thief_id = 0", "def reset(self):\n \n #initiate all tiles' value to 0\n self._grid_2048 = [[0 for dummy_col in range(self._grid_width)] for dummy_row in range(self._grid_height)]\n \n # two new tiles\n self.new_tile()\n self.new_tile()", "def _clear_map(self, default=100):\r\n self.tiles = [\r\n [default\r\n for _ in range(self.height)]\r\n for _ in range(self.width)]\r\n\r\n for (x, y, score) in self.goals:\r\n self.tiles[x][y] = score\r\n\r\n for (x,y) in self.walls:\r\n self.tiles[x][y] = np.nan", "def reset_map(self):\n self.x = None\n self.X = None\n self.y = None\n self.Y = None\n self.data = None\n self.sampling = None\n self.size = None", "def reset(self):\n\n #Create a grid of zeros\n self._grid = [[0 for dummy_col in range(self._grid_width)] for dummy_row in range(self._grid_height)]\n # _available_new_tiles will be refilled every 10 moves\n self._available_new_tiles = TOTAL_AVAILABLE_MOVES[:]\n for dummy_i in range(2):\n self.new_tile()\n self._game_over = False", "def specific_reset(self) -> None:\n\n # first, set agent xy and adjust its height\n self.agent.specific_reset()\n agent_pos = np.zeros(3)\n agent_pos = np.concatenate((agent_pos[:2], [self.agent.init_xyz[2]]))\n self.agent.set_position(agent_pos)\n\n # second, reset obstacle positions\n if len(self.obstacles) > 0:\n obs_init_pos = env_utils.generate_obstacles_init_pos(\n num_obstacles=len(self.obstacles),\n agent_pos=self.agent.get_position(),\n goal_pos=np.array([]), # no goal in gather task\n world=self.world,\n min_allowed_distance=self.obstacle_obstacle_distance,\n agent_obstacle_distance=self.agent_obstacle_distance\n )\n for i, ob in enumerate(self.obstacles):\n ob.set_position(obs_init_pos[i])\n\n # finally, make all collected objects visible again\n [ob.update_visuals(make_visible=True) for ob in self.obstacles]", "def reset_objects(new_game_map):\r\n new_coins = []\r\n new_enemies = []\r\n y_val = 0\r\n for row in new_game_map:\r\n x_val = 0\r\n for tile in row:\r\n if tile == '3':\r\n new_coins.append(pygame.Rect((x_val * TILE_SIZE), (y_val * TILE_SIZE), TILE_SIZE, TILE_SIZE))\r\n if tile == '4':\r\n new_enemies.append([[0, 0], pygame.Rect((x_val * TILE_SIZE), (y_val * TILE_SIZE), TILE_SIZE, TILE_SIZE), 1, True, ['enemy_move', 0]])\r\n x_val += 1\r\n y_val += 1\r\n return new_coins, new_enemies, []", "def reset(self):\r\n # replace with your code\r\n for row in range(0, self._grid_height):\r\n for col in range(0, self._grid_width):\r\n self._grid_tile[row][col] = 0\r\n # at this step, all cells should be available\r\n self.new_tile()\r\n self.new_tile()", "def reset(self):\r\n # replace with your code\r\n self._cells = [[0 for dummy_col in range(self._grid_width)] for dummy_row in range(self._grid_height)]\r\n self.new_tile()\r\n self.new_tile()", "def reset(self, grid, disallowed, num_of_obstacles):\n # self.array.clear()\n random_array = []\n\n # If I want the obstacles in the same location every episode\n # random.seed(10)\n\n # Make a copy of the grid\n allowed = grid[:]\n\n [allowed.remove(pos) for pos in disallowed]\n\n for i in range(num_of_obstacles):\n new_pos = random.choice((allowed))\n self.array.append(new_pos)\n random_array.append(new_pos)\n allowed.remove(new_pos)\n\n self.array_length = self.array_length + num_of_obstacles\n\n return random_array", "def reset(self):\r\n self._cells = [ [0 for dummy_col in range(self._grid_width)] \r\n for dummy_row in range(self._grid_height) ]\r\n \r\n \r\n self.new_tile()\r\n self.new_tile()", "def reset_map(self):\n self.reset_world(self._filename)", "def reset(self):\r\n self.grid = [[0 for dummy_col in range(self.grid_width)] for dummy_row in range(self.grid_height)]\r\n self.new_tile()\r\n self.new_tile()", "def reset(self):\n # replace with your code\n self._grid = [[0] * self._width for _ in xrange(self._height)]\n self.new_tile()\n self.new_tile()", "def reset(self):\n # self.grid = [[0] * self.grid_width] * self.grid_height\n self.grid = []\n for dummy_row in range(self.grid_height):\n new_row = []\n for dummy_col in range(self.grid_width):\n new_row.append(0)\n self.grid.append(new_row)\n self.new_tile()\n self.new_tile()", "def reset(self):\n # replace with your code\n dummy_row = self._grid_height\n dummy_col = self._grid_width\n self._cells = [ [0 for dummy_col in range(self._grid_width)] \n for dummy_row in range(self._grid_height)]\n \n self.new_tile()\n self.new_tile()", "def reset(self):\n # replace with your code\n self._grid = [[0 for dummy_column in range(self._grid_width)] for dummy_row in range(self._grid_height)]\n for dummy_num in range(2):\n self.new_tile()", "def reset_position(self): \n self.rect.x = 400\n self.rect.y = 400\n \n # Specifies the Player's spawnpoint as maze_arrangement[8][8], representing\n # the tile in the center of the maze \n self.__minotaur_x = 8\n self.__minotaur_y = 8", "def reset(self):\n self.grid = [[0 for col in range(self.grid_width)] for row in range(self.grid_height)]\n # next, create a list of all tuples (row,col)\n self.tiles = [(row,col) for row in range(self.grid_height) for col in range(self.grid_width)] \n for dummy_idx in range(2):\n self.new_tile()\n self.dir_dic = {\n UP:[(0, col) for col in range(self.grid_width)],\n DOWN:[(self.grid_height-1, col) for col in range(self.grid_width)],\n LEFT:[(row,0) for row in range(self.grid_height)], \n RIGHT:[(row,self.grid_width-1) for row in range(self.grid_height)]}", "def reset(self):\r\n\r\n self._board = [[0 for x in range(self._grid_width)]\r\n for y in range(self._grid_height)]\r\n self.new_tile()", "def reset(self):\n self._cells = [[0 for dummy_col in range(self._grid_width)] for dummy_row in range(self._grid_height)]\n self.new_tile()\n self.new_tile()\n #return self._cells", "def reset(self):\n self._maps = {}", "def reset(self):\r\n self.board = [[0 for i in range(self.width)]\r\n for i in range(self.height)]\r\n self.new_tile()\r\n self.new_tile()", "def reset(self, roi_warped_points):\n self.__init__(roi_warped_points)", "def reset(self):\n # replace with your code\n self.board = [[0 for dummy_index in range(self.grid_width)] for dummy_inner_index in range(self.grid_height)]", "def _reset_game(self):\n\t\tself.is_play = True\n\t\tself.is_gameover = False\n\t\tself.bolan.rect.y = self.bolan.default_y\n\t\tself.scoreboard.score = 0\n\t\tself.obstacles._reset_positions()", "def reset(self):\n self._grid = [[0 for dummy_col in range(self._width)]\n for dummy_row in range(self._height)]\n self.new_tile()\n self.new_tile()", "def reset(self) :\n for i in range(len(self.playerCellList)) :\n for j in range(len(self.playerCellList[i])) :\n self.playerCellList[i][j].hasPawn = False", "def clear_map(self):\n self.rooms = []\n\n self.dungeon.clear_dungeon()", "def update_obstacles(self, new_obs):\n self.obstacles = new_obs", "def reset(self):\n self._grid = [[0] * self._width for _ in range(self._height)]\n self.new_tile()\n self.new_tile()", "def updateHardObstacles(self):\r\n global_obs = self.calcGlobalObstaclePosition([[10, 20],[10, 0],[10, -20]])\r\n self.globalHardObstaclesList.extend(global_obs)", "def reset(self):\n self.position = np.zeros(self.ndegres)\n self.velocity = np.zeros(self.ndegres)\n self.state = np.zeros(2*self.ndegres)\n self.flag = 0\n self.h_ref = np.array([self.ref for _ in range(self.horizon)])\n self.action = np.zeros(self.ACTION_DIM) \n self.h_action = np.zeros(self.ACTION_DIM*self.horizon)", "def reset(self):\n self.x_pos = -self.width\n self.y_pos = self.screenHeight / 2 - self.height\n self.isJump = False\n self.y_velocity = self.origin_y_velocity\n self.x_velocity = self.origin_x_velocity\n self.score = 0\n self.spriteCount = 0\n self.goForward = True", "def reset(self):\n super(PolygonTool, self).reset()\n # self.__nsides = None\n # self.__increment = None\n # self.__external = False # make this adjustable?\n self.__center = None\n for _i in range(self.__nsides):\n self.__xpts[_i] = 0.0\n self.__ypts[_i] = 0.0", "def reset(self):\r\n # creating the grid with the values all initialized to zero\r\n \r\n self._grid = [[ 0 for dummy_col in range(self._width)]\r\n for dummy_row in range(self._height)]\r\n # introducing the two initial tiles\r\n self.new_tile()\r\n self.new_tile()\r\n #for testing purposes\r\n #print self.grid\r\n #print self\r", "def clean(self):\n self.board_values = np.zeros((self.size, self.size))\n self.tiles_taken[:, :] = False", "def reset():\n global WORLD\n WORLD = np.array([\n [\"G\", \"_\", \"_\", \"_\", \"_\", \"X\", \"X\"],\n [\"G\", \"_\", \"_\", \"_\", \"_\", \"_\", \"_\"],\n [\"X\", \"X\", \"_\", \"_\", \"_\", \"_\", \"_\"],\n [\"X\", \"X\", \"X\", \"X\", \"X\", \"_\", \"_\"],\n [\"X\", \"X\", \"_\", \"_\", \"_\", \"_\", \"_\"],\n [\"_\", \"_\", \"_\", \"_\", \"_\", \"_\", \"_\"],\n [\"S\", \"_\", \"_\", \"_\", \"X\", \"X\", \"X\"]\n ])\n global V\n V = 0", "def reset(self, obstacle_pos=(0.0, 0.0), agent_pos=(2.0, -11.0)):\n\n obstacle_pos_x_double = ctypes.c_double(obstacle_pos[0])\n obstacle_pos_y_double = ctypes.c_double(obstacle_pos[1])\n agent_pos_x_double = ctypes.c_double(agent_pos[0])\n agent_pos_y_double = ctypes.c_double(agent_pos[1])\n\n self.wrapper.reset(self.instance, obstacle_pos_x_double, obstacle_pos_y_double,\n agent_pos_x_double, agent_pos_y_double)\n self.obstacle_pos = self.get_body_ini_pos(self.obstacle_body_index)\n self.agent_pos = self.get_body_ini_pos(self.agent_body_index)", "def recall(self):\n for t in self.placed_tiles:\n row = self.placed_tiles[t][1][0]\n col = self.placed_tiles[t][1][1]\n # remove tiles from board\n self.board.board[row][col].letter = None\n # put tiles back on rack\n self.rack[t] = self.placed_tiles[t][0]", "def resets_attributes(self):\n \n self.path_dict = None\n self.poss_dict = None\n self.check_dict = None\n self.long_dict = None\n self.rep_counter = 0\n self.cap_counter = 0\n \n self.board = []\n self.coords = []\n self.chess_coords = []\n self.empty = \" \"", "def specific_reset(self) -> None:\n\n # set agent and goal positions\n self.agent.specific_reset()\n agent_pos = self.world.generate_random_xyz_position()\n goal_pos = agent_pos\n while np.linalg.norm(agent_pos[:2]-goal_pos[:2]) < self.world.body_min_distance:\n goal_pos = self.world.generate_random_xyz_position()\n # adjust the height of agent\n agent_pos = np.concatenate((agent_pos[:2], [self.agent.init_xyz[2]]))\n self.agent.set_position(agent_pos)\n self.goal.set_position(goal_pos)\n self.old_dist = self.get_xy_distance()\n\n # apply random orientation to agent.\n random_yaw = np.random.uniform(-np.pi, np.pi)\n quaternion = self.bc.getQuaternionFromEuler([0, 0, random_yaw])\n self.agent.set_orientation(quaternion)\n\n # reset obstacle positions\n if len(self.obstacles) > 0:\n obs_init_pos = env_utils.generate_obstacles_init_pos(\n num_obstacles=len(self.obstacles),\n agent_pos=self.agent.get_position(),\n goal_pos=self.goal.get_position(),\n world=self.world,\n min_allowed_distance=self.world.body_min_distance,\n agent_obstacle_distance=self.agent_obstacle_distance\n )\n for i in range(len(self.obstacles)):\n self.obstacles[i].set_position(obs_init_pos[i])", "def reset(self):\n self.xview_moveto(0)\n self.yview_moveto(0)\n self.zoomMap(1, 0, 0)", "def reset(self):\n\n self.x = np.random.randint(3, self.grid_size-3, size=1)[0]\n self.y = np.random.randint(3, self.grid_size-3, size=1)[0]\n \n self.h_x = []\n self.h_y = []\n for i in range(self.hunters):\n x = np.random.randint(3, self.grid_size-3, size=1)[0]\n y = np.random.randint(3, self.grid_size-3, size=1)[0]\n self.h_x.append(x)\n self.h_y.append(y)\n\n self.trajectory = np.zeros((self.grid_size,self.grid_size))\n\n bonus = 0.5 * np.random.binomial(1,self.temperature,size=self.grid_size**2)\n bonus = bonus.reshape(self.grid_size,self.grid_size)\n\n malus = -1.0 * np.random.binomial(1,self.temperature,size=self.grid_size**2)\n malus = malus.reshape(self.grid_size, self.grid_size)\n\n self.to_draw = np.zeros((self.max_time+2, self.grid_size*self.scale, self.grid_size*self.scale, 3))\n\n\n malus[bonus>0]=0\n\n self.board = bonus + malus\n\n self.position = np.zeros((self.grid_size, self.grid_size))\n self.position[0:2,:]= -1\n self.position[:,0:2] = -1\n self.position[-2:, :] = -1\n self.position[:, -2:] = -1\n self.board[self.x,self.y] = 0\n self.t = 0\n \n self.board_with_hunters[:,:] = 0\n \n for i in range(self.hunters):\n self.board_with_hunters[self.h_x[i],self.h_y[i]] = -100\n \n\n global_state = np.concatenate((\n self.board.reshape(self.grid_size, self.grid_size,1),\n self.position.reshape(self.grid_size, self.grid_size,1),\n self.trajectory.reshape(self.grid_size, self.grid_size,1),\n self.board_with_hunters.reshape(self.grid_size, self.grid_size,1)),axis=2)\n\n state = global_state[self.x - 2:self.x + 3, self.y - 2:self.y + 3, :]\n return state", "def reset_path(self):\n for i in self.grid:\n for y in i:\n y.g = 0\n y.h = 0\n y.f = 0\n y.parent = None\n y.visited = False", "def reset(self):\n self.nodes = []\n self.start = self.start\n self.end = self.end\n\n for row in self.charMap:\n for c in row:\n if c == \"2\":\n c.c = \"0\"\n self.n_checked = 0", "def specific_reset(self) -> None:\n\n # set agent and goal positions\n self.agent.specific_reset()\n agent_pos = self.agent.init_xyz\n agent_pos[:2] = self.world.generate_random_xyz_position()[:2]\n goal_pos = agent_pos\n while np.linalg.norm(agent_pos[:2]-goal_pos[:2]) < self.world.body_min_distance:\n goal_pos = self.world.generate_random_xyz_position()\n # adjust the height of agent\n # agent_pos = np.concatenate((agent_pos[:2], [self.agent.init_xyz[2]]))\n self.agent.set_position(agent_pos)\n self.goal.set_position(goal_pos)\n self.old_dist = self.get_xy_distance()\n\n # set agent orientation towards goal\n yaw = angle2pos(self.agent.get_position(), self.goal.get_position())\n yaw = self.agent.init_rpy[2] + yaw\n # apply random orientation to agent.\n yaw += np.random.uniform(-np.pi, np.pi)\n quaternion = self.bc.getQuaternionFromEuler([0, 0, yaw])\n self.agent.set_orientation(quaternion)\n\n # reset obstacle positions\n if len(self.obstacles) > 0:\n obs_init_pos = env_utils.generate_obstacles_init_pos(\n num_obstacles=len(self.obstacles),\n agent_pos=self.agent.get_position(),\n goal_pos=self.goal.get_position(),\n world=self.world,\n min_allowed_distance=self.world.body_min_distance,\n agent_obstacle_distance=self.agent_obstacle_distance\n )\n for i in range(len(self.obstacles)):\n self.obstacles[i].set_position(obs_init_pos[i])", "def reset_route(self):\r\n\t\tself.shift = 0\r\n\t\tself.start_pos = None\r\n\t\tself.goal_pos = None", "def reset(self):\n self.resetPos()\n self.vx, self.vy = 0, 0\n self.accel, self.dangle = 0, 0\n self.crashed = False\n self.timeDriving, self.score, self.checkpoint, self.laps = 0, 0, 0, 0\n self.targetCheckpointPos = self.maze.checkpoints[0].getMidInt()\n self.inputColour = [sensor_colours[0] for i in range(self.dimensions[0])]\n self.scan = np.array([0 for i in range(self.dimensions[0])])\n self.cost = [0 for i in range(6)]\n #Extrapos for CTS LOS\n self.extrapos = []", "def reset_position(self, x, y):\n\t\tself.grid[x][y] = self.terminal", "def reset_position(self):\n self.set_position(copy.deepcopy(self.ab_pos))", "def reset_all(self):\n for i, stop in enumerate(self):\n stop._map = self\n stop.reset()", "def clear_for_new_board(self):\r\n self.game_board = []\r\n self.good_contours = []\r\n self.game_board_contours = []", "def reset(self):\n self.creature.reset()\n self.current_world = copy.copy(self.init_world_rewards)\n self.draw_board()\n self.last_move = None", "def clear(self):\n super().clear()\n self.world = None\n self.regions = {}\n self.loaded_regions = set()\n self.given_center = False", "def reset(self):\n self.board_cards = []\n self.position = -1\n self._reset_players()", "def reset(self):\n self.tile=\"\"", "def reset_board(self):\n cell_list = self.get_cells()\n for current_cell in cell_list:\n current_cell.set_cell_state(0) # remove player ownership of cell", "def resetForces(self):\n for atom in range(0, self.numAtoms):\n self.atoms[atom].fx = 0\n self.atoms[atom].fy = 0\n self.atoms[atom].fz = 0\n self.atoms[atom].pot = 0", "def place_obstacles():\n #Randomly generate different sized rectangles\n #Soem may overlap, which gives more variety in shape of obstacles\n xvals = np.random.randint(0,self.map_dimensions[1],size=self.N_obstacles)\n yvals = np.random.randint(0,self.map_dimensions[0],size=self.N_obstacles)\n lower_left = zip(xvals,yvals)\n rects = []\n for LL in lower_left:\n x = LL[0]\n y = LL[1]\n wmax = self.map_dimensions[1] - x\n w = np.random.randint(0,wmax,size=1)[0]\n hmax = self.map_dimensions[0] - y\n h = np.random.randint(0,hmax,size=1)[0]\n rects += [(x,y,w,h)]\n self.coordinates__obstacles = rects", "def reset(self):\n self._top = [self.middle, self.middle, self.middle]\n self._left = [self.middle, self.middle, self.middle]\n self._right = [self.middle, self.middle, self.middle]\n self._bottom = [self.middle, self.middle, self.middle]", "def replace(self):\n if self.removed:\n self.coordinates = [[(self.player * 15 - 15), 0], [(self.player * 15 - 15), 1],\n [(self.player * 15 - 15), 2], [(self.player * 15 - 15), 3]]\n for i in self.coordinates:\n self.collision_boxes.append(rect.Rect(i[0] * 64, i[1] * 64, 64, 64))\n self.removed=False", "def reset(self):\n self.x_pos1 = 0\n self.x_pos2 = self.x_pos1 + self.width\n self.y_pos = self.offset_y\n self.velocity = self.origin_velocity", "def reset(self):\r\n self.body = [[int(self.x_pos/2), int(self.y_pos/2)]] # initial snake starts at center of screen\r\n self.direction = \"UP\"\r\n self.length = 1\r\n self.alive = True\r\n self.speed = 10", "def reset(self):\n self.bbox = None\n self.true = None\n self.meta = None", "def resetBoard(self):\n\t\tself.board = np.zeros((self.boardSize,self.boardSize))", "def resetBoard(self):\n\t\tself.board = np.zeros((self.boardSize,self.boardSize))", "def init_map(self, obstacle_rate=0.9):\n n = self.size()\n\n map_obstacles = [] # np.zeros((n, n)) # 1: obstacle, 0: non-obstacle\n \n for i in range(n):\n # We only need 2 bit to encode 1/0 for each element of NumberArray\n row = NumberArray(2, n)\n for j in range(n):\n if i == j:\n # map_obstacles[i][j] = 0\n row[j] = 0\n elif i > j:\n # map_obstacles[i][j] = map_obstacles[j][i]\n row[j] = map_obstacles[j][i]\n else:\n # map_obstacles[i][j] = 1 if random.random() > 0.9 else 0\n row[j] = 1 if random.random() > obstacle_rate else 0\n map_obstacles.append(row)\n\n self.map_obstacle = map_obstacles", "def reset(self):\n self._x = 0\n self._y = 0", "def reset(self):\n self.position = self.initial_position\n self.velocity = [0, 0, 0]", "def reset(self):\n\n\t\tself.games_played = 0\n\t\tself.memory = {}\n\t\tself.movesThisGen = list()", "def reset(self):\n\n\t\tself.games_played = 0\n\t\tself.memory = {}\n\t\tself.movesThisGen = list()", "def reset(self) -> None:\n self.moves_taken = []", "def ResetPos(self):\n for idx in range(self.unFixJL):\n self._p.resetJointState(self.uid, idx,\n self.InitInfo[\"JPos\"][idx],\n self.InitInfo[\"JVel\"][idx])", "def updateObstacles(self, obstacles):\r\n global_obs = self.calcGlobalObstaclePosition(obstacles)\r\n self.globalObstaclesList.extend(global_obs)", "def reset(self):\n self.dynamic_predictions = {}\n self.position = 0\n self.references = []", "def reset(self):\n self.my_board = np.ones((self.board_size, self.board_size), dtype=int) * CLOSED\n self.board = place_mines(self.board_size, self.num_mines)\n self.num_actions = 0\n self.valid_actions = np.ones((self.board_size * self.board_size), dtype=bool)\n\n return self.my_board", "def reset(self):\n self.vrp = np.matrix([0.5, 0.5, 1])\n self.vpn = np.matrix([0, 0, -1])\n self.vup = np.matrix([0, 1, 0])\n self.u = np.matrix([-1, 0, 0])\n self.extent = [1., 1., 1.]\n self.screen = [400., 400.]\n self.offset = [20., 20.]", "def resetBoard(self):\n self.space1 = 0\n self.space2 = 0\n self.space3 = 0\n self.space4 = 0\n self.space5 = 0\n self.space6 = 0", "def reset(self):\n self.board = place_mines(self.board_size, self.num_mines)\n self.my_board = np.ones((self.board_size, self.board_size), dtype=int) * CLOSED\n self.valid_actions = np.ones((self.board_size, self.board_size), dtype=np.bool)\n return self.my_board", "def reset(self):\n self.placeables = []\n self.previous_placeable = None\n self.current_volume = 0\n self.reset_tip_tracking()", "def random_map(self, world):\n obstacles = []\n if self.cfg[\"obstacle\"][\"octagon\"][\"enabled\"]:\n obstacles += self.__generate_octagon_obstacles(world)\n if self.cfg[\"obstacle\"][\"rectangle\"][\"enabled\"]:\n obstacles += self.__generate_rectangle_obstacles(world)\n\n # update the current obstacles and goal\n self.current_obstacles = obstacles\n self.add_new_goal()\n\n # apply the new obstacles and goal to the world\n self.apply_to_world(world)", "def reset():\n if not game_state.is_solving:\n game_state.is_solving = False\n game_state.is_dirty = False\n\n del tower_a_disks[0:len(tower_a_disks)]\n del tower_b_disks[0:len(tower_b_disks)]\n del tower_c_disks[0:len(tower_c_disks)]\n\n disk_sprites.empty()\n\n [add_disk() for x in range(3)]\n\n return reset", "def reset(self):\n self.posXY = (0,0)\n self.magXY = (1.0,1.0)\n self.rot = 0.0\n self.trans = 255\n self.isDone = False\n self.isFirst = True\n\n self.kill()\n self.Group = pyglet.graphics.OrderedGroup(self.order)\n self.isReady = self.check()", "def reset(self):\n self._steps = 0\n self._empty_canvas()\n\n if self._player_start_pos is None:\n x = random.randint(0, (self._width // self.PLAYER_DIM) - 1)\n y = random.randint(0, (self._height // self.PLAYER_DIM) - 1)\n\n x *= self.PLAYER_DIM\n y *= self.PLAYER_DIM\n\n self._player = Pt(x, y)\n else:\n self._player = self._player_start_pos\n\n self._goal_generate()\n self._current_route_key = (tuple(self._player), tuple(self._goal))\n while self._current_route_key in self._routes.keys():\n self._goal_generate()\n self._current_route_key = (tuple(self._player), tuple(self._goal))\n\n self._routes[self._current_route_key]\n\n self._update()", "def reset(self):\n self.grid = np.array([0] * 9) # grid\n self.turn = 1 # whose turn it is\n self.done = False # whether game is done\n return self.grid", "def clear_tiles(self):\n for y in range(Settings.SIZE_Y):\n for x in range(Settings.SIZE_X):\n self.__tile_grid[y][x].configure(\n image=self.__marker_images[MarkerType.NONE])", "def reset(self):\n width = len(self.cell)\n height = len(self.cell[0])\n self.cell = [ [EMPTY for r in range(height)] for c in range(width) ]", "def specific_reset(self) -> None:\n self.agent.specific_reset() # reset joints\n new_pos = self.agent.init_xyz\n new_pos[:2] = np.random.uniform(-0.01, 0.01, 2)\n self.agent.set_position(new_pos)\n self.old_potential = self.calculate_task_potential()", "def reset_state(self):\n for row in range(len(self.state)):\n for column in range(len(self.state[row])):\n self.state[row][column] = None", "def reset(self):\n self.grid = np.array([0] * 9) # grid\n self.turn = 1 # whose turn it is\n self.done = False # whether game is doneT\n return self.grid", "def reset(self):\n\t\tself.offsets = self.start_off.copy()", "def reset(self, fullreset=True):\n self.controlpoints = []\n self.contour = []\n self.ext_energies = []\n self.update()\n if fullreset:\n self.optimized = False", "def reset(self) -> list:\n self.x1 = 0\n self.y1 = -1\n self.z = 3\n self.x2 = 0\n self.y2 = 0\n self.frame = 0\n return self.get_state()", "def remove_obstacle(self, x, y):\n self.BOARD[y][x].traversable = True\n self.board_array[y][x] = 0", "def reset_game():\n global x_pos, o_pos, frames, count\n\n count = 0\n x_pos = []\n o_pos = []\n result.set(\"Your Turn!\")\n for x in frames:\n for y in x.winfo_children():\n y.config(text=\" \", state='normal')", "def reset_board(self):\n\n self.board = np.array(self.initial_board)" ]
[ "0.80095875", "0.7382117", "0.72062683", "0.7101044", "0.7061013", "0.70536333", "0.7010583", "0.6989404", "0.69813454", "0.69743735", "0.6880206", "0.68602246", "0.6838129", "0.6808036", "0.67923963", "0.6767282", "0.67519385", "0.67181975", "0.67109746", "0.6702349", "0.6693133", "0.66791046", "0.6668063", "0.66637594", "0.6661358", "0.665283", "0.66279536", "0.66215134", "0.65996504", "0.65979785", "0.6596446", "0.6576431", "0.6573421", "0.6555398", "0.6541702", "0.65374255", "0.6508948", "0.6475114", "0.647084", "0.64532876", "0.64335746", "0.6410386", "0.64034456", "0.63862866", "0.6373345", "0.6362071", "0.6356949", "0.6342898", "0.63360393", "0.63302463", "0.63284695", "0.63256377", "0.63154596", "0.6310789", "0.6291712", "0.629153", "0.6227197", "0.6225278", "0.621896", "0.6204273", "0.6201195", "0.620029", "0.6196645", "0.61956286", "0.6194664", "0.61886805", "0.6177943", "0.6175757", "0.61659324", "0.61659324", "0.6138367", "0.6133233", "0.61200356", "0.6119317", "0.6119317", "0.61157286", "0.61132765", "0.6106129", "0.60843986", "0.6079422", "0.60727745", "0.6062905", "0.6047417", "0.6039181", "0.6036618", "0.6035286", "0.601243", "0.6011303", "0.601128", "0.60028046", "0.60012674", "0.6000816", "0.599663", "0.5994305", "0.59804934", "0.59769654", "0.5966026", "0.59633106", "0.5956247", "0.5955274" ]
0.84130687
0
Return True if drone should avoid obstacle and False if not
def is_obstacle_in_path(self): for obstacle in self.obstacles.tolist(): print("obstacle.get_point():", obstacle.get_point()) dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point())) if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD: if isinstance(obstacle, StationaryObstacle): paths = self.generate_possible_paths(obstacle) if len(paths) != 0: return True, np.array(paths) elif isinstance(obstacle, MovingObstacle): pass return False, None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def passable(self, point):\n return point not in self.obstacles", "def obstacle_prone_area(self,image):\r\n\r\n start_x=int(self.start[0])\r\n start_y=int(self.start[1])\r\n goal_x=int(self.goal[0])\r\n goal_y=int(self.goal[1])\r\n print(goal_x,goal_y)\r\n if (image[int(self.maximum_size-goal_x),int(goal_y),0]==0) or ((image[int(self.maximum_size-start_x),int(start_y),0]==0)):\r\n #print(1)\r\n return False\r\n else:\r\n #print(2)\r\n return True", "def check_movement(self):\n is_clear = True # default return value if no obstacles\n # !!! IR_SENSORS DISABLED\n if self.move_state == MOV_FORWARD:\n if self.l.look_for_obstacle(OBST_FRONT) == True:\n is_clear = False\n return is_clear", "async def should_handle(self):\n local_controller = self.controller\n workers_total = len(local_controller.workers)\n geysers = local_controller.extractors\n drones_in_queue = local_controller.already_pending(DRONE)\n if (\n not local_controller.close_enemies_to_base\n and local_controller.can_train(DRONE)\n and not local_controller.counter_attack_vs_flying\n ):\n if workers_total == 12 and not drones_in_queue:\n return True\n if (\n workers_total in (13, 14, 15)\n and len(local_controller.overlords) + local_controller.already_pending(OVERLORD) > 1\n ):\n return True\n optimal_workers = min(\n sum(x.ideal_harvesters for x in local_controller.townhalls | geysers), 90 - len(geysers)\n )\n return (\n workers_total + drones_in_queue < optimal_workers\n and np.sum(\n np.array(\n [\n len(local_controller.zerglings),\n len(local_controller.hydras),\n len(local_controller.ultralisks),\n ]\n )\n * np.array([1, 2, 3])\n )\n > 15\n )\n return False", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def __hit_bricks(self, g_object):\n return type(g_object) == GRect and g_object != self.__paddle", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def is_obstacle_in_path_of_drone(self, obstacle_vector, waypoint_vector):\n obstacle_list = obstacle_vector.tolist()\n waypoint_list = waypoint_vector.tolist()\n\n for index in range(len(obstacle_list)):\n if all(item > 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]) or all(item < 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]):\n return False\n\n return True", "def can_move(self):\r\n for wall in self.app.walls:\r\n if vec(self.grid_pos+self.direction) == wall:\r\n return False\r\n return True", "def avoid_obstacles(self):\n _a = v2d(0, 0)\n _count = 0\n\n # Process all obstacles\n for obs in self.target._obstacles:\n # Vector from target to me\n diff = self._posn - obs._posn\n dist = abs(diff) # Distance\n if 0 < dist < self._sensing_range: # Is it in range?\n # Get force exherted by obstacle\n _f = self.obstacle_force(obs)\n if _f.magnitude() > 1: # Is the force significant?\n _a += _f\n _count += 1\n \n if _count > 0:\n _a /= _count\n _a *= self._speed_cap\n #limit(_a, self._max_f)\n \n return _a", "def should_drive(self):\n\t\tif not self.moving:\n\t\t\tnew_direction = self.find_direction()\n\t\t\tif self.orderQueue.has_order_in_floor_and_direction(self.direction, self.currentFloor) or self.orderQueue.has_order_in_floor_and_direction(ORDERDIR.IN, self.currentFloor):\n\t\t\t\tself.orderQueue.delete_order_in_floor(self.direction, self.currentFloor)\n\t\t\t\tself.open_door()\n\t\t\telif new_direction != self.direction and self.orderQueue.has_order_in_floor_and_direction(not self.direction, self.currentFloor):\n\t\t\t\tself.orderQueue.delete_order_in_floor(not self.direction, self.currentFloor)\n\t\t\t\tself.open_door()\n\t\t\telif self.orderQueue.has_orders() and not self.moving and self.doorTimer.is_finished:\n\t\t\t\tself.drive()\n\t\t\tself.update_and_send_elevator_info()", "async def should_handle(self):\n local_controller = self.controller\n cavern = local_controller.caverns\n if local_controller.hives and not cavern:\n return False\n if not local_controller.can_train(HYDRALISK, local_controller.hydradens.ready):\n return False\n if local_controller.pits.ready and not local_controller.hives and not await BuildHive.morphing_lairs(self):\n return False\n if cavern.ready:\n return len(local_controller.ultralisks) * 2.75 > len(local_controller.hydras)\n return not local_controller.floating_buildings_bm", "def goal_test(self, state):\n for x, y in state.alvos:\n if state.tabuleiro[x][y] is not BOX_ON_TARGET:\n return False\n return True", "def is_spare(self):\n if self.is_strike():\n return False\n\n return (self.first_ball + self.second_ball) == 10", "def _check_sonar_obstacles(self):\n # TODO: what's a good number?\n BLOCKED_THRESHOLD = 0.7\n\n rate = rospy.Rate(10) # 10 hz\n count = 10\n left = 0\n center = 0\n right = 0\n\n for i in range(count):\n obstacle = self.swarmie.get_obstacle_condition()\n\n if obstacle & Obstacle.SONAR_LEFT == Obstacle.SONAR_LEFT:\n left += 1\n if (obstacle & Obstacle.SONAR_CENTER ==\n Obstacle.SONAR_CENTER):\n center += 1\n if obstacle & Obstacle.SONAR_RIGHT == Obstacle.SONAR_RIGHT:\n right += 1\n\n rate.sleep()\n\n left_blocked = left / count > BLOCKED_THRESHOLD\n center_blocked = center / count > BLOCKED_THRESHOLD\n right_blocked = right / count > BLOCKED_THRESHOLD\n\n return left_blocked, center_blocked, right_blocked", "def iswalking(self):\r\n return self.model.coord!=self.model.targetcoord", "def is_unhappy(self):\n #checked!#\n ###your code here###\n same=0\n for i in self.home.neighbors:\n if i.occupant!=None:\n if i.occupant.group==self.group:\n same+=1\n happniess=float(same)/len(self.home.neighbors)\n if happniess<self.happiness_threshold:\n return True\n else:\n return False", "def check_falling(self, obstacles):\n self.rect.move_ip((0, 1))\n if not pygame.sprite.spritecollideany(self, obstacles):\n if not self.climb:\n\t self.fall = True\n\n self.rect.move_ip((0, -1))", "def still_going(ball_stats):\n if ball_stats[3] <= 0: # if vy = vx = 0 we should stop\n return False\n\n if ball_stats[0] > 41 * 2.54 or ball_stats[0] < 0: # checking if we are out of the lane\n return False\n pins_loc = ORIG_PINS_LOC.copy()\n for p in pins_loc:\n if dist((ball_stats[0], ball_stats[1]), p) < R_BALL + R_PIN: # checking if we hit one of the balls\n return False\n return True", "def allow_to_move(self, direction, row, column):\n if self.valid_coverage_cell(row, column):\n if self.collision(direction) is False and \\\n self.cov_grid[row][column] == NOT_VISITED:\n return True\n else:\n return False", "def noSol(self):\n noSol = False \n\n cost_min_bilet = 100000\n\n for a in self.info.autobuze:\n if a.price < cost_min_bilet:\n cost_min_bilet = a.price\n\n for o in self.info.oameni:\n if o.money < cost_min_bilet and o.remaining_dest != []: \n noSol = True\n break\n \n set_destinatii = set()\n\n for o in self.info.oameni:\n if o.current_loc in set_destinatii:\n noSol = True\n break\n else:\n set_destinatii.add(o.current_loc)\n\n return noSol", "def degenerate(self):\n return self.radius == 0.0", "def can_move(self, next_x, next_y):\n\t\tif self.battery == 0:\n\t\t\tif self.planet.tiles[next_y][next_x].is_shaded():\n\t\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"+\":\n\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"-\":\n\t\t\treturn False\n\t\treturn True", "def victory_checker() -> bool:\r\n conflict_check()\r\n for x in range(shape):\r\n for y in range(shape):\r\n if conflict_space[x, y] != 0:\r\n return False\r\n if separation_crawler(False):\r\n return False\r\n return True", "def safe_to_dance(self):\n #check for all fil/early-termination conditions\n for _ in range(4):\n if self.read_distance() < 300:\n print(\"not safe to dance!\")\n return False\n else:\n self.turn_by_deg(90)\n #after all checks have been done, we deduce its safe to dance\n print(\"Dance on!\")\n return True", "def fail(self):\n rows, cols, _ = self.bird.img.shape\n # find the top-left coordinates of bird image\n x_b, y_b = self.bird.x + self.env.pad - cols//2, max(self.bird.y + self.env.pad - rows//2, 0)\n \n # check if the bird square intersects with some environment obstacles\n isCollision = (self.env.occ[y_b:y_b + rows, x_b:x_b + cols]).any()\n \n return isCollision", "def perform(self, reevaluate=False):\n self.publish_debug_data(\"avoid_ball\", self.blackboard.pathfinding.avoid_ball)\n\n if self.blackboard.pathfinding.avoid_ball:\n return 'YES'\n return 'NO'", "def separation_crawler(mode: bool) -> bool:\r\n for x in range(shape):\r\n for y in range(shape):\r\n if mode:\r\n if conflict_space[x, y] != 0 and safeboard[x, y] == 1:\r\n if walled_in(x, y):\r\n safeboard[x, y] = 0\r\n print(\"Cell will create separation if marked. Marked safe:\", x, \",\", y)\r\n progress_handler(False, True)\r\n else:\r\n if example[x, y] == 0:\r\n if walled_in(x, y):\r\n print(\"Solution Rejected, separated areas\")\r\n return True", "def canMove(self, direction, robot, newPosX, newPosY):\n result = False\n if (newPosY < 0 or newPosY > len(self.map)):\n print (\"Déplacement impossible\")\n elif (newPosX < 0 or newPosX > len(self.map[newPosY])):\n print (\"Déplacement impossible\")\n else:\n if (self.isThereWallInDirection(direction, robot, \\\n newPosX, newPosY)):\n print(\"Déplacement impossible (mur sur le chemin)\")\n result = False\n else:\n car = self.map[newPosY][newPosX]\n logging.info(\"self.map[{}]={}\".format(newPosY, \\\n self.map[newPosY]))\n logging.info(\"new coord X={} : Y={} :: {}\".\\\n format(newPosX, newPosY, car))\n if (car == \"O\"):\n print(\"Déplacement impossible (mur)\")\n else:\n logging.info(\"Déplacement possible\")\n result = True\n return result", "def test(self, grid, flag):\n x = self.x+SPEED_X[flag]\n y = self.y+SPEED_Y[flag]\n return 0 <= x < self.n and 0 <= y < self.n and grid[y][x] == 1", "def check_map_obstacle_has_sight(self):\n return self.map_obstacle.sight_range > 0", "def has_guardian(self):\n return self.tiles.count(3) > 0", "def check_collision(self, x_3x1, avoidance_radius):\n if len(self.obstacles) > 1:\n for obs_point in self.obstacles[:]:\n dist = np.linalg.norm(obs_point - x_3x1)\n if dist < avoidance_radius:\n print \"dist: \" + str(dist)\n # a collision was found within the avoidance radius\n return True\n return False", "def can_move_direction(entity, neighbor, game_map):\n new_x, new_y = neighbor\n if not game_map.in_bounds(x=new_x, y=new_y, margin=1):\n return False\n elif game_map.in_bounds(x=new_x, y=new_y) \\\n and game_map.terrain[new_x][new_y].elevation > Elevation.SHALLOWS \\\n and not entity.wings:\n return False\n return True", "def isBlocked(mapObj, gameStateObj, x, y):\n\n if isWall(mapObj, x, y):\n return True\n\n elif x < 0 or x >= len(mapObj) or y < 0 or y >= len(mapObj[x]):\n return True # x and y aren't actually on the map.\n\n elif (x, y) in gameStateObj['stars']:\n return True # a star is blocking\n\n return False", "def violated(self) -> bool:\n ...", "def _nonforce_drop(self) -> bool:\n if self.closed:\n return True\n if self.zero_failures():\n return False\n return random.random() < self.failurerate", "def collide_other_tower(self, other_tower):\n x2 = other_tower.x\n y2 = other_tower.y\n\n dis = math.sqrt((x2 - self.x) ** 2 + (y2 - self.y) ** 2)\n if dis >= 100:\n return False\n else:\n return True", "def nail_in(self):\n if not self.in_wall:\n self.in_wall = True", "def is_donor(self):\n return True", "def drop_off_task(obs):\n gripper_obs = obs[0][0][2:5]\n object_obs = torch.cat((obs[0][0][5:7], torch.tensor([1.0])))\n if (sum(gripper_obs == object_obs) == 3).item():\n print(f'Dropping the object off now')\n return True\n else:\n print(f'Picking up the object!')\n return False", "def check_directionality_viable(self):\n\n direction_viable = True\n nose_cords, ear_left_cords, ear_right_cords = [], [], []\n for animal_name in self.animal_bp_dict.keys():\n for bp_cord in [\"X_bps\", \"Y_bps\"]:\n bp_list = self.animal_bp_dict[animal_name][bp_cord]\n for bp_name in bp_list:\n bp_name_components = bp_name.split(\"_\")\n bp_name_components = [x.lower() for x in bp_name_components]\n if \"nose\" in bp_name_components:\n nose_cords.append(bp_name)\n elif (\"ear\" in bp_name_components) and (\n \"left\" in bp_name_components\n ):\n ear_left_cords.append(bp_name)\n elif (\"ear\" in bp_name_components) and (\n \"right\" in bp_name_components\n ):\n ear_right_cords.append(bp_name)\n else:\n pass\n\n for cord in [nose_cords, ear_left_cords, ear_right_cords]:\n if len(cord) != len(self.animal_bp_dict.keys()) * 2:\n direction_viable = False\n\n if direction_viable:\n nose_cords = [\n nose_cords[i * 2 : (i + 1) * 2]\n for i in range((len(nose_cords) + 2 - 1) // 2)\n ]\n ear_left_cords = [\n ear_left_cords[i * 2 : (i + 1) * 2]\n for i in range((len(ear_left_cords) + 2 - 1) // 2)\n ]\n ear_right_cords = [\n ear_right_cords[i * 2 : (i + 1) * 2]\n for i in range((len(ear_right_cords) + 2 - 1) // 2)\n ]\n\n return direction_viable, nose_cords, ear_left_cords, ear_right_cords", "def can_flyover(self):\n return False", "def condition(o):\n\t\t\tv = o.pos() - self.pos()\n\t\t\treturn v.norm2() < dist2 and abs(angle_diff(v.angle(),self.angle())) < math.radians(45)", "def check_goal(self):\n hero = self.objects[0]\n others = self.objects[1:]\n\n for other in others:\n if other.x == hero.x and other.y == hero.y:\n self.objects.remove(other)\n if other.reward == 1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 1, 1, \"goal\"))\n elif other.reward == -1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 0, -1, \"fire\"))\n return other.reward, False\n return 0.0, False", "def won(self, vehicles):\n return vehicles[0].x == self.size - 2", "def goal_test(self):\n if -1 in self.state:\n return False\n else:\n return True", "def check_enemy_fleet(self):\n if len(self.enemyShips) > 0:\n response = False\n for ship in self.enemyShips:\n if ship.afloat == True:\n response = True\n return response", "def _is_action_legal(self, action):\n loading_position = self.end_of_lanes[self.current_Lane]\n length_of_vehicle = self.vehicle_data[4][action]\n\n # Check if the corresponding lane has sufficient capacity for cargo\n if loading_position + length_of_vehicle <= self.rows:\n # Check if still vehicle are due to be loaded or infinite vehicle are in harbour yard to load\n if self.number_of_vehicles_loaded[action] < self.vehicle_data[1][action] or \\\n self.vehicle_data[1][action] == -1:\n # Check if cargo type is a reefer that it can be placed in chosen position\n if self.vehicle_data[5][action] == 1:\n designated_loading_area = self.grid_reefer.T[self.current_Lane][\n loading_position:(loading_position + length_of_vehicle)]\n return np.all(designated_loading_area == 1)\n else:\n return True\n else:\n return False\n else:\n return False", "def is_building_eye(self):\r\n pass", "def attack_friendly(self):\n if self.friendly_pos == self.friendly_fight_pos and self.x_speed != -10:\n self.x_speed = 10\n self.y_speed = -10\n elif self.friendly_pos[0] >=150: #150 being when diriction switches\n self.x_speed = -10\n self.y_speed = 10\n elif self.friendly_pos == self.friendly_fight_pos and self.x_speed == -10:\n self.x_speed = 0\n self.y_speed = 0\n return True", "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def run_step(self, debug=True):\n\n # is there an obstacle in front of us?\n hazard_detected = False\n\n # retrieve relevant elements for safe navigation, i.e.: traffic lights\n # and other vehicles\n\n actor_list = self._world.get_actors()\n vehicle_list = actor_list.filter(\"*vehicle*\")\n lights_list = actor_list.filter(\"*traffic_light*\")\n\n\n # check possible obstacles\n vehicle_state, vehicle = self._is_vehicle_hazard(vehicle_list)\n if vehicle_state:\n if debug:\n print('!!! VEHICLE BLOCKING AHEAD [{}])'.format(vehicle.id))\n\n self._state = AgentState.BLOCKED_BY_VEHICLE\n hazard_detected = True\n\n # check for the state of the traffic lights\n # light_state, traffic_light = False, None # \n light_state, traffic_light = self._is_light_red(lights_list)\n if light_state:\n if debug:\n print('=== RED LIGHT AHEAD [{}])'.format(traffic_light.id))\n\n self._state = AgentState.BLOCKED_RED_LIGHT\n hazard_detected = True\n\n if hazard_detected:\n control = self.emergency_stop()\n else:\n self._state = AgentState.NAVIGATING\n # standard local planner behavior\n control = self._local_planner.run_step(debug)\n\n return control", "def check_directions(next_door, current_node, goal_node, chip, crossroad, travelled_path, colide): \n if next_door[2] < 0 or next_door[2] > 7:\n return crossroad\n\n # Check if the node is off the grid\n if next_door[0] < 0 or next_door[0] > chip.width - 1 or next_door[1] < 0 or next_door[1] > chip.height - 1:\n return crossroad\n\n (x, y, z) = current_node.position\n\n # Check whether a connection is already being used\n if chip.coordinates[z][y][x].connections[next_door].used:\n return crossroad\n\n next_node = chip.coordinates[next_door[2]][next_door[1]][next_door[0]]\n\n neighbour = nd.Node(next_door, current_node, next_node.cost, next_node.cost + next_node.distance_to_goal)\n\n if neighbour != goal_node and chip.coordinates[next_door[2]][next_door[1]][next_door[0]].gate is not None:\n return crossroad\n\n # Check whether the coordinate is already in the current path.\n if neighbour in travelled_path:\n return crossroad\n\n # Check whether neighbor is in open list and if it has a lower cost value\n if add_to_crossroad(neighbour, crossroad, colide):\n crossroad.append(neighbour)\n\n return crossroad", "def checkMissionEnd(self) -> bool:\n if getTimestamp() - self.mission['timestamp'] < self.TAKE_OFF_DELAY:\n return False\n drone: Drone\n for drone in self.dronesSet.getDrones().values():\n if drone['state'] != 'onTheGround' and drone['state'] != 'crashed':\n return False\n\n self.endMission()\n return True", "def does_path_intersect_obstacle_3d(self, obstacle, drone_point, waypoint):\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle.get_point(), drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(drone_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS\n\n return False", "def dominate(self, opponent: MOSwallow) -> bool:\n\n self_dominates = False\n\n for i in range(self.n_obj):\n if self.fitness[i] < opponent.fitness[i]:\n self_dominates = True\n elif opponent.fitness[i] < self.fitness[i]:\n return False\n\n return self_dominates", "def is_water(self):\n return False", "def is_water(self):\n return False", "def is_obstacle(self, pos: tuple):\n if self.within_map(pos):\n return self.map[round(pos[0]), round(pos[1])] == OBSTACLE\n else:\n return False", "def is_shooting(self):\n if self.gun_interface:\n return self.gun_interface.is_preparing()\n return False", "def is_smelling(self,conc_array):\n if conc_array[int(self.x)][int(self.y)]>self.threshold:\n self.smell_timer = self.Timer(self.T,self.lamda)\n #Nav mode three and four need to know whether the moth is smelling\n #at a specific moment, for that reason they use Tfirst.\n self.Tfirst = self.T\n self.odor = True #this datum will be useful in the graphical functions\n return True\n elif self.turned_on:\n self.odor = False\n if self.smell_timer.is_running(self.T):\n return True #note - even though the there is no detection, the navigator stay in nav mode.\n else:\n self.odor = False\n return False", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def cell_is_blocked(self, y, x, map_data):\n symbol = map_data[y][x]\n # collision: obstacle, bridge, mirror (all types), anti-tank (all types)\n if symbol == self.OBSTACLE_SYMBOL or symbol == self.BRIDGE_SYMBOL or symbol == self.BRICK_SYMBOL or \\\n symbol == self.MIRROR_UL_SYMBOL or symbol == self.MIRROR_UR_SYMBOL or \\\n symbol == self.MIRROR_DL_SYMBOL or symbol == self.MIRROR_DR_SYMBOL or \\\n symbol == self.ANTI_TANK_UP_SYMBOL or symbol == self.ANTI_TANK_DOWN_SYMBOL or \\\n symbol == self.ANTI_TANK_LEFT_SYMBOL or symbol == self.ANTI_TANK_RIGHT_SYMBOL or \\\n symbol == self.ANTI_TANK_DESTROYED_SYMBOL:\n return True\n return False", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def __bool__(self):\n return not(self.outcome != 0 or self.filled)", "def is_boring(state):\n return state.boring_moves >= state.just_stop", "def is_permissible(self, cell, direction):\n \n if self.is_unknown(cell):\n return False\n \n try:\n return (self.grid[cell[0]][cell[1]] & globals._dir_int[direction] != 0)\n except:\n print cell, direction, ' is a wall!'", "def has_crossing_len2_ob(self) -> bool:\n fcell = self.first_cell\n scell = self.second_cell\n if self._fuse_row:\n possible_obs = [\n GriddedPerm((0, 1), (fcell, scell)),\n GriddedPerm((1, 0), (scell, fcell)),\n ]\n else:\n possible_obs = [\n GriddedPerm((0, 1), (fcell, scell)),\n GriddedPerm((1, 0), (fcell, scell)),\n ]\n return any(ob in possible_obs for ob in self._tiling.obstructions)", "def _check_collisions(self):\n\t\tif pygame.sprite.spritecollide(\n\t\t\tself.bolan, \n\t\t\tself.obstacles.obstacles,\n\t\t\tFalse, \n\t\t\tpygame.sprite.collide_mask):\n\t\t\t\tself.is_play = False\n\t\t\t\tself.is_gameover = True\n\t\t\t\tself.bolan.image = self.settings.bolan_dead_image", "def check_win(self):\n return UNEXPOSED not in self.get_game() and self.get_game().count(FLAG) == len(self.get_pokemon_location)", "def thinking(self):\n if self.motion.moveIsActive():\n # Maneuver occurring. Let's finish it\n # before taking any other measure.\n pass\n\n elif not self.sensors['proximity'][0].imminent_collision:\n # Goes back to moving state.\n self.behavior_ = self.BEHAVIORS.moving\n\n elif all(s.imminent_collision for s in self.sensors['proximity']):\n # There's nothing left to be done, only flag this is a dead-end.\n self.behavior_ = self.BEHAVIORS.stuck\n\n else:\n peripheral_sensors = self.sensors['proximity'][1:]\n for maneuver, sensor in zip(range(1, 4), peripheral_sensors):\n if not sensor.imminent_collision:\n # A sensor that indicates no obstacles were found.\n # Move in that direction.\n self.motion.post.moveTo(0, 0, np.pi / 2)\n break\n\n return self", "def can_throw(self):\n if self.round_points == 0:\n return False\n return True", "def is_solved(self):\n return (self.from_grid == self.to_grid)", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def isGoal(self):\n for index in range(self.DIM):\n if not self.values('r',index).count(0) is 0:\n return False\n if not self.isValid():\n return False\n return True", "def is_at_wall(self):\n return self.distmin < self.distmax*0.8", "def isGoalState(self, state):\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and \\\n abs(diff.x) + abs(diff.z) == 2 and \\\n state.get_block(self._player_loc + diff/2 + _Vec3(0, -1, 0)) not in \\\n (_AIR, _LAVA, _WATER)", "def check_loss(self):\n return POKEMON in self.get_game()", "def is_solved(self):\n return self.to_grid == self.from_grid", "def robotCanOccupy(self, (xIndex, yIndex)):\n for dx in range(0, self.growRadiusInCells + 1):\n for dy in range(0, self.growRadiusInCells + 1):\n xPlus = util.clip(xIndex+dx, 0, self.xN-1)\n xMinus = util.clip(xIndex-dx, 0, self.xN-1)\n yPlus = util.clip(yIndex+dy, 0, self.yN-1)\n yMinus = util.clip(yIndex-dy, 0, self.yN-1)\n if self.occupied((xPlus, yPlus)) or \\\n self.occupied((xPlus,yMinus)) or \\\n self.occupied((xMinus, yPlus)) or \\\n self.occupied((xMinus, yMinus)):\n return False\n return True", "def is_goal_unreachable(self, x, y, theta):\n self.current_x = x\n self.current_y = y\n self.wp_goal_unreachable = Point(self.current_x,self.current_y)\n self.dist_btw_follow_goal_unreachable = abs(self.wp_goal_unreachable.distance_to(self.wp_follow))\n #print self.is_left_line\n #print self.dist_btw_follow_goal_unreachable\n if self.dist_btw_follow_goal_unreachable < self.TOLERANCE and self.is_left_line == 1:\n print \"goal unreachable\"\n return True\n else:\n return False", "def __hit_paddle(self, g_object):\n return g_object == self.__paddle", "def is_movable(self, src_piece, dest_x, dest_y):\n if dest_x < 0 or dest_x >= self.width or dest_y < 0 or dest_y >= self.height:\n return False\n elif self.coordinates[dest_x][dest_y].status == 0:\n return False\n elif self.coordinates[dest_x][dest_y].player == src_piece.player:\n return False\n else:\n return True", "def is_ringing(self) -> bool:", "def check_collisions(self, offset, index, obstacles):\n unaltered = True\n self.rect.move_ip(offset)\n while pygame.sprite.spritecollideany(self, obstacles):\n\n # First of all, check if it is a motile transparent block.\n # if so, do nothin\n col_spr = pygame.sprite.spritecollideany(self, obstacles)\n if hasattr(col_spr, \"inertia\"):\n if col_spr.inertia:\n break\n\n if self.climb:\n\t self.climb_mobility = False\n else:\n self.climb_mobility = True\n\n self.rect[index] += (1 if offset[index] < 0 else -1)\n unaltered = False\n #print(\"DEBUG: PLAYERCOL, {}\".format(index))\n\n # stop walking animation\n if index == 0:\n self.walk = False\n\n\n return unaltered", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def safe_to_dance(self):\n # check for all fail/early-termination conditions\n for _ in range(4):\n if self.read_distance() < 300:\n print(\"NOT SAFE TO DANCE!\")\n return False\n else: \n self.turn_by_deg(90) \n\n #after all checks have been done. We deduce it's safe\n print(\"SAFE TO DANCE!\")\n return True\n\n for x in range(3): \n self.shake()", "def need_neighbor(self):\n return self._need_neighbor", "def __game_is_over(self):\n return not (self.__playing and self.__bricks_total > 0 and self.__num_lives > 0)", "def isDisturbance(self):\n return False", "def is_jumping(self):\n if(self.going_down or self.going_up or self.mid_air):\n return True\n else:\n return False", "def can_take_damage(self):\n result = True\n if self.side_effects[\"shield\"] > 0:\n result = False\n return result", "def is_won(self):\n for tile in self:\n if not tile.is_mine and tile.visibility != 1:\n return False\n return True", "def noyable(self):\n return False", "def should_decommission_fleet(self, fleet: Fleet) -> bool:\n fleet_forces = self.context.calculate_forces(fleet.resources)\n return (\n fleet_forces.space_forces < 2 * self.max_space_force\n or fleet_forces.ground_forces < 2 * self.max_ground_force\n )", "def isResistantTo(self, drug):\r\n # TODO\r\n try:return self.resistances[drug]\r\n except:return False" ]
[ "0.6633492", "0.64835197", "0.6468089", "0.64078575", "0.63262063", "0.63250136", "0.6243122", "0.62322724", "0.6221644", "0.61202383", "0.6082648", "0.6052843", "0.6038652", "0.6014081", "0.60019505", "0.599128", "0.5978446", "0.5961467", "0.59569794", "0.5955709", "0.5943549", "0.5933055", "0.5918267", "0.5910638", "0.590585", "0.5901536", "0.5881411", "0.5854032", "0.58507127", "0.5832415", "0.5812691", "0.58062243", "0.57817227", "0.57617927", "0.5761269", "0.5760103", "0.5747922", "0.5727515", "0.5717264", "0.57035124", "0.56670946", "0.56457317", "0.5643785", "0.5637699", "0.56373024", "0.56313866", "0.5628149", "0.5621304", "0.56093943", "0.56023157", "0.56010705", "0.55918765", "0.55908", "0.558507", "0.558507", "0.558115", "0.5571075", "0.55600923", "0.55580306", "0.55557156", "0.55506444", "0.55506444", "0.5547123", "0.55384225", "0.5536288", "0.5532559", "0.5532514", "0.5528949", "0.55281293", "0.5527356", "0.55257654", "0.55221456", "0.5515709", "0.5515528", "0.5512973", "0.5511078", "0.55033726", "0.5500314", "0.54992646", "0.54963577", "0.5494915", "0.5494662", "0.54917973", "0.5490957", "0.5488835", "0.5476446", "0.54719794", "0.5468143", "0.5467208", "0.54666674", "0.54651594", "0.5463498", "0.5463373", "0.5460085", "0.54565185", "0.54559326", "0.5451488", "0.54508305", "0.544738", "0.54420525" ]
0.5899829
26
Generate possible paths around the passed obstacle
def generate_possible_paths(self, obstacle): if self.does_uav_intersect_obstacle_vertically(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()): if self.does_path_intersect_obstacle_2d(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()): new_attempt_pos_points = [ [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]], [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]], [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]], [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]], [obstacle.get_point()[0], obstacle.get_point()[1] + obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)], [obstacle.get_point()[0], obstacle.get_point()[1] - obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)], [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)], [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)] ] new_paths = [] for new_pos_point in new_attempt_pos_points: if not self.does_path_intersect_obstacle_3d(obstacle, self.drone.get_point(), new_pos_point) and self.flight_boundary.is_point_in_bounds(new_pos_point): for recursive_new_pos_point in new_attempt_pos_points: if self.flight_boundary.is_point_in_bounds(recursive_new_pos_point) and abs(recursive_new_pos_point[2] - new_pos_point[2]) < 5: if recursive_new_pos_point[0] != new_pos_point[0] or recursive_new_pos_point[1] != new_pos_point[1]: if not self.does_path_intersect_obstacle_3d(obstacle, new_pos_point, recursive_new_pos_point) and not self.does_path_intersect_obstacle_3d(obstacle, recursive_new_pos_point, self.drone.get_waypoint_holder().get_current_waypoint()): new_paths.append([new_pos_point, recursive_new_pos_point]) # Uncomment for DEBUGGING ONLY for path in new_paths: print("Point:", str(path)) return new_paths return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_path(self, start_point: Pos, end_point: Pos, obstacles: list) -> list:\n pass", "def drawpath(self,obstacles):\n for i in obstacles:\n self.distance_map[i[0],i[1]]=44\n print(\"Distance map\")\n print(self.distance_map)\n for i in self.footprint:\n self.distance_map[i[0],i[1]]=88\n print(\"Evaluated path\")\n print(self.distance_map)", "def uniquePathsWithObstacles(self, obstacleGrid: List[List[int]]) -> int:\n if obstacleGrid[0][0] == 1:\n return 0\n\n m, n = len(obstacleGrid), len(obstacleGrid[0])\n dp = [[0 for _ in range(n)] for _ in range(m)]\n dp[0][0] = 1\n\n for i in range(1, m):\n if obstacleGrid[i][0] == 1: break\n else: dp[i][0] = dp[i-1][0]\n\n for j in range(1, n):\n if obstacleGrid[0][j] == 1: break\n else: dp[0][j] = dp[0][j-1]\n\n for i in range(1, m):\n for j in range(1, n):\n if obstacleGrid[i][j] == 0:\n dp[i][j] = dp[i-1][j] + dp[i][j-1]\n\n return dp[-1][-1]", "def calculate_path(self):\n #Se repite el ciclo para el número especificado de veces\n for i in range(self.iterations):\n for ant in self.ants:\n ant.setup_ant()\n while not ant.final_node_reached:\n #Seleccion aleatoria del nodo a visitar\n node_to_vist = self.select_next_node(self.map.nodes_array[int(ant.actual_node[0])][int(ant.actual_node[1])])\n #Mover la hormiga al siguiente nodo seleccionado al azar\n ant.move_ant(node_to_visit)\n #Compruebe si se ha alcanzado la solución\n ant.is_final_node_reached()\n #Agregar la ruta resultante a la lista de rutas\n self.add_to_path_results(self.delete_loops(ant.get_visited_nodes()))\n # Habilitar a la hormiga para otra busqueda\n ant.enable_start_new_path()\n \n # Actualizar el nivel global de feromonas\n self.pheromone_update()\n self.best_result = self.paths[0]\n\n #Vaciar la lista de rutas\n self.empty_paths()\n print('Iteration: ', i, 'lenght of the path: ', len(self.best_result))\n return self.best_result", "def Find_Path(self):\n closed_nodes_map = [] # map of closed (tried-out) nodes\n open_nodes_map = [] # map of open (not-yet-tried) nodes\n dir_map = [] # map of directions\n row = [0] * self.n\n for i in range(self.m): # create 2d arrays\n closed_nodes_map.append(list(row))\n open_nodes_map.append(list(row))\n dir_map.append(list(row))\n \n pq = [[], []] # priority queues of open (not-yet-tried) nodes\n pqi = 0 # priority queue index\n # create the start node and push into list of open nodes\n n0 = node(self.xStart, self.yStart, 0.0, 0.0)\n n0.updatePriority(self.xFinish, self.yFinish)\n heappush(pq[pqi], n0)\n open_nodes_map[self.yStart][self.xStart] = n0.priority # mark it on the open nodes map\n \n # A* search\n while len(pq[pqi]) > 0:\n # get the current node w/ the highest priority\n # from the list of open nodes\n n1 = pq[pqi][0] # top node\n n0 = node(n1.xPos, n1.yPos, n1.distance, n1.priority)\n x = n0.xPos\n y = n0.yPos\n heappop(pq[pqi]) # remove the node from the open list\n open_nodes_map[y][x] = 0\n # mark it on the closed nodes map\n closed_nodes_map[y][x] = 1\n \n # quit searching when the goal state is reached\n if x == self.xFinish and y == self.yFinish:\n # Generate the path from finish to start by following the \n # directions.\n return self.Reconstruct_Path(dir_map)\n \n # generate moves (child nodes) in all possible directions\n for i in range(self.num_directions):\n new_x = x + self.dx[i]\n new_y = y + self.dy[i]\n Flag=True\n if not (new_x < 0 or new_x > self.n-1 or new_y < 0 or new_y > self.m - 1\n or self.MAP[new_y][new_x] == 1 or closed_nodes_map[new_y][new_x] == 1):\n # Check to see if the extended path runs through any obstacles\n if (abs(self.dx[i])>1 or abs(self.dy[i])>1):\n # Need to check that the path does not pass an object\n JumpCells=2*max(abs(self.dx[i]),abs(self.dy[i]))-1\n for K in range(1,JumpCells):\n YPOS=int(round(K*1.0*self.dy[i]/JumpCells))\n XPOS=int(round(K*1.0*self.dx[i]/JumpCells))\n if (self.MAP[y+YPOS][x+XPOS]==1):\n Flag=False\n if Flag: \n # generate a child node\n m0 = node(new_x, new_y, n0.distance, n0.priority)\n m0.calc_cost(self.dx[i], self.dy[i])\n m0.updatePriority(self.xFinish, self.yFinish)\n # if it is not in the open list then add into that\n if open_nodes_map[new_y][new_x] == 0:\n open_nodes_map[new_y][new_x] = m0.priority\n heappush(pq[pqi], m0)\n # mark its parent node direction\n dir_map[new_y][new_x] = (self.num_directions-i-1) % self.num_directions\n elif open_nodes_map[new_y][new_x] > m0.priority:\n # update the priority info\n open_nodes_map[new_y][new_x] = m0.priority\n # update the parent direction info\n dir_map[new_y][new_x] = (self.num_directions-i-1) % self.num_directions\n # replace the node\n # by emptying one pq to the other one\n # except the node to be replaced will be ignored\n # and the new node will be pushed in instead\n while not (pq[pqi][0].xPos == new_x and pq[pqi][0].yPos == new_y):\n heappush(pq[1 - pqi], pq[pqi][0])\n heappop(pq[pqi])\n heappop(pq[pqi]) # remove the wanted node\n # empty the larger size pq to the smaller one\n if len(pq[pqi]) > len(pq[1 - pqi]):\n pqi = 1 - pqi\n while len(pq[pqi]) > 0:\n heappush(pq[1-pqi], pq[pqi][0])\n heappop(pq[pqi]) \n pqi = 1 - pqi\n heappush(pq[pqi], m0) # add the better node instead\n return '','' # no route found", "def search_path(self):\n\n nodes = [self.start]\n final_node = None\n \n count = 0\n while True:\n count += 1\n\n if count % self.pick_target == 0:\n pick = self.goal.pos[:2]\n else:\n pick = self.car.random_pos()[:2]\n \n nearest = self.get_nearest_node(nodes, pick)\n\n if count % self.check_dubins == 0:\n solutions = self.dubins.find_tangents(nearest.pos, self.goal.pos)\n dubins_route, cost, valid = self.dubins.best_tangent(solutions)\n \n if valid:\n final_node = nearest\n break\n\n phi = self.get_steering_angle(nearest.pos, pick)\n pos = nearest.pos\n branch = [pos[:2]]\n \n for i in range(self.max_steps):\n pos = self.car.step(pos, phi)\n branch.append(pos[:2])\n \n # check safety of route-----------------------\n if phi == 0:\n safe = self.dubins.is_straight_route_safe(nearest.pos, pos)\n else:\n d, c, r = self.car.get_params(nearest.pos, phi)\n safe = self.dubins.is_turning_route_safe(nearest.pos, pos, d, c, r)\n # --------------------------------------------\n \n if not safe:\n continue\n \n new_node = Node(pos, phi, i+1)\n \n if new_node in nodes:\n continue\n \n new_node.branch = branch\n new_node.parent = nearest\n nodes.append(new_node)\n \n route = self.backtracking(final_node) + dubins_route\n path = self.car.get_path(self.car.start_pos, route)\n print('Total iteration:', count)\n \n return path, nodes", "def _build_path(self):\r\n\r\n path = []\r\n \r\n for i in range(len(self.path) - 1):\r\n current_node = self.path[i]\r\n next_node = self.path[i + 1]\r\n \r\n key_list = [i for i in range(len(current_node.leaving_roads)) if current_node.leaving_roads[i].end == next_node]\r\n \r\n if len(key_list) == 0:\r\n raise Exception('ERROR (in gps._build_path()) : there is no route.')\r\n \r\n path.append(key_list[0])\r\n \r\n return path", "def a_star_obs(obs_map):\n world_ndarray = np.copy(obs_map[0])\n\n start = tuple(np.argwhere(world_ndarray == -2)[0])\n goal = tuple(np.argwhere(world_ndarray == -3)[0])\n\n world_ndarray[world_ndarray == -2] = 0\n world_ndarray[world_ndarray == -3] = 0\n\n world_tuple = tuple(map(tuple, world_ndarray))\n\n def h_custom_i(cur, end, obstacle):\n ytop, ybot, minx = obstacle\n cur_y, cur_x = cur\n end_y, end_x = end\n obs_bot = np.where(world_ndarray[ybot] == -1)[0][0]\n mid_y = ybot + (ytop - ybot) // 2\n if cur_y in range(ybot, ytop) and cur_x in range(max(obs_bot, start[1]), end_x):\n return 5000 - abs(minx - cur_x) ** 2 - abs(cur_y - mid_y) ** 2\n return abs(cur_x - end_x) + abs(cur_y - end_y)\n\n pr_queue = [] # Use heapqueue as priority queue\n heappush(pr_queue, (0 + h_custom_i(start, goal, obs_map[1]), 0, \"\", start))\n visited = set() # Each element has to be unique in a set\n graph = get_neighbors(world_tuple)\n route_str = \"\"\n\n while pr_queue:\n _, cost, path, current = heappop(pr_queue)\n if current == goal:\n route_str = path\n break\n if current in visited:\n continue\n visited.add(current)\n for direction, neighbour in graph[current].iteritems():\n heappush(pr_queue, (cost + h_custom_i(neighbour, goal, obs_map[1]), cost + 1, path + direction, neighbour))\n world_ndarray[neighbour] = cost + 1\n\n # print \"Expanded nodes(A*+Custom H): \", len(visited), \" Path length: \", len(route_str)\n # Convert string directions to 2D(x,y) coordinates\n route_coord = [start]\n for p in route_str:\n route_coord.append(graph[route_coord[-1]][p])\n\n world_ndarray[start] = -2 # Mark the start and end coordinates again\n world_ndarray[goal] = -3\n\n return route_coord, world_ndarray, len(visited), len(route_str)", "def get_path(self):\r\n path = [self.city_map.get_tile_at_position(self.position)]\r\n destination_tile = self.city_map.get_tile_at_position(self.destination)\r\n\r\n neighbors = self.city_map.get_adjacent_intersections(self.position)\r\n closest_neighbor = None\r\n closest_neighbor_distance = math.inf\r\n for neighbor in neighbors:\r\n if distance(neighbor.position, self.position) < closest_neighbor_distance:\r\n closest_neighbor = neighbor\r\n closest_neighbor_distance = distance(neighbor.position, self.position)\r\n\r\n path.append(closest_neighbor)\r\n\r\n while destination_tile not in path:\r\n current_tile = path[-1]\r\n neighbors = self.city_map.get_adjacent_intersections(current_tile.position)\r\n closest_neighbor = None\r\n closest_neighbor_distance = math.inf\r\n for neighbor in neighbors:\r\n if current_tile.position['y'] == self.destination['y'] or current_tile.position['x'] == \\\r\n self.destination['x']:\r\n # If we're on the same row\r\n if current_tile.position['y'] == self.destination['y']:\r\n # And if the destination is between our current position and the next intersection over, go to it\r\n if (current_tile.position['x'] < self.destination['x'] <= neighbor.position['x'] or\r\n neighbor.position['x'] <= self.destination['x'] < current_tile.position['x']):\r\n path.append(destination_tile)\r\n break\r\n # If the neighbor gets us closer, go to it\r\n elif distance(neighbor.position, self.destination) < distance(current_tile.position,\r\n self.destination):\r\n path.append(neighbor)\r\n break\r\n if current_tile.position['x'] == self.destination['x']:\r\n if (current_tile.position['y'] < self.destination['y'] <= neighbor.position['y'] or\r\n neighbor.position['y'] <= self.destination['y'] < current_tile.position['y']):\r\n path.append(destination_tile)\r\n break\r\n # If the neighbor gets us closer, go to it\r\n elif distance(neighbor.position, self.destination) < distance(current_tile.position,\r\n self.destination):\r\n path.append(neighbor)\r\n break\r\n\r\n elif distance(neighbor.position, self.destination) < closest_neighbor_distance:\r\n closest_neighbor = neighbor\r\n closest_neighbor_distance = distance(neighbor.position, self.destination)\r\n\r\n # If the last iteration through the loop did reach the destination, don't append this\r\n if destination_tile not in path and closest_neighbor is not None:\r\n path.append(closest_neighbor)\r\n\r\n return path", "def is_obstacle_in_path(self):\n for obstacle in self.obstacles.tolist():\n print(\"obstacle.get_point():\", obstacle.get_point())\n dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point()))\n if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD:\n if isinstance(obstacle, StationaryObstacle):\n paths = self.generate_possible_paths(obstacle)\n\n if len(paths) != 0:\n return True, np.array(paths)\n elif isinstance(obstacle, MovingObstacle):\n pass\n\n return False, None", "def plan_path(self, start_point, end_point, map_obj):\n # STUFF FOR TESTING \n if self.enable_vis:\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.type = marker.POINTS\n marker.action = marker.ADD\n \n marker.scale.x = 0.1\n marker.scale.y = 0.1\n self.vis_pub.publish(marker)\n \n exploration_bias = 1.0 - self.goal_bias\n final_node = None\n num_existing_path_points_added = 0\n \n self.rrt_star = RRTStar(Node(start_point))\n self.max_iterations = self.rrt_star.max_size\n while self.rrt_star.size <= self.max_iterations:\n p = np.random.uniform()\n if p < exploration_bias:\n \n x_rand = self.map.sample_free_space()\n else:\n if final_node is None:\n x_rand = end_point\n else:\n x_rand = self.branched_from_existing_path(\n final_node,\n depth_underestimate=num_existing_path_points_added\n )\n num_existing_path_points_added += 1\n\n x_nearest = self.rrt_star.nearest(x_rand) # Find the nearest node to x_rand\n\n path = self.map.generate_line_path(x_nearest.value, x_rand, eta=self.eta)\n if path is not None: # no obstacles between x_nearest and x_rand\n x_new = path[-1]\n X_nearby_connectable = self.find_nearby_connectable(x_nearest, x_new)\n\n cost_min, node_min = self.find_best_parent(X_nearby_connectable, x_new)\n\n X_nearby_connectable.remove(node_min) # Remove x_new's parent node from the list of nearby nodes so it is not considered for rewiring\n \n # Create the new node at x_new!\n node_new = self.rrt_star.add_config(node_min, x_new)\n \n if self.enable_vis:\n # FOR TESTING ONLY #\n # Code to publish marker for new node\n ###########################################################################################\n TEMP = Point()\n TEMP.x = x_new[0]\n TEMP.y = x_new[1]\n TEMP.z = .05\n marker.points.append(TEMP)\n \n TEMP = ColorRGBA()\n TEMP.r = 1\n TEMP.g = 0\n TEMP.b = 0\n TEMP.a = 1\n \n marker.colors.append(TEMP)\n \n self.vis_pub.publish(marker)\n ###########################################################################################\n\n self.rewire(cost_min, node_new, X_nearby_connectable)\n \n if np.allclose(node_new.value, end_point, .05, 0) and (final_node is None):#np.array_equal(node_new.value, end_point):\n final_node = node_new\n # reduce exploration bias so that we reinforce the existing path\n exploration_bias = .5\n if VERBOSE:\n print(\"Path found!!!!\")\n print(final_node.cost)\n if rospy.get_time() - self.start_time > self.time_thresh:\n if VERBOSE:\n print(self.rrt_star.size)\n break\n\n \n if final_node is not None:\n if self.enable_vis:\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.type = marker.POINTS\n marker.action = marker.ADD\n \n marker.scale.x = 0.1\n marker.scale.y = 0.1\n marker.points = []\n marker.colors = []\n def recur(node):\n if self.enable_vis:\n TEMP = Point()\n TEMP.x = node.value[0]\n TEMP.y = node.value[1]\n TEMP.z = .05\n marker.points.append(TEMP)\n \n TEMP = ColorRGBA()\n TEMP.r = 1\n TEMP.g = 0\n TEMP.b = 0\n TEMP.a = 1\n \n marker.colors.append(TEMP)\n \n \n self.trajectory.points.append([node.value[0], node.value[1]])\n parent = node.parent\n if parent is not None:\n recur(parent)\n recur(final_node)\n self.trajectory.points.reverse()\n if self.enable_vis:\n self.vis_pub.publish(marker)\n if VERBOSE:\n print (final_node.depth)\n else:\n if VERBOSE:\n print(\"No path found! Please try again.\")\n \n \n \n # publish trajectory\n self.traj_pub.publish(self.trajectory.toPoseArray())\n\n # visualize trajectory Markers\n self.trajectory.publish_viz()", "def obstacles(self):\r\n\r\n #Radious arround the head\r\n limit_sight = self.snake_sight\r\n head = self.body[0].position\r\n binary_map_complete = self.complete_mapping()\r\n map_matrix = np.matrix(binary_map_complete)\r\n obstacles = []\r\n\r\n #limits in all directions\r\n left_x = head[0] - limit_sight\r\n right_x = head[0] + limit_sight\r\n up_y = head[1] - limit_sight\r\n down_y = head[1] + limit_sight\r\n\r\n #submatrix with limits size\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:right_x+1]\r\n\r\n #Special cases where the snake approximates to the borders\r\n ##Corners\r\n if left_x < 0 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[temporal, snake_sight] \r\n return snake_sight\r\n \r\n if left_x < 0 and down_y > self.limits[1] - 1:\r\n snake_sight = map_matrix[up_y:self.limits[1], 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[temporal, snake_sight]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n\r\n ##Middle\r\n if left_x < 0:\r\n snake_sight = map_matrix[up_y:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n return snake_sight\r\n\r\n if right_x > self.limits[0]-1:\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n return snake_sight\r\n\r\n if up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:right_x+1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[interval_y_matrix, snake_sight]\r\n return snake_sight\r\n \r\n if down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:right_x+1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[snake_sight, interval_y_matrix]\r\n return snake_sight\r\n\r\n return snake_sight", "def find_good_paths(self):\n return self.robot_step((0,0),[])", "def IteratePaths(self):\n self.w = self.setwage(self.K, self.N)\n self.r = self.setrate(self.K, self.N)\n self.b = self.benefit(self.N)\n\n a1, aT = [-1,], []\n\n for q in range(self.Nq):\n if q == 0:\n self.apath[-1] = 0.2\n elif q == 1:\n self.apath[-1] = 0.3\n else:\n self.apath[-1] = max(0,aT[-1]-(aT[-1]-aT[-2])*a1[-1]/(a1[-1]-a1[-2]))\n \n self.npath[-1] = 0\n self.cpath[-1] = self.apath[-1]*(1+self.r) + self.b\n\n for y in range(-2,-(self.T+1),-1): # y = -2, -3,..., -60\n self.apath[y], self.npath[y], self.cpath[y] = self.DirectSolve(y)\n\n aT.append(self.apath[-1])\n a1.append(self.apath[-self.T])\n if (fabs(self.apath[-self.T])<self.tol):\n break\n for y in range(-1,-(self.T+1),-1):\n self.upath[y] = self.util(self.cpath[y],self.npath[y])", "def _getPathPair(\n self, speed=2, smoothPoints=5, collisionShrink=0.05, pathDelay=1\n ):\n # break out these parameters later\n # smoothPoints = 5\n # collisionShrink = 0.05 # mm, this value shoudld probably be a function of stepsize\n # speed = 2 # rpm at output\n # pathDelay = 1 # seconds\n ###########\n\n self.smoothPaths(smoothPoints)\n self.simplifyPaths()\n self.shrinkCollisionBuffer(collisionShrink)\n self.verifySmoothed(self.nSteps)\n # print(\"got %i smoothed collisions in getPathPair\"%self.smoothCollisions)\n self.growCollisionBuffer(collisionShrink)\n\n toDestination = {}\n fromDestination = {}\n\n for r in self.robotDict.values():\n\n # if robot is offline, don't get a path for it\n if r.isOffline:\n continue\n\n ibp = np.array(r.simplifiedBetaPath)\n\n\n iap = np.array(r.simplifiedAlphaPath)\n\n\n alphaTimesR = iap[:, 0] * self.stepSize / (speed * 360 / 60.)\n alphaDegR = iap[:, 1]\n betaTimesR = ibp[:, 0] * self.stepSize / (speed * 360 / 60.)\n betaDegR = ibp[:, 1]\n\n\n # add time buffer for the reverse path, in case robot is\n # not exactly starting from the expected spot.\n # build path from initial state to destination\n armPathR = {}\n armPathR[\"alpha\"] = [(pos, time + pathDelay) for pos, time in zip(alphaDegR, alphaTimesR)]\n armPathR[\"beta\"] = [(pos, time + pathDelay) for pos, time in zip(betaDegR, betaTimesR)]\n\n toDestination[int(r.id)] = armPathR\n\n # build path from destination to initial state\n alphaTimesF = np.abs(alphaTimesR - alphaTimesR[-1])[::-1]\n alphaDegF = alphaDegR[::-1]\n betaTimesF = np.abs(betaTimesR - betaTimesR[-1])[::-1]\n betaDegF = betaDegR[::-1]\n\n armPathF = {}\n armPathF[\"alpha\"] = [(pos, time + pathDelay) for pos, time in zip(alphaDegF, alphaTimesF)]\n armPathF[\"beta\"] = [(pos, time + pathDelay) for pos, time in zip(betaDegF, betaTimesF)]\n\n\n fromDestination[int(r.id)] = armPathF\n\n return toDestination, fromDestination", "def _build_path(self):\n for point_3d in self.path_coordinates:\n self.connect_point_with_neighbors(point_3d)", "def paths(self, start):\n # This is probably a little slow\n tupadd = lambda p, v: (p[0] + v[0], p[1] + v[1])\n # First, we'll check adjacency moves.\n adj = [tupadd(start, v) for v in DIRECTIONS]\n yield from (p for p in adj if self.board(p) == 0)\n # Now we check repeated hops.\n # We do this by a breadth first search.\n\n #TODO: Consensus on legality of hopping back to start and \"skipping\"\n visited = set(adj)\n to_visit = [start]\n while len(to_visit):\n pt = to_visit.pop(0)\n if pt in visited:\n continue\n\n # We have to actually move a piece\n # But this stops us from considering \"start\" even if we can\n # make some hops and get back to start\n if pt is not start:\n yield pt\n \n visited.add(pt)\n # Compute the hop directions\n dirs = ((tupadd(pt, v), tupadd(pt, tupadd(v, v))) for v in DIRECTIONS)\n to_visit.extend(\n dest for over, dest in dirs\n if self.board(over) > 0\n and self.board(dest) == 0\n and dest not in visited\n and over != start\n )", "def prm_planning(start_x, start_y, goal_x, goal_y,\n obstacle_x_list, obstacle_y_list, robot_radius, *, rng=None):\n obstacle_kd_tree = KDTree(np.vstack((obstacle_x_list, obstacle_y_list)).T)\n\n sample_x, sample_y = sample_points(start_x, start_y, goal_x, goal_y,\n robot_radius,\n obstacle_x_list, obstacle_y_list,\n obstacle_kd_tree, rng)\n if show_animation:\n plt.plot(sample_x, sample_y, \".b\")\n\n road_map = generate_road_map(sample_x, sample_y,\n robot_radius, obstacle_kd_tree)\n\n rx, ry = dijkstra_planning(\n start_x, start_y, goal_x, goal_y, road_map, sample_x, sample_y)\n\n return rx, ry", "def carve_path(self):\n final = self.length # once we reach the last length, we set the goal and terminate\n w, l, h = 0, 0, 0 # start at 0,0,0\n last_move_name, last_move_tuple = \"forward\", (0, 1, 0) # we don't want to repeat the last movement\n moves = {\"back\": (0, -1, 0), \"left\": (-1, 0, 0), \"right\": (1, 0, 0), \"up\": (0, 0, 1),\n \"down\": (0, 0, -1)} # possible moves\n self.world_grid[w][l][h] = blocks[\"empty\"] # set the current block empty\n while l != final:\n move, (m_w, m_l, m_h) = random.choice(list(moves.iteritems())) # get a move\n w += m_w # apply move\n l += m_l\n h += m_h\n self.world_grid[w][l][h] = blocks[\"empty\"] # set that cell empty\n moves[last_move_name] = last_move_tuple # add back in the last move to movelist\n last_move_name, last_move_tuple = move, (m_w, m_l, m_h) # copy the current move to last move\n moves.pop(last_move_name) # remove the current\n self.goal = (w, l, h) # after terminating, set this as the goal", "def generate_path(goal_node, visited):\n goal_state = goal_node['state']\n path = [goal_state]\n while goal_node['parent']:\n path.append(goal_node['state'])\n goal_node = visited[goal_node['parent']]\n return path", "def solution(self):\n return [node.move for node in self.path()[1:]]", "def solution_path(self) -> list[State]:", "def path_and_costmap_from_config(params):\n # we assume right turn, we can always flip it\n turn_params = params.turn_params\n\n hh = turn_params.main_corridor_length / 2\n w = turn_params.turn_corridor_length / 2\n alpha = turn_params.turn_corridor_angle\n dd = turn_params.main_corridor_width\n z = turn_params.turn_corridor_width\n margin = turn_params.margin\n flip_arnd_oy = turn_params.flip_arnd_oy\n flip_arnd_ox = turn_params.flip_arnd_ox\n rot_theta = turn_params.rot_theta\n\n pts = _draw_pts_in_standard_coords(dd, hh, alpha, z, w)\n oriented_way_pts = _generate_path_in_standard_coords(dd, hh, alpha, z, w)\n\n # Maybe transform the points\n rot_mtx = _rotation_matrix(rot_theta)\n\n flipping_mtx = np.array(\n [[-1. if flip_arnd_oy else 1., 0.],\n [0., -1. if flip_arnd_ox else 1.]],\n )\n transform_mtx = np.dot(rot_mtx, flipping_mtx)\n\n new_pts = []\n\n for pt in pts:\n new_pt = np.dot(transform_mtx, pt)\n new_pts.append(new_pt)\n\n new_oriented_way_pts = []\n for pt in oriented_way_pts:\n x, y, t = pt\n nx, ny = np.dot(transform_mtx, np.array([x, y]))\n new_angle = t\n if flip_arnd_ox:\n new_angle = -new_angle\n if flip_arnd_oy:\n new_angle = np.pi - new_angle\n new_angle = np.mod(new_angle + rot_theta, 2 * np.pi)\n new_pt = np.array([nx, ny, new_angle])\n new_oriented_way_pts.append(new_pt)\n\n a, _, c, d, e, _, g, h, i, j = new_pts # pylint: disable=unbalanced-tuple-unpacking\n rb, rk, rl, rf = new_oriented_way_pts # pylint: disable=unbalanced-tuple-unpacking\n all_pts = np.array(list(new_pts))\n\n min_x = all_pts[:, 0].min()\n max_x = all_pts[:, 0].max()\n min_y = all_pts[:, 1].min()\n max_y = all_pts[:, 1].max()\n\n world_size = abs(max_x - min_x) + 2 * margin, abs(max_y - min_y) + 2 * margin\n world_origin = min_x - margin, min_y - margin\n\n obstacles = [\n Wall(from_pt=a, to_pt=i),\n Wall(from_pt=c, to_pt=d),\n Wall(from_pt=d, to_pt=e),\n Wall(from_pt=j, to_pt=g),\n Wall(from_pt=g, to_pt=h)\n ]\n\n static_path = np.array([rb, rk, rl, rf])\n\n static_map = CostMap2D.create_empty(\n world_size=world_size, # x width, y height\n resolution=params.env_params.resolution,\n world_origin=world_origin\n )\n\n for obs in obstacles:\n static_map = obs.render(static_map)\n\n return static_path, static_map", "def plan_path(self, msg):\n # Request the map\n # In case of error, return an empty path\n mapdata = PathPlanner.request_map()\n\n if mapdata is None:\n return Path()\n # Calculate the C-space and publish it\n cspacedata = self.calc_cspace(mapdata, 3)\n # Execute A*\n start = PathPlanner.world_to_grid(mapdata, msg.start.pose.position)\n goal = PathPlanner.world_to_grid(mapdata, msg.goal.pose.position)\n \n path = self.a_star(cspacedata, start, goal) #, self.c_space_array, self.frontier, self.expanded)\n \n # Optimize waypoints\n waypoints = PathPlanner.optimize_path(path)\n # print waypoints\n waypoints.remove(waypoints[0])\n # print waypoints\n\n self.path_pub.publish(self.path_to_message(cspacedata, waypoints))\n # Return a Path message\n return self.path_to_message(cspacedata, waypoints)", "def dijkstras(occupancy_map,x_spacing,y_spacing,start,goal):\n ROWS, COLS = occupancy_map.shape\n #convert physical location to index in the grid\n startNode = locToIndex(start, x_spacing, y_spacing)\n startingNodeLoc = indexToLoc(startNode, x_spacing, y_spacing)\n initialcost = math.sqrt((startingNodeLoc[0] - start[0])**2 + (startingNodeLoc[1] - start[1])**2)\n goalNode = locToIndex(goal, x_spacing, y_spacing)\n \n freelist = np.where(occupancy_map == 0)\n if occupancy_map[startNode[0], startNode[1]] != 0:\n #raise ValueError(\"start : ({}, {}) invalid, is an obstacle\".format(startNode[0], startNode[1]))\n startNode = findValidNode(startNode, start, occupancy_map, x_spacing, y_spacing)\n if occupancy_map[goalNode[0], goalNode[1]] != 0:\n #raise ValueError(\"goal: ({}, {}) invalid, is an obstacle\".format(goalNode[0], goalNode[1]))\n goalNode = findValidNode(goalNode, goal, occupancy_map, x_spacing, y_spacing)\n candidate = [ [sys.float_info.max, \n i, (freelist[0][i], freelist[1][i])] for i in range(len(freelist[0]))] \n visited = set([])\n queue = PriorityQueue(candidate)\n paths = {}\n found = False\n\n #update initial cost\n queue.remove(startNode)\n queue.insert(startNode, initialcost)\n paths[startNode] = None\n updateInitial(occupancy_map, ROWS, COLS, start, startNode, 0, 1, queue, paths, x_spacing, y_spacing, initialcost)\n updateInitial(occupancy_map, ROWS, COLS, start, startNode, 0, -1, queue, paths, x_spacing, y_spacing, initialcost)\n updateInitial(occupancy_map, ROWS, COLS, start, startNode, 1, 0, queue, paths, x_spacing, y_spacing, initialcost)\n updateInitial(occupancy_map, ROWS, COLS, start, startNode, -1, 0, queue, paths, x_spacing, y_spacing, initialcost)\n while queue.size() > 0:\n priority, current = queue.pop()\n if current == goalNode:\n found = True\n break\n #not reaching goal node yet, for each of its neighbor, update the weight\n visited.add(current)\n update(occupancy_map, ROWS, COLS, current, 0, 1, priority, queue, paths, visited, x_spacing, y_spacing)\n update(occupancy_map, ROWS, COLS, current, 0, -1, priority, queue, paths, visited, x_spacing, y_spacing)\n update(occupancy_map, ROWS, COLS, current, 1, 0, priority, queue, paths, visited, x_spacing, y_spacing)\n update(occupancy_map, ROWS, COLS, current, -1, 0, priority, queue, paths, visited, x_spacing, y_spacing)\n \n if not found:\n raise ValueError(\"fail to find shortest path\")\n node = goalNode\n shortestpath = []\n while node is not None:\n shortestpath.append(node)\n node = paths[node]\n #shortestpath.append(startNode)\n #print (startNode)\n #print ('*', list(reversed(shortestpath)))\n #print (goalNode)\n p = list(reversed([ indexToLoc(n, x_spacing, y_spacing) for n in shortestpath]))\n #start and final position may not fall on center of the grid\n if abs(p[0][0] - start[0]) > 0.0005 or abs(p[0][1] - start[1]) > 0.0005:\n p.insert(0, [start[0][0], start[1][0]])\n if abs(p[-1][0] - goal[0]) > 0.0005 or abs(p[-1][1] - goal[1]) > 0.0005:\n p.append([goal[0][0], goal[1][0]])\n res = np.array(p)\n print (res)\n return res", "def make_path(self, loop):\n path=Path(closed=True)\n for c in range(0,len(loop)):\n connection=loop[c]\n nextConnection = loop[(c+1)%len(loop)]\n lastConnection = loop[(c-1)%len(loop)]\n if self.dontFill(connection, nextConnection, lastConnection):\n return False\n cp = self.corner_pos(connection, nextConnection, lastConnection)\n corner_offset = cp[0]\n corner_dir = cp[1]\n # catch case when it is the end of a single rod\n if type(corner_offset) is list:\n endpoints = corner_offset\n corner_offset = endpoints[0]\n else:\n endpoints = False\n if connection.other.radius is not None and ( corner_offset.length() < connection.other.radius or corner_dir >0):\n # catch case when it is the end of a single rod\n if endpoints:\n para=(connection.this.pos-connection.other.pos).normalize()\n d = math.sqrt(connection.other.radius**2 - corner_offset.length()**2)\n path.add_point(PSharp(connection.other.pos + corner_offset + d*para))\n path.add_point(PArc(connection.other.pos, radius=connection.other.radius, direction='cw'))\n path.add_point(PSharp(connection.other.pos - corner_offset+d*para))\n else:\n path.add_point(PAroundcurve(connection.other.pos + corner_offset, centre=connection.other.pos, radius=connection.other.radius, direction='cw'))\n\n elif self.get_intRadius(connection, connection.other) is not None:\n # path.add_point(PIncurve(connection.other.pos + corner_offset, radius=self.get_intRadius(connection, connection.other)))\n # path.add_point(PIncurve(connection.other.pos - corner_offset, radius=self.get_intRadius(connection, connection.other)))\n # path.add_point(PIncurve(connection.other.pos - corner_offset, radius=self.get_intRadius(connection, connection.other)))\n path.add_point(PIncurve(connection.other.pos + corner_offset, radius=self.get_intRadius(connection, connection.other)))\n else:\n cornerpos = self.corner_pos(connection, nextConnection, lastConnection)\n if type(cornerpos) is list:\n path.add_point(PSharp(connection.other.pos + cornerpos[0]))\n path.add_point(PSharp(connection.other.pos + cornerpos[1]))\n else:\n path.add_point(PSharp(connection.other.pos + cornerpos))#self.corner_pos(connection, nextConnection, lastConnection)))\n # path.add_point(PSharp(connection.other.pos - cornerpos))#self.corner_pos(connection, nextConnection, lastConnection)))\n if connection.other.holeRad is not None:\n if type(connection.other.holeRad) is int or type(connection.other.holeRad) is float:\n self.otherpaths.append(Circle(connection.other.pos, rad=connection.other.holeRad, side='in'))\n else:\n t=copy.deepcopy(connection.other.holeRad)\n t.translate(connection.other.pos)\n self.otherpaths.append(t)\n return path", "def get_path(self, grid, start_wp, end_wp):\n # The open and closed sets\n openset = set()\n closedset = set()\n\n # Add the starting point to the open set\n openset.add(start_wp)\n\n # While the open set is not empty\n while openset:\n # Find the waypoint in the open set with the lowest G + H score\n current_wp = min(openset, key=lambda o: o.G + o.H)\n # Found the goal\n if current_wp == end_wp:\n path = []\n while current_wp.parent:\n path.append(current_wp)\n current_wp = current_wp.parent\n path.append(current_wp)\n print(\"Path found in {} moves: {}\".format(len(path), path))\n return path[::-1]\n\n # Remove the waypoint from the open set\n openset.remove(current_wp)\n # Add it to the closed set\n closedset.add(current_wp)\n\n # Generate children\n children = current_wp.generate_children(grid)\n\n for waypoint in children:\n # If it is already in the closed set, skip it\n if waypoint in closedset:\n continue\n # Otherwise if it is already in the open set\n if waypoint in openset:\n # Check if we beat the G score\n new_g = current_wp.G + 1\n\n if waypoint.G > new_g:\n # If so, update the waypoint to have a new parent\n waypoint.G = new_g\n waypoint.parent = current_wp\n else:\n # If it isn't in the open set, calculate the G and H score for the waypoint\n if waypoint.orientation != current_wp.orientation:\n waypoint.G = current_wp.G + 1.5 # Avoiding zigzag move by increase the cost of a rotation\n else:\n waypoint.G = current_wp.G + 1\n\n waypoint.H = abs(waypoint.x - end_wp.x) + abs(waypoint.y - end_wp.y)\n # Set the parent to our current_wp\n waypoint.parent = current_wp\n # Add it to the set\n openset.add(waypoint)\n\n # If there is no solution\n return [start_wp, end_wp]", "def __generate_octagon_obstacles(self, world):\n obs_radius = self.cfg[\"obstacle\"][\"octagon\"][\"radius\"]\n obs_min_count = self.cfg[\"obstacle\"][\"octagon\"][\"min_count\"]\n obs_max_count = self.cfg[\"obstacle\"][\"octagon\"][\"max_count\"]\n obs_min_dist = self.cfg[\"obstacle\"][\"octagon\"][\"min_distance\"]\n obs_max_dist = self.cfg[\"obstacle\"][\"octagon\"][\"max_distance\"]\n\n # generate the obstacles\n obstacles = []\n obs_dist_range = obs_max_dist - obs_min_dist\n num_obstacles = randrange(obs_min_count, obs_max_count + 1)\n\n test_geometries = [r.global_geometry for r in world.robots]\n while len(obstacles) < num_obstacles:\n\n # generate position\n dist = obs_min_dist + (random() * obs_dist_range)\n phi = -pi + (random() * 2 * pi)\n x = dist * sin(phi)\n y = dist * cos(phi)\n\n # generate orientation\n theta = -pi + (random() * 2 * pi)\n\n # test if the obstacle overlaps the robots or the goal\n obstacle = OctagonObstacle(obs_radius, Pose(x, y, theta))\n intersects = False\n for test_geometry in test_geometries:\n intersects |= geometrics.convex_polygon_intersect_test(test_geometry, obstacle.global_geometry)\n if not intersects:\n obstacles.append(obstacle)\n return obstacles", "def search(world_state, robot_pose, goal_pose):\n if world_state.shape[0] == 0 or world_state.shape[1] == 0:\n print(\"Error, empty world_state!!!\")\n return None\n if not is_pos_valid(robot_pose, world_state.shape):\n print(\"Error, invalid robot_pose!!!\", robot_pose)\n return None\n if not is_pos_valid(goal_pose, world_state.shape):\n print(\"Error, invalid goal_pose!!!\", goal_pose)\n return None\n\n directions = [(-1, 0), (1, 0), (0, -1), (0, 1)] # orthogonal directions\n found = False\n\n x, y = robot_pose\n g = 0\n h = heuristic(robot_pose, goal_pose)\n f = g + h\n open = [[f, x, y]]\n came_from = {}\n came_from[robot_pose] = None\n cost_so_far = {}\n cost_so_far[robot_pose] = 0\n\n while open:\n open.sort() # sort based on f value\n current = open.pop(0)\n\n x, y = current[1:]\n g = cost_so_far[(x, y)]\n\n if (x, y) == goal_pose:\n found = True\n break\n else:\n # find available next positions\n for direction in directions:\n x2 = x + direction[0]\n y2 = y + direction[1]\n\n # check whether x2 and y2 are valid\n if not is_pos_valid((x2, y2), world_state.shape):\n continue\n\n g2 = g + 1\n if world_state[x2, y2] == 0 and ((x2, y2) not in cost_so_far or g2 < cost_so_far[(x2, y2)]):\n\n h2 = heuristic((x2, y2), goal_pose)\n f2 = g2 + h2\n open.append([f2, x2, y2])\n came_from[(x2, y2)] = (x, y)\n cost_so_far[(x2, y2)] = g2\n if found:\n path = [goal_pose]\n current = goal_pose\n while came_from[current]:\n current = came_from[current]\n path.append(current)\n\n path.reverse()\n return path\n\n else:\n return None", "def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n\n start = problem.getStartState()\n stack = util.Stack() # stack to keep track of frontier nodes where pacman has move\n stack.push(start)\n explored = set() # to keep track of explored areas\n route = []\n\n while not stack.isEmpty():\n current_position = stack.pop()\n explored.add(current_position)\n\n if problem.isGoalState(current_position):\n break\n for each in problem.getSuccessors(current_position):\n if each[0] not in explored: # x,y coordinates of positions we haven't visited are pushed onto stack\n # print(each)\n stack.push(each[0])\n route.append((current_position, each[0], each[1])) # record of movements to rebuild path (from,to,how)\n\n x = len(route)\n while x - 1 != 0: # loop clears out actions that dont come from previous position\n if route[x - 1][0] != route[x - 2][1]: # starts from goal and works backwards\n route.remove(route[x - 2])\n x = len(route)\n else:\n x -= 1\n # print(route)\n return [action[2] for action in route]", "def getPathPair(\n self, speed=2, smoothPoints=5, collisionShrink=0.05, pathDelay=1,\n epsilon=None\n ):\n if epsilon is None:\n epsilon = self.epsilon\n\n toDestination = {}\n fromDestination = {}\n\n self.maxSpeed = speed * 360 / 60.\n _dt = self.stepSize / self.maxSpeed\n self.maxAccel = 2*self.maxSpeed / _dt\n\n for r in self.robotDict.values():\n # if robot is offline, don't get a path for it\n if r.isOffline:\n continue\n\n ap = [x[1] for x in r.alphaPath]\n bp = [x[1] for x in r.betaPath]\n # buffer ends\n ap = np.array([[ap[0]]*smoothPoints + ap + [ap[-1]]*smoothPoints]).flatten()\n bp = np.array([[bp[0]]*smoothPoints + bp + [bp[-1]]*smoothPoints]).flatten()\n steps = np.arange(len(ap))\n # smooth\n aps = savgol_filter(ap, smoothPoints, polyorder=3)\n bps = savgol_filter(bp, smoothPoints, polyorder=3)\n\n # simplify\n apss = simplify_coords(np.array([steps, aps]).T, epsilon)\n bpss = simplify_coords(np.array([steps, bps]).T, epsilon)\n # linearly interpolate back to original density\n # (for collision checking after smoothing/simplifying)\n apssi = np.interp(steps, apss[:,0], apss[:,1])\n bpssi = np.interp(steps, bpss[:,0], bpss[:,1])\n # set on the robot object for checking for collisions\n\n r.alphaPathBuffered = [[t,x] for t,x in zip(steps, ap)]\n r.betaPathBuffered = [[t,x] for t,x in zip(steps, bp)]\n\n r.smoothedAlphaPath = [[t,x] for t,x in zip(steps, aps)]\n r.smoothedBetaPath = [[t,x] for t,x in zip(steps, bps)]\n\n r.simplifiedAlphaPath = [[t,x] for t,x in apss]\n r.simplifiedBetaPath = [[t,x] for t,x in bpss]\n\n r.interpSimplifiedAlphaPath = [[t,x] for t,x in zip(steps, apssi)]\n r.interpSimplifiedBetaPath = [[t,x] for t,x in zip(steps, bpssi)]\n\n # calculate velocity and acceleration for each arm\n denseTimes = steps * self.stepSize / (speed * 360 / 60.)\n alphaVel = np.gradient(apssi)/_dt\n betaVel = np.gradient(bpssi)/_dt\n alphaAcc = np.gradient(alphaVel)/_dt\n betaAcc = np.gradient(betaVel)/_dt\n\n r.interpVelocityAlphaPath = [[t,x] for t,x in zip(denseTimes, alphaVel)]\n r.interpVelocityBetaPath = [[t,x] for t,x in zip(denseTimes, betaVel)]\n\n r.interpAccelerationAlphaPath = [[t,x] for t,x in zip(denseTimes, alphaAcc)]\n r.interpAccelerationBetaPath = [[t,x] for t,x in zip(denseTimes, betaAcc)]\n\n\n armPathToDest = {}\n armPathFromDest = {}\n\n for axis, data in zip([\"alpha\", \"beta\"], [apss, bpss]):\n # to destination path\n times = data[:, 0] * self.stepSize / (speed * 360 / 60.)\n angle = data[:, 1]\n armPathToDest[axis] = [(pos, time + pathDelay) for pos, time in zip(angle, times)]\n\n # from destination path, a reverse of the same thing\n timesR = np.abs(times - times[-1])[::-1]\n angleR = angle[::-1]\n armPathFromDest[axis] = [(pos, time + pathDelay) for pos, time in zip(angleR, timesR)]\n\n toDestination[r.id] = armPathToDest\n fromDestination[r.id] = armPathFromDest\n\n # self.shrinkCollisionBuffer(collisionShrink)\n # self.verifySmoothed(len(steps))\n # self.growCollisionBuffer(collisionShrink)\n\n return toDestination, fromDestination\n\n\n # alphaPaths.append(ap)\n # betaPaths.append(bp)\n\n # alphaPaths = np.array(alphaPaths)\n # betaPaths = np.array(betaPaths)\n # # smooth them\n # alphaSmooth = savgol_filter(alphaPaths, smoothPoints, polyorder=3, axis=1)\n # betaSmooth = savgol_filter(betaPaths, smoothPoints, polyorder=3, axis=1)\n\n # steps = numpy.arrange(alphaSmooth.shape[1])\n # # could potentially multiprocess this...\n # for ii, robotID in enumerate(robotIDs):\n # _alphaSmooth = alphaSmooth[ii]\n # _betaSmooth = betaSmooth[ii]\n # alphaSimp =", "def get_number_of_paths(node, x, y, current_player):\n paths = 0\n\n if current_player == PLAYER_A:\n # Possible downwards path\n if y != node.board_size - 1:\n for i in range(1, node.board_size - y):\n if node.board[y+i][x] != BLANK_SPACE_CHAR :\n break\n elif y + i == node.board_size - 1: \n paths += 1\n break \n\n # Possible downwards-right path\n if x != node.board_size - 1 and y != node.board_size - 1:\n limit = min([node.board_size - x, node.board_size - y])\n for i in range(1, limit):\n if node.board[y+i][x+i] != BLANK_SPACE_CHAR:\n break \n elif y + i == node.board_size - 1: \n paths += 1\n break \n\n # Possible downwards-left path\n if x != 0 and y != node.board_size - 1:\n limit = min([x + 1, node.board_size - y])\n for i in range(1, limit):\n if node.board[y+i][x-i] != BLANK_SPACE_CHAR:\n break \n elif y + i == node.board_size - 1: \n paths += 1\n break \n\n else:\n # Possible upwards path\n if y != 0:\n for i in range(1, y + 1):\n if node.board[y- i][x] != BLANK_SPACE_CHAR:\n break \n elif y - i == 0: \n paths += 1\n break \n\n # Possible upwards-right path\n if x != node.board_size - 1 and y != 0:\n limit = min([node.board_size - x, y + 1])\n for i in range(1, limit):\n if node.board[y-i][x+i] != BLANK_SPACE_CHAR:\n break \n elif x + i == node.board_size - 1 or y - i == 0: \n paths += 1\n break \n\n # Possible upwards-left path\n if x != 0 and y != 0:\n limit = min([x + 1, y + 1])\n for i in range(1, limit):\n if node.board[y-i][x-i] != BLANK_SPACE_CHAR:\n break \n elif x - i == 0 or y - i == 0: \n paths += 1\n break \n\n return paths", "def test_make_pathways(self):\n basic_test_runner(self, 'pathways')", "def get_connected_nodes(node, current_path_len) :\r\n\r\n connected_nodes = [] #A list of the connected nodes\r\n closed_list_coords = get_path_coordinates(closed_list)\r\n\r\n #Checking if the node belongs to the 1st row\r\n if(node.coords[0] != 0) :\r\n connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the last row\r\n if(node.coords[0] != grid_dims[0] - 1) :\r\n connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != 0) :\r\n connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != grid_dims[1] - 1) :\r\n connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n return connected_nodes", "def reconstruct_path(came_from, start, goal):\n current = goal\n path = [current]\n\n # Append configuartion to board as a step until the begin situation is reached\n while current != start:\n current = came_from[current][0]\n path.append(current)\n path.append(start)\n path.reverse()\n return [path[1:]]", "def get_path_to(self, dest_x, dest_y) -> List[Tuple[int, int]]:\n\t\t# Kopier den 'walkable' list.\n\t\t# Note `cost` fordi vi ser hvor meget tid det koster at komme over til målet.\n\t\tcost = np.array(self.entity.game_map.tiles['walkable'], dtype=np.int8)\n\n\t\tfor entity in self.entity.game_map.entities:\n\t\t\t# Check that an entity blocks movement and that cost isn't zero (blockin)\n\t\t\tif entity.blocks_movement and cost[entity.x, entity.y]:\n\t\t\t\t# Add to the cost of a blocked position\n\t\t\t\t# A lower number means more enemies will crowd behind each other in hallways.\n\t\t\t\t# A higher number means enemies will take longer paths in order to surround the player\n\t\t\t\tcost[entity.x, entity.y] += 10 # This encourages the entity to move around that area, since the entity will try to go the path with the smallest cost\n\n\t\t# Create a graph from the cost array and pass that graph to a new pathfinder\n\t\tgraph = tcod.path.SimpleGraph(cost=cost, cardinal=2, diagonal=3)\n\t\tpathfinder = tcod.path.Pathfinder(graph)\n\n\t\tpathfinder.add_root((self.entity.x, self.entity.y)) # Start position\n\n\t\t# Compute the path to the destination and remove the starting point\n\t\tpath: List[List[int]] = pathfinder.path_to((dest_x, dest_y))[1:].tolist()\n\n\t\t# Convert from List[List[int]] to List[Tuple[int, int]]\n\t\treturn [(index[0], index[1]) for index in path]", "def pathfinder(starting_position: tuple, target_position: tuple, grid: np.ndarray) -> List[tuple] or None:\n moves_dict = {(1, 0): \"DOWN\", (-1, 0): \"UP\", (0, 1): \"RIGHT\", (0, -1): \"LEFT\"}\n\n moves = []\n path = []\n dead_ends = []\n\n def rate_position(current, target):\n \"\"\"\n Helper function to calculate distance to target\n \"\"\"\n return (target[0] - current[0]) ** 2 + (target[1] - current[1]) ** 2\n\n # Setting starting position\n current_position = starting_position\n while current_position != target_position:\n possible_moves = {}\n # Checking for each possible move and rating them\n for m in moves_dict.keys():\n if check_valid_move(grid, current_position, m):\n new_position = tuple(np.add(current_position, m))\n new_position_rating = rate_position(new_position, target_position)\n if new_position not in path and new_position not in dead_ends:\n possible_moves[new_position_rating] = m\n\n # if there are possible move, select the one, that would move us the closest to target\n if possible_moves:\n path.append(current_position) # save position to path\n moves.append(possible_moves[min(possible_moves)]) # save move to move list\n current_position = tuple(np.add(current_position, possible_moves[min(possible_moves)]))\n # if not, go back one move and add current position to dead ends\n else:\n # if no moves available from the start, return None\n if current_position == starting_position:\n return None\n dead_ends.append(current_position) # save position to dead ends\n current_position = path[-1] # move back one step\n path.pop(-1) # delete step from path\n moves.pop(-1) # delete move from move list\n\n return [tuple(moves_dict[move] for move in moves)]", "def _find_path(maze, from_x, from_y, to_x, to_y):\n distance_map = _create_distance_map(maze, to_x, to_y)\n if distance_map is None or distance_map[from_x, from_y] < 0:\n return None\n\n path = []\n current_pos = [from_x, from_y]\n distance = distance_map[current_pos[0], current_pos[1]]\n\n offsets = [[-1, 0], [1, 0], [0, -1], [0, 1]]\n\n while distance != 0:\n for (offset_x, offset_y) in offsets:\n x = current_pos[0] + offset_x\n y = current_pos[1] + offset_y\n if (0 <= x < maze.shape[0] and 0 <= y < maze.shape[1] and\n distance_map[x, y] == distance - 1):\n path.append((x, y))\n current_pos = [x, y]\n distance -= 1\n break\n\n return path", "def __search_path(self, start_node, goal_node):\n\n path = []\n queue = PriorityQueue()\n queue.put((0, start_node))\n visited = set(start_node)\n\n branch = {}\n found = False\n \n while not queue.empty():\n item = queue.get()\n current_cost = item[0]\n current_node = item[1]\n\n if current_node == goal_node: \n found = True\n break\n else:\n for next_node in self._route_graph[current_node]:\n cost = self._route_graph.edges[current_node, next_node]['weight']\n new_cost = current_cost + cost + self.__heuristic(next_node, goal_node)\n\n if next_node not in visited: \n visited.add(next_node) \n queue.put((new_cost, next_node))\n\n branch[next_node] = (new_cost, current_node)\n\n path = []\n path_cost = 0\n if found:\n # retrace steps\n path = []\n n = goal_node\n path_cost = branch[n][0]\n while branch[n][1] != start_node:\n path.append(branch[n][1])\n n = branch[n][1]\n path.append(branch[n][1])\n else:\n print(\"Path Not Found\")\n\n return path[::-1], path_cost", "def CalculatePaths(self):\n agrid = self.agrid \n self.apath = array([0 for y in range(self.T)], dtype=float)\n self.cpath = array([0 for y in range(self.T)], dtype=float)\n self.npath = array([0 for y in range(self.T)], dtype=float)\n # generate each generation's asset, consumption and labor supply forward\n for y in range(self.T-1): # y = 0, 1,..., 58\n self.apath[y+1] = max(0,interp1d(agrid, self.a[y], kind='cubic')(self.apath[y]))\n if y >= self.W:\n self.cpath[y], self.npath[y] = (1+self.r)*self.apath[y] + self.b - self.apath[y+1], 0\n else:\n self.cpath[y], self.npath[y] = self.solve(self.apath[y], self.apath[y+1])\n self.upath[y] = self.util(self.cpath[y], self.npath[y])\n # the oldest generation's consumption and labor supply\n self.cpath[self.T-1], self.npath[self.T-1] = (1+self.r)*self.apath[self.T-1]+self.b, 0\n self.upath[self.T-1] = self.util(self.cpath[self.T-1], self.npath[self.T-1])", "def a_star(self, mapdata, start, goal):\n\n print \"Inside A star\"\n rospy.loginfo(\"Generate path from (%d,%d) to (%d,%d)\" % (start[0], start[1], goal[0], goal[1]))\n if not PathPlanner.is_cell_walkable(mapdata, goal[0], goal[1]):\n rospy.logerr(\"not walkable goal\")\n return[]\n #calculated from goal\n frontier = PriorityQueue()\n frontier.put(start, 0)\n came_from = {}\n cost_so_far = {}\n came_from[start] = None\n cost_so_far[start] = 0\n\n while not frontier.empty():\n frontier_msg = GridCells()\n frontier_cells = []\n for e in frontier.elements:\n frontier_cells.append(PathPlanner.grid_to_world(mapdata, e[1][0], e[1][1]))\n frontier_msg.header = mapdata.header\n frontier_msg.header.stamp = rospy.get_rostime()\n frontier_msg.cell_width = mapdata.info.resolution\n frontier_msg.cell_height = mapdata.info.resolution\n frontier_msg.cells = frontier_cells\n expanded_msg = GridCells()\n expanded_cells = []\n for e in cost_so_far: \n expanded_cells.append(PathPlanner.grid_to_world(mapdata, e[0], e[1]))\n \n expanded_msg.header = mapdata.header\n expanded_msg.header.stamp = rospy.get_rostime()\n expanded_msg.cell_width = mapdata.info.resolution\n expanded_msg.cell_height = mapdata.info.resolution\n expanded_msg.cells = expanded_cells\n self.expanded_pub.publish(expanded_msg)\n rospy.sleep(0.01)\n\n current = frontier.get()\n\n #creates path\n if current == goal:\n entry = goal\n listOfCoord = []\n while entry != None:\n listOfCoord.append(entry)\n entry = came_from[entry]\n listOfCoord.reverse()\n self.expanded_pub.publish(PathPlanner.createGridcells(mapdata, listOfCoord))\n return listOfCoord\n \n for next in PathPlanner.neighbors_of_8(mapdata, current[0], current[1]):\n new_cost = cost_so_far[current] + 1 #assume cost to move each unit is 1\n if next not in cost_so_far or new_cost < cost_so_far[next]:\n cost_so_far[next] = new_cost\n priority = new_cost + PathPlanner.euclidean_distance(next[0], next[1], goal[0], goal[1])\n frontier.put(next, priority)\n came_from[next] = current\n\n \n return[]", "def generate_paths(self, paths: list) -> list or int:\n path_count = 0\n new_paths = {}\n # follow each path in the paths list\n for path in paths:\n # find neighbours from last position in each path\n neighbours = self.neighbours(path[-1])\n for neighbour in neighbours:\n if neighbour in self.queue.values():\n continue # if neighbour in queue, go to next neighbour\n # find grid value of neighbour\n grid_value = self.grid[neighbour[1]][neighbour[0]]\n if grid_value == 1:\n new_paths[path_count] = path.copy()\n new_paths[path_count].append(neighbour) # add path to dict\n self.queue[path_count + 1] = neighbour # add neighbour to queue\n path_count += 1 # increase count of number of paths\n if grid_value == 9:\n return len(path)\n\n # roll dict out into list of paths\n paths_store = [new_paths[key] for key in new_paths]\n\n return paths_store", "def adjPaths(imgR,location):\n directions = [(1,0),(-1,0),(0,1),(0,-1)] # up, down, left, right \n possiblePaths = [] \n for direction in directions:\n iPlus,jPlus = direction\n if imgR[location[0]+iPlus,location[1]+jPlus] == 0: \n possiblePaths.append(direction)\n return possiblePaths", "def a_star_planning(start_x, start_y, goal_x, goal_y, id):\n # extract the index of start node, goal node and obstacles\n start = Point(round(start_x/grid_size), round(start_y/grid_size), 0.0, -1, [0,0,0])\n goal = Point(round(goal_x/grid_size), round(goal_y/grid_size), 0.0, -1, [0,0,0])\n if not_legal(goal, id):\n print ('not a legal goal')\n return False\n \n # time.sleep(10)\n\n # create the open list and close list to store nodes\n openset, closeset = deque(), deque()\n openset.append(start)\n\n while True:\n # find out the min f node to explore\n\n current_node = min(openset,\n key=lambda node: node.g + calculate_heuristic(node,goal))\n\n # pltplt.plot(current_node.x, current_node.y, \"b*\")\n if len(closeset) % 10 == 0:\n plt.pause(0.001)\n\n if current_node.x == goal.x and current_node.y == goal.y:\n print(\"Congratulations! You have found the goal!\")\n goal.parent = current_node\n break\n\n # Remove it from the open list\n openset.remove(current_node)\n # Add it to the close list\n closeset.append(current_node)\n\n # Explore the neighbour\n for motion in motions:\n if motion == current_node.parent_motion:\n turn_cost = 0\n elif (motion[0] == -1 * current_node.parent_motion[0]) and (motion[1] == -1 * current_node.parent_motion[1]):\n turn_cost = 1.5\n else:\n turn_cost = 1\n\n node = Point(current_node.x + motion[0],\n current_node.y + motion[1],\n current_node.g + motion[2] + turn_cost,\n current_node,\n motion,\n )\n\n # ignore it if it is in the close list\n flag = False\n for item in closeset:\n if item.x == node.x and item.y == node.y:\n flag = True\n break\n if flag:\n continue\n # ignore it if it is obstacle\n\n if not_legal(node, id):\n continue\n # update its parent if it is the open list\n flag = True\n for item in openset:\n if item.x == node.x and item.y == node.y:\n flag = False\n # if closer, update the parent\n if node.g <= item.g:\n item.g = node.g\n item.parent = node.parent\n item.parent_motion = node.parent_motion\n break\n # add to the open list if it is not in the open list\n if flag:\n openset.append(node)\n\n # generate the final path\n while True:\n route = deque()\n route.append(goal)\n plt.plot(goal.x, goal.y, \"rx\")\n if goal.parent == -1:\n break\n else:\n goal = goal.parent\n route.appendleft(goal)\n # return route\n # return False\n if NEED_DRAW:\n # draw map\n for i in range(map.gridwidth):\n for j in range(map.gridheight):\n if map.grid[1,i,j] >0:\n plt.plot(i, j, \"xc\")\n\n plt.plot(start.x, start.y, \"ro\")\n plt.plot(goal.x, goal.y, \"go\")\n\n for goal in route:\n plt.plot(goal.x, goal.y, \"rx\")\n plt.show()", "def get_easy_possible_paths(mario_position: tuple, princess_position: tuple) -> Set[tuple]:\n # getting 'distance' between Mario and princess in coordinates\n distance = np.subtract(princess_position, mario_position)\n\n # create required moves from distance\n moves_list = []\n for i in range(abs(distance[0])):\n moves_list.append((1, 0) if distance[0] >= 0 else (-1, 0))\n for i in range(abs(distance[1])):\n moves_list.append((0, 1) if distance[1] >= 0 else (0, -1))\n\n # create possible paths from possible moves, using set to remove duplicates\n all_paths_list = set(itertools.permutations(moves_list))\n return all_paths_list", "def getPaths(db,i,j):\n if i+j==0:\n return [[(0,0)]]\n paths = []\n if \"N\" in db[i,j]:\n paths += getPaths(db,i-1,j)\n if \"W\" in db[i,j]:\n paths += getPaths(db,i,j-1)\n if \"NW\" in db[i,j]:\n paths += getPaths(db,i-1,j-1)\n for path in paths:\n path.append((i,j))\n return paths", "def get_possible_paths(graph, _from, _to):\n # Mark all the nodes as not visited\n visited = []\n\n # Create an array to store paths\n paths = []\n\n # Call the recursive helper function to get all paths\n get_all_paths_helper(graph, _from, _to, visited, [], paths)\n\n return paths", "def calculate_paths(shape: Tuple[int, int], point: Tuple[int, int]) -> int:\n\tn, m = map(int, input().split())\n\tf = [[0] * (m+1) for i in range(n+1)]\n\tf[1][1] = 1\n\tfor i in range(2, n+1):\n\t\tfor j in range(2, m + 1):\n\t\t\tf[i][j] = f[i-2][j-2] + f[i-2][j-1]\n\treturn n + m", "def obstacles(p):\n c1 = np.array([-0.5,-1.])\n r1 = 1.\n c2 = np.array([0.75,0.5])\n r2 = 0.5\n return [\n (p[0] + 2, np.array([1.,0.])), # left\n (2 - p[0], np.array([-1.,0.])), # right\n (p[1] + 1, np.array([0.,1.])), # bottom\n (1 - p[1], np.array([0.,-1.])), # top\n (norm(p - c1) - r1, (p - c1)/norm(p - c1)), # circle 1\n (norm(p - c2) - r2, (p - c2)/norm(p - c2)) # circle 2\n ]", "def prepare_gates(chip, source_gate, target_gate):\n crossroad = []\n travelled_path = []\n\n # Source and target always on z-axis 0\n source_coords = [chip.gates[source_gate][\"x\"], chip.gates[source_gate][\"y\"], 0]\n target_coords = [chip.gates[target_gate][\"x\"], chip.gates[target_gate][\"y\"], 0]\n\n chip = calculate_distance(target_coords, chip)\n\n start = chip.coordinates[0][source_coords[1]][source_coords[0]]\n start_node = nd.Node(source_coords, None, 1, start.cost + start.distance_to_goal)\n goal_node = nd.Node(target_coords, None, 1, 0)\n crossroad.append(start_node)\n\n return run_algorithm(target_coords, start_node, goal_node, chip, crossroad, travelled_path)", "def find_shortest_path(self, box_indicator):\n # To be implemented\n dict = {}\n shortest_path = []\n visited = set()\n queue = deque()\n queue.append(self.grid_pos)\n goal_node = None\n while queue:\n node = Vec2d(queue.popleft())\n if node == self.get_target_tile().int_tuple:\n goal_node = node.int_tuple\n break\n for neighbor in self.get_tile_neighbors(node, box_indicator):\n neighbor = neighbor.int_tuple\n if neighbor not in visited:\n queue.append(neighbor)\n visited.add(neighbor)\n dict[neighbor] = node.int_tuple\n if goal_node is None:\n return deque([])\n else:\n key = goal_node\n while key != self.grid_pos.int_tuple:\n shortest_path.append(Vec2d(key))\n parent_node = dict[key]\n key = parent_node\n return deque(shortest_path)", "def routes_with_criteria(self, src, target, criteria):\n\n # BFS\n routes = []\n q = deque() # <- [ ... ] <-\n stops = 0\n distance = 0 # not true for this app, but it works out in the conditional check\n q.append((src, stops, distance, [src]))\n\n while q:\n # this city, stops to this city, distance to this city, route to this city\n city, stops, distance, route = q.popleft()\n if target == city and distance: # no self-loops!\n r = list(route)\n routes.append(r)\n for dest, cost in self.G[city].items():\n if criteria(stops + 1, distance + cost):\n new_route = list(route)\n new_route.append(dest)\n q.append((dest, stops + 1, distance + cost, new_route))\n return routes", "def reconstruct_path(goal: Vector2D, prev_node: dict) -> list:\n path = []\n prev = prev_node[goal] # remove 'goal' from path\n \n while prev != None:\n path.append(prev)\n prev = prev_node[prev]\n \n path = path[:-1] # remove 'start' from path\n path.reverse()\n return path", "def A_Star(start, goal, final_occupancy_grid):\n x, y = np.mgrid[0:LENGTH:1, 0:WIDTH:1]\n pos = np.empty(x.shape + (2,))\n # x.shape = (LENGTH,WIDTH)\n # x.shape + (2,) = (LENGTH,WIDTH,2)\n pos[:, :, 0] = x\n pos[:, :, 1] = y\n # pos.shape = (1890, 2)\n pos = np.reshape(pos, (x.shape[0] * x.shape[1], 2))\n coords = list([(int(x[0]), int(x[1])) for x in pos])\n # Define the heuristic:\n # h: dictionary containing the distance to goal ignoring obstacles for all coordinates in the grid (heuristic function)\n h = np.linalg.norm(pos - goal, axis=1)\n # If axis is an integer, it specifies the axis of x along which to compute the vector norms\n # axis = 1: h.shape = 1890\n # axis = 0: h.shape = 2\n h = dict(zip(coords, h))\n\n # Check if the start and goal are within the boundaries of the map\n for point in [start, goal]:\n\n if point[0] < 0 and point[0] >= final_occupancy_grid.shape[0]:\n raise Exception('Start node/goal node is not contained in the map')\n\n if point[1] < 0 and point[1] >= final_occupancy_grid.shape[1]:\n raise Exception('Start node/goal node is not contained in the map')\n\n # check if start and goal nodes correspond to free spaces\n if final_occupancy_grid[start[0], start[1]]:\n raise Exception('Start node is not traversable')\n\n if final_occupancy_grid[goal[0], goal[1]]:\n raise Exception('Goal node is not traversable')\n\n # get the possible movements\n movements = get_movements_8n()\n\n # The set of visited nodes that need to be (re-)expanded, i.e. for which the neighbors need to be explored\n # Initially, only the start node is known.\n openSet = [start]\n\n # The set of visited nodes that no longer need to be expanded.\n closedSet = []\n\n # For node n, cameFrom[n] is the node immediately preceding it on the cheapest path from start to n currently known.\n cameFrom = dict()\n\n # For node n, gScore[n] is the cost of the cheapest path from start to n currently known.\n gScore = dict(zip(coords, [np.inf for x in range(len(coords))]))\n gScore[start] = 0\n\n # For node n, fScore[n] := gScore[n] + h(n). map with default value of Infinity\n fScore = dict(zip(coords, [np.inf for x in range(len(coords))]))\n fScore[start] = h[start]\n\n # while there are still elements to investigate\n while openSet != []:\n\n # the node in openSet having the lowest fScore[] value\n fScore_openSet = {key: val for (key, val) in fScore.items() if key in openSet}\n current = min(fScore_openSet, key=fScore_openSet.get)\n del fScore_openSet\n\n # If the goal is reached, reconstruct and return the obtained path\n if current == goal:\n # print(\"Path\", closedSet)\n return reconstruct_path(cameFrom, current)\n\n openSet.remove(current)\n closedSet.append(current)\n\n # for each neighbor of current:\n for dx, dy, deltacost in movements:\n\n neighbor = (current[0] + dx, current[1] + dy)\n\n # if the node is not in the map, skip\n if (neighbor[0] >= final_occupancy_grid.shape[0]) or (neighbor[1] >= final_occupancy_grid.shape[1]) or (\n neighbor[0] < 0) or (neighbor[1] < 0):\n continue\n\n # if the node is occupied, skip\n if (final_occupancy_grid[neighbor[0], neighbor[1]]):\n continue\n\n # if the has already been visited, skip\n if (neighbor in closedSet):\n continue\n # d(current,neighbor) is the weight of the edge from current to neighbor\n # tentative_gScore is the distance from start to the neighbor through current\n tentative_gScore = gScore[current] + deltacost\n\n if neighbor not in openSet:\n openSet.append(neighbor)\n\n if tentative_gScore < gScore[neighbor]:\n # This path to neighbor is better than any previous one. Record it!\n cameFrom[neighbor] = current\n gScore[neighbor] = tentative_gScore\n fScore[neighbor] = gScore[neighbor] + h[neighbor]\n\n # Open set is empty but goal was never reached\n print(\"No path found to goal\")\n return [], closedSet", "def find_path(maze_map, start, target):\n path = [] # path list\n tried = set() # set for faster membership checks\n done = False\n curr_tile = start\n while not done:\n if curr_tile == target:\n done = True # if at target tile, we are done\n else:\n options = [ # possible moves\n (curr_tile[0] + 1, curr_tile[1]),\n (curr_tile[0] - 1, curr_tile[1]),\n (curr_tile[0], curr_tile[1] + 1),\n (curr_tile[0], curr_tile[1] - 1)\n ]\n test = (abs(target[0] - start[0]), abs(target[1] - start[0]))\n prefer = test.index(max(test[0], test[1]))\n if prefer == 0:\n options.sort(key=lambda x: x[0], reverse=True)\n else:\n options.sort(key=lambda x: x[1], reverse=True)\n backtrack = True # assume we must backtrack\n for opt in options:\n try:\n if maze_map[opt[0]][opt[1]] not in ('x', ) and opt not in tried:\n backtrack = False # if we haven't tried this option before, and it's not blocked\n path.append(opt) # then add to the path, and remember that it's been tried\n tried.add(opt)\n curr_tile = opt\n break\n except IndexError:\n continue\n if backtrack: # backtrack to the previous position in the path\n curr_tile = path.pop()\n return path", "def __routes(self, with_return):\n nonzeo_pois = list(filter(None, self.pois.keys()))\n\n for path in itertools.permutations(nonzeo_pois):\n steps = self.poi_distance(0, path[0])\n for i, j in zip(path, path[1:]):\n steps += self.poi_distance(i, j)\n if with_return:\n steps += self.poi_distance(path[-1], 0)\n yield steps", "def dfs_path(graph, s, goals=[]):\n visited = []\n parents = [None]*(len(graph))\n boundary = [s]\n while len(boundary) > 0:\n v = boundary.pop()\n visited += [v]\n for w in neighbours(v, graph):\n if w not in visited and w not in boundary:\n boundary.append(w)\n parents[w] = v\n return get_path(parents, goals[0], goals[1])", "def find_path(sources, goals, connections):\n visited = set()\n expanded = set()\n queue = deque()\n\n for s in sources:\n queue.appendleft([s])\n\n while queue:\n path = queue.pop()\n head = path[-1]\n visited.add(head)\n\n neighbours = [o for (i, o) in connections if i == head]\n for neighbour in neighbours:\n if neighbour in goals:\n return path + [neighbour]\n elif neighbour not in visited:\n queue.appendleft(path + [neighbour])\n\n return []", "def bfs_paths(self, start: str, goal: str) -> List[Path]:\n queue = [(start, [start])]\n while queue:\n (node, path) = queue.pop(0)\n if node not in self.graph:\n yield []\n for _next in set(self.graph[node]) - set(path):\n if _next == goal:\n yield path + [_next]\n elif _next in self.graph:\n queue.append((_next, path + [_next]))", "def A_Star(start, goal, final_occupancy_grid):\n x, y = np.mgrid[0:45:1, 0:42:1]\n pos = np.empty(x.shape + (2,))\n pos[:, :, 0] = x;\n pos[:, :, 1] = y\n pos = np.reshape(pos, (x.shape[0] * x.shape[1], 2))\n coords = list([(int(x[0]), int(x[1])) for x in pos])\n\n # Define the heuristic:\n # h: dictionary containing the distance to goal ignoring obstacles for all coordinates in the grid (heuristic function)\n h = np.linalg.norm(pos - goal, axis=1)\n h = dict(zip(coords, h))\n\n # Check if the start and goal are within the boundaries of the map\n for point in [start, goal]:\n\n if point[0] < 0 and point[0] >= final_occupancy_grid.shape[0]:\n raise Exception('Start node/goal node is not contained in the map')\n\n if point[1] < 0 and point[1] >= final_occupancy_grid.shape[1]:\n raise Exception('Start node/goal node is not contained in the map')\n\n # check if start and goal nodes correspond to free spaces\n if final_occupancy_grid[start[0], start[1]]:\n raise Exception('Start node is not traversable')\n\n if final_occupancy_grid[goal[0], goal[1]]:\n raise Exception('Goal node is not traversable')\n\n # get the possible movements\n movements = _get_movements_8n()\n\n # The set of visited nodes that need to be (re-)expanded, i.e. for which the neighbors need to be explored\n # Initially, only the start node is known.\n openSet = [start]\n\n # The set of visited nodes that no longer need to be expanded.\n closedSet = []\n\n # For node n, cameFrom[n] is the node immediately preceding it on the cheapest path from start to n currently known.\n cameFrom = dict()\n\n # For node n, gScore[n] is the cost of the cheapest path from start to n currently known.\n gScore = dict(zip(coords, [np.inf for x in range(len(coords))]))\n gScore[start] = 0\n\n # For node n, fScore[n] := gScore[n] + h(n). map with default value of Infinity\n fScore = dict(zip(coords, [np.inf for x in range(len(coords))]))\n fScore[start] = h[start]\n\n # while there are still elements to investigate\n while openSet != []:\n\n # the node in openSet having the lowest fScore[] value\n fScore_openSet = {key: val for (key, val) in fScore.items() if key in openSet}\n current = min(fScore_openSet, key=fScore_openSet.get)\n del fScore_openSet\n\n # If the goal is reached, reconstruct and return the obtained path\n if current == goal:\n return reconstruct_path(cameFrom, current)\n\n openSet.remove(current)\n closedSet.append(current)\n\n # for each neighbor of current:\n for dx, dy, deltacost in movements:\n\n neighbor = (current[0] + dx, current[1] + dy)\n\n # if the node is not in the map, skip\n if (neighbor[0] >= final_occupancy_grid.shape[0]) or (neighbor[1] >= final_occupancy_grid.shape[1]) or (\n neighbor[0] < 0) or (neighbor[1] < 0):\n continue\n\n # if the node is occupied, skip\n if (final_occupancy_grid[neighbor[0], neighbor[1]]):\n continue\n\n # if the has already been visited, skip\n if (neighbor in closedSet):\n continue\n # d(current,neighbor) is the weight of the edge from current to neighbor\n # tentative_gScore is the distance from start to the neighbor through current\n tentative_gScore = gScore[current] + deltacost\n\n if neighbor not in openSet:\n openSet.append(neighbor)\n\n if tentative_gScore < gScore[neighbor]:\n # This path to neighbor is better than any previous one. Record it!\n cameFrom[neighbor] = current\n gScore[neighbor] = tentative_gScore\n fScore[neighbor] = gScore[neighbor] + h[neighbor]\n\n # Open set is empty but goal was never reached\n print(\"No path found to goal\")\n return []", "def path_search(start, goal):\n if start == goal:\n return [start]\n explored = set() \n queue = [ [start, ('', 0)] ]\n while queue:\n path = queue.pop(0)\n s = path[-2]\n linenum, changetimes = path[-1]\n if s == goal:\n print changetimes\n print path\n for x in queue:\n print x\n return path\n for state, action in bj_subway[s].items():\n if state not in explored:\n linechange = changetimes + 1\n explored.add(state)\n if linenum != action:\n linechange += 2\n path2 = path[:-1] + [action, state, (action, linechange)]\n queue.append(path2)\n queue.sort(key=lambda path:path[-1][-1])\n return []", "def get_path(input_dictionary, output_dictionary,\n input_species_list, output_species_list):\n\n input_operon_list = []\n path_queue = [(input_operon_list, input_species_list) ]\n\n final_operon_path_list = []\n final_species_path_list = []\n\n while path_queue != []:\n\n ###print \"\\nget_path: path queue:\",path_queue\n\n path_queue,\\\n final_operon_path_list,\\\n final_species_path_list = traverse(input_dictionary,\n output_dictionary,\n input_species_list,\n output_species_list,\n path_queue,\n final_operon_path_list,\n final_species_path_list)\n\n return final_operon_path_list, final_species_path_list", "def solve_maze(self):\r\n # if there is no maze to solve, cut the method\r\n if not self.generated:\r\n return None\r\n\r\n # initialize with empty path at starting cell\r\n self.path = dict()\r\n current = self.start\r\n\r\n # loop until the ending cell is reached\r\n while True:\r\n while True:\r\n # choose valid direction\r\n # must remain in the grid\r\n # also must not cross a wall\r\n dirNum = random.randint(0,3)\r\n adjacent = self.get_next_cell(current,dirNum,1)\r\n if self.is_valid_direction(current,dirNum):\r\n hasWall = (self.grid[adjacent[0]][adjacent[1]] == 0)\r\n if not hasWall:\r\n break\r\n # add cell and direction to path\r\n self.path[current] = dirNum\r\n\r\n # get next cell\r\n current = self.get_next_cell(current,dirNum,2)\r\n if current == self.end: \r\n break # break if ending cell is reached\r\n\r\n # go to start of path\r\n current = self.start\r\n self.solution.append(current)\r\n # loop until end of path is reached\r\n while not (current == self.end):\r\n dirNum = self.path[current] # get direction\r\n # add adjacent and crossed cells to solution\r\n crossed = self.get_next_cell(current,dirNum,1)\r\n current = self.get_next_cell(current,dirNum,2)\r\n self.solution.append(crossed)\r\n self.solution.append(current)\r\n\r\n self.path = dict()", "def search_paths_agent_to_goal(self, robot_x, robot_y, goal_x, goal_y, G, road_node_Nos, road_node_info,\n road_lines, road_directions, road_lines_num, node_edges):\n # add target node\n target_node_coordinate = np.zeros((1, 2))\n target_node_coordinate[0][0] = goal_x\n target_node_coordinate[0][1] = goal_y\n target_node = None\n\n for (key, value) in road_node_info.items():\n if math.sqrt((value[0]-target_node_coordinate[0][0])**2 + (value[1]-target_node_coordinate[0][1])**2) <= 0.01:\n target_node = key\n\n if target_node == 0:\n print(target_node)\n raise Exception(\"wrong target node\", target_node)\n\n # Check whether the robot is on the road node or not\n at_node = False\n for (key, value) in road_node_info.items():\n if key == 0:\n continue\n if value[0] == robot_x and value[1] == robot_y:\n at_node = True\n agent_node_No = key\n\n if at_node == False:\n # add agent node\n agent_node_No = 0\n agent_node_coordinate = np.zeros((1, 2))\n agent_node_coordinate[0][0] = robot_x\n agent_node_coordinate[0][1] = robot_y\n agent_node = dict(zip([agent_node_No], agent_node_coordinate))\n road_node_info.update(agent_node)\n\n # add node\n env_node_Nos = [agent_node_No] + road_node_Nos\n G.add_nodes_from(env_node_Nos)\n\n # add edges from agent to the nearest road line\n # calculate the distance from the agent to the lines\n agent_line_dist = []\n for i in range(road_lines_num):\n cross = (road_lines[i][2] - road_lines[i][0]) * (agent_node_coordinate[0][0] - road_lines[i][0]) \\\n + (road_lines[i][3] - road_lines[i][1]) * (agent_node_coordinate[0][1] - road_lines[i][1])\n if cross <= 0:\n agent_line_dist.append(np.sqrt((agent_node_coordinate[0][0] - road_lines[i][0]) ** 2\n + (agent_node_coordinate[0][1] - road_lines[i][1]) ** 2))\n continue\n\n d2 = (road_lines[i][2] - road_lines[i][0]) ** 2 + (road_lines[i][3] - road_lines[i][1]) ** 2\n if cross >= d2:\n agent_line_dist.append(np.sqrt((agent_node_coordinate[0][0] - road_lines[i][2]) ** 2\n + (agent_node_coordinate[0][1] - road_lines[i][3]) ** 2))\n continue\n r = cross / d2\n p0 = road_lines[i][0] + (road_lines[i][2] - road_lines[i][0]) * r\n p1 = road_lines[i][1] + (road_lines[i][3] - road_lines[i][1]) * r\n agent_line_dist.append(\n np.sqrt((agent_node_coordinate[0][0] - p0) ** 2 + (agent_node_coordinate[0][1] - p1) ** 2))\n\n # find the nearest line index\n agent_line_dist_shortest = float(\"inf\")\n agent_line_shortest_index = 0\n\n for index, item in enumerate(agent_line_dist):\n if item < agent_line_dist_shortest:\n agent_line_shortest_index = index\n agent_line_dist_shortest = item\n\n # find the shortest line's node\n agent_line_shortest_node0 = None\n agent_line_shortest_node1 = None\n\n for (key, value) in road_node_info.items():\n if value[0] == road_lines[agent_line_shortest_index][0] and value[1] == \\\n road_lines[agent_line_shortest_index][1]:\n agent_line_shortest_node0 = key\n if value[0] == road_lines[agent_line_shortest_index][2] and value[1] == \\\n road_lines[agent_line_shortest_index][3]:\n agent_line_shortest_node1 = key\n\n # add new edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.append([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.append([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.append([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.append([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n G.add_edges_from(node_edges)\n simple_paths_list = list()\n if agent_node_No not in G or target_node not in G:\n has_path = False\n G.clear()\n else:\n if nx.has_path(G, source=agent_node_No, target=target_node):\n simple_paths = nx.shortest_simple_paths(G, source=agent_node_No, target=target_node, weight='len')\n\n for path in simple_paths:\n simple_paths_list.append(path)\n\n for path in simple_paths_list:\n if path[1] == agent_line_shortest_node1:\n path[0] = agent_line_shortest_node0\n elif path[1] == agent_line_shortest_node0:\n path[0] = agent_line_shortest_node1\n else:\n raise ValueError('First node Error!')\n\n remove_paths_list = list()\n for path in simple_paths_list:\n for path_rest in simple_paths_list[simple_paths_list.index(path) + 1:]:\n if path == path_rest[- len(path):]:\n remove_paths_list.append(path_rest)\n\n for remove_path in remove_paths_list:\n if remove_path in simple_paths_list:\n simple_paths_list.remove(remove_path)\n\n # Choose 1 simple paths\n if len(simple_paths_list) > 1:\n simple_paths_list = simple_paths_list[0:1]\n\n # remove edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n has_path = True\n G.clear()\n else:\n # remove edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n has_path = False\n G.clear()\n else:\n G.add_edges_from(node_edges)\n simple_paths_list = list()\n # 判断站点是否在路网上\n if agent_node_No not in G or target_node not in G:\n has_path = False\n G.clear()\n else:\n # 判断站点和目标间是否存在路径\n if nx.has_path(G, source=agent_node_No, target=target_node):\n # 提取所有简单路径\n simple_paths = nx.shortest_simple_paths(G, source=agent_node_No, target=target_node, weight='len')\n\n for path in simple_paths:\n simple_paths_list.append(path)\n\n # 移除带有回环的路网\n remove_paths_list = list()\n for path in simple_paths_list:\n for path_rest in simple_paths_list[simple_paths_list.index(path) + 1:]:\n if path == path_rest[- len(path):]:\n remove_paths_list.append(path_rest)\n\n for remove_path in remove_paths_list:\n if remove_path in simple_paths_list:\n simple_paths_list.remove(remove_path)\n\n # 提取最多2条路径\n if len(simple_paths_list) > 2:\n simple_paths_list = simple_paths_list[0:2]\n\n # 确认存在路径\n has_path = True\n G.clear()\n else:\n # 不存在路径\n has_path = False\n G.clear()\n\n return simple_paths_list, has_path", "def __a_star(puzzle, goal):\n # Make the goal state.\n goal_state = State(goal, 0, '', goal)\n # Make the first state.\n init_state = State(puzzle, 0, '', goal)\n\n # Make all the lists.\n open_list = list()\n temp_open_list = set()\n closed_list = set()\n\n # Push initial state to open list.\n heapq.heappush(open_list, init_state)\n temp_open_list.add(init_state)\n\n # While elements exist in the open list, loop.\n while open_list:\n # Pop the node and add it to the closed list.\n popped_node = heapq.heappop(open_list)\n list_states = popped_node._State__get_neighbors(goal) # Must add _State in front to call private method.\n temp_open_list.remove(popped_node)\n closed_list.add(popped_node)\n\n # If state is goal, return the path.\n if(popped_node == goal_state):\n return popped_node._State__get_path() # Must add _State in front to call private method.\n\n # Check all the child states (neighbors).\n for state in list_states:\n if state in temp_open_list:\n continue\n elif state in closed_list:\n continue\n else:\n # Combine the paths.\n state._State__combine_path(popped_node) # Must add _State in front to call private method.\n # Push the new state to the open list.\n heapq.heappush(open_list, state)\n temp_open_list.add(state)\n\n # Check for errors.\n if state != goal_state:\n sys.exit(\"The OPEN list is empty.\")", "def move_obstacles(obstacles_poses, obstacles_goal_poses):\n # for pose in obstacles_poses:\n # dx = random.uniform(0, 0.03); dy = random.uniform(0,0.03);\n # pose[0] -= np.sign(pose[0])*dx; pose[1] -= np.sign(pose[1])*dy;\n\n \"\"\" Each obstacles tends to go to its selected goal point with random speed \"\"\"\n for p in range(len(obstacles_poses)):\n pose = obstacles_poses[p]; goal = obstacles_goal_poses[p]\n dx, dy = (goal - pose) / norm(goal-pose) * 0.05#random.uniform(0,0.05)\n pose[0] += dx; pose[1] += dy;\n\n return obstacles_poses", "def optimizedRoutePossibilities(routes,cities):\n\tgraph = createOptimizedGraph(routes)\n\tfor couple in permutationsFromOrigin(cities):\n\t\tif couple is not None:\n\t\t\t#yield find_all_paths2(graph,couple[0],couple[1])[0]\n\t\t\tprint(find_all_paths2(graph,couple[0],couple[1])[0])", "def robotInAGridHelper(grid, path, start):\n\n\tif not grid or not grid[0]:\n\t\treturn []\n\n\ti, j = start\n\n\tif path[i][j]:\n\t\treturn path[i][j]\n\n\tR = len(grid)\n\tC = len(grid[0])\n\n\tif i == R - 1 and j == C - 1:\n\t\tpath[i][j] = [(i,j)]\n\t\treturn path[i][j]\n\n\tdownPath = []\n\trightPath = []\n\n\tif i + 1 < R and grid[i+1][j]:\n\t\tdownPath = robotInAGridHelper(grid, path, (i+1, j))\n\n\tif j + 1 < C and grid[i][j+1]:\n\t\trightPath = robotInAGridHelper(grid, path, (i, j+1))\n\n\tif not downPath and not rightPath:\n\t\treturn []\n\n\tif not downPath:\n\t\tif rightPath[-1] == (R-1, C-1):\n\t\t\tpath[i][j] = [(i,j)] + rightPath\n\t\t\treturn path[i][j]\n\t\telse:\n\t\t\treturn []\n\n\tif not rightPath:\n\t\tif downPath[-1] == (R-1, C-1):\n\t\t\tpath[i][j] = [(i,j)] + downPath\n\t\t\treturn path[i][j]\n\t\telse:\n\t\t\treturn []\n\n\tif downPath[-1] == (R-1, C-1) and rightPath[-1] == (R-1, C-1):\n\t\tif len(downPath) <= len(rightPath):\n\t\t\tpath[i][j] = [(i,j)] + downPath + path[i][j]\n\t\telse:\n\t\t\tpath[i][j] = [(i,j)] + rightPath + path[i][j]\n\t\treturn path[i][j]\n\n\tif downPath[-1] == (R-1, C-1):\n\t\tpath[i][j] = [(i, j)] + downPath + path[i][j]\n\t\treturn path[i][j]\n\n\tif rightPath[-1] == (R-1, C-1):\n\t\tpath[i][j] = [(i, j)] + rightPath + path[i][j]\n\n\treturn []", "def create_path(self, board: np.array, my_location: tuple, goal_location: tuple) -> bool:\n to_visit = [my_location]\n visited = []\n\n came_from = dict()\n came_from[my_location] = None\n\n came_from_direction = dict()\n came_from_direction[my_location] = Directions.ZERO\n\n while to_visit:\n point = to_visit.pop(0)\n if point == goal_location: break\n for direction in Directions.NEIGHBOURS:\n # By making it a numpy can add the values\n new_point = tuple(np.array(point) + direction.array)\n\n # Either the row or column value is not on the board\n if not self.in_bounds(new_point):\n continue\n\n # Has already visited that point\n if new_point in visited:\n continue\n\n # Can it reach this point? -> Yes the add to visit list\n if self.check_direction(board, point, direction):\n to_visit.append(new_point)\n came_from[point] = new_point\n came_from_direction[point] = direction\n\n visited.append(point)\n return self.reverse_path(came_from, came_from_direction, goal_location)", "def path_between_states(self):\n\n start_given = (self.row_before, self.col_before) # row, col before state transition\n finish_given = (self.row_after, self.col_after) # row, col after state transition\n\n # find_path based on a* algorithm\n path = find_path(Customer.GRID, start_given, finish_given, Customer.POSSIBLE_MOVES)\n\n # if empty path fillin values to enable next step interpolation into 1s resolution\n if start_given == finish_given:\n path = [(self.row_before, self.col_before), (self.row_after, self.col_after)]\n\n self.path = path", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "def __generate_all_shortest_paths(self,cutoff = 10):\n if cutoff < 1:\n cutoff = 10\n self.__logger.info(\"cutoff value must be a positive integer. Set back to default value: 10\")\n\n all_pair_shortest_paths = nx.all_pairs_shortest_path(self.G, cutoff=cutoff)\n for item in all_pair_shortest_paths:\n from_node = item[0]\n paths = item[1]\n for destination,path in paths.items():\n yield (len(path),path)", "def paths(self, x, y):\n return [direction for direction in Compass if ~self._grid[y][x] & direction]", "def find_working_paths(paths: Iterable[tuple], starting_position: tuple, grid: np.ndarray) -> List[tuple]:\n moves_dict = {(1, 0): \"DOWN\", (-1, 0): \"UP\", (0, 1): \"RIGHT\", (0, -1): \"LEFT\"}\n successful_paths = []\n for path in paths:\n # starting from starting position\n new_pos = starting_position\n path_check = []\n for move in path:\n # moving to new position and checking if it is valid\n new_pos = tuple(np.add(new_pos, move))\n path_check.append(True if grid[new_pos] != 'x' else False)\n\n # if all moves are valid, adding path to success list\n if all(path_check):\n successful_paths.append(tuple(moves_dict[move] for move in path))\n\n return successful_paths", "def optimize_path(path):\n rospy.loginfo(\"Optimizing path\")\n\n opt_path = []\n current_direction = (0, 0)\n last_direction = (0, 0)\n\n for i in range(len(path) -1):\n current_direction = (path[i+1][0] - path[i][0], path[i+1][1] - path[i][1])\n if current_direction != last_direction:\n opt_path.append(path[i])\n last_direction = current_direction\n \n opt_path.append(path[-1]) #add the last coordinate back\n\n return opt_path", "def getPaths(self):\n\n trafficEndPoints = []\n # A job denotes a traffic flow, which corresponds to an iperf task.\n for job in self.config.trace.jobs:\n trafficEndPoints.append((job['src'], job['dst']))\n\n # Obtain details about user-specified non-default links.\n configuredLinks = []\n for linkInfo in self.config.topoData['linkInfos']:\n configuredLinks.append((linkInfo['src'], linkInfo['dst']))\n\n paths = None\n spec = self.config.topoData['flowSpec']\n if spec == 'shortest_path':\n # export paths info and create routing conf using shortest paths\n adjFile = self.config.adjacencyFile\n writeAdjList(self.net, adjFile)\n info(\"**** [G2]: adjacency list written to file\", adjFile, \"\\n\")\n\n outfile = os.path.join(self.config.outPath, SHORTEST_PATH_FILE)\n paths = generateShortestPaths(adjFile, outfile, trafficEndPoints, configuredLinks)\n info(\"**** [G2]: shortest paths written to file\", outfile, \"\\n\")\n # Note: Since there can be multiple shortest paths between two endpoints, solution could vary.\n elif \".json\" in spec:\n info(\"**** [G2]: reading path info from\", spec, \"\\n\")\n paths = readFromPathFile(spec)\n else:\n paths = None\n return paths", "def get_heuristic(self, state):\n\n def get_manhattan_distance(coord_a, coord_b):\n \"\"\"Returns the manhattan distance between coord_a and coord_b.\"\"\"\n return abs(coord_a.x - coord_b.x) + abs(coord_a.y - coord_b.y)\n\n \n def get_num_obstacles(coord_a, coord_b):\n \"\"\"Returns the number of obstacles (wriggler segments or walls) between\n coord_a and coord_b.\n \n This function assumes that coord_b is larger (in either/both x and y)\n than coord_a.\n \"\"\"\n obstacle_count = 0\n \n for x in range(coord_a.x, coord_b.x + 1):\n for y in range(coord_a.y, coord_b.y + 1):\n coord = Coordinate(x, y)\n if coord in self.wall_coords or coord in state:\n obstacle_count += 1\n \n return obstacle_count\n\n\n head_coord = state.wriggler_list[0].get_head()\n tail_coord = state.wriggler_list[0].get_tail()\n \n head_manhattan_distance = get_manhattan_distance(head_coord, self.goal_coord)\n tail_manhattan_distance = get_manhattan_distance(tail_coord, self.goal_coord)\n \n # Calculate and return heuristic value depending on which heuristic to use\n if self.heuristic == Heuristic.MANHATTAN_DIST:\n # Return the shortest Manhattan distance of wriggler0's tail or head to the goal\n return min(head_manhattan_distance, tail_manhattan_distance)\n \n else: # self.heuristic == Heuristic.NUM_OBSTACLES:\n # Return the number of obstacles between wriggler0's tail/head to the goal\n # The tail/head is selected based on which is closer to the goal\n if head_manhattan_distance <= tail_manhattan_distance:\n # The head is closer or the same distance away\n return get_num_obstacles(head_coord, self.goal_coord)\n \n else:\n # The tail is closer\n return get_num_obstacles(tail_coord, self.goal_coord)", "def get_shortest_paths(distance_map: DistanceMap, agent_pos, agent_dir, max_depth: Optional[int] = None, agent_handle: Optional[int] = None) \\\n -> Dict[int, Optional[List[Waypoint]]]:\n shortest_paths = dict()\n\n def _shortest_path_for_agent(agent,agent_pos,agent_dir):\n if agent_pos is None :\n if agent.status == RailAgentStatus.READY_TO_DEPART:\n position = agent.initial_position\n elif agent.status == RailAgentStatus.ACTIVE:\n position = agent.position\n elif agent.status == RailAgentStatus.DONE:\n position = agent.target\n else:\n shortest_paths[agent.handle] = None\n return\n direction = agent.direction\n else :\n position = agent_pos\n direction = agent_dir \n shortest_paths[agent.handle] = []\n distance = math.inf\n depth = 0\n while (position != agent.target and (max_depth is None or depth < max_depth)):\n next_actions = get_valid_move_actions_(direction, position, distance_map.rail)\n best_next_action = None\n for next_action in next_actions:\n next_action_distance = distance_map.get()[\n agent.handle, next_action.next_position[0], next_action.next_position[\n 1], next_action.next_direction]\n if next_action_distance < distance:\n best_next_action = next_action\n distance = next_action_distance\n\n shortest_paths[agent.handle].append(Waypoint(position, direction))\n depth += 1\n\n # if there is no way to continue, the rail must be disconnected!\n # (or distance map is incorrect)\n if best_next_action is None:\n shortest_paths[agent.handle] = None\n return\n\n position = best_next_action.next_position\n direction = best_next_action.next_direction\n if max_depth is None or depth < max_depth:\n shortest_paths[agent.handle].append(Waypoint(position, direction))\n\n if agent_handle is not None:\n _shortest_path_for_agent(distance_map.agents[agent_handle],agent_pos,agent_dir)\n else:\n for agent in distance_map.agents:\n _shortest_path_for_agent(agent,agent_pos,agent_dir)\n\n return shortest_paths", "def cornersHeuristic(state, problem):\n corners = problem.corners # These are the corner coordinates\n walls = problem.walls # These are the walls of the maze, as a Grid (game.py)\n \"*** YOUR CODE HERE ***\"\n \"\"\"\n En este ejercicio me he dado cuenta de un problema de mi definición del espacio de estados:\n - El espacio de estados consiste en tuplas ((x,y), grid), donde (x,y) es la posición en coordenadas\n y grid es la tabla de true/false.\n - El problema es que yo he pensado la tabla grid en forma de matriz matemática, de manera que los índices\n no van de acuerdo con la posición de las esquinas, sinó con los índices de una matriz.\n Para solucionar este problema sin tener que modificar todo lo anterior (dado que no me queda tiempo) lo que he\n tenido que hacer es crear una lista y añadir de forma ordenada los valores true/false, para que se corresponda\n cada uno con su esquina.\n \n Mi heurística consiste en lo siguiente:\n * Calculo la distancia desde la posición en la que me sitúo hasta todos los corners no visitados (los que aún\n tienen comida) y me quedo con la mínima de estas distancias, y con el corner que me de esa mínima.\n * Calculo la distancia desde ese corner (el mínimo de antes) hasta todos los otros posibles corners no visitados\n y de nuevo me quedo con la mínima distancia y con el corner que me da esa mínima.\n * Repito este proceso hasta que no queden corners.\n Entonces lo que hago es definir una nueva lista de corners, newListOfCorners que irá extrayendo los corners a medida\n que su distanca sea calculada. Por ejemplo, si tengo los cuatro corners con comida y estoy en una posición \n aleatoria, la lista newListOfCorners estará llena. Se calculará la distancia a cada corner y el corner que de la \n mínima será extraído de newListOfCorners. Entonces se calculará la distancia desde este corner hasta los restantes\n tres corners de newListOfCorners y el corner de esos tres que me de la mínima será extraído de la lista. Etc...\n \"\"\"\n\n # Ordenamos la lista de True's y False's para que vaya acorde con el orden de la lista corners:\n visitedCorners = []\n visitedCorners.append(state[1][1][0])\n visitedCorners.append(state[1][0][0])\n visitedCorners.append(state[1][1][1])\n visitedCorners.append(state[1][0][1])\n corners = list(corners) # De aquí saco una lista que contenga los corners ordenados.\n # Ahora los corners y la lista de visitedCorners contendrán la información de forma ordenada y coherente\n minimum = 9999999999999999 # Defino un mínimo muy grande para asegurarme que nunca sea superado\n total = 0 # Inicializo el total a cero\n newListOfCorners = [] # Creo una nueva lista para añadir los corners no estudiados\n for corner in corners: # Primero vamos a llenar la lista de corners con los que me interesen: los que tienen comida\n if visitedCorners[corners.index(corner)]: # Miramos que el corner tenga comida, sino pasamos\n newListOfCorners.append(corner) # Si tiene comida, lo añadimos\n minimCorner = corners[0] # Inicializo el minimCorner a un corner aleatorio para que no me de problemas más tarde\n actualState = state[0] # Lo mismo\n\n while not len(newListOfCorners) == 0: # Mientras la lista no esté vacía...\n for corner in newListOfCorners: # Cogemos un corner de la lista\n distanceToCorner = manhattanHeuristicToCorners(actualState, corner) # Calculamos dist. a corner\n if distanceToCorner < minimum: # Calculamos el mínimo\n minimum = distanceToCorner\n minimCorner = corner\n total += minimum # Y lo añadimos al total\n actualState = minimCorner # Reactualizamos cada variable para volver a empezar el bucle\n minimum = 9999999999999999999999999999999\n newListOfCorners.remove(minimCorner)\n return total", "def extra(maze):\n # TODO: Write your code here\n heuristic_lookup = {} \n objs = maze.getObjectives()\n corner_list = maze.getObjectives()\n start = maze.getStart()\n path = []\n dim = maze.getDimensions()\n visited = {}\n lookup_table = {}\n p_queue = []\n edgeset = []\n mintree = {}\n start_heuristic = 0 + multi_dot_heuristic_query(maze, start, objs, edgeset, mintree) * 2\n heuristic_lookup[(start, tuple(objs))] = start_heuristic\n start_state = state(start, corner_list)\n lookup_table[state(start, corner_list)] = (start_heuristic, 0, state((-2, -2)))\n p_queue.append((start_heuristic, state(start, corner_list)))\n while p_queue:\n pair = p_queue.pop(0)\n visited[pair[1]] = lookup_table.get(pair[1])[2]\n if not pair[1].getlist():\n current_state = pair[1]\n while current_state != start_state:\n path.append(current_state.getpos())\n current_state = visited.get(current_state)\n path.append(start)\n path.reverse()\n return path\n else: \n list_of_neighbors = maze.getNeighbors(pair[1].getpos()[0], pair[1].getpos()[1])\n for coordinates in list_of_neighbors:\n current_state = state(coordinates)\n if coordinates in pair[1].getlist():\n new_list = copy.copy(pair[1].getlist())\n new_list.remove(coordinates)\n current_state = state(coordinates, new_list)\n else:\n current_state = state(coordinates, pair[1].getlist()) \n if current_state in visited:\n continue\n if current_state in lookup_table:\n if (lookup_table.get(current_state)[0], current_state) in p_queue:\n cost = lookup_table.get(pair[1])[1] + 1\n queried_heuristic = 0\n if (current_state.getpos(), tuple(current_state.getlist())) in heuristic_lookup:\n queried_heuristic = heuristic_lookup.get((current_state.getpos(), tuple(current_state.getlist())))\n else:\n queried_heuristic = multi_dot_heuristic_query(maze, current_state.getpos(), current_state.getlist(), edgeset, mintree) * 2\n heuristic_lookup[(current_state.getpos(), tuple(current_state.getlist()))] = queried_heuristic\n heuristic = queried_heuristic + cost\n old_heuristic = lookup_table.get(current_state)[0]\n if heuristic < lookup_table.get(current_state)[0]:\n lookup_table[current_state] = (heuristic, cost, pair[1])\n p_queue.remove((old_heuristic, current_state))\n bisect.insort(p_queue, (heuristic, current_state))\n else:\n cost = lookup_table.get(pair[1])[1] + 1\n queried_heuristic = 0\n if (current_state.getpos(), tuple(current_state.getlist())) in heuristic_lookup:\n queried_heuristic = heuristic_lookup.get((current_state.getpos(), tuple(current_state.getlist()))) \n else:\n queried_heuristic = multi_dot_heuristic_query(maze, current_state.getpos(), current_state.getlist(), edgeset, mintree) * 2\n heuristic_lookup[(current_state.getpos(), tuple(current_state.getlist()))] = queried_heuristic\n heuristic = queried_heuristic + cost\n lookup_table[current_state] = (heuristic, cost, pair[1])\n bisect.insort(p_queue, (heuristic, current_state))\n\n return []", "def get_robo_route(start=(41.814884, -87.664603), chicago_path=\"chicago.xml\", pothole_path=\"potholes.xml\"):\n\n\t#Acquire the graph of Chicago\n\tprint(\"Opening Chicago\")\n\tChicago = open_chicago_graph(chicago_path)\n\tCpp = CppGraph(Chicago)\n\tCpp.set_start(start)\n\t\n\t#Acquire the set of all potholes that need to be filled\n\tprint(\"Opening Potholes\")\n\tpotholes = open_potholes(pothole_path)\n\t\n\t#Get the shortest paths connecting the potholes and the robot facility\n\tprint(\"Adding potholes to Chicago\")\n\tshort_pathN(Chicago, Cpp, potholes)\n\t\n\t#Find the optimal way to traverse the graph starting from the robot's facility\n\tprint(\"Finding the route\")\n\troute = Cpp.solve()\n\t\n\t#Save the route the robot will take as an image\n\tfig, ax = ox.plot_graph_route(G, route, save=True, filename=\"graph\")\n\treturn route", "def find_path(source_point, destination_point, mesh):\r\n\r\n path = []\r\n boxes = {}\r\n\r\n sourceBox = None\r\n destBox = None\r\n\r\n # print(mesh)\r\n\r\n for box in mesh['boxes']:\r\n if box not in boxes and inBox(box, source_point):\r\n sourceBox = box\r\n # print(sourceBox)\r\n if box not in boxes and inBox(box, destination_point):\r\n destBox = box\r\n # print(destBox)\r\n\r\n if (sourceBox is None) or (destBox is None):\r\n print(\"No Path!\")\r\n return [], []\r\n\r\n dist = {}\r\n prev = {}\r\n boxCoords = {}\r\n\r\n dist[sourceBox] = 0\r\n prev[sourceBox] = None\r\n boxCoords[sourceBox] = source_point\r\n\r\n priorityQueue = []\r\n heappush(priorityQueue, (dist[sourceBox], sourceBox))\r\n\r\n adj = mesh[\"adj\"]\r\n\r\n while priorityQueue:\r\n\r\n currentCost, currentPos = heappop(priorityQueue)\r\n #neighbors = adj(graph, currentPos)\r\n\r\n # if currentPos == destination:\r\n # path = []\r\n # currPath = destination\r\n # while currPath is not None:\r\n # path.insert(0, currPath)\r\n # currPath = prev[currPath]\r\n # return path\r\n\r\n if currentPos == destBox:\r\n path = [boxCoords[currentPos], destination_point]\r\n\r\n backBox = prev[currentPos]\r\n backCoord = boxCoords[currentPos]\r\n\r\n while backBox is not None:\r\n path.insert(0, [boxCoords[backBox], backCoord])\r\n backBox = prev[backBox]\r\n backCoord = boxCoords[backBox]\r\n print(backCoord)\r\n\r\n return path, boxes.keys()\r\n\r\n # for neighborPos, neighborCost in neighbors:\r\n\r\n # alt = dist[currentPos] + neighborCost\r\n\r\n # if neighborPos not in dist or alt < dist[neighborPos]:\r\n # dist[neighborPos] = alt\r\n # prev[neighborPos] = currentPos\r\n # heappush(priorityQueue, (alt, neighborPos))\r\n\r\n for neighbor in adj[currentPos]:\r\n\r\n boxes[neighbor] = currentPos\r\n\r\n xRange = [max(currentPos[0], neighbor[0]),\r\n min(currentPos[1], neighbor[1])]\r\n yRange = [max(currentPos[2], neighbor[2]),\r\n min(currentPos[3], neighbor[3])]\r\n\r\n firstCost = euclideanDistance(\r\n (xRange[0], yRange[0]), boxCoords[currentPos])\r\n secondCost = euclideanDistance(\r\n (xRange[1], yRange[1]), boxCoords[currentPos])\r\n\r\n if firstCost <= secondCost:\r\n finalCost = firstCost\r\n finalPoint = (xRange[0], yRange[0])\r\n else:\r\n finalCost = secondCost\r\n finalPoint = (xRange[1], yRange[1])\r\n\r\n alt = currentCost + finalCost\r\n if neighbor not in dist or alt < dist[neighbor]:\r\n dist[neighbor] = alt\r\n prev[neighbor] = currentPos\r\n boxCoords[neighbor] = finalPoint\r\n heappush(priorityQueue, (alt, neighbor))\r\n return None", "def findRoute(self, x1, y1, x2, y2):\r\n\r\n\t\t# Check to see if the start and end node are the same\r\n\t\tif x1 == x2 and y1 == y2:\r\n\t\t\treturn [(x1, y1)]\r\n\r\n\t\troot_node = DijkstraNode(x1, y1, None, 0)\r\n\t\troot_node.neighbours = self.getNeighbours(x1, y1)\r\n\r\n\t\t# Create a dictionary to store all of the nodes\r\n\t\tall_nodes = {(x1, y1): root_node}\r\n\t\t# If no starting place is found return nothing\r\n\t\tif len(root_node.neighbours) == 0:\r\n\t\t\treturn []\r\n\t\tcurrent_node = root_node\r\n\t\twhile (x2, y2) not in all_nodes:\r\n\r\n\t\t\t# If the algorithm hasn't found the target node and cannot explore further then return empty path\r\n\t\t\tif current_node is None:\r\n\t\t\t\treturn []\r\n\r\n\t\t\tcurrent_node.neighbours = self.getNeighbours(current_node.x, current_node.y)\r\n\r\n\t\t\t# The distance from the root node through the current node to the neighbour\r\n\t\t\tcurrent_neighbour_dist = current_node.dist + 1\r\n\r\n\t\t\tfor neighbour in current_node.neighbours:\r\n\t\t\t\tif neighbour in all_nodes:\r\n\t\t\t\t\tneighbour_node = all_nodes[neighbour]\r\n\t\t\t\t\tif current_neighbour_dist < neighbour_node.dist:\r\n\t\t\t\t\t\t# The new best path is through the current node\r\n\t\t\t\t\t\tneighbour_node.parent = current_node\r\n\t\t\t\t\t\tneighbour_node.dist = current_neighbour_dist\r\n\t\t\t\telse:\r\n\t\t\t\t\t# Add a new node if it doesn't exist within the currently explored nodes\r\n\t\t\t\t\tall_nodes[neighbour] = DijkstraNode(neighbour[0], neighbour[1], current_node, current_neighbour_dist)\r\n\r\n\t\t\t# Mark the current node as being explored as you have checked all the neighbours\r\n\t\t\tcurrent_node.explored = True\r\n\r\n\t\t\t# Gets a list of all of the unexplored nodes to check for the next node to explore\r\n\t\t\tunexplored_nodes = [node for _, node in all_nodes.items() if not node.explored]\r\n\r\n\t\t\tif len(unexplored_nodes) > 0:\r\n\t\t\t\t# Go to the next node with the smallest distance that hasn't been explored\r\n\t\t\t\tcurrent_node = min(unexplored_nodes, key=lambda node: node.dist)\r\n\t\t\telse:\r\n\t\t\t\tcurrent_node = None\r\n\r\n\t\t# Make your way back from the target node\r\n\t\tcurrent_node = all_nodes[(x2, y2)]\r\n\t\t# Initialise a list to hold the path going from the target to the root\r\n\t\treversed_path = []\r\n\t\t# This will end when the root node tries to travel to a None node\r\n\t\twhile current_node is not None:\r\n\t\t\t# Add the current node to the list\r\n\t\t\treversed_path.append((current_node.x, current_node.y))\r\n\t\t\t# Travel to the parent node\r\n\t\t\tcurrent_node = current_node.parent\r\n\t\t\t# current_node will be None at the root because the parent of the root node is 'None'\r\n\r\n\t\t# Return the list in the correct order\r\n\t\treturn list(reversed(reversed_path))", "def TSP_ILP(G):\n V1 = range(len(G))\n n, V = len(G), set(V1)\n model = Model() # binary variables indicating if arc (i,j) is used\n # on the route or not\n x = [[model.add_var(var_type=BINARY) for j in V] for i in V]\n # continuous variable to prevent subtours: each city will have a\n # different sequential id in the planned route except the 1st one\n y = [model.add_var() for i in V]\n # objective function: minimize the distance\n model.objective = minimize(xsum(G[i][j]*x[i][j] for i in V for j in V))\n\n # constraint : leave each city only once\n for i in V:\n model += xsum(x[i][j] for j in V - {i}) == 1\n # constraint : enter each city only once\n for i in V:\n model += xsum(x[j][i] for j in V - {i}) == 1 # subtour elimination\n for (i, j) in product(V - {0}, V - {0}):\n if i != j:\n model += y[i] - (n+1)*x[i][j] >= y[j]-n # optimizing\n\n model.verbose = 0\n model.optimize() # checking if a solution was found\n\n if model.num_solutions:\n nc = 0 # cycle starts from vertex 0\n cycle = [nc]\n while True:\n nc = [i for i in V if x[nc][i].x >= 0.99][0]\n cycle.append(nc)\n if nc == 0:\n break\n\n return (model.objective_value, cycle)", "def create_path(self):\n\n partials = []\n partials.append({})\n #print self.trip_id\n\n #this variable is true if we have not yet recorded the first edge of a path\n first_edge = True\n #this variable is false until we hit the midpoint\n hit_midpoint = False\n\n first_lasts = []\n first_lasts.append([0,0])\n matrices = []\n matrices.append([np.zeros((self.graph.rows,self.graph.cols)),0])\n edge_sets = []\n edge_sets.append([0 for i in range(self.graph.num_edges)])\n cur_line = self.line_num\n good_graphs = []\n good_graphs.append(True)\n nodes_visited = []\n nodes_visited.append([])\n #normalized = dg.normalize(self.graph.lines[cur_line])\n normalized = normalize_simple(self.graph.lines[cur_line])\n matrices_index = 0\n prev_coords = (-1,-1)\n while normalized[0] == self.trip_id:\n lat = normalized[1]\n lon = normalized[2]\n coords = self.graph.gps_to_coords(lat,lon)\n node = self.graph.coords_to_node(coords[0],coords[1])\n\n if prev_coords == (-1,-1) and coords[0] != -1:\n first_lasts[matrices_index][0] = node\n\n if coords[0] == -1 and prev_coords[0] != -1:\n prev_node = self.graph.coords_to_node(prev_coords[0],prev_coords[1])\n first_lasts[matrices_index][1] = prev_node\n\n if prev_coords != (-1,-1) and coords[0] != -1 and coords != prev_coords:\n edge_num = self.graph.edge_num(prev_coords[0],prev_coords[1],coords[0],coords[1])\n if edge_num == -1:\n good_graphs[matrices_index] = False\n else:\n edge_sets[matrices_index][edge_num] = 1\n if edge_num in partials[matrices_index] and partials[matrices_index][edge_num] == 0:\n del partials[matrices_index][edge_num]\n if not hit_midpoint:\n if first_edge:\n above = (prev_coords[0]-1,prev_coords[1])\n below = (prev_coords[0]+1,prev_coords[1])\n left = (prev_coords[0],prev_coords[1]-1)\n right = (prev_coords[0],prev_coords[1]+1)\n for next_coords in (above,below,left,right):\n other_edge = self.graph.edge_num(prev_coords[0],prev_coords[1],next_coords[0],next_coords[1])\n if other_edge != -1:\n partials[matrices_index][other_edge] = 0\n first_edge = False\n if self.graph.coords_to_node(prev_coords[0],prev_coords[1]) == self.midpoint:\n hit_midpoint = True\n partials[matrices_index][edge_num] = 1\n if self.graph.coords_to_node(coords[0],coords[1]) == self.midpoint:\n hit_midpoint = True\n\n\n\n if coords[0] == -1:\n matrices.append([np.zeros((self.graph.rows,self.graph.cols)),0])\n first_lasts.append([0,0])\n edge_sets.append([0 for i in range(self.graph.num_edges)])\n good_graphs.append(True)\n nodes_visited.append([])\n matrices_index += 1\n partials.append({})\n hit_midpoint = False\n first_edge = True\n \n elif coords[0] < self.graph.rows and coords[1] < self.graph.cols and not matrices[matrices_index][0][coords[0]][coords[1]]:\n matrices[matrices_index][1] += 1\n matrices[matrices_index][0][coords[0]][coords[1]] = 1\n nodes_visited[matrices_index].append(coords)\n\n prev_coords = coords\n\n cur_line += 1\n if cur_line == len(self.graph.lines):\n break\n #normalized = dg.normalize(self.graph.lines[cur_line])\n normalized = normalize_simple(self.graph.lines[cur_line])\n\n prev_node = self.graph.coords_to_node(prev_coords[0],prev_coords[1])\n first_lasts[matrices_index][1] = prev_node\n self.next_line = cur_line\n best_index = 0\n best_score = 0\n for matrix_index in range(len(matrices)):\n if matrices[matrix_index][1] > best_score:\n best_score = matrices[matrix_index][1]\n best_index = matrix_index\n\n for coords in nodes_visited[best_index]:\n self.graph.node_visit(self.trip_id,coords)\n \n\n if self.trip_id not in self.graph.trip_id2line_num:\n #if first_lasts[best_index] == [28,5]:\n # print \"a to b: %d\" % self.trip_id\n self.graph.first_last2trip_ids[tuple(first_lasts[best_index])].append(self.trip_id)\n\n return matrices[best_index][0],edge_sets[best_index],good_graphs[best_index],partials[best_index]", "def generate_obstacles(self):\r\n obstacles = self.get_obstable_metrics\r\n obstacle_arrays = []\r\n\r\n for nb_obstacle in obstacles:\r\n empty_array = np.zeros(shape=(self.WINDOW_HEIGHT,\r\n self.WINDOW_WIDTH))\r\n start_location = 0 if nb_obstacle[2] == 1 else self.WINDOW_HEIGHT\r\n y, x = start_location - 1, nb_obstacle[3]\r\n empty_array[y, x] = -1\r\n\r\n for w_value in range(nb_obstacle[0]):\r\n x_updated = x + w_value\r\n\r\n for h_value in range(nb_obstacle[1]):\r\n if nb_obstacle[2] == 1:\r\n y_updated = y + h_value\r\n else:\r\n y_updated = y - h_value\r\n # Replace Value\r\n empty_array[y_updated, x_updated] = -1\r\n\r\n new_array = self.trim_whitespace(empty_array,\r\n nb_obstacle[2],\r\n self.MIN_GAP)\r\n obstacle_arrays.append(new_array)\r\n\r\n return obstacle_arrays", "def optimized_path(coords, startid, mask):\n coords = np.column_stack((coords, mask))\n pass_by = np.asarray(coords)\n path = [coords[startid]]\n pass_by = np.delete(pass_by, startid, axis=0)\n while pass_by.any():\n nearest_id, nearest = min(\n enumerate(pass_by), key=lambda x: distance(path[-1][:2], x[1][:2]))\n path.append(nearest)\n pass_by = np.delete(pass_by, nearest_id, axis=0)\n\n return path", "def get_pathologic_covering_routes(n_pl, n_target, attacker_strategy, target_values):\n # computes the coefficient used by the greedy oracle to choose routes\n targets_coeff = np.transpose(np.multiply(attacker_strategy, target_values))\n\n # randomly selects the player for which the non optimal choice will be made\n wrong_pl = randint(1, n_pl)\n\n # generate the non optimal route randomly\n n_covered_targets = randint(n_pl,n_target-1)\n non_opt_action = np.zeros(n_target)\n for i in range(0, n_covered_targets):\n random_covered_target = randint(0, n_target-1)\n non_opt_action[random_covered_target] = 1\n\n # compute the value of the non optimal route\n non_opt_val = get_value_single_route(non_opt_action, targets_coeff)\n\n # generate routes that have, as a single, values smaller than the best greedy route but taken togher perform\n # at least as well. [[0,1,...],[...],...] a[r][t]=1 iff t is covered by r.\n # The returned list should have n_pl - 1 routes\n opt_routes = get_opt_routes(n_pl, non_opt_action)\n\n I={}\n for pl in range(1, n_pl+1):\n\n n_r = randint(0, MAX_ROUTES)\n temp = lil_matrix((n_r+1, n_target), dtype='int8')\n\n if pl == wrong_pl:\n # put the non opt route in the bucket\n for t in non_opt_action.nonzero():\n temp[0,t] = 1\n else:\n for t in opt_routes.pop().nonzero():\n temp[0,t] = 1\n\n # generate other random routes with single value less than the non_opt_value\n for r in range(1, n_r):\n new_route = get_r_limited_val(non_opt_val, targets_coeff)\n\n for t in new_route.nonzero():\n temp[r,t] = 1\n\n I[pl] = temp.tocsr()\n\n return I", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def draw_best_route(final_route):\n shape('turtle')\n fillcolor('purple')\n pencolor('purple')\n pensize(4)\n speed(1)\n # Finds the start position of the node in the graphical grid\n start_pos_x = (final_route[0].x) * 30\n start_pos_y = (final_route[0].y - 1) * -30\n penup()\n # Sets the start position of the drawn path in the middle of the start node\n setpos(-500 + start_pos_x + 15, 200 + start_pos_y - 15)\n pendown()\n # Draws right, left, down or up based on the position of the next node in the list\n for i in range(0, len(final_route) - 1):\n if final_route[i].x < final_route[i + 1].x:\n goto(xcor() + 30, ycor())\n elif final_route[i].x > final_route[i + 1].x:\n goto(xcor() - 30, ycor())\n elif final_route[i].y < final_route[i + 1].y:\n goto(xcor(), ycor() - 30)\n else:\n goto(xcor(), ycor() + 30)\n done()", "def naiveGlobalRouting(self):\n for e_list in self.s2e.values():\n for e in e_list:\n slot_path = []\n src_slot = self.v2s[e.src]\n dst_slot = self.v2s[e.dst]\n slot_path.append(src_slot)\n\n curr = src_slot\n len_x = src_slot.getLenX()\n len_y = src_slot.getLenY()\n\n # first go in X direction\n x_diff = curr.getPositionX() - dst_slot.getPositionX()\n if x_diff:\n dir = 'LEFT' if x_diff > 0 else 'RIGHT'\n for i in range(int(abs(x_diff/len_x))):\n curr = self.slot_manager.createSlotForRouting(curr.getNeighborSlotName(dir))\n slot_path.append(curr)\n\n y_diff = curr.getPositionY() - dst_slot.getPositionY()\n if y_diff:\n dir = 'DOWN' if y_diff > 0 else 'UP'\n for i in range(int(abs(y_diff/len_y))):\n curr = self.slot_manager.createSlotForRouting(curr.getNeighborSlotName(dir))\n slot_path.append(curr)\n \n assert curr == dst_slot\n \n slot_path = slot_path[1:-1] # exclude the src and the dst\n logging.info(f'{e.name}: {self.v2s[e.src].getName()} -> {self.v2s[e.dst].getName()} : ' + ' '.join(s.getName() for s in slot_path))\n self.e_name2path[e.name] = slot_path", "def find_path(self):\n j, i = utl.pixel_coords_to_pos(\n self.xcor(), self.ycor(), self.maze_size)\n level_cpy = copy.deepcopy(self.level.maze)\n self.backtrack(level_cpy, i, j, [])", "def find_path(self, origin, destination, max_time = 1):\r\n \r\n # Before we start, let's check we need to do something\r\n if origin == destination or self._heuristic_weight(origin, destination) == 0:\r\n return None\r\n \r\n # Add the starting point to the \"open\" list\r\n self.open_list.append(origin)\r\n self.g_cost[origin] = 0\r\n self.h_cost[origin] = self.f_cost[origin] = self._heuristic_weight(origin, destination)\r\n \r\n self.start_time = lib.clock()\r\n nearest_parent = {}\r\n self.path = PATH_INEXISTENT\r\n \r\n #while (lib.clock() - self.start_time) < max_time:\r\n while len(self.open_list):\r\n # The \"parent\" node, around which we look, is always the first node of the \"open\" list\r\n # This node is transferred to the \"closed\" list\r\n current_parent = self.open_list[0]\r\n self.closed_list.append(current_parent)\r\n del self.open_list[0]\r\n\r\n # The \"parent\" node is the destination : the path has been found.\r\n if current_parent == destination:\r\n self.path = PATH_FOUND\r\n break\r\n\r\n # Set the first element of the open list as the one that has the smallest F-cost\r\n for (i, node) in enumerate(self.open_list):\r\n if self.f_cost[self.open_list[0]] > self.f_cost[node]:\r\n (self.open_list[i], self.open_list[0]) = (self.open_list[0], node)\r\n \r\n # Check the adjacent nodes\r\n children = [road.end for road in current_parent.leaving_roads]\r\n \r\n for child in children:\r\n # Not already in the closed list neither in the open list\r\n if not (child in self.closed_list) and not (child in self.open_list):\r\n # Compute its G-cost, H-cost and F-cost\r\n self.g_cost[child] = self.g_cost[current_parent] + road.weight\r\n self.h_cost[child] = self._heuristic_weight(child, destination)\r\n self.f_cost[child] = self.g_cost[child] + self.h_cost[child]\r\n \r\n nearest_parent[child] = current_parent\r\n \r\n # Add the node to the open list, keeping the order (the first node has the smallest F-cost)\r\n if len(self.open_list) and (self.f_cost[self.open_list[0]] > self.f_cost[child]):\r\n self.open_list.insert(0, child)\r\n else:\r\n self.open_list.append(child)\r\n\r\n # Already in the open list : check to see if this path is a better one than the currently known path\r\n elif child in self.open_list:\r\n # Compute the G-cost of this possible new path\r\n current_g_cost = self.g_cost[current_parent] + road.weight\r\n \r\n # This path is shorter (lower G-cost) : store this path as default to reach this node\r\n if current_g_cost < self.g_cost[child]:\r\n # Set this path as the shortest path to reach this node\r\n nearest_parent[child] = current_parent\r\n self.g_cost[child] = current_g_cost\r\n self.f_cost[child] = self.g_cost[current_parent] + self.h_cost[child] # Do not forget to update the F-cost !\r\n \r\n # Check if the open list is still in the right order\r\n if self.f_cost[self.open_list[0]] > self.f_cost[child]:\r\n i = self.open_list.index(child)\r\n (self.open_list[0], self.open_list[i]) = (self.open_list[i], self.open_list[0])\r\n\r\n # Save the path if it exists.\r\n if self.path == PATH_FOUND:\r\n \r\n current_node = destination\r\n self.path = []\r\n self.path_length = 0\r\n \r\n while current_node != origin:\r\n self.path.insert(0, current_node)\r\n if current_node in nearest_parent:\r\n current_node = nearest_parent[current_node]\r\n else:\r\n raise Exception('ERROR (in gps.find_path()): ill-formed parent list, a node has no parent.')\r\n \r\n self.path_length += 1\r\n return self._build_path()\r\n\r\n return None", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def a_star(self, xy1, xy2):\n tile_col1, tile_row1 = self.the_map.xy_to_cr(xy1[0], xy1[1])\n tile_col2, tile_row2 = self.the_map.xy_to_cr(xy2[0], xy2[1])\n \n successor_to_parent_map = {}\n start_state = (tile_col1, tile_row1)\n #print('x=%d, y=%d to col=%d, row=%d (map row=%d, col= %d)' % (xy1[0], xy1[1], tile_col1, tile_row1, \n # self.the_map.tile_speeds.shape[0], self.the_map.tile_speeds.shape[1]))\n successor_to_parent_map[(start_state, None)] = None # (Successor, Action) -> (Parent, Action)\n \n open_list = PriorityQueue()\n open_list.update((start_state, None), 0)\n closed = []\n \n while not open_list.isEmpty():\n current_state, action_to_current_state = open_list.pop()\n \n if current_state == (tile_col2, tile_row2):\n return self.__get_action_path((current_state, action_to_current_state), successor_to_parent_map)\n \n if current_state not in closed:\n if current_state == start_state:\n current_cost = 0\n else:\n current_cost = len(self.__get_action_path((current_state, action_to_current_state),\n successor_to_parent_map))\n \n for successor_state, action, step_cost in self.__get_successors(current_state):\n cost = current_cost + step_cost + self.__cartesian_distance(current_state, successor_state)\n \n open_list.update((successor_state, action), cost)\n \n if successor_state not in closed:\n successor_to_parent_map[(successor_state, action)] = (current_state, action_to_current_state)\n \n closed.append(current_state)\n return []", "def update_trip_path(trip_mpois, paths, graph):\n n_nodes = len(trip_mpois)\n # adjacency matrix\n new_paths = np.zeros(shape=(n_nodes, n_nodes))\n\n # iterate through all the nodes and create a list of nodes with sequential id\n for i, node1 in enumerate(trip_mpois):\n for j, node2 in enumerate(trip_mpois):\n new_paths[i, j] = paths[node1, node2]\n\n # new_paths = new_paths/np.max(new_paths[new_paths < _INF])\n # new_paths[np.isinf(new_paths)] = _INF\n\n # create a dummy edge between end and start node with weight 0\n new_paths[1,0] = -_INF\n # new_paths[0,1] = _INF\n\n shortest_path = None\n if n_nodes > 5:\n shortest_path, dist = tsp.solve(n_nodes, new_paths)\n # shortest_path = range(n_nodes)\n else:\n shortest_path = range(n_nodes)\n\n trip_path = np.array(trip_mpois)[shortest_path]\n\n if ___DEBUG:\n fname = 'dump/' + str(n_nodes) + '.dist'\n np.savetxt(fname, new_paths, fmt='%.6f')\n \n mpoi_pos = np.zeros(shape=(n_nodes,2))\n \n for i, node in enumerate(trip_mpois):\n pos_3d = graph.vs[node]['position']\n assert node == graph.vs[node].index\n mpoi_pos[i,:] = pos_3d[:2]\n\n fname = 'dump/' + str(n_nodes) + '.pos'\n np.savetxt(fname, mpoi_pos)\n \n # print trip_mpois, trip_path\n\n return trip_path", "def a_star(grid, heuristic_func, start, goal):\n\n path = []\n path_cost = 0\n queue = PriorityQueue()\n queue.put((0, start))\n visited = set(start)\n\n branch = {}\n found = False\n\n while not queue.empty():\n item = queue.get()\n current_cost = item[0]\n current_node = item[1]\n\n if current_node == goal:\n print('Found a path.')\n found = True\n break\n else:\n # Get the new vertexes connected to the current vertex\n for a in valid_actions(grid, current_node):\n next_node = (current_node[0] + a.delta[0], current_node[1] + a.delta[1])\n new_cost = current_cost + a.cost + heuristic_func(next_node, goal)\n\n if next_node not in visited:\n visited.add(next_node)\n queue.put((new_cost, next_node))\n\n branch[next_node] = (new_cost, current_node, a)\n\n if found:\n # retrace steps\n n = goal\n path_cost = branch[n][0]\n while branch[n][1] != start:\n path.append(branch[n][1])\n n = branch[n][1]\n path.append(branch[n][1])\n\n return path[::-1], path_cost", "def OptimalWarpingPath( self, colStart=None ):\n rows = len(self.D)\n cols = len(self.D[0])\n n = rows-1\n m = cols-1\n if colStart:\n m=colStart\n path = [(n,m)]\n while n > 0 or m > 0:\n if n == 0 :\n path.insert(0,(0,m-1))\n m -= 1\n elif m == 0 :\n path.insert(0,(n-1,0))\n n -= 1\n else:\n minStep = min( self.D[n-1][m-1], self.D[n-1][m], self.D[n][m-1] )\n if self.D[n-1][m-1] == minStep:\n path.insert(0,(n-1,m-1))\n n -= 1\n m -= 1\n elif self.D[n-1][m] == minStep:\n path.insert(0,(n-1,m))\n n -= 1\n else: # self.D[n][m-1] == min:\n path.insert(0,(n,m-1))\n m -= 1\n return path, self.CostOfPath( path, self.D )", "def astar(grid, heuristic):\r\n\r\n class MapNode:\r\n def __init__(self, cell, cost, parent):\r\n self.cell = cell\r\n self.cost = cost\r\n self.parent = parent\r\n\r\n @functools.total_ordering\r\n class FrontierElement:\r\n def __init__(self, cell, cost, parent, estimatedCost):\r\n self.node = MapNode(cell, cost, parent)\r\n self.estimatedCost = estimatedCost\r\n def __lt__(self, other):\r\n return self.estimatedCost < other.estimatedCost\r\n def __eq__(self, other):\r\n return self.estimatedCost is other.estimatedCost\r\n\r\n frontier = PriorityQueue()\r\n visitedNodes = set()\r\n frontier.put(FrontierElement(grid.getStart(), 0, None, 0))\r\n\r\n path = []\r\n\r\n while not frontier.empty():\r\n currentElement = frontier.get()\r\n grid.addVisited(currentElement.node.cell)\r\n visitedNodes.add(currentElement.node.cell)\r\n\r\n if currentElement.node.cell in grid.getGoals():\r\n currentNode = currentElement.node\r\n while currentNode is not None:\r\n path.insert(0, currentNode.cell)\r\n currentNode = currentNode.parent\r\n break\r\n\r\n for neighbor in grid.getNeighbors(currentElement.node.cell):\r\n neighborCoord = neighbor[0]\r\n\r\n if neighborCoord in visitedNodes:\r\n continue\r\n\r\n neighborCost = neighbor[1]\r\n cheapestGoal = min(grid.getGoals(), key=lambda goal: Vector2.fromCell(neighborCoord).squaredDistanceTo(Vector2.fromCell(goal)))\r\n\r\n cost = currentElement.node.cost + neighborCost\r\n\r\n frontier.put(FrontierElement(neighborCoord, cost, currentElement.node, cost + heuristic(neighborCoord, cheapestGoal)))\r\n\r\n grid.setPath(path)", "def generate_obstacle_point(start, end):\n top_left = (start[0], start[1] - _OBSTACLE_SIZE)\n top_right = (end[0], end[1] - _OBSTACLE_SIZE)\n return start, end, top_right, top_left" ]
[ "0.7479054", "0.6964157", "0.6800724", "0.6652429", "0.65608233", "0.6515328", "0.6502793", "0.64869034", "0.64225703", "0.63867986", "0.62653744", "0.6237662", "0.61990416", "0.61127084", "0.61037284", "0.60940355", "0.6085445", "0.6071979", "0.6071137", "0.6048148", "0.6047975", "0.6042786", "0.6037737", "0.6025176", "0.60099244", "0.60093457", "0.5997415", "0.59826666", "0.5979279", "0.59777176", "0.597523", "0.59746295", "0.59723896", "0.594491", "0.5943486", "0.59366286", "0.5936183", "0.5922934", "0.5920275", "0.59096396", "0.59056884", "0.58948815", "0.5891924", "0.5882652", "0.58678025", "0.5863782", "0.5863295", "0.5855325", "0.5844479", "0.58427876", "0.5836324", "0.58349514", "0.5828393", "0.5826294", "0.5823025", "0.5820367", "0.5817971", "0.58077693", "0.5806355", "0.5804456", "0.58026576", "0.57974386", "0.5796185", "0.57898885", "0.57853264", "0.57824504", "0.57818365", "0.5776852", "0.57767934", "0.57763845", "0.5776053", "0.57652754", "0.5760867", "0.57520026", "0.57464135", "0.5746128", "0.57364374", "0.57358295", "0.5731047", "0.57303095", "0.5720426", "0.5719681", "0.5717499", "0.57140577", "0.5705735", "0.5700107", "0.5696109", "0.5686706", "0.56779134", "0.56732064", "0.5670348", "0.56682986", "0.5668121", "0.5665449", "0.5664291", "0.5662001", "0.56611425", "0.56593335", "0.5656427", "0.56543636" ]
0.82060087
0
Determine if the UAV intersects an obstacle on the verticle axis
def does_uav_intersect_obstacle_vertically(self, obstacle, drone_point, waypoint): if isinstance(obstacle, StationaryObstacle): if drone_point[2] < obstacle.height + Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex(): \n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def does_path_intersect_obstacle_2d(self, obstacle, uav_point, waypoint):\n drone_point = uav_point[:-1]\n waypoint = waypoint[:-1]\n obstacle_point = obstacle.get_point()[:-1]\n\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle_point, drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(uav_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < obstacle.get_radius()\n\n return False", "def isCollidingWithWall(self, vert, ent1Index, ent2, u, v):\n status = NOCOLLISION\n ent1 = self.listOfEntities[ent1Index]\n \n pt = vert - ent1.body.x.v\n \n vel = ent1.body.velocity.v + np.cross(ent1.body.omega.v, pt)\n \n# vel = QVRotation(ent.body.q,vel)\n \n n = np.cross(u,v)\n n = n/np.linalg.norm(n)\n \n Vr = vel\n Vrn = np.dot(Vr, n)\n \n if Vrn < 0:\n self.listOfCollisions.append(Collision(ent1Index,ent2,n,vert,Vr,-(Vr - (np.dot(np.dot(Vr,n),n)))))\n status = COLLISION\n \n return status", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex():\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def inside(self, uv):\n result = self._trimmed.Perform(gp_Pnt2d(uv[0], uv[1]))\n return result == TopAbs_IN", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def isOnInteriorSide(self, v):\n n = self.normalVect()\n return n.dotProduct(vector(self.vertices[0]) - vector(v)) > 0", "def is_linearly_independent_2x2(u, v):\n uv = get_uv(u, v)\n if uv[0][0] * uv[1][1] - uv[1][0] * uv[0][1] != 0:\n return True\n else:\n return False", "def is_in_obstacle(self, x: float, y: float) -> bool:\n for obstacle in self.obstacles:\n if obstacle.contains_point((x, y)):\n return True\n return False", "def has_collide(self, obj):\n rect1 = self.anim.getRect()\n rect2 = obj.anim.getRect()\n \n rect1.move_ip(self.pos)\n rect2.move_ip(obj.pos)\n \n return rect1.colliderect(rect2)", "def isColliding(self, vert, ent1Index, ent2Index, norm): #u, v\n \n ent1 = self.listOfEntities[ent1Index]\n ent2 = self.listOfEntities[ent2Index]\n status = NOCOLLISION\n \n pt1 = vert - ent1.body.x.v\n pt2 = vert - ent2.body.x.v\n \n vel1 = ent1.body.velocity.v + np.cross(ent1.body.omega.v, pt1)\n vel2 = ent2.body.velocity.v + np.cross(ent2.body.omega.v, pt2)\n \n# norm = np.cross(u,v)\n# norm = -norm/np.linalg.norm(norm)\n norm = norm/np.linalg.norm(norm)\n \n Vr = vel1 - vel2\n Vrn = np.dot(Vr, norm)\n \n if Vrn < 0:\n self.listOfCollisions.append(Collision(ent1Index,ent2Index,norm,vert,Vr,-(Vr - (np.dot(np.dot(Vr,norm),norm)))))\n status = COLLISION\n\n return status", "def hit(self, otherball):\r\n dx = (self.unif[0] + self.vx) - (otherball.unif[0] + otherball.vx)\r\n dy = (self.unif[1] + self.vy) - (otherball.unif[1] + otherball.vy)\r\n rd = self.radius + otherball.radius\r\n return dot(dx, dy) < (rd * rd)", "def _in_huc(shply, huc_shply):\n if huc_shply.contains(shply):\n return 2\n elif huc_shply.intersects(shply):\n return 1\n else:\n return 0", "def collide(obj1, obj2):\n offset_x = obj2.x - obj1.x #The difference between obj1 and obj 2\n offset_y = obj2.y - obj1.y \n return obj1.mask.overlap(obj2.mask, (int(offset_x), int(offset_y))) != None # (x,y)", "def has_intersection(self, obj):\r\n obj_x, obj_y = obj.get_location()\r\n x = self.__x\r\n y = self.__y\r\n # Distance formula\r\n distance = sqrt((obj_x - x) ** 2 + (obj_y - y) ** 2)\r\n if distance <= obj.get_radius() + self.__radius:\r\n return True\r\n return False", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def origin_is_inside_hitbox(self, hitbox):\n if self.hitdetection.accurate:\n max_x = max(hitbox, key = lambda index: abs(index[0]))[0]\n max_y = max(hitbox, key = lambda index: abs(index[1]))[1]\n \n m = max(max_x, max_y)\n \n num_intersections = 0\n for i in range(0, len(hitbox), 1):\n if self.hitdetection.module.does_intersect([[m, m], [0, 0]], [hitbox[i], hitbox[(i + 1) % len(hitbox)]]):\n num_intersections += 1\n return [False, True][num_intersections % 2]\n else:\n has_smaller = False\n has_bigger = False\n for hx, hy in hitbox:\n if hx > 0 and hy > 0:\n has_bigger = True\n if hx < 0 and hy < 0:\n has_smaller = True\n return has_smaller and has_bigger", "def check_shot_on_target(self, shot):\n # Defining a few variables to ease the reading\n # Here we define the x and y interval of the goal's segment\n x_min = min(self.s_pos.x, self.e_pos.x)\n x_max = max(self.s_pos.x, self.e_pos.x)\n\n y_min = min(self.s_pos.y, self.e_pos.y)\n y_max = max(self.s_pos.y, self.e_pos.y)\n\n # Shortening variables names\n o_x = shot.opponent.pos.x\n o_y = shot.opponent.pos.y\n\n # If the angle = pi / 2 or - pi / 2, then tan(angle) is undefined\n # In these cases, the shot is vertical, therefore it is valid\n # iff the x coordinate of the opponent is in the goal's x interval\n if abs(shot.angle) == math.pi / 2:\n return self.is_in_interval(x_min, x_max, o_x)\n\n # If the angle = 0, pi or -pi, then tan(angle) is 0 which can lead to \n # undefined intersection points (if the goal is vertical for example)\n # although there is an intersection point\n # \n # In these cases, the shot is horizontal, therefore it is valid\n # iff the y coordinate of the opponent is in the goal's y interval\n if abs(shot.angle) == math.pi or shot.angle == 0:\n return self.is_in_interval(y_min, y_max, o_y)\n\n # Using tan the least amount of time possible, for this is a slow function\n tan_theta = math.tan(shot.angle)\n\n # Define the LE of the shot\n le1 = LinearEquation(tan_theta, o_y - tan_theta * o_x)\n le2 = None\n\n # If the goal is vertical, finding the intersection point\n # is not possible using the normal way\n #\n # That being said, unless the LE of the shot is vertical too (which it \n # isn't as it is checked before hand) there has to be an intersection point\n # This intersection must happen when at the x coodinate of the goal's segment\n # therefore, it is possible to compute the y coordinate of the intersection by\n # computing the application of the shot's LE on this ex coordinate\n #\n # Then, the resulting y is valid iff it is in the goal's segment interval\n if self.e_pos.x - self.s_pos.x == 0:\n y = le1.apply(self.e_pos.x)\n return self.is_in_interval(y_min, y_max, y)\n\n # The normal way of solving the intersection of these two LEs\n else:\n\n # Shortening variables by computing the coefficient of the goal's LE\n ratio = (self.e_pos.y - self.s_pos.y) / (self.e_pos.x - self.s_pos.x)\n\n # If the lines are parallels (have the same coefficient) return False\n if math.tan(shot.angle) == ratio:\n return False\n\n # Defining the goal's LE\n le2 = LinearEquation(ratio, self.e_pos.y - self.e_pos.x * ratio)\n\n # Finding the intersection point of the two LEs\n # If there isn't one, return False (but there should be one\n # given all the asserts we do before hand, this is just for completion sake)\n p_intersect = le1.intersection(le2)\n if p_intersect == None:\n return False\n\n # If the intersection point's abscissa is in the goal's x interval, then it is\n # a valid abstracted shot going \n return self.is_in_interval(x_min, x_max, p_intersect.x)", "def intersect(self, sprite):\n return not ((self.left > sprite.right)\n or (self.right < sprite.left)\n or (self.top < sprite.bottom)\n or (self.bottom > sprite.top))", "def check_intersection(obj1, obj2):\n (x1, y1, w1, h1) = obj1.get_box()\n (x2, y2, w2, h2) = obj2.get_box()\n if x2 + w2 - 1 < x1 or x2 >= x1 + w1:\n return False\n if y2 + h2 - 1 < y1 or y2 >= y1 + h1:\n return False\n \n return True", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def is_rectangle_colliding(self, rectangle):\n for obstacle in self.obstacle_iterator():\n if rectangle.colliderect(obstacle.rect):\n return True\n return False", "def intersects(self, cuboid):\n\t\treturn ( cuboid.front >= self.back and cuboid.back < self.front\n\t\t\tand cuboid.right >= self.left and cuboid.left < self.right\n\t\t\tand cuboid.bottom >= self.top and cuboid.top < self.bottom )", "def _is_collider(self, u, v, w):\n if v in self._children[u] and v in self._children[w]:\n return True\n elif v in self._children[u] and v in self._spouses[w]:\n return True\n elif v in self._spouses[u] and v in self._children[w]:\n return True\n elif v in self._spouses[u] and v in self._spouses[w]:\n return True\n else:\n return False", "def is_obstacle_in_path(self):\n for obstacle in self.obstacles.tolist():\n print(\"obstacle.get_point():\", obstacle.get_point())\n dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point()))\n if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD:\n if isinstance(obstacle, StationaryObstacle):\n paths = self.generate_possible_paths(obstacle)\n\n if len(paths) != 0:\n return True, np.array(paths)\n elif isinstance(obstacle, MovingObstacle):\n pass\n\n return False, None", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def check_collisions(self, offset, index, obstacles):\n unaltered = True\n self.rect.move_ip(offset)\n while pygame.sprite.spritecollideany(self, obstacles):\n\n # First of all, check if it is a motile transparent block.\n # if so, do nothin\n col_spr = pygame.sprite.spritecollideany(self, obstacles)\n if hasattr(col_spr, \"inertia\"):\n if col_spr.inertia:\n break\n\n if self.climb:\n\t self.climb_mobility = False\n else:\n self.climb_mobility = True\n\n self.rect[index] += (1 if offset[index] < 0 else -1)\n unaltered = False\n #print(\"DEBUG: PLAYERCOL, {}\".format(index))\n\n # stop walking animation\n if index == 0:\n self.walk = False\n\n\n return unaltered", "def collides(self, other):\r\n for block in self.blocks:\r\n for obstacle in other.blocks:\r\n if block.col == obstacle.col and block.row == obstacle.row:\r\n return True\r\n return False", "def is_incident(self, Vobj):\n return self.polyhedron().incidence_matrix()[Vobj.index(), self.index()] == 1", "def is_incident(self, Vobj):\n return self.polyhedron().incidence_matrix()[Vobj.index(), self.index()] == 1", "def is_on(self, obj1_loc, obj1_dims, obj2_loc, obj2_dims):\n VERT_MEASUREMENT_TOLERANCE = self.VERT_MEASUREMENT_TOLERANCE\n result = None\n obj1_x = obj1_loc[0]\n obj1_y = obj1_loc[1]\n obj1_zmin = obj1_loc[2] - (.5 * obj1_dims[2])\n obj2_xmin, obj2_xmax, obj2_ymin, obj2_ymax, obj2_zmin, obj2_zmax = self.get_corners(obj2_loc, obj2_dims)\n if obj1_x >= obj2_xmin and obj1_x <= obj2_xmax:\n if obj1_y >= obj2_ymin and obj1_y <= obj2_ymax:\n if obj1_zmin >= obj2_zmax-VERT_MEASUREMENT_TOLERANCE and obj1_zmin <= obj2_zmax+VERT_MEASUREMENT_TOLERANCE:\n result = 'on'\n return result", "def intersects(self):\n match = False\n for i in range(len(self.__points) - 1):\n p1 = self.__points[i]\n p2 = self.__points[i + 1]\n bounds = self.__line_segment(p1, p2)\n if not bounds is None:\n xmin = bounds[0]\n ymin = bounds[1]\n xmax = bounds[0]\n ymax = bounds[1]\n for j in range(len(bounds)):\n if not (j % 2):\n if bounds[j] < xmin:\n xmin = bounds[j]\n elif bounds[j] > xmax:\n xmax = bounds[j]\n else:\n if bounds[j] < ymin:\n ymin = bounds[j]\n elif bounds[j] > ymax:\n ymax = bounds[j]\n x = self.x\n y = self.y\n # TODO: Determine direction, and check two leading edge points; ie. last vector ----> then points are x+width,y+width x+width,y-width\n if x > xmin and x < xmax and y > ymin and y < ymax:\n match = True\n break\n return match", "def collision_detect(self):\n\n # Check if the collision was with a map\n # Rect-based collision code\n for map_rect in Map.current_map.collision_rects:\n collision_time, norm_x, norm_y = collision.aabb_swept_collision(self.rect, (self.vx, self.vy), map_rect)\n if collision_time != 1:\n if DEBUG: print(\"[collision]\", collision_time)\n break\n self.x += self.vx * collision_time\n self.y += self.vy * collision_time\n\n remaining_time = 1 - collision_time\n \"\"\"\n if remaining_time > 0:\n self.vx *= remaining_time;\n self.vy *= remaining_time;\n \"\"\"\n if collision_time != 1:\n if abs(norm_x) > .0001:\n self.vx = -self.vx * COLLISION_DAMPING\n if abs(norm_y) > .0001:\n self.vy = -self.vy * COLLISION_DAMPING\n self.collision_counter += 1\n return True\n return False\n\n # Old, mask-based collision code\n \"\"\"\n self.mask = pygame.mask.from_surface(self.image)\n point = pygame.sprite.collide_mask(Map.current_map, self)\n if point:\n if COLLISION_ALGORITHM_EXPERIMENTAL:\n self.vx, self.vy = collision.calculate_reflection_angle(Map.current_map.mask, point, (self.vx, self.vy))\n else: \n self.vx, self.vy = collision.simple_collision(Map.current_map.mask, point, (self.vx, self.vy))\n self.vx, self.vy = self.vx * COLLISION_DAMPING, self.vy * COLLISION_DAMPING\n \n self.collision_counter += 1\n return True\n return False\n \"\"\"", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def is_approaching(self, other_particle):\n if self.pos_x < other_particle.pos_x:\n d_v_x = self.velocity_x - other_particle.velocity_x\n else:\n d_v_x = other_particle.velocity_x - self.velocity_x\n\n if self.pos_y < other_particle.pos_y:\n d_v_y = self.velocity_y - other_particle.velocity_y\n else:\n d_v_y = other_particle.velocity_y - self.velocity_y\n\n return d_v_x > 0 or d_v_y > 0", "def is_hit(ball, r_ball, v, target, r_target):\n v_norm = norm_2d(v)\n dr = (target[0] - ball[0], target[1] - ball[1])\n dr_norm = norm_2d(dr)\n\n p = project(dr, v)\n p_norm = norm_2d(p)\n\n if p_norm > v_norm:\n c = (v_norm ** 2 + dr_norm ** 2 - 2 * sc_mul(v, dr)) ** 0.5\n return c <= r_ball + r_target\n\n h = get_point_line_distance(target, ball, (-v[1], v[0]))\n return abs(h) <= r_ball + r_target", "def collision(self):\n # Check collision with walls\n (x_coord, y_coord) = (self.x_coord[0], self.y_coord[0])\n if x_coord <= EDGE or x_coord >= SCREEN_X - self.size - EDGE or \\\n y_coord <= EDGE or y_coord >= SCREEN_Y - self.size - EDGE:\n return True\n # Check collision with self\n corners = self.get_corners()\n if self.heading == \"right\":\n (frontleft_x, frontleft_y) = (corners[1][0], corners[1][1])\n (frontright_x, frontright_y) = (corners[2][0], corners[2][1])\n elif self.heading == \"left\":\n (frontleft_x, frontleft_y) = (corners[3][0], corners[3][1])\n (frontright_x, frontright_y) = (corners[0][0], corners[0][1])\n elif self.heading == \"up\":\n (frontleft_x, frontleft_y) = (corners[0][0], corners[0][1])\n (frontright_x, frontright_y) = (corners[1][0], corners[1][1])\n elif self.heading == \"down\":\n (frontleft_x, frontleft_y) = (corners[2][0], corners[2][1])\n (frontright_x, frontright_y) = (corners[3][0], corners[3][1])\n for i in range(len(self.x_coord)):\n if self.x_coord[i] < frontleft_x < self.x_coord[i] + self.size and \\\n self.y_coord[i] < frontleft_y < self.y_coord[i] + self.size:\n return True\n if self.x_coord[i] < frontright_x < self.x_coord[i] + self.size and \\\n self.y_coord[i] < frontright_y < self.y_coord[i] + self.size:\n return True\n return False", "def intersects(self, other): # -> bool:\n ...", "def accurate_collision(self, other) -> bool:\r\n if self.collide:\r\n if self.bbox_intersect(other):\r\n offset = round(self.x - other.x), \\\r\n round(self.y - other.y)\r\n if self.mask.overlap(other.mask, offset): # Overlap returns None or 1 point\r\n return True\r\n return False\r\n else:\r\n return False", "def inside(i,j,im,h=H): #X\n return i-h >=0 and j-h >=0 and i+h+1<=im.shape[0] and j+h+1<=im.shape[1]", "def is_inside(inner_path, outer_path):\r\n if not hasattr(inner_path, 'bounding_box'):\r\n inner_path.bounding_box = CutPlanner.bounding_box(inner_path)\r\n if not hasattr(outer_path, 'bounding_box'):\r\n outer_path.bounding_box = CutPlanner.bounding_box(outer_path)\r\n if outer_path.bounding_box[0] > inner_path.bounding_box[0]:\r\n # outer minx > inner minx (is not contained)\r\n return False\r\n if outer_path.bounding_box[1] > inner_path.bounding_box[1]:\r\n # outer miny > inner miny (is not contained)\r\n return False\r\n if outer_path.bounding_box[2] < inner_path.bounding_box[2]:\r\n # outer maxx < inner maxx (is not contained)\r\n return False\r\n if outer_path.bounding_box[3] < inner_path.bounding_box[3]:\r\n # outer maxy < inner maxy (is not contained)\r\n return False\r\n if outer_path.bounding_box == inner_path.bounding_box:\r\n if outer_path == inner_path: # This is the same object.\r\n return False\r\n if not hasattr(outer_path, 'vm'):\r\n outer_path = Polygon([outer_path.point(i / 100.0, error=1e4) for i in range(101)])\r\n vm = VectorMontonizer()\r\n vm.add_cluster(outer_path)\r\n outer_path.vm = vm\r\n for i in range(101):\r\n p = inner_path.point(i / 100.0, error=1e4)\r\n if not outer_path.vm.is_point_inside(p.x, p.y):\r\n return False\r\n return True", "def is_at_intersection(self):\n directions = 0\n self.tile = (self.get_nearest_row(), self.get_nearest_col())\n if self.internal_map[self.tile[0] - 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0] + 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] - 1] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] + 1] not in ('x', ):\n directions += 1\n return True if directions > 2 else False", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def checkObstacles(dist_compl, centerBoxCoordinate, box_width, box_height, threshold=2):\n # move the coordinate system to the center + box_height/2\n #plt.plot(dist_compl.real,dist_compl.imag,'g.')\n shift_dist_compl= dist_compl-( centerBoxCoordinate-np.array([box_height/2+0j]) )\n #plt.plot(dist_compl.real,dist_compl.imag,'r.')\n # now look in the box in front of you\n obstacleIdx = (shift_dist_compl.real<box_height)*(abs(shift_dist_compl.imag)<((box_width)))\n #plt.show()\n return sum(obstacleIdx)>threshold", "def is_collision_conf(self, q: np.ndarray) -> bool:\n for obs in self.obstacles:\n if np.fabs(q[2]-obs[0]) <= obs[3] and np.fabs(q[0]) <= obs[1] and np.fabs(q[1]) <= obs[2]:\n return True\n return False", "def is_in_collision_point(self, pos):\n x, y = pos\n return sqrt((self.x - x)**2 + (self.y - y)**2) < self.r", "def _is_obstacle_in_front(self):\n range_front = []\n range_front[:20] = self.lidar_data[-20:]\n range_front[20:] = self.lidar_data[:20]\n range_front = list(filter(lambda num: num != 0, range_front))\n min_front = min(range_front)\n if min_front < 0.4 and min_front != 0.0:\n\t\t\treturn True\n else:\n\t\t\treturn False", "def isOutsideBorder(self):\n if (self.posX < -self.myGalaxy.worldWidth or self.posX > self.myGalaxy.worldWidth or\n self.posY < -self.myGalaxy.worldHeight or self.posY > self.myGalaxy.worldHeight):\n return 1\n return 0", "def intersect(self, ray):\n # TODO A5 (Step1) implement this function\n # Copy your implementation from A4\n # Then calculate uv coordinates, to be passed into the Hit initializer\n vs = self.vs\n\n a = vs[0][0] - vs[1][0]\n b = vs[0][1] - vs[1][1]\n c = vs[0][2] - vs[1][2]\n d = vs[0][0] - vs[2][0]\n e = vs[0][1] - vs[2][1]\n f = vs[0][2] - vs[2][2]\n\n ray_dir = ray.direction\n ray_orig = ray.origin\n\n g = ray_dir[0]\n h = ray_dir[1]\n i = ray_dir[2]\n j = vs[0][0] - ray_orig[0]\n k = vs[0][1] - ray_orig[1]\n l = vs[0][2] - ray_orig[2]\n\n M = a * (e * i - h * f) + b * (g * f - d * i) + c * (d * h - e * g)\n\n t = -(f * (a * k - j * b) + e * (j * c - a * l) + d *\n (b * l - k * c)) / M\n\n if (t < ray.start or t > ray.end):\n return no_hit\n\n gamma = (i * (a * k - j * b) + h * (j * c - a * l) + g *\n (b * l - k * c)) / M\n\n if (gamma < 0 or gamma > 1):\n return no_hit\n\n beta = (j * (e * i - h * f) + k * (g * f - d * i) +\n l * (d * h - e * g)) / M\n\n if (beta < 0 or beta > 1 - gamma):\n return no_hit\n\n P = ray_orig + t * ray_dir\n\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n return Hit(t, P, unit_normal, vec([u, v]), self.material)", "def _isInside(self, v, select, progress):\n # Compute on non-masked sources :\n xyz = self.xyz\n N = xyz.shape[0]\n inside = np.ones((xyz.shape[0],), dtype=bool)\n v = v.reshape(v.shape[0] * 3, 3)\n\n # Loop over sources :\n progress.show()\n for k in range(N):\n # Get the euclidian distance :\n eucl = cdist(v, xyz[[k], :])\n # Get the closest vertex :\n eucl_argmin = eucl.argmin()\n # Get distance to zero :\n xyz_t0 = np.sqrt((xyz[k, :] ** 2).sum())\n v_t0 = np.sqrt((v[eucl_argmin, :] ** 2).sum())\n inside[k] = xyz_t0 <= v_t0\n progress.setValue(100 * k / N)\n self.data.mask = False\n self.data.mask = inside if select != 'inside' else np.invert(inside)\n # Finally update data sources and text :\n self.update()\n self.text_update()\n progress.hide()", "def vector_equal(v1,v2):\n if (v2.x - 0.001 <= v1.x <= v2.x + 0.001) and \\\n (v2.y - 0.001 <= v1.y <= v2.y + 0.001) and \\\n (v2.z - 0.001 <= v1.z <= v2.z + 0.001):\n return True", "def interior_contains(self, Vobj):\n return False", "def interior_contains(self, Vobj):\n return False", "def is_in_collision_line(self, a, b):\n return abs((b[0]-a[0])*self.x + (a[1]-b[1])*self.y + (a[0]-b[0])*b[1] + (b[1]-a[1])*a[0]) /\\\n sqrt((b[0]-b[1])**2 + (a[1]-b[1])**2 + 0.0000001)< self.r", "def inframe(self, uv: np.ndarray) -> np.ndarray:\n # Ignore comparisons to NaN\n with np.errstate(invalid=\"ignore\"):\n return np.all((uv >= 0) & (uv <= self.imgsz), axis=1)", "def intersect_ext(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0, 0\n dp = line.p - self.p\n c2 = self.cross_z\n u = c.dot(dp) / d\n v = c2.dot(dp) / d\n return u > 0 and v > 0 and u < 1 and v < 1, self.lerp(u), u, v", "def is_intersecting(self, y: int, x: int) -> bool:\n return (self.is_enabled\n and self.top_left_y <= y <= self.bottom_right_y and self.top_left_x <= x <= self.bottom_right_x)", "def __check_obstacle_intersections(self, goal):\n # generate a proximity test geometry for the goal\n min_clearance = self.cfg[\"goal\"][\"min_clearance\"]\n n = 6 # goal is n sided polygon\n goal_test_geometry = []\n for i in range(n):\n goal_test_geometry.append(\n [goal[0] + min_clearance * cos(i * 2 * pi / n),\n goal[1] + min_clearance * sin(i * 2 * pi / n)])\n goal_test_geometry = Polygon(goal_test_geometry)\n intersects = False\n for obstacle in self.current_obstacles:\n intersects |= geometrics.convex_polygon_intersect_test(goal_test_geometry, obstacle.global_geometry)\n return intersects", "def is_incident(self, Hobj):\n return self.polyhedron().incidence_matrix()[self.index(), Hobj.index()] == 1", "def is_incident(self, Hobj):\n return self.polyhedron().incidence_matrix()[self.index(), Hobj.index()] == 1", "def intersects(self, rect):\n\t\treturn ( rect.right >= self.left and rect.left < self.right\n\t\t\tand rect.bottom >= self.top and rect.top < self.bottom )", "def is_vertical(self):\n return self.start.y == self.end.y", "def prekryvaSa(self,inyPrekryvac):\n offset = (int(inyPrekryvac.x-self.x),int(inyPrekryvac.y-self.y))\n if self.mask.overlap(inyPrekryvac.mask,offset) is None:\n return True\n return False", "def is_overlap(box_1, box_2, iou_th):\n return box_1.iou(box_2) > iou_th", "def is_collided_vertical(self):\n # bounce of vertical borders -> y-axis-check\n if self.position[1] <= config['globals']['BALL_RADIUS']:\n self.velocity[1] *= -1\n elif self.position[1] >= config['globals']['HEIGHT'] + 1 - config['globals']['BALL_RADIUS']:\n self.velocity[1] *= -1", "def collides(snake_object, food_object):\n snake_corners = snake_object.get_corners()\n (food_x, food_y) = food_object.pos\n for coord in snake_corners:\n if food_x <= coord[0] <= food_x + food_object.size and \\\n food_y <= coord[1] <= food_y + food_object.size:\n return True\n return False", "def intersects(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n return False", "def ifCollide( ball1, ball2 ):\n\t\n\tb1_x, b1_y = ball1.position.xy\n\tb2_x, b2_y = ball2.position.xy\n\t\n\t#vector connect center of particles\n\tdistant = Vector.from_points((b2_x, b2_y), (b1_x, b1_y))\n\t\n\t#if lenght of vector above is less( equal ) than sum of radius ( they overlapping )\n\tif ( ball1.r + ball2.r ) ** 2 >= distant.norm():\n\t\treturn True\n\telse:\n\t\treturn False", "def check_collisions(self):", "def IsPosViewedFromPos(self,eye,pos): \r\n p0x=eye[0] ;p0y=eye[1]\r\n p1x=pos[0] ;p1y=pos[1]\r\n dx=p1x-p0x ;dy=p1y-p0y\r\n coord0x=int(p0x) ;coord0y=int(p0y)\r\n coord1x=int(p1x) ;coord1y=int(p1y)\r\n \r\n #No more intersection\r\n while coord0x!=coord1x or coord0y!=coord1y:\r\n if not self.IsTransparent((coord0x,coord0y)):\r\n return False\r\n cornerx=float(coord0x+(dx>0))\r\n cornery=float(coord0y+(dy>0))\r\n det=dx*(cornery-p0y) - dy*(cornerx-p0x)\r\n \r\n vertical=((dx>0)==(dy>0))==(det>0)\r\n if vertical:\r\n #intersection with a vertial line\r\n p0y+=dy*(cornerx-p0x)/dx \r\n p0x=float(cornerx) # x is projected on the line\r\n if dx>0: coord0x+=1\r\n else: coord0x-=1\r\n \r\n else:\r\n #intersection with a horizontal line\r\n p0x+=dx*(cornery-p0y)/dy\r\n p0y=float(cornery) # y is projected on the line\r\n if dy>0: coord0y+=1\r\n else: coord0y-=1\r\n \r\n return True", "def intersect(self, ray):\n # TODO A5 (Step3 and Step4) implement this function\n # For step 4, check if uvs and normals are not None (respectively)\n # If so, then interpolate them\n\n # batch_intersect returns t, beta, gamma, i\n posns = self.posns\n uvs = self.uvs\n inds = self.inds\n normals = self.normals\n t, beta, gamma, i = batch_intersect(posns[inds[:, :]], ray)\n if (t == np.inf):\n return no_hit\n vs = posns[inds[i, :]]\n P = ray.origin + t * ray.direction\n\n if (t == np.inf):\n return no_hit\n else:\n\n alpha = 1 - beta - gamma\n\n if uvs is not None:\n\n uv0 = uvs[inds[i][0]]\n uv1 = uvs[inds[i][1]]\n uv2 = uvs[inds[i][2]]\n\n uv = alpha * uv0 + beta * uv1 + gamma * uv2\n\n else:\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n uv = vec([u, v])\n\n if normals is not None:\n\n n0 = normals[inds[i][0]]\n n1 = normals[inds[i][1]]\n n2 = normals[inds[i][2]]\n\n unit_normal = normalize(alpha * n0 + beta * n1 + gamma * n2)\n\n else:\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n return Hit(t, P, unit_normal, uv, self.material)", "def __contains__(self, item):\n try:\n pos = Vec2(*item)\n return pos.x >= self.origin.x and pos.y >= self.origin.y \\\n and pos.x < self.origin.x + self.size.x \\\n and pos.y < self.origin.y + self.size.y\n except TypeError:\n return False", "def has_intersection(self, game_object):\n distance = self.__get_distance(game_object)\n return distance <= (self._radius + game_object.get_radius())", "def _iou(self, obj_a, obj_b):\n # compute the area of both the prediction and ground-truth\n # rectangles\n box_a_area = (obj_a[2] - obj_a[0] + 1) * (obj_a[3] - obj_a[1] + 1)\n box_b_area = (obj_b[2] - obj_b[0] + 1) * (obj_b[3] - obj_b[1] + 1)\n\n # determine the (x, y)-coordinates of the intersection rectangle\n x_a = max(obj_a[0], obj_b[0])\n y_a = max(obj_a[1], obj_b[1])\n x_b = min(obj_a[2], obj_b[2])\n y_b = min(obj_a[3], obj_b[3])\n\n # compute the area of intersection rectangle\n inter_area = max(0, x_b - x_a + 1) * max(0, y_b - y_a + 1)\n\n # compute the intersection over union by taking the intersection\n # area and dividing it by the sum of prediction + ground-truth\n # areas - the interesection area\n iou = inter_area / float(box_a_area + box_b_area - inter_area)\n\n # return the intersection over union value\n return iou", "def hitTest( a, b ):\n r = a.radius + b.radius\n x = abs( a.x - b.x )\n y = abs( a.y - b.y )\n if x <= r and y <= r and x*x + y*y <= r*r:\n return 1\n return 0", "def _find_intersection(self):\n count = 0\n for each_list in self.lab.look():\n if each_list[1] == 'wall':\n count += 1\n if count < 2:\n return True\n else:\n return False", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "def doBoundingBoxesIntersect(self, other):\n if(self.upperLeft.x <= other.lowerRight.x and\n self.lowerRight.x >= other.upperLeft.x and\n self.upperLeft.y >= other.lowerRight.y and\n self.lowerRight.y <= other.upperLeft.y):\n return True\n return False", "def intersects(self, other: RectangularRoom) -> bool:\n return (\n self.x1 <= other.x2\n and self.x2 >= other.x1\n and self.y1 <= other.y2\n and self.y2 >= other.y1\n )", "def is_obstacle_in_path_of_drone(self, obstacle_vector, waypoint_vector):\n obstacle_list = obstacle_vector.tolist()\n waypoint_list = waypoint_vector.tolist()\n\n for index in range(len(obstacle_list)):\n if all(item > 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]) or all(item < 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]):\n return False\n\n return True", "def intersect(self,ray:Ray):\n o = ray.o #ray origin\n d = ray.d #ray destination\n oc = o - self.center #vector from ray origin to center\n b = 2*(oc*d)\n c = oc*oc - self.r**2\n disc = b**2-4*c\n if disc<0:\n return False,-1\n else:\n disc **=0.5\n t0 = -b-disc\n t1 = -b+disc\n return True,max(t0,t1)", "def check_path_collision(self, X):\n\n #check collision with circular obstacles\n for i in range(X.shape[1]):\n p = X[0:3,i].flatten()\n\n for obs_loc in self.obs_locs:\n if np.linalg.norm(p[0:2] - obs_loc) < self.obs_rad:\n return True\n if i>0:\n p2 = X[0:3,i-1].flatten()\n #check collision with walls\n for win in self.windows:\n if win.check_collision(p, p2):\n return True\n\n return False", "def check_collision(self,node, obstacleList):\n if (node is None):\n return True\n\n for i in range(len(node.path_x)):\n if self.map[node.path_x[i]+self.mapwidth*node.path_y[i]]:\n return True\n for j in range(5):\n #check neighbouring\n if self.map[node.path_x[i]+self.mapwidth*node.path_y[i]+j]:\n return True\n if self.map[node.path_x[i]+self.mapwidth*node.path_y[i]-j]:\n return True\n # for i in range(len(node.path_x)):\n # if (node.path_x[i],node.path_y[i])in obstacleList:\n # return True\n # for i in range(len(node.path_x)):\n # for j in range(5):\n # if(node.path_x[i]+j,node.path_y[i]+j) in obstacleList:\n # return True\n # elif (node.path_x[i]-j,node.path_y[i]-j) in obstacleList:\n # return True\n\n return False # safe", "def _in_bounds(self, x, y):\r\n return 0 <= x < 8 and 0 <= y < 8", "def does_path_intersect_obstacle_3d(self, obstacle, drone_point, waypoint):\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle.get_point(), drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(drone_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS\n\n return False", "def _overlapping(self, atom1, atom2):\n\n if np.linalg.norm(atom1.pos-atom2.pos) < (atom1.rad+atom2.rad):\n return True\n else:\n return False", "def is_intersecting(self, ray):\n\n intersecting_point = self._sympy_plane.intersection(ray.sympy_line)[0]\n\n if 'x' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'y' in self._name:\n\n if self._within_x_bounds(intersecting_point.x) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'z' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_x_bounds(intersecting_point.x):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n return False, None", "def is_connected(object_one, object_two):\n\n for vert_one in object_one.Vertexes:\n for vert_two in object_two.Vertexes:\n if (vert_one.X == vert_two.X) and (vert_one.y == vert_two.y):\n return True\n\n return False", "def intersects(self, other):\n return (self.x1 <= other.x2 and self.x2 >= other.x1 and\n self.y1 <= other.y2 and self.y2 >= other.y1)", "def collision(self, model, new_location):\n\n within_bounds = all(model.boundaries[0] <= new_location) and all(new_location <= model.boundaries[1])\n\n if not within_bounds:\n\n collide = True\n\n elif self.neighbourhood(model, new_location):\n\n collide = True\n\n else:\n\n collide = False\n\n return collide", "def _check_sonar_obstacles(self):\n # TODO: what's a good number?\n BLOCKED_THRESHOLD = 0.7\n\n rate = rospy.Rate(10) # 10 hz\n count = 10\n left = 0\n center = 0\n right = 0\n\n for i in range(count):\n obstacle = self.swarmie.get_obstacle_condition()\n\n if obstacle & Obstacle.SONAR_LEFT == Obstacle.SONAR_LEFT:\n left += 1\n if (obstacle & Obstacle.SONAR_CENTER ==\n Obstacle.SONAR_CENTER):\n center += 1\n if obstacle & Obstacle.SONAR_RIGHT == Obstacle.SONAR_RIGHT:\n right += 1\n\n rate.sleep()\n\n left_blocked = left / count > BLOCKED_THRESHOLD\n center_blocked = center / count > BLOCKED_THRESHOLD\n right_blocked = right / count > BLOCKED_THRESHOLD\n\n return left_blocked, center_blocked, right_blocked", "def contains(outer, inner):\n return inner.tl.x >= outer.tl.x and inner.tl.y >= outer.tl.y and \\\n inner.br.x <= outer.br.x and inner.br.y <= outer.br.y", "def __hit_bricks(self, g_object):\n return type(g_object) == GRect and g_object != self.__paddle", "def check_collision(self, footprint):\n return self.upperleft[0] < footprint.upperleft[0] < footprint.upperright[0] < self.upperright[0] and \\\n self.upperleft[1] < footprint.upperleft[1] < footprint.bottomleft[1] < self.bottomleft[1]", "def inside_rectangle(self, x, y):\n if (self.pos.x - self.width < x < self.pos.x + self.width and\n self.pos.y - self.height < y < self.pos.y + self.height):\n return True" ]
[ "0.66017944", "0.66017944", "0.65201724", "0.6504722", "0.6439276", "0.6425067", "0.638491", "0.6364111", "0.63495326", "0.6324251", "0.6221943", "0.6213708", "0.6208691", "0.61606497", "0.6156494", "0.6152293", "0.6147638", "0.6138719", "0.6131911", "0.6120432", "0.6107103", "0.609772", "0.608096", "0.6080534", "0.6069908", "0.60662115", "0.6062206", "0.6060807", "0.6045898", "0.60382247", "0.60375416", "0.6023941", "0.6016574", "0.60037804", "0.60037804", "0.5997425", "0.59923464", "0.59911466", "0.5990798", "0.5982512", "0.5979446", "0.59599954", "0.5957017", "0.595354", "0.5926588", "0.5912591", "0.5910737", "0.5909938", "0.587539", "0.585701", "0.5851753", "0.585067", "0.5843726", "0.58328485", "0.58040446", "0.57997406", "0.5785566", "0.5785566", "0.5781954", "0.5774998", "0.57715786", "0.57634455", "0.57527626", "0.5742174", "0.5742174", "0.5726569", "0.5724584", "0.5716972", "0.5714466", "0.5698135", "0.56977034", "0.56944716", "0.56897426", "0.56768334", "0.56756204", "0.56719875", "0.5670189", "0.5669844", "0.5655663", "0.5655275", "0.56551915", "0.56503665", "0.5648798", "0.5645882", "0.5643718", "0.56434184", "0.5638952", "0.563762", "0.56326455", "0.562722", "0.5622306", "0.56212306", "0.56193024", "0.5619231", "0.5617277", "0.5616014", "0.5607865", "0.5607514", "0.5580008", "0.5578188" ]
0.71816885
0
Determine if the vector between a UAV's position and the current waypoint intersect an obstacle.
def does_path_intersect_obstacle_2d(self, obstacle, uav_point, waypoint): drone_point = uav_point[:-1] waypoint = waypoint[:-1] obstacle_point = obstacle.get_point()[:-1] waypoint_vector = np.subtract(waypoint, drone_point) obstacle_vector = np.subtract(obstacle_point, drone_point) obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector) rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector) rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector) # Uncomment for DEBUGGING ONLY print("Waypoint Vector: " + str(waypoint_vector)) print("Obstacle Vector: " + str(obstacle_vector)) print("Rejection Vector: " + str(rejection_vector)) print("Rejection Vector Magnitude: " + str(rejection_vector_magnitude)) print("Obstacle Radius: " + str(obstacle.get_radius())) print("Distance From Obstacle: " + str(VectorMath.get_vector_magnitude(np.subtract(uav_point, obstacle.get_point())))) if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector): return rejection_vector_magnitude < obstacle.get_radius() return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def does_uav_intersect_obstacle_vertically(self, obstacle, drone_point, waypoint):\n if isinstance(obstacle, StationaryObstacle):\n if drone_point[2] < obstacle.height + Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS:\n return True\n\n return False", "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def is_obstacle_in_path_of_drone(self, obstacle_vector, waypoint_vector):\n obstacle_list = obstacle_vector.tolist()\n waypoint_list = waypoint_vector.tolist()\n\n for index in range(len(obstacle_list)):\n if all(item > 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]) or all(item < 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]):\n return False\n\n return True", "def is_obstacle_in_path(self):\n for obstacle in self.obstacles.tolist():\n print(\"obstacle.get_point():\", obstacle.get_point())\n dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point()))\n if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD:\n if isinstance(obstacle, StationaryObstacle):\n paths = self.generate_possible_paths(obstacle)\n\n if len(paths) != 0:\n return True, np.array(paths)\n elif isinstance(obstacle, MovingObstacle):\n pass\n\n return False, None", "def does_path_intersect_obstacle_3d(self, obstacle, drone_point, waypoint):\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle.get_point(), drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(drone_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS\n\n return False", "def is_approaching(self, other_particle):\n if self.pos_x < other_particle.pos_x:\n d_v_x = self.velocity_x - other_particle.velocity_x\n else:\n d_v_x = other_particle.velocity_x - self.velocity_x\n\n if self.pos_y < other_particle.pos_y:\n d_v_y = self.velocity_y - other_particle.velocity_y\n else:\n d_v_y = other_particle.velocity_y - self.velocity_y\n\n return d_v_x > 0 or d_v_y > 0", "def check_position(self, player):\n\n # Mid point of the segment defining the goal\n mid = Point.mid_point(self.s_pos, self.e_pos)\n\n # Transposition of this point by the direction vector of the goal\n # to get the direction vector with its origin in the center of the goal\n mid_prime = self.dir + mid\n\n # Creating both needed vectors\n v1 = Vector.v_from_pp(mid, player.pos)\n v2 = Vector.v_from_pp(mid, mid_prime)\n\n # Getting the angle and checking if it is a valid one\n angle = v1.angle(v2)\n\n return self.is_in_interval(-math.pi / 2, math.pi / 2, angle)", "def has_uav_reached_current_waypoint(self):\n return self.drone.has_reached_waypoint()", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def is_in_obstacle(self, x: float, y: float) -> bool:\n for obstacle in self.obstacles:\n if obstacle.contains_point((x, y)):\n return True\n return False", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def __contains__(self, item):\n try:\n pos = Vec2(*item)\n return pos.x >= self.origin.x and pos.y >= self.origin.y \\\n and pos.x < self.origin.x + self.size.x \\\n and pos.y < self.origin.y + self.size.y\n except TypeError:\n return False", "def IsPointInsideMesh2(obj, p, max_dist = 1.84467e+19):\n bResult, point, normal, face = obj.closest_point_on_mesh(p, max_dist)\n p2 = point-p\n v = p2.dot(normal)\n return not(v < 0.0)", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def __contains__(self, point, e=10e-10):\n v1 = self.vector\n v2 = Vector.createFromTwoPoints(self.point, point)\n return abs(v1.angle - v2.angle) < e", "def intersects(self):\n match = False\n for i in range(len(self.__points) - 1):\n p1 = self.__points[i]\n p2 = self.__points[i + 1]\n bounds = self.__line_segment(p1, p2)\n if not bounds is None:\n xmin = bounds[0]\n ymin = bounds[1]\n xmax = bounds[0]\n ymax = bounds[1]\n for j in range(len(bounds)):\n if not (j % 2):\n if bounds[j] < xmin:\n xmin = bounds[j]\n elif bounds[j] > xmax:\n xmax = bounds[j]\n else:\n if bounds[j] < ymin:\n ymin = bounds[j]\n elif bounds[j] > ymax:\n ymax = bounds[j]\n x = self.x\n y = self.y\n # TODO: Determine direction, and check two leading edge points; ie. last vector ----> then points are x+width,y+width x+width,y-width\n if x > xmin and x < xmax and y > ymin and y < ymax:\n match = True\n break\n return match", "def is_point_within(self, x, y):\n return abs(x - self._x_position) <= self._x_length / 2 and abs(y - self._y_position) <= self._y_length / 2", "def check_shot_on_target(self, shot):\n # Defining a few variables to ease the reading\n # Here we define the x and y interval of the goal's segment\n x_min = min(self.s_pos.x, self.e_pos.x)\n x_max = max(self.s_pos.x, self.e_pos.x)\n\n y_min = min(self.s_pos.y, self.e_pos.y)\n y_max = max(self.s_pos.y, self.e_pos.y)\n\n # Shortening variables names\n o_x = shot.opponent.pos.x\n o_y = shot.opponent.pos.y\n\n # If the angle = pi / 2 or - pi / 2, then tan(angle) is undefined\n # In these cases, the shot is vertical, therefore it is valid\n # iff the x coordinate of the opponent is in the goal's x interval\n if abs(shot.angle) == math.pi / 2:\n return self.is_in_interval(x_min, x_max, o_x)\n\n # If the angle = 0, pi or -pi, then tan(angle) is 0 which can lead to \n # undefined intersection points (if the goal is vertical for example)\n # although there is an intersection point\n # \n # In these cases, the shot is horizontal, therefore it is valid\n # iff the y coordinate of the opponent is in the goal's y interval\n if abs(shot.angle) == math.pi or shot.angle == 0:\n return self.is_in_interval(y_min, y_max, o_y)\n\n # Using tan the least amount of time possible, for this is a slow function\n tan_theta = math.tan(shot.angle)\n\n # Define the LE of the shot\n le1 = LinearEquation(tan_theta, o_y - tan_theta * o_x)\n le2 = None\n\n # If the goal is vertical, finding the intersection point\n # is not possible using the normal way\n #\n # That being said, unless the LE of the shot is vertical too (which it \n # isn't as it is checked before hand) there has to be an intersection point\n # This intersection must happen when at the x coodinate of the goal's segment\n # therefore, it is possible to compute the y coordinate of the intersection by\n # computing the application of the shot's LE on this ex coordinate\n #\n # Then, the resulting y is valid iff it is in the goal's segment interval\n if self.e_pos.x - self.s_pos.x == 0:\n y = le1.apply(self.e_pos.x)\n return self.is_in_interval(y_min, y_max, y)\n\n # The normal way of solving the intersection of these two LEs\n else:\n\n # Shortening variables by computing the coefficient of the goal's LE\n ratio = (self.e_pos.y - self.s_pos.y) / (self.e_pos.x - self.s_pos.x)\n\n # If the lines are parallels (have the same coefficient) return False\n if math.tan(shot.angle) == ratio:\n return False\n\n # Defining the goal's LE\n le2 = LinearEquation(ratio, self.e_pos.y - self.e_pos.x * ratio)\n\n # Finding the intersection point of the two LEs\n # If there isn't one, return False (but there should be one\n # given all the asserts we do before hand, this is just for completion sake)\n p_intersect = le1.intersection(le2)\n if p_intersect == None:\n return False\n\n # If the intersection point's abscissa is in the goal's x interval, then it is\n # a valid abstracted shot going \n return self.is_in_interval(x_min, x_max, p_intersect.x)", "def inside(self, uv):\n result = self._trimmed.Perform(gp_Pnt2d(uv[0], uv[1]))\n return result == TopAbs_IN", "def is_inside(inner_path, outer_path):\r\n if not hasattr(inner_path, 'bounding_box'):\r\n inner_path.bounding_box = CutPlanner.bounding_box(inner_path)\r\n if not hasattr(outer_path, 'bounding_box'):\r\n outer_path.bounding_box = CutPlanner.bounding_box(outer_path)\r\n if outer_path.bounding_box[0] > inner_path.bounding_box[0]:\r\n # outer minx > inner minx (is not contained)\r\n return False\r\n if outer_path.bounding_box[1] > inner_path.bounding_box[1]:\r\n # outer miny > inner miny (is not contained)\r\n return False\r\n if outer_path.bounding_box[2] < inner_path.bounding_box[2]:\r\n # outer maxx < inner maxx (is not contained)\r\n return False\r\n if outer_path.bounding_box[3] < inner_path.bounding_box[3]:\r\n # outer maxy < inner maxy (is not contained)\r\n return False\r\n if outer_path.bounding_box == inner_path.bounding_box:\r\n if outer_path == inner_path: # This is the same object.\r\n return False\r\n if not hasattr(outer_path, 'vm'):\r\n outer_path = Polygon([outer_path.point(i / 100.0, error=1e4) for i in range(101)])\r\n vm = VectorMontonizer()\r\n vm.add_cluster(outer_path)\r\n outer_path.vm = vm\r\n for i in range(101):\r\n p = inner_path.point(i / 100.0, error=1e4)\r\n if not outer_path.vm.is_point_inside(p.x, p.y):\r\n return False\r\n return True", "def is_obstacle(self, pos: tuple):\n if self.within_map(pos):\n return self.map[round(pos[0]), round(pos[1])] == OBSTACLE\n else:\n return False", "def __check_obstacle_intersections(self, goal):\n # generate a proximity test geometry for the goal\n min_clearance = self.cfg[\"goal\"][\"min_clearance\"]\n n = 6 # goal is n sided polygon\n goal_test_geometry = []\n for i in range(n):\n goal_test_geometry.append(\n [goal[0] + min_clearance * cos(i * 2 * pi / n),\n goal[1] + min_clearance * sin(i * 2 * pi / n)])\n goal_test_geometry = Polygon(goal_test_geometry)\n intersects = False\n for obstacle in self.current_obstacles:\n intersects |= geometrics.convex_polygon_intersect_test(goal_test_geometry, obstacle.global_geometry)\n return intersects", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex(): \n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def has_intersection(self, obj):\r\n obj_x, obj_y = obj.get_location()\r\n x = self.__x\r\n y = self.__y\r\n # Distance formula\r\n distance = sqrt((obj_x - x) ** 2 + (obj_y - y) ** 2)\r\n if distance <= obj.get_radius() + self.__radius:\r\n return True\r\n return False", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def isInPlane(self, p) -> bool:\n # Testing for zero is done with math.isclose, to avoid rounding/floating point errors.\n # Since we are testing near zero, abs_tol is set to 1e-09\n return math.isclose(\n math.fabs(\n dot(\n self.normal(),\n Vector.connect(p.x, p.y, p.z, self.p0.x, self.p0.y, self.p0.z),\n )\n ),\n 0,\n rel_tol=1e-09,\n abs_tol=1e-09,\n )", "def isNearTo(self, point):\n # BBB: I'm using a majored version of the collide rect to fix a problem with a charas-bouncing-effect on movement... :-|\n x, y = self.currentLevel.transformToScreenCoordinate(point)\n collide_rect = self.collide_rect\n collide_rect.height+=3\n return collide_rect.collidepoint(x, y)", "def is_point_in(self, point):\n return (self.upperleft[0] <= point[0] <= self.upperright[0] and self.upperleft[1] <= point[1] <= self.bottomleft[1])", "def is_point_in(self, point):\n return (self.upperleft[0] <= point[0] <= self.upperright[0] and self.upperleft[1] <= point[1] <= self.bottomleft[1])", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex():\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def isOnInteriorSide(self, v):\n n = self.normalVect()\n return n.dotProduct(vector(self.vertices[0]) - vector(v)) > 0", "def is_in_collision_point(self, pos):\n x, y = pos\n return sqrt((self.x - x)**2 + (self.y - y)**2) < self.r", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def _is_at_position(pose_1, pose_2, atol):\n # type: (typing.Union[PoseStamped, PositionTarget, Waypoint], typing.Union[PoseStamped, PositionTarget, Waypoint], float) -> bool\n\n pos = [np.zeros(3), np.zeros(3)]\n for i, pose in enumerate((pose_1, pose_2)):\n if isinstance(pose, PoseStamped):\n pos[i][:] = np.array([pose.pose.position.x, pose.pose.position.y, pose.pose.position.z])\n elif isinstance(pose, PositionTarget):\n pos[i][:] = np.array([pose.position.x, pose.position.y, pose.position.z])\n elif isinstance(pose, Waypoint):\n pos[i][:] = np.array([pose.x_lat, pose.y_long, pose.z_alt])\n else:\n raise Warning(\"Wrong type\")\n\n return all(np.isclose(pos[0], pos[1], atol=atol))", "def is_at_intersection(self):\n directions = 0\n self.tile = (self.get_nearest_row(), self.get_nearest_col())\n if self.internal_map[self.tile[0] - 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0] + 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] - 1] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] + 1] not in ('x', ):\n directions += 1\n return True if directions > 2 else False", "def is_ahead_of(self, pose, x, y):\n x1 = pose.position.x\n y1 = pose.position.y\n orientation = pose.orientation\n euler = tf.transformations.euler_from_quaternion(\n [orientation.x, orientation.y, orientation.z, orientation.w])\n yaw = euler[2]\n return ((x - x1) * math.cos(yaw) + (y - y1) * math.sin(yaw)) > 0", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def is_intersection(self, location):\n loc = to_carla_location(location)\n waypoint = self._map.get_waypoint(loc,\n project_to_road=False,\n lane_type=carla.LaneType.Any)\n if not waypoint:\n # The map didn't return a waypoint because the location not within\n # mapped location.\n return False\n else:\n # XXX(ionel): is_intersection will be deprecated in the future\n # Carla releases.\n return waypoint.is_intersection", "def origin_is_inside_hitbox(self, hitbox):\n if self.hitdetection.accurate:\n max_x = max(hitbox, key = lambda index: abs(index[0]))[0]\n max_y = max(hitbox, key = lambda index: abs(index[1]))[1]\n \n m = max(max_x, max_y)\n \n num_intersections = 0\n for i in range(0, len(hitbox), 1):\n if self.hitdetection.module.does_intersect([[m, m], [0, 0]], [hitbox[i], hitbox[(i + 1) % len(hitbox)]]):\n num_intersections += 1\n return [False, True][num_intersections % 2]\n else:\n has_smaller = False\n has_bigger = False\n for hx, hy in hitbox:\n if hx > 0 and hy > 0:\n has_bigger = True\n if hx < 0 and hy < 0:\n has_smaller = True\n return has_smaller and has_bigger", "def contains_pt(self, pt):\n x, y = pt\n if not self.x - self.radius < x < self.x + self.radius:\n return False\n if not self.y - self.radius < y < self.y + self.radius:\n return False\n return True", "def contains(self, position):\n return (position - self._position).dot(self.normal(position)) < 0", "def condition(o):\n\t\t\tv = o.pos() - self.pos()\n\t\t\treturn v.norm2() < dist2 and abs(angle_diff(v.angle(),self.angle())) < math.radians(45)", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def hit(self, otherball):\r\n dx = (self.unif[0] + self.vx) - (otherball.unif[0] + otherball.vx)\r\n dy = (self.unif[1] + self.vy) - (otherball.unif[1] + otherball.vy)\r\n rd = self.radius + otherball.radius\r\n return dot(dx, dy) < (rd * rd)", "def hit(self, origin, sightVector, hitError):\n distance = [0, 0, 0]\n for i in range(0, 3):\n distance[i] = self.translation[i] - origin[i]\n v1 = normalize(distance)\n v2 = normalize(sightVector)\n return abs(v1[0] - v2[0]) < hitError and \\\n abs(v1[1] - v2[1]) < hitError and abs(v1[2] - v2[2]) < hitError", "def is_on(self, obj1_loc, obj1_dims, obj2_loc, obj2_dims):\n VERT_MEASUREMENT_TOLERANCE = self.VERT_MEASUREMENT_TOLERANCE\n result = None\n obj1_x = obj1_loc[0]\n obj1_y = obj1_loc[1]\n obj1_zmin = obj1_loc[2] - (.5 * obj1_dims[2])\n obj2_xmin, obj2_xmax, obj2_ymin, obj2_ymax, obj2_zmin, obj2_zmax = self.get_corners(obj2_loc, obj2_dims)\n if obj1_x >= obj2_xmin and obj1_x <= obj2_xmax:\n if obj1_y >= obj2_ymin and obj1_y <= obj2_ymax:\n if obj1_zmin >= obj2_zmax-VERT_MEASUREMENT_TOLERANCE and obj1_zmin <= obj2_zmax+VERT_MEASUREMENT_TOLERANCE:\n result = 'on'\n return result", "def contains(self, point):\n return super().contains((point[0] - self.x, point[1] - self.y))", "def is_hit(ball, r_ball, v, target, r_target):\n v_norm = norm_2d(v)\n dr = (target[0] - ball[0], target[1] - ball[1])\n dr_norm = norm_2d(dr)\n\n p = project(dr, v)\n p_norm = norm_2d(p)\n\n if p_norm > v_norm:\n c = (v_norm ** 2 + dr_norm ** 2 - 2 * sc_mul(v, dr)) ** 0.5\n return c <= r_ball + r_target\n\n h = get_point_line_distance(target, ball, (-v[1], v[0]))\n return abs(h) <= r_ball + r_target", "def point_in_between(ob, row, cell, other_cell):\n if row:\n left = other_cell[0] < cell[0]\n if left:\n return ob.patt[0] == 1\n else:\n return ob.patt[2] == 1\n below = other_cell[1] < cell[1]\n if below:\n return ob.patt[1] == 0\n else:\n return ob.patt[1] == 2", "def is_in_desired_position(self, current_position, epsilon=0.05):\n\n is_in_desired_pos = False\n\n x_pos_plus = self.desired_point.x + epsilon\n x_pos_minus = self.desired_point.x - epsilon\n y_pos_plus = self.desired_point.y + epsilon\n y_pos_minus = self.desired_point.y - epsilon\n\n x_current = current_position.x\n y_current = current_position.y\n\n x_pos_are_close = (x_current <= x_pos_plus) and (\n x_current > x_pos_minus)\n y_pos_are_close = (y_current <= y_pos_plus) and (\n y_current > y_pos_minus)\n\n is_in_desired_pos = x_pos_are_close and y_pos_are_close\n\n rospy.logwarn(\"###### IS DESIRED POS ? ######\")\n rospy.logwarn(\"current_position\"+str(current_position))\n rospy.logwarn(\"x_pos_plus\"+str(x_pos_plus) +\n \",x_pos_minus=\"+str(x_pos_minus))\n rospy.logwarn(\"y_pos_plus\"+str(y_pos_plus) +\n \",y_pos_minus=\"+str(y_pos_minus))\n rospy.logwarn(\"x_pos_are_close\"+str(x_pos_are_close))\n rospy.logwarn(\"y_pos_are_close\"+str(y_pos_are_close))\n rospy.logwarn(\"is_in_desired_pos\"+str(is_in_desired_pos))\n rospy.logwarn(\"############\")\n\n return is_in_desired_pos", "def contains(self, pt):\n x,y = pt.as_tuple()\n return (self.left <= x <= self.right and\n self.top <= y <= self.bottom)", "def vector_equal(v1,v2):\n if (v2.x - 0.001 <= v1.x <= v2.x + 0.001) and \\\n (v2.y - 0.001 <= v1.y <= v2.y + 0.001) and \\\n (v2.z - 0.001 <= v1.z <= v2.z + 0.001):\n return True", "def inside( self, point ):\n for i in range( 0, len(point) ):\n if math.fabs( self.center[i] - point[i] ) > self.dimLens[i]/2.0:\n return False;\n return True;", "def _has_arrived(self, context) -> bool:\n return self._target[0] == context.x and self._target[1] == context.y", "def is_perpendicular_to(self, vector):\n\n if abs(self.dot(vector)) < 0.01:\n return True\n return False", "def __contains__(self, point):\n if not isinstance(point, np.ndarray):\n point = np.array(point)\n test = self.A.dot(point.flatten()) - self.b < ABS_TOL\n return np.all(test)", "def is_within(point, surface, offset):\r\n return (point[0] >= offset[0] and point[0] < offset[0] + surface.get_width() \\\r\n and point[1] >= offset[1] and point[1] < offset[1] + surface.get_height())", "def has_intersection(self, game_object):\n distance = self.__get_distance(game_object)\n return distance <= (self._radius + game_object.get_radius())", "def __contains__(self, point, e=10e-10):\n v1 = self.vector\n v2 = Vector.createFromTwoPoints(self.point, point)\n return v1.colinear(v2, e)", "def isPointCollide(self, point):\n return self.p[0] <= point <= self.p[2]", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def IsPosViewedFromPos(self,eye,pos): \r\n p0x=eye[0] ;p0y=eye[1]\r\n p1x=pos[0] ;p1y=pos[1]\r\n dx=p1x-p0x ;dy=p1y-p0y\r\n coord0x=int(p0x) ;coord0y=int(p0y)\r\n coord1x=int(p1x) ;coord1y=int(p1y)\r\n \r\n #No more intersection\r\n while coord0x!=coord1x or coord0y!=coord1y:\r\n if not self.IsTransparent((coord0x,coord0y)):\r\n return False\r\n cornerx=float(coord0x+(dx>0))\r\n cornery=float(coord0y+(dy>0))\r\n det=dx*(cornery-p0y) - dy*(cornerx-p0x)\r\n \r\n vertical=((dx>0)==(dy>0))==(det>0)\r\n if vertical:\r\n #intersection with a vertial line\r\n p0y+=dy*(cornerx-p0x)/dx \r\n p0x=float(cornerx) # x is projected on the line\r\n if dx>0: coord0x+=1\r\n else: coord0x-=1\r\n \r\n else:\r\n #intersection with a horizontal line\r\n p0x+=dx*(cornery-p0y)/dy\r\n p0y=float(cornery) # y is projected on the line\r\n if dy>0: coord0y+=1\r\n else: coord0y-=1\r\n \r\n return True", "def intersect_ext(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0, 0\n dp = line.p - self.p\n c2 = self.cross_z\n u = c.dot(dp) / d\n v = c2.dot(dp) / d\n return u > 0 and v > 0 and u < 1 and v < 1, self.lerp(u), u, v", "def accurate_collision(self, other) -> bool:\r\n if self.collide:\r\n if self.bbox_intersect(other):\r\n offset = round(self.x - other.x), \\\r\n round(self.y - other.y)\r\n if self.mask.overlap(other.mask, offset): # Overlap returns None or 1 point\r\n return True\r\n return False\r\n else:\r\n return False", "def _is_solvent_accessible(protein_coords, atm, min_distance=2):\n if str(atm.atomic_symbol) == 'H':\n atm_position = np.array(atm.coordinates)\n neighbour = np.array(atm.neighbours[0].coordinates)\n direction = np.subtract(atm_position, neighbour) * 2\n position = np.array([direction + atm_position])\n distance = min(np.linalg.norm(protein_coords - position, axis=1))\n if distance > min_distance:\n return True\n else:\n return False\n\n else:\n return True", "def contains_vect(self, v: Tuple[float, float]) -> bool:\n assert len(v) == 2\n return bool(lib.cpBBContainsVect(self, v))", "def contains_point(self, point) -> bool:\n return (self.pos.x <= point[0] <= self.pos.x + self.size.x and\n self.pos.y <= point[1] <= self.pos.y + self.size.y and\n self.visible)", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def if_near_boom(self, player_pos, boom_pos):\n if player_pos[0] == boom_pos[0]:\n if player_pos[1] in range(boom_pos[1]-2*30, boom_pos[1]+3*30, 30):\n return True\n else:\n return False\n elif player_pos[1] == boom_pos[1]:\n if player_pos[0] in range(boom_pos[0]-2*30, boom_pos[0]+3*30, 30):\n return True\n else:\n return False\n else:\n return False", "def IsPointInsideMesh(MeshObj, PointInObjectSpace):\n #direction is irellevant unless mesh is REALLY wierd shaped\n direction = mathutils.Vector((1,0,0)) \n epsilon = direction * 1e-6 \n count = 0 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace, direction) \n while result: \n count += 1 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace + epsilon, direction) \n return (count % 2) == 1", "def contains ( self, pos ):\n dr2 = (pos[0, :]-self.x)**2 + (pos[1, :]-self.y)**2\n # which points are in the circle?\n if self.include_border:\n inds = (dr2 - self.r**2) < self.abs_tol\n else:\n inds = (dr2 - self.r**2) < -self.abs_tol\n \n \n # if there's no poit inside\n if ~inds.any() and self.default_nearest: \n inds[argmin(dr2)] = True\n \n return inds", "def passable(self, point):\n return point not in self.obstacles", "def is_intersecting(self, ray):\n\n intersecting_point = self._sympy_plane.intersection(ray.sympy_line)[0]\n\n if 'x' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'y' in self._name:\n\n if self._within_x_bounds(intersecting_point.x) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'z' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_x_bounds(intersecting_point.x):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n return False, None", "def player_physics(action, sprite, vector):\n area = []\n if ceil(sprite.width) > 1 or ceil(sprite.height) > 1:\n area = to_area(sprite.x, sprite.y, sprite.width, sprite.height)\n else:\n area.append(sprite.pos)\n\n # shift each point by the vector\n area = list((x + vector[0], y + vector[1]) for x,y in area)\n\n for pos in area:\n obj = at(pos)\n if obj and isinstance(obj, list):\n for x in obj:\n if x.tag == OBSTACLE:\n return False\n elif obj and obj.tag == OBSTACLE:\n return False\n elif not visible(pos):\n return False\n return True", "def _is_obstacle_in_front(self):\n range_front = []\n range_front[:20] = self.lidar_data[-20:]\n range_front[20:] = self.lidar_data[:20]\n range_front = list(filter(lambda num: num != 0, range_front))\n min_front = min(range_front)\n if min_front < 0.4 and min_front != 0.0:\n\t\t\treturn True\n else:\n\t\t\treturn False", "def contains ( self, pos ):\n \n poly = Polygon(array(self.edges).reshape(-1,2)[:,0],array(self.edges).reshape(-1,2)[:,1])\n dists = poly.is_inside(pos[0,:],pos[1,:]) \n if self.include_border:\n inds = dists >= -self.abs_tol\n else:\n inds = dists > 0\n \n \n # if none inside, take nearest\n if ~inds.any() and self.default_nearest:\n dr2 = array(self.edges).reshape(-1,2).mean(0)\n inds[argmin(dr2)] = True\n \n return inds", "def checkDirection(neighbour, current_point, end):\n\n for i in range(3):\n delta = abs(end[i] - current_point[i])\n if abs(end[i] - neighbour[i]) < delta and delta >= 0:\n return True, i\n\n return False, None", "def has_collide(self, obj):\n rect1 = self.anim.getRect()\n rect2 = obj.anim.getRect()\n \n rect1.move_ip(self.pos)\n rect2.move_ip(obj.pos)\n \n return rect1.colliderect(rect2)", "def within(point: tuple, box: tuple) -> bool:\r\n \r\n return box[0] < point[0] < box[2] and box[1] < point[1] < box[3]", "def intersect(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0\n t = c.dot(line.p - self.p) / d\n return True, self.lerp(t), t", "def is_reachable_from(self, position: np.ndarray) -> bool:\n s, r = self.local_coordinates(position)\n return 0 <= s < self.length + CAR_LENGTH and np.abs(r) <= 2 * self.width", "def __contains__(self, point, e=1e-10):\n if point == self.p1:\n return True\n v1 = Vector.createFromTwoPoints(self.p1, point)\n v2 = self.getVector()\n return (abs(v1.angle - v2.angle) % (2 * math.pi) < e) and (v1.norm <= v2.norm)", "def inside(point, rectangle):\n\n ll = rectangle.getP1() # assume p1 is ll (lower left)\n ur = rectangle.getP2() # assume p2 is ur (upper right)\n\n return ll.getX() < point.getX() < ur.getX() and ll.getY() < point.getY() < ur.getY()", "def is_curr_location_corner(game, player_location):\n corner_positions = [(0, 0), (0, game.height - 1), (game.width - 1, 0), (game.width - 1, game.height - 1)]\n return player_location in corner_positions", "def is_at_target_position(self, position, tolerance=0.0):\n x, _ = position\n return x > self.corridor_length - tolerance", "def is_linearly_independent_2x2(u, v):\n uv = get_uv(u, v)\n if uv[0][0] * uv[1][1] - uv[1][0] * uv[0][1] != 0:\n return True\n else:\n return False", "def contains_point(self, x, y):\r\n if self.m == None:\r\n if abs(x - self.start[0]) > 0.6:\r\n return False\r\n else:\r\n if (y >= self.start[1] and y <= self.end[1]) or \\\r\n (y <= self.start[1] and y >= self.end[1]):\r\n return True\r\n else:\r\n return False\r\n else: \r\n y0 = int(self.m * x + self.n)\r\n if abs(y - y0) > 0.6: \r\n return False \r\n else: \r\n if ((x >= self.start[0] and x <= self.end[0]) or \\\r\n (x <= self.start[0] and x >= self.end[0])) and \\\r\n ((y >= self.start[1] and y <= self.end[1]) or \\\r\n (y <= self.start[1] and y >= self.end[1])): \r\n return True\r\n else:\r\n return False", "def __check_direction(self, vector, coordinate):\n inverse_vector = -vector[0], -vector[1]\n # Calculate hits to direction\n hits = self.__direction(vector,1,coordinate)\n if hits == 5:\n return True\n # After reaching the end, add hits towards the opposite direction\n hits = self.__direction(inverse_vector,hits,coordinate)\n if hits == 5:\n return True", "def collision_detect(self):\n\n # Check if the collision was with a map\n # Rect-based collision code\n for map_rect in Map.current_map.collision_rects:\n collision_time, norm_x, norm_y = collision.aabb_swept_collision(self.rect, (self.vx, self.vy), map_rect)\n if collision_time != 1:\n if DEBUG: print(\"[collision]\", collision_time)\n break\n self.x += self.vx * collision_time\n self.y += self.vy * collision_time\n\n remaining_time = 1 - collision_time\n \"\"\"\n if remaining_time > 0:\n self.vx *= remaining_time;\n self.vy *= remaining_time;\n \"\"\"\n if collision_time != 1:\n if abs(norm_x) > .0001:\n self.vx = -self.vx * COLLISION_DAMPING\n if abs(norm_y) > .0001:\n self.vy = -self.vy * COLLISION_DAMPING\n self.collision_counter += 1\n return True\n return False\n\n # Old, mask-based collision code\n \"\"\"\n self.mask = pygame.mask.from_surface(self.image)\n point = pygame.sprite.collide_mask(Map.current_map, self)\n if point:\n if COLLISION_ALGORITHM_EXPERIMENTAL:\n self.vx, self.vy = collision.calculate_reflection_angle(Map.current_map.mask, point, (self.vx, self.vy))\n else: \n self.vx, self.vy = collision.simple_collision(Map.current_map.mask, point, (self.vx, self.vy))\n self.vx, self.vy = self.vx * COLLISION_DAMPING, self.vy * COLLISION_DAMPING\n \n self.collision_counter += 1\n return True\n return False\n \"\"\"", "def is_inside(self, p):\n s, t = self.get_barycentric_coord(p)\n if 0 <= s <= 1 and 0 <= t <= 1 and s + t <= 1:\n return True\n else:\n return False", "def obstacle_between(self, node1, node2, agent):\n if self.obstacles[agent] is None:\n return False\n if self.is_inside(node1, self.obstacles, agent)[0] or self.is_inside(node2, self.obstacles, agent)[0]:\n return True\n\n for cords in self.xy_cords:\n x1 = node1.state[cords[0]]\n y1 = node1.state[cords[1]]\n x2 = node2.state[cords[0]]\n y2 = node2.state[cords[1]]\n p1 = Point(x1, y1)\n q1 = Point(x2, y2)\n for obstacle in self.obstacles[agent]:\n x_min = obstacle[0][0]\n x_max = obstacle[0][1]\n y_min = obstacle[1][0]\n y_max = obstacle[1][1]\n p2 = Point(x_min, y_min)\n q2 = Point(x_min, y_max)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_min, y_max)\n q2 = Point(x_max, y_max)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_max, y_max)\n q2 = Point(x_max, y_min)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_max, y_min)\n q2 = Point(x_min, y_min)\n if doIntersect(p1, q1, p2, q2):\n return True\n return False", "def check_inside(self, pos):\n x,y = pos\n return x >= self.posx and x <= self.posx + self.sizex and y >= self.posy and y <= self.posy + self.sizey", "def contains(self, point):\n return 0 <= point.x <= 1 \\\n and 0 <= point.y <= 1 \\\n and 0 <= point.z <= 1", "def __contains__(self,pos):\n # Permet de donner une contenance a l objet\n # Il devient comme une liste de point\n # Ainsi on peut le parcourir comme on le ferai avec une liste\n xmin=self.pos[0]\n xmax=self.pos[0]+self.dim[0]\n ymin=self.pos[1]\n ymax=self.pos[1]+self.dim[1]\n xpt=pos[0]\n ypt=pos[1]\n return (xpt>=xmin and xpt<=xmax and ypt>=ymin and ypt<=ymax)", "def __contains__(self,pos):\n # Permet de donner une contenance a l objet\n # Il devient comme une liste de point\n # Ainsi on peut le parcourir comme on le ferai avec une liste\n xmin=self.pos[0]\n xmax=self.pos[0]+self.dim[0]\n ymin=self.pos[1]\n ymax=self.pos[1]+self.dim[1]\n xpt=pos[0]\n ypt=pos[1]\n return (xpt>=xmin and xpt<=xmax and ypt>=ymin and ypt<=ymax)" ]
[ "0.7454063", "0.7005391", "0.7005391", "0.69903654", "0.6773237", "0.66110426", "0.6502717", "0.6439229", "0.64051235", "0.6336705", "0.6297145", "0.6216562", "0.621584", "0.6154906", "0.61373097", "0.6084647", "0.6082654", "0.6072066", "0.60683346", "0.60457075", "0.6034389", "0.60234964", "0.60185206", "0.5999616", "0.5978637", "0.5978229", "0.59609795", "0.59457964", "0.5936145", "0.59358144", "0.5923164", "0.5917276", "0.5917276", "0.5911114", "0.59068316", "0.5906811", "0.58885616", "0.587788", "0.5874754", "0.5863606", "0.5862768", "0.58543736", "0.585181", "0.5843388", "0.58356607", "0.58106065", "0.5807744", "0.5807312", "0.58017796", "0.5789631", "0.57868993", "0.5780597", "0.5772314", "0.57721925", "0.576394", "0.57610375", "0.5745635", "0.5733186", "0.5731844", "0.5728615", "0.57240945", "0.57232374", "0.5715483", "0.57117075", "0.5694945", "0.5694945", "0.56930476", "0.56908387", "0.56818616", "0.5671009", "0.566406", "0.5655837", "0.5653148", "0.5652157", "0.5648607", "0.5634096", "0.56333846", "0.56326884", "0.562396", "0.5623077", "0.5620098", "0.5617727", "0.56173956", "0.56135076", "0.5612203", "0.56102407", "0.55871725", "0.5584703", "0.5580518", "0.5573326", "0.5568292", "0.55671537", "0.5560163", "0.5552915", "0.55520254", "0.5546361", "0.55419165", "0.553643", "0.5526792", "0.5526792" ]
0.7407062
1
Determine if the vector between a UAV's position and the current waypoint intersect an obstacle.
def does_path_intersect_obstacle_3d(self, obstacle, drone_point, waypoint): waypoint_vector = np.subtract(waypoint, drone_point) obstacle_vector = np.subtract(obstacle.get_point(), drone_point) obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector) rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector) rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector) # Uncomment for DEBUGGING ONLY print("Waypoint Vector: " + str(waypoint_vector)) print("Obstacle Vector: " + str(obstacle_vector)) print("Rejection Vector: " + str(rejection_vector)) print("Rejection Vector Magnitude: " + str(rejection_vector_magnitude)) print("Obstacle Radius: " + str(obstacle.get_radius())) print("Distance From Obstacle: " + str(VectorMath.get_vector_magnitude(np.subtract(drone_point, obstacle.get_point())))) if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector): return rejection_vector_magnitude < Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def does_uav_intersect_obstacle_vertically(self, obstacle, drone_point, waypoint):\n if isinstance(obstacle, StationaryObstacle):\n if drone_point[2] < obstacle.height + Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS:\n return True\n\n return False", "def does_path_intersect_obstacle_2d(self, obstacle, uav_point, waypoint):\n drone_point = uav_point[:-1]\n waypoint = waypoint[:-1]\n obstacle_point = obstacle.get_point()[:-1]\n\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle_point, drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(uav_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < obstacle.get_radius()\n\n return False", "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def inside_obstacle(point, obstacle):\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0", "def is_obstacle_in_path_of_drone(self, obstacle_vector, waypoint_vector):\n obstacle_list = obstacle_vector.tolist()\n waypoint_list = waypoint_vector.tolist()\n\n for index in range(len(obstacle_list)):\n if all(item > 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]) or all(item < 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]):\n return False\n\n return True", "def is_obstacle_in_path(self):\n for obstacle in self.obstacles.tolist():\n print(\"obstacle.get_point():\", obstacle.get_point())\n dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point()))\n if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD:\n if isinstance(obstacle, StationaryObstacle):\n paths = self.generate_possible_paths(obstacle)\n\n if len(paths) != 0:\n return True, np.array(paths)\n elif isinstance(obstacle, MovingObstacle):\n pass\n\n return False, None", "def is_approaching(self, other_particle):\n if self.pos_x < other_particle.pos_x:\n d_v_x = self.velocity_x - other_particle.velocity_x\n else:\n d_v_x = other_particle.velocity_x - self.velocity_x\n\n if self.pos_y < other_particle.pos_y:\n d_v_y = self.velocity_y - other_particle.velocity_y\n else:\n d_v_y = other_particle.velocity_y - self.velocity_y\n\n return d_v_x > 0 or d_v_y > 0", "def check_position(self, player):\n\n # Mid point of the segment defining the goal\n mid = Point.mid_point(self.s_pos, self.e_pos)\n\n # Transposition of this point by the direction vector of the goal\n # to get the direction vector with its origin in the center of the goal\n mid_prime = self.dir + mid\n\n # Creating both needed vectors\n v1 = Vector.v_from_pp(mid, player.pos)\n v2 = Vector.v_from_pp(mid, mid_prime)\n\n # Getting the angle and checking if it is a valid one\n angle = v1.angle(v2)\n\n return self.is_in_interval(-math.pi / 2, math.pi / 2, angle)", "def has_uav_reached_current_waypoint(self):\n return self.drone.has_reached_waypoint()", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def is_in_obstacle(self, x: float, y: float) -> bool:\n for obstacle in self.obstacles:\n if obstacle.contains_point((x, y)):\n return True\n return False", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def __contains__(self, item):\n try:\n pos = Vec2(*item)\n return pos.x >= self.origin.x and pos.y >= self.origin.y \\\n and pos.x < self.origin.x + self.size.x \\\n and pos.y < self.origin.y + self.size.y\n except TypeError:\n return False", "def IsPointInsideMesh2(obj, p, max_dist = 1.84467e+19):\n bResult, point, normal, face = obj.closest_point_on_mesh(p, max_dist)\n p2 = point-p\n v = p2.dot(normal)\n return not(v < 0.0)", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def __contains__(self, point, e=10e-10):\n v1 = self.vector\n v2 = Vector.createFromTwoPoints(self.point, point)\n return abs(v1.angle - v2.angle) < e", "def intersects(self):\n match = False\n for i in range(len(self.__points) - 1):\n p1 = self.__points[i]\n p2 = self.__points[i + 1]\n bounds = self.__line_segment(p1, p2)\n if not bounds is None:\n xmin = bounds[0]\n ymin = bounds[1]\n xmax = bounds[0]\n ymax = bounds[1]\n for j in range(len(bounds)):\n if not (j % 2):\n if bounds[j] < xmin:\n xmin = bounds[j]\n elif bounds[j] > xmax:\n xmax = bounds[j]\n else:\n if bounds[j] < ymin:\n ymin = bounds[j]\n elif bounds[j] > ymax:\n ymax = bounds[j]\n x = self.x\n y = self.y\n # TODO: Determine direction, and check two leading edge points; ie. last vector ----> then points are x+width,y+width x+width,y-width\n if x > xmin and x < xmax and y > ymin and y < ymax:\n match = True\n break\n return match", "def is_point_within(self, x, y):\n return abs(x - self._x_position) <= self._x_length / 2 and abs(y - self._y_position) <= self._y_length / 2", "def check_shot_on_target(self, shot):\n # Defining a few variables to ease the reading\n # Here we define the x and y interval of the goal's segment\n x_min = min(self.s_pos.x, self.e_pos.x)\n x_max = max(self.s_pos.x, self.e_pos.x)\n\n y_min = min(self.s_pos.y, self.e_pos.y)\n y_max = max(self.s_pos.y, self.e_pos.y)\n\n # Shortening variables names\n o_x = shot.opponent.pos.x\n o_y = shot.opponent.pos.y\n\n # If the angle = pi / 2 or - pi / 2, then tan(angle) is undefined\n # In these cases, the shot is vertical, therefore it is valid\n # iff the x coordinate of the opponent is in the goal's x interval\n if abs(shot.angle) == math.pi / 2:\n return self.is_in_interval(x_min, x_max, o_x)\n\n # If the angle = 0, pi or -pi, then tan(angle) is 0 which can lead to \n # undefined intersection points (if the goal is vertical for example)\n # although there is an intersection point\n # \n # In these cases, the shot is horizontal, therefore it is valid\n # iff the y coordinate of the opponent is in the goal's y interval\n if abs(shot.angle) == math.pi or shot.angle == 0:\n return self.is_in_interval(y_min, y_max, o_y)\n\n # Using tan the least amount of time possible, for this is a slow function\n tan_theta = math.tan(shot.angle)\n\n # Define the LE of the shot\n le1 = LinearEquation(tan_theta, o_y - tan_theta * o_x)\n le2 = None\n\n # If the goal is vertical, finding the intersection point\n # is not possible using the normal way\n #\n # That being said, unless the LE of the shot is vertical too (which it \n # isn't as it is checked before hand) there has to be an intersection point\n # This intersection must happen when at the x coodinate of the goal's segment\n # therefore, it is possible to compute the y coordinate of the intersection by\n # computing the application of the shot's LE on this ex coordinate\n #\n # Then, the resulting y is valid iff it is in the goal's segment interval\n if self.e_pos.x - self.s_pos.x == 0:\n y = le1.apply(self.e_pos.x)\n return self.is_in_interval(y_min, y_max, y)\n\n # The normal way of solving the intersection of these two LEs\n else:\n\n # Shortening variables by computing the coefficient of the goal's LE\n ratio = (self.e_pos.y - self.s_pos.y) / (self.e_pos.x - self.s_pos.x)\n\n # If the lines are parallels (have the same coefficient) return False\n if math.tan(shot.angle) == ratio:\n return False\n\n # Defining the goal's LE\n le2 = LinearEquation(ratio, self.e_pos.y - self.e_pos.x * ratio)\n\n # Finding the intersection point of the two LEs\n # If there isn't one, return False (but there should be one\n # given all the asserts we do before hand, this is just for completion sake)\n p_intersect = le1.intersection(le2)\n if p_intersect == None:\n return False\n\n # If the intersection point's abscissa is in the goal's x interval, then it is\n # a valid abstracted shot going \n return self.is_in_interval(x_min, x_max, p_intersect.x)", "def inside(self, uv):\n result = self._trimmed.Perform(gp_Pnt2d(uv[0], uv[1]))\n return result == TopAbs_IN", "def is_inside(inner_path, outer_path):\r\n if not hasattr(inner_path, 'bounding_box'):\r\n inner_path.bounding_box = CutPlanner.bounding_box(inner_path)\r\n if not hasattr(outer_path, 'bounding_box'):\r\n outer_path.bounding_box = CutPlanner.bounding_box(outer_path)\r\n if outer_path.bounding_box[0] > inner_path.bounding_box[0]:\r\n # outer minx > inner minx (is not contained)\r\n return False\r\n if outer_path.bounding_box[1] > inner_path.bounding_box[1]:\r\n # outer miny > inner miny (is not contained)\r\n return False\r\n if outer_path.bounding_box[2] < inner_path.bounding_box[2]:\r\n # outer maxx < inner maxx (is not contained)\r\n return False\r\n if outer_path.bounding_box[3] < inner_path.bounding_box[3]:\r\n # outer maxy < inner maxy (is not contained)\r\n return False\r\n if outer_path.bounding_box == inner_path.bounding_box:\r\n if outer_path == inner_path: # This is the same object.\r\n return False\r\n if not hasattr(outer_path, 'vm'):\r\n outer_path = Polygon([outer_path.point(i / 100.0, error=1e4) for i in range(101)])\r\n vm = VectorMontonizer()\r\n vm.add_cluster(outer_path)\r\n outer_path.vm = vm\r\n for i in range(101):\r\n p = inner_path.point(i / 100.0, error=1e4)\r\n if not outer_path.vm.is_point_inside(p.x, p.y):\r\n return False\r\n return True", "def is_obstacle(self, pos: tuple):\n if self.within_map(pos):\n return self.map[round(pos[0]), round(pos[1])] == OBSTACLE\n else:\n return False", "def __check_obstacle_intersections(self, goal):\n # generate a proximity test geometry for the goal\n min_clearance = self.cfg[\"goal\"][\"min_clearance\"]\n n = 6 # goal is n sided polygon\n goal_test_geometry = []\n for i in range(n):\n goal_test_geometry.append(\n [goal[0] + min_clearance * cos(i * 2 * pi / n),\n goal[1] + min_clearance * sin(i * 2 * pi / n)])\n goal_test_geometry = Polygon(goal_test_geometry)\n intersects = False\n for obstacle in self.current_obstacles:\n intersects |= geometrics.convex_polygon_intersect_test(goal_test_geometry, obstacle.global_geometry)\n return intersects", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex(): \n return self.polyhedron()._is_positive( self.eval(Vobj) ) \n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def has_intersection(self, obj):\r\n obj_x, obj_y = obj.get_location()\r\n x = self.__x\r\n y = self.__y\r\n # Distance formula\r\n distance = sqrt((obj_x - x) ** 2 + (obj_y - y) ** 2)\r\n if distance <= obj.get_radius() + self.__radius:\r\n return True\r\n return False", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) ) \n except AttributeError:\n pass\n \n if Vobj.is_line(): \n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def isInPlane(self, p) -> bool:\n # Testing for zero is done with math.isclose, to avoid rounding/floating point errors.\n # Since we are testing near zero, abs_tol is set to 1e-09\n return math.isclose(\n math.fabs(\n dot(\n self.normal(),\n Vector.connect(p.x, p.y, p.z, self.p0.x, self.p0.y, self.p0.z),\n )\n ),\n 0,\n rel_tol=1e-09,\n abs_tol=1e-09,\n )", "def isNearTo(self, point):\n # BBB: I'm using a majored version of the collide rect to fix a problem with a charas-bouncing-effect on movement... :-|\n x, y = self.currentLevel.transformToScreenCoordinate(point)\n collide_rect = self.collide_rect\n collide_rect.height+=3\n return collide_rect.collidepoint(x, y)", "def is_point_in(self, point):\n return (self.upperleft[0] <= point[0] <= self.upperright[0] and self.upperleft[1] <= point[1] <= self.bottomleft[1])", "def is_point_in(self, point):\n return (self.upperleft[0] <= point[0] <= self.upperright[0] and self.upperleft[1] <= point[1] <= self.bottomleft[1])", "def interior_contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n elif Vobj.is_vertex():\n return self.polyhedron()._is_positive( self.eval(Vobj) )\n else: # Vobj.is_ray()\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def isOnInteriorSide(self, v):\n n = self.normalVect()\n return n.dotProduct(vector(self.vertices[0]) - vector(v)) > 0", "def is_in_collision_point(self, pos):\n x, y = pos\n return sqrt((self.x - x)**2 + (self.y - y)**2) < self.r", "def contains(self, Vobj):\n try:\n if Vobj.is_vector(): # assume we were passed a point\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )\n except AttributeError:\n pass\n\n if Vobj.is_line():\n return self.polyhedron()._is_zero( self.eval(Vobj) )\n else:\n return self.polyhedron()._is_nonneg( self.eval(Vobj) )", "def _is_at_position(pose_1, pose_2, atol):\n # type: (typing.Union[PoseStamped, PositionTarget, Waypoint], typing.Union[PoseStamped, PositionTarget, Waypoint], float) -> bool\n\n pos = [np.zeros(3), np.zeros(3)]\n for i, pose in enumerate((pose_1, pose_2)):\n if isinstance(pose, PoseStamped):\n pos[i][:] = np.array([pose.pose.position.x, pose.pose.position.y, pose.pose.position.z])\n elif isinstance(pose, PositionTarget):\n pos[i][:] = np.array([pose.position.x, pose.position.y, pose.position.z])\n elif isinstance(pose, Waypoint):\n pos[i][:] = np.array([pose.x_lat, pose.y_long, pose.z_alt])\n else:\n raise Warning(\"Wrong type\")\n\n return all(np.isclose(pos[0], pos[1], atol=atol))", "def is_at_intersection(self):\n directions = 0\n self.tile = (self.get_nearest_row(), self.get_nearest_col())\n if self.internal_map[self.tile[0] - 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0] + 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] - 1] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] + 1] not in ('x', ):\n directions += 1\n return True if directions > 2 else False", "def is_ahead_of(self, pose, x, y):\n x1 = pose.position.x\n y1 = pose.position.y\n orientation = pose.orientation\n euler = tf.transformations.euler_from_quaternion(\n [orientation.x, orientation.y, orientation.z, orientation.w])\n yaw = euler[2]\n return ((x - x1) * math.cos(yaw) + (y - y1) * math.sin(yaw)) > 0", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def is_intersection(self, location):\n loc = to_carla_location(location)\n waypoint = self._map.get_waypoint(loc,\n project_to_road=False,\n lane_type=carla.LaneType.Any)\n if not waypoint:\n # The map didn't return a waypoint because the location not within\n # mapped location.\n return False\n else:\n # XXX(ionel): is_intersection will be deprecated in the future\n # Carla releases.\n return waypoint.is_intersection", "def origin_is_inside_hitbox(self, hitbox):\n if self.hitdetection.accurate:\n max_x = max(hitbox, key = lambda index: abs(index[0]))[0]\n max_y = max(hitbox, key = lambda index: abs(index[1]))[1]\n \n m = max(max_x, max_y)\n \n num_intersections = 0\n for i in range(0, len(hitbox), 1):\n if self.hitdetection.module.does_intersect([[m, m], [0, 0]], [hitbox[i], hitbox[(i + 1) % len(hitbox)]]):\n num_intersections += 1\n return [False, True][num_intersections % 2]\n else:\n has_smaller = False\n has_bigger = False\n for hx, hy in hitbox:\n if hx > 0 and hy > 0:\n has_bigger = True\n if hx < 0 and hy < 0:\n has_smaller = True\n return has_smaller and has_bigger", "def contains_pt(self, pt):\n x, y = pt\n if not self.x - self.radius < x < self.x + self.radius:\n return False\n if not self.y - self.radius < y < self.y + self.radius:\n return False\n return True", "def contains(self, position):\n return (position - self._position).dot(self.normal(position)) < 0", "def condition(o):\n\t\t\tv = o.pos() - self.pos()\n\t\t\treturn v.norm2() < dist2 and abs(angle_diff(v.angle(),self.angle())) < math.radians(45)", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def hit(self, otherball):\r\n dx = (self.unif[0] + self.vx) - (otherball.unif[0] + otherball.vx)\r\n dy = (self.unif[1] + self.vy) - (otherball.unif[1] + otherball.vy)\r\n rd = self.radius + otherball.radius\r\n return dot(dx, dy) < (rd * rd)", "def hit(self, origin, sightVector, hitError):\n distance = [0, 0, 0]\n for i in range(0, 3):\n distance[i] = self.translation[i] - origin[i]\n v1 = normalize(distance)\n v2 = normalize(sightVector)\n return abs(v1[0] - v2[0]) < hitError and \\\n abs(v1[1] - v2[1]) < hitError and abs(v1[2] - v2[2]) < hitError", "def is_on(self, obj1_loc, obj1_dims, obj2_loc, obj2_dims):\n VERT_MEASUREMENT_TOLERANCE = self.VERT_MEASUREMENT_TOLERANCE\n result = None\n obj1_x = obj1_loc[0]\n obj1_y = obj1_loc[1]\n obj1_zmin = obj1_loc[2] - (.5 * obj1_dims[2])\n obj2_xmin, obj2_xmax, obj2_ymin, obj2_ymax, obj2_zmin, obj2_zmax = self.get_corners(obj2_loc, obj2_dims)\n if obj1_x >= obj2_xmin and obj1_x <= obj2_xmax:\n if obj1_y >= obj2_ymin and obj1_y <= obj2_ymax:\n if obj1_zmin >= obj2_zmax-VERT_MEASUREMENT_TOLERANCE and obj1_zmin <= obj2_zmax+VERT_MEASUREMENT_TOLERANCE:\n result = 'on'\n return result", "def contains(self, point):\n return super().contains((point[0] - self.x, point[1] - self.y))", "def is_hit(ball, r_ball, v, target, r_target):\n v_norm = norm_2d(v)\n dr = (target[0] - ball[0], target[1] - ball[1])\n dr_norm = norm_2d(dr)\n\n p = project(dr, v)\n p_norm = norm_2d(p)\n\n if p_norm > v_norm:\n c = (v_norm ** 2 + dr_norm ** 2 - 2 * sc_mul(v, dr)) ** 0.5\n return c <= r_ball + r_target\n\n h = get_point_line_distance(target, ball, (-v[1], v[0]))\n return abs(h) <= r_ball + r_target", "def point_in_between(ob, row, cell, other_cell):\n if row:\n left = other_cell[0] < cell[0]\n if left:\n return ob.patt[0] == 1\n else:\n return ob.patt[2] == 1\n below = other_cell[1] < cell[1]\n if below:\n return ob.patt[1] == 0\n else:\n return ob.patt[1] == 2", "def is_in_desired_position(self, current_position, epsilon=0.05):\n\n is_in_desired_pos = False\n\n x_pos_plus = self.desired_point.x + epsilon\n x_pos_minus = self.desired_point.x - epsilon\n y_pos_plus = self.desired_point.y + epsilon\n y_pos_minus = self.desired_point.y - epsilon\n\n x_current = current_position.x\n y_current = current_position.y\n\n x_pos_are_close = (x_current <= x_pos_plus) and (\n x_current > x_pos_minus)\n y_pos_are_close = (y_current <= y_pos_plus) and (\n y_current > y_pos_minus)\n\n is_in_desired_pos = x_pos_are_close and y_pos_are_close\n\n rospy.logwarn(\"###### IS DESIRED POS ? ######\")\n rospy.logwarn(\"current_position\"+str(current_position))\n rospy.logwarn(\"x_pos_plus\"+str(x_pos_plus) +\n \",x_pos_minus=\"+str(x_pos_minus))\n rospy.logwarn(\"y_pos_plus\"+str(y_pos_plus) +\n \",y_pos_minus=\"+str(y_pos_minus))\n rospy.logwarn(\"x_pos_are_close\"+str(x_pos_are_close))\n rospy.logwarn(\"y_pos_are_close\"+str(y_pos_are_close))\n rospy.logwarn(\"is_in_desired_pos\"+str(is_in_desired_pos))\n rospy.logwarn(\"############\")\n\n return is_in_desired_pos", "def contains(self, pt):\n x,y = pt.as_tuple()\n return (self.left <= x <= self.right and\n self.top <= y <= self.bottom)", "def vector_equal(v1,v2):\n if (v2.x - 0.001 <= v1.x <= v2.x + 0.001) and \\\n (v2.y - 0.001 <= v1.y <= v2.y + 0.001) and \\\n (v2.z - 0.001 <= v1.z <= v2.z + 0.001):\n return True", "def inside( self, point ):\n for i in range( 0, len(point) ):\n if math.fabs( self.center[i] - point[i] ) > self.dimLens[i]/2.0:\n return False;\n return True;", "def _has_arrived(self, context) -> bool:\n return self._target[0] == context.x and self._target[1] == context.y", "def is_perpendicular_to(self, vector):\n\n if abs(self.dot(vector)) < 0.01:\n return True\n return False", "def __contains__(self, point):\n if not isinstance(point, np.ndarray):\n point = np.array(point)\n test = self.A.dot(point.flatten()) - self.b < ABS_TOL\n return np.all(test)", "def is_within(point, surface, offset):\r\n return (point[0] >= offset[0] and point[0] < offset[0] + surface.get_width() \\\r\n and point[1] >= offset[1] and point[1] < offset[1] + surface.get_height())", "def has_intersection(self, game_object):\n distance = self.__get_distance(game_object)\n return distance <= (self._radius + game_object.get_radius())", "def __contains__(self, point, e=10e-10):\n v1 = self.vector\n v2 = Vector.createFromTwoPoints(self.point, point)\n return v1.colinear(v2, e)", "def isPointCollide(self, point):\n return self.p[0] <= point <= self.p[2]", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def IsPosViewedFromPos(self,eye,pos): \r\n p0x=eye[0] ;p0y=eye[1]\r\n p1x=pos[0] ;p1y=pos[1]\r\n dx=p1x-p0x ;dy=p1y-p0y\r\n coord0x=int(p0x) ;coord0y=int(p0y)\r\n coord1x=int(p1x) ;coord1y=int(p1y)\r\n \r\n #No more intersection\r\n while coord0x!=coord1x or coord0y!=coord1y:\r\n if not self.IsTransparent((coord0x,coord0y)):\r\n return False\r\n cornerx=float(coord0x+(dx>0))\r\n cornery=float(coord0y+(dy>0))\r\n det=dx*(cornery-p0y) - dy*(cornerx-p0x)\r\n \r\n vertical=((dx>0)==(dy>0))==(det>0)\r\n if vertical:\r\n #intersection with a vertial line\r\n p0y+=dy*(cornerx-p0x)/dx \r\n p0x=float(cornerx) # x is projected on the line\r\n if dx>0: coord0x+=1\r\n else: coord0x-=1\r\n \r\n else:\r\n #intersection with a horizontal line\r\n p0x+=dx*(cornery-p0y)/dy\r\n p0y=float(cornery) # y is projected on the line\r\n if dy>0: coord0y+=1\r\n else: coord0y-=1\r\n \r\n return True", "def intersect_ext(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0, 0\n dp = line.p - self.p\n c2 = self.cross_z\n u = c.dot(dp) / d\n v = c2.dot(dp) / d\n return u > 0 and v > 0 and u < 1 and v < 1, self.lerp(u), u, v", "def accurate_collision(self, other) -> bool:\r\n if self.collide:\r\n if self.bbox_intersect(other):\r\n offset = round(self.x - other.x), \\\r\n round(self.y - other.y)\r\n if self.mask.overlap(other.mask, offset): # Overlap returns None or 1 point\r\n return True\r\n return False\r\n else:\r\n return False", "def _is_solvent_accessible(protein_coords, atm, min_distance=2):\n if str(atm.atomic_symbol) == 'H':\n atm_position = np.array(atm.coordinates)\n neighbour = np.array(atm.neighbours[0].coordinates)\n direction = np.subtract(atm_position, neighbour) * 2\n position = np.array([direction + atm_position])\n distance = min(np.linalg.norm(protein_coords - position, axis=1))\n if distance > min_distance:\n return True\n else:\n return False\n\n else:\n return True", "def contains_vect(self, v: Tuple[float, float]) -> bool:\n assert len(v) == 2\n return bool(lib.cpBBContainsVect(self, v))", "def contains_point(self, point) -> bool:\n return (self.pos.x <= point[0] <= self.pos.x + self.size.x and\n self.pos.y <= point[1] <= self.pos.y + self.size.y and\n self.visible)", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def if_near_boom(self, player_pos, boom_pos):\n if player_pos[0] == boom_pos[0]:\n if player_pos[1] in range(boom_pos[1]-2*30, boom_pos[1]+3*30, 30):\n return True\n else:\n return False\n elif player_pos[1] == boom_pos[1]:\n if player_pos[0] in range(boom_pos[0]-2*30, boom_pos[0]+3*30, 30):\n return True\n else:\n return False\n else:\n return False", "def IsPointInsideMesh(MeshObj, PointInObjectSpace):\n #direction is irellevant unless mesh is REALLY wierd shaped\n direction = mathutils.Vector((1,0,0)) \n epsilon = direction * 1e-6 \n count = 0 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace, direction) \n while result: \n count += 1 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace + epsilon, direction) \n return (count % 2) == 1", "def contains ( self, pos ):\n dr2 = (pos[0, :]-self.x)**2 + (pos[1, :]-self.y)**2\n # which points are in the circle?\n if self.include_border:\n inds = (dr2 - self.r**2) < self.abs_tol\n else:\n inds = (dr2 - self.r**2) < -self.abs_tol\n \n \n # if there's no poit inside\n if ~inds.any() and self.default_nearest: \n inds[argmin(dr2)] = True\n \n return inds", "def passable(self, point):\n return point not in self.obstacles", "def is_intersecting(self, ray):\n\n intersecting_point = self._sympy_plane.intersection(ray.sympy_line)[0]\n\n if 'x' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'y' in self._name:\n\n if self._within_x_bounds(intersecting_point.x) and self._within_z_bounds(intersecting_point.z):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n\n\n elif 'z' in self._name:\n\n if self._within_y_bounds(intersecting_point.y) and self._within_x_bounds(intersecting_point.x):\n return True, np.array(map(float, [intersecting_point.x, intersecting_point.y, intersecting_point.z]))\n\n return False, None", "def player_physics(action, sprite, vector):\n area = []\n if ceil(sprite.width) > 1 or ceil(sprite.height) > 1:\n area = to_area(sprite.x, sprite.y, sprite.width, sprite.height)\n else:\n area.append(sprite.pos)\n\n # shift each point by the vector\n area = list((x + vector[0], y + vector[1]) for x,y in area)\n\n for pos in area:\n obj = at(pos)\n if obj and isinstance(obj, list):\n for x in obj:\n if x.tag == OBSTACLE:\n return False\n elif obj and obj.tag == OBSTACLE:\n return False\n elif not visible(pos):\n return False\n return True", "def _is_obstacle_in_front(self):\n range_front = []\n range_front[:20] = self.lidar_data[-20:]\n range_front[20:] = self.lidar_data[:20]\n range_front = list(filter(lambda num: num != 0, range_front))\n min_front = min(range_front)\n if min_front < 0.4 and min_front != 0.0:\n\t\t\treturn True\n else:\n\t\t\treturn False", "def contains ( self, pos ):\n \n poly = Polygon(array(self.edges).reshape(-1,2)[:,0],array(self.edges).reshape(-1,2)[:,1])\n dists = poly.is_inside(pos[0,:],pos[1,:]) \n if self.include_border:\n inds = dists >= -self.abs_tol\n else:\n inds = dists > 0\n \n \n # if none inside, take nearest\n if ~inds.any() and self.default_nearest:\n dr2 = array(self.edges).reshape(-1,2).mean(0)\n inds[argmin(dr2)] = True\n \n return inds", "def checkDirection(neighbour, current_point, end):\n\n for i in range(3):\n delta = abs(end[i] - current_point[i])\n if abs(end[i] - neighbour[i]) < delta and delta >= 0:\n return True, i\n\n return False, None", "def has_collide(self, obj):\n rect1 = self.anim.getRect()\n rect2 = obj.anim.getRect()\n \n rect1.move_ip(self.pos)\n rect2.move_ip(obj.pos)\n \n return rect1.colliderect(rect2)", "def within(point: tuple, box: tuple) -> bool:\r\n \r\n return box[0] < point[0] < box[2] and box[1] < point[1] < box[3]", "def intersect(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0\n t = c.dot(line.p - self.p) / d\n return True, self.lerp(t), t", "def is_reachable_from(self, position: np.ndarray) -> bool:\n s, r = self.local_coordinates(position)\n return 0 <= s < self.length + CAR_LENGTH and np.abs(r) <= 2 * self.width", "def __contains__(self, point, e=1e-10):\n if point == self.p1:\n return True\n v1 = Vector.createFromTwoPoints(self.p1, point)\n v2 = self.getVector()\n return (abs(v1.angle - v2.angle) % (2 * math.pi) < e) and (v1.norm <= v2.norm)", "def inside(point, rectangle):\n\n ll = rectangle.getP1() # assume p1 is ll (lower left)\n ur = rectangle.getP2() # assume p2 is ur (upper right)\n\n return ll.getX() < point.getX() < ur.getX() and ll.getY() < point.getY() < ur.getY()", "def is_curr_location_corner(game, player_location):\n corner_positions = [(0, 0), (0, game.height - 1), (game.width - 1, 0), (game.width - 1, game.height - 1)]\n return player_location in corner_positions", "def is_at_target_position(self, position, tolerance=0.0):\n x, _ = position\n return x > self.corridor_length - tolerance", "def is_linearly_independent_2x2(u, v):\n uv = get_uv(u, v)\n if uv[0][0] * uv[1][1] - uv[1][0] * uv[0][1] != 0:\n return True\n else:\n return False", "def contains_point(self, x, y):\r\n if self.m == None:\r\n if abs(x - self.start[0]) > 0.6:\r\n return False\r\n else:\r\n if (y >= self.start[1] and y <= self.end[1]) or \\\r\n (y <= self.start[1] and y >= self.end[1]):\r\n return True\r\n else:\r\n return False\r\n else: \r\n y0 = int(self.m * x + self.n)\r\n if abs(y - y0) > 0.6: \r\n return False \r\n else: \r\n if ((x >= self.start[0] and x <= self.end[0]) or \\\r\n (x <= self.start[0] and x >= self.end[0])) and \\\r\n ((y >= self.start[1] and y <= self.end[1]) or \\\r\n (y <= self.start[1] and y >= self.end[1])): \r\n return True\r\n else:\r\n return False", "def __check_direction(self, vector, coordinate):\n inverse_vector = -vector[0], -vector[1]\n # Calculate hits to direction\n hits = self.__direction(vector,1,coordinate)\n if hits == 5:\n return True\n # After reaching the end, add hits towards the opposite direction\n hits = self.__direction(inverse_vector,hits,coordinate)\n if hits == 5:\n return True", "def collision_detect(self):\n\n # Check if the collision was with a map\n # Rect-based collision code\n for map_rect in Map.current_map.collision_rects:\n collision_time, norm_x, norm_y = collision.aabb_swept_collision(self.rect, (self.vx, self.vy), map_rect)\n if collision_time != 1:\n if DEBUG: print(\"[collision]\", collision_time)\n break\n self.x += self.vx * collision_time\n self.y += self.vy * collision_time\n\n remaining_time = 1 - collision_time\n \"\"\"\n if remaining_time > 0:\n self.vx *= remaining_time;\n self.vy *= remaining_time;\n \"\"\"\n if collision_time != 1:\n if abs(norm_x) > .0001:\n self.vx = -self.vx * COLLISION_DAMPING\n if abs(norm_y) > .0001:\n self.vy = -self.vy * COLLISION_DAMPING\n self.collision_counter += 1\n return True\n return False\n\n # Old, mask-based collision code\n \"\"\"\n self.mask = pygame.mask.from_surface(self.image)\n point = pygame.sprite.collide_mask(Map.current_map, self)\n if point:\n if COLLISION_ALGORITHM_EXPERIMENTAL:\n self.vx, self.vy = collision.calculate_reflection_angle(Map.current_map.mask, point, (self.vx, self.vy))\n else: \n self.vx, self.vy = collision.simple_collision(Map.current_map.mask, point, (self.vx, self.vy))\n self.vx, self.vy = self.vx * COLLISION_DAMPING, self.vy * COLLISION_DAMPING\n \n self.collision_counter += 1\n return True\n return False\n \"\"\"", "def is_inside(self, p):\n s, t = self.get_barycentric_coord(p)\n if 0 <= s <= 1 and 0 <= t <= 1 and s + t <= 1:\n return True\n else:\n return False", "def obstacle_between(self, node1, node2, agent):\n if self.obstacles[agent] is None:\n return False\n if self.is_inside(node1, self.obstacles, agent)[0] or self.is_inside(node2, self.obstacles, agent)[0]:\n return True\n\n for cords in self.xy_cords:\n x1 = node1.state[cords[0]]\n y1 = node1.state[cords[1]]\n x2 = node2.state[cords[0]]\n y2 = node2.state[cords[1]]\n p1 = Point(x1, y1)\n q1 = Point(x2, y2)\n for obstacle in self.obstacles[agent]:\n x_min = obstacle[0][0]\n x_max = obstacle[0][1]\n y_min = obstacle[1][0]\n y_max = obstacle[1][1]\n p2 = Point(x_min, y_min)\n q2 = Point(x_min, y_max)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_min, y_max)\n q2 = Point(x_max, y_max)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_max, y_max)\n q2 = Point(x_max, y_min)\n if doIntersect(p1, q1, p2, q2):\n return True\n p2 = Point(x_max, y_min)\n q2 = Point(x_min, y_min)\n if doIntersect(p1, q1, p2, q2):\n return True\n return False", "def check_inside(self, pos):\n x,y = pos\n return x >= self.posx and x <= self.posx + self.sizex and y >= self.posy and y <= self.posy + self.sizey", "def contains(self, point):\n return 0 <= point.x <= 1 \\\n and 0 <= point.y <= 1 \\\n and 0 <= point.z <= 1", "def __contains__(self,pos):\n # Permet de donner une contenance a l objet\n # Il devient comme une liste de point\n # Ainsi on peut le parcourir comme on le ferai avec une liste\n xmin=self.pos[0]\n xmax=self.pos[0]+self.dim[0]\n ymin=self.pos[1]\n ymax=self.pos[1]+self.dim[1]\n xpt=pos[0]\n ypt=pos[1]\n return (xpt>=xmin and xpt<=xmax and ypt>=ymin and ypt<=ymax)", "def __contains__(self,pos):\n # Permet de donner une contenance a l objet\n # Il devient comme une liste de point\n # Ainsi on peut le parcourir comme on le ferai avec une liste\n xmin=self.pos[0]\n xmax=self.pos[0]+self.dim[0]\n ymin=self.pos[1]\n ymax=self.pos[1]+self.dim[1]\n xpt=pos[0]\n ypt=pos[1]\n return (xpt>=xmin and xpt<=xmax and ypt>=ymin and ypt<=ymax)" ]
[ "0.7454063", "0.7407062", "0.7005391", "0.7005391", "0.69903654", "0.6773237", "0.6502717", "0.6439229", "0.64051235", "0.6336705", "0.6297145", "0.6216562", "0.621584", "0.6154906", "0.61373097", "0.6084647", "0.6082654", "0.6072066", "0.60683346", "0.60457075", "0.6034389", "0.60234964", "0.60185206", "0.5999616", "0.5978637", "0.5978229", "0.59609795", "0.59457964", "0.5936145", "0.59358144", "0.5923164", "0.5917276", "0.5917276", "0.5911114", "0.59068316", "0.5906811", "0.58885616", "0.587788", "0.5874754", "0.5863606", "0.5862768", "0.58543736", "0.585181", "0.5843388", "0.58356607", "0.58106065", "0.5807744", "0.5807312", "0.58017796", "0.5789631", "0.57868993", "0.5780597", "0.5772314", "0.57721925", "0.576394", "0.57610375", "0.5745635", "0.5733186", "0.5731844", "0.5728615", "0.57240945", "0.57232374", "0.5715483", "0.57117075", "0.5694945", "0.5694945", "0.56930476", "0.56908387", "0.56818616", "0.5671009", "0.566406", "0.5655837", "0.5653148", "0.5652157", "0.5648607", "0.5634096", "0.56333846", "0.56326884", "0.562396", "0.5623077", "0.5620098", "0.5617727", "0.56173956", "0.56135076", "0.5612203", "0.56102407", "0.55871725", "0.5584703", "0.5580518", "0.5573326", "0.5568292", "0.55671537", "0.5560163", "0.5552915", "0.55520254", "0.5546361", "0.55419165", "0.553643", "0.5526792", "0.5526792" ]
0.66110426
6
Looks at the signs of the components of the vectors to determine if the direction of the obstacle is in the same direction as the waypoint (quadrants)
def is_obstacle_in_path_of_drone(self, obstacle_vector, waypoint_vector): obstacle_list = obstacle_vector.tolist() waypoint_list = waypoint_vector.tolist() for index in range(len(obstacle_list)): if all(item > 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]) or all(item < 0 for item in [-1.0 * obstacle_list[index], waypoint_vector[index]]): return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __check_direction(self, vector, coordinate):\n inverse_vector = -vector[0], -vector[1]\n # Calculate hits to direction\n hits = self.__direction(vector,1,coordinate)\n if hits == 5:\n return True\n # After reaching the end, add hits towards the opposite direction\n hits = self.__direction(inverse_vector,hits,coordinate)\n if hits == 5:\n return True", "def does_path_intersect_obstacle_2d(self, obstacle, uav_point, waypoint):\n drone_point = uav_point[:-1]\n waypoint = waypoint[:-1]\n obstacle_point = obstacle.get_point()[:-1]\n\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle_point, drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(uav_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < obstacle.get_radius()\n\n return False", "def check_position(self, player):\n\n # Mid point of the segment defining the goal\n mid = Point.mid_point(self.s_pos, self.e_pos)\n\n # Transposition of this point by the direction vector of the goal\n # to get the direction vector with its origin in the center of the goal\n mid_prime = self.dir + mid\n\n # Creating both needed vectors\n v1 = Vector.v_from_pp(mid, player.pos)\n v2 = Vector.v_from_pp(mid, mid_prime)\n\n # Getting the angle and checking if it is a valid one\n angle = v1.angle(v2)\n\n return self.is_in_interval(-math.pi / 2, math.pi / 2, angle)", "def sameDirection(cls, *vectors, e=10e-10):\n l = len(vectors)\n if l == 2:\n v1 = vectors[0]\n v2 = vectors[1]\n return (abs(v1.angle - v2.angle) % (2 * math.pi)) < e\n else:\n for i in range(l):\n for j in range(i + 1, l):\n if not cls.sameDirection(vectors[i], vectors[j]):\n return False\n return True", "def does_path_intersect_obstacle_3d(self, obstacle, drone_point, waypoint):\n waypoint_vector = np.subtract(waypoint, drone_point)\n obstacle_vector = np.subtract(obstacle.get_point(), drone_point)\n obstacle_vector_magnitude = VectorMath.get_vector_magnitude(obstacle_vector)\n rejection_vector = VectorMath.get_vector_rejection(obstacle_vector, waypoint_vector)\n rejection_vector_magnitude = VectorMath.get_vector_magnitude(rejection_vector)\n\n # Uncomment for DEBUGGING ONLY\n print(\"Waypoint Vector: \" + str(waypoint_vector))\n print(\"Obstacle Vector: \" + str(obstacle_vector))\n print(\"Rejection Vector: \" + str(rejection_vector))\n print(\"Rejection Vector Magnitude: \" + str(rejection_vector_magnitude))\n print(\"Obstacle Radius: \" + str(obstacle.get_radius()))\n print(\"Distance From Obstacle: \" + str(VectorMath.get_vector_magnitude(np.subtract(drone_point, obstacle.get_point()))))\n\n if self.is_obstacle_in_path_of_drone(obstacle_vector, waypoint_vector):\n return rejection_vector_magnitude < Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS\n\n return False", "def is_solved(self):\n return self.to_grid == self.from_grid", "def is_solved(self):\n return (self.from_grid == self.to_grid)", "def checkDirection(neighbour, current_point, end):\n\n for i in range(3):\n delta = abs(end[i] - current_point[i])\n if abs(end[i] - neighbour[i]) < delta and delta >= 0:\n return True, i\n\n return False, None", "def is_solved(self):\n return self.from_grid == self.to_grid", "def is_solved(self):\n return self.from_grid == self.to_grid", "def is_solved(self):\n return self.from_grid == self.to_grid", "def is_same_waypoint(self, wp1, wp2, max_d=0.5, max_v=0.5):\n dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)\n ddif = dl(wp1.pose.pose.position, wp2.pose.pose.position)\n if ddif < max_d:\n return True\n return False", "def _hasChangedDirection(motionPts: list) -> tuple:\n dispPts = Ball._getDisplacements(motionPts)\n xDir = yDir = None\n xChange = yChange = False\n for dispPt in dispPts:\n # Compute differences\n xDirNow = RIGHT if dispPt[0] > 0 else LEFT\n yDirNow = DOWN if dispPt[1] > 0 else UP\n # Look for x changes\n if xDir is None:\n xDir = xDirNow\n elif xDirNow != xDir:\n xChange = True\n # Look for y changes\n if yDir is None:\n yDir = yDirNow\n elif yDirNow != yDir:\n yChange = True\n return xChange, yChange", "def is_obstacle_in_path(self):\n for obstacle in self.obstacles.tolist():\n print(\"obstacle.get_point():\", obstacle.get_point())\n dist_to_obstacle = VectorMath.get_vector_magnitude(np.subtract(obstacle.get_point(), self.drone.get_point()))\n if dist_to_obstacle < obstacle.get_radius() + Constants.DETECTION_THRESHOLD:\n if isinstance(obstacle, StationaryObstacle):\n paths = self.generate_possible_paths(obstacle)\n\n if len(paths) != 0:\n return True, np.array(paths)\n elif isinstance(obstacle, MovingObstacle):\n pass\n\n return False, None", "def vector_equal(v1,v2):\n if (v2.x - 0.001 <= v1.x <= v2.x + 0.001) and \\\n (v2.y - 0.001 <= v1.y <= v2.y + 0.001) and \\\n (v2.z - 0.001 <= v1.z <= v2.z + 0.001):\n return True", "def _point_in_tri(self, pos, tri):\n signs = np.sign([np.cross(tri[np.mod(i + 1, 3)] - tri[i],\n pos - tri[i]) for i in range(3)])\n if np.all(signs[1:] == signs[0]):\n return True\n else:\n return False", "def check_shot_direction(self, shot):\n return Vector.v_from_a(shot.angle) * self.dir < 0", "def R_will_change_direction(point0, point1, point2):\n\n x0, y0 = point0[0], point0[1]\n x1, y1 = point1[0], point1[1]\n x2, y2 = point2[0], point2[1]\n\n try:\n m1 = (x1 - x2) / (y2 - y1)\n m2 = (y2 - y1) / (x2 - x1)\n x3 = ((m2 * x1) - (m1 * x0) - y1 + y0) / (m2 - m1)\n y3 = m1 * (x3 - x0) + y0\n except ZeroDivisionError:\n (x3, y3) = (x0, y1) if y1 == y2 else (x1, y0)\n\n return ((min(x1, x2) <= x3 <= max(x1, x2)) and (min(y1, y2) <= y3 <= max(y1, y2))), (x3, y3)", "def reached_final_point():\n return all(point.constraints[b.atom_indexes] == b.final_dist\n for b in self.bonds)", "def condition(o):\n\t\t\tv = o.pos() - self.pos()\n\t\t\treturn v.norm2() < dist2 and abs(angle_diff(v.angle(),self.angle())) < math.radians(45)", "def isInPlane(self, p) -> bool:\n # Testing for zero is done with math.isclose, to avoid rounding/floating point errors.\n # Since we are testing near zero, abs_tol is set to 1e-09\n return math.isclose(\n math.fabs(\n dot(\n self.normal(),\n Vector.connect(p.x, p.y, p.z, self.p0.x, self.p0.y, self.p0.z),\n )\n ),\n 0,\n rel_tol=1e-09,\n abs_tol=1e-09,\n )", "def is_straight(distance_travel_x, distance_travel_y):\r\n if (distance_travel_x > 0 and distance_travel_y == 0) or (distance_travel_x == 0 and distance_travel_y > 0):\r\n return True\r\n else:\r\n return False", "def is_perpendicular_to(self, vector):\n\n if abs(self.dot(vector)) < 0.01:\n return True\n return False", "def check_directionality_viable(self):\n\n direction_viable = True\n nose_cords, ear_left_cords, ear_right_cords = [], [], []\n for animal_name in self.animal_bp_dict.keys():\n for bp_cord in [\"X_bps\", \"Y_bps\"]:\n bp_list = self.animal_bp_dict[animal_name][bp_cord]\n for bp_name in bp_list:\n bp_name_components = bp_name.split(\"_\")\n bp_name_components = [x.lower() for x in bp_name_components]\n if \"nose\" in bp_name_components:\n nose_cords.append(bp_name)\n elif (\"ear\" in bp_name_components) and (\n \"left\" in bp_name_components\n ):\n ear_left_cords.append(bp_name)\n elif (\"ear\" in bp_name_components) and (\n \"right\" in bp_name_components\n ):\n ear_right_cords.append(bp_name)\n else:\n pass\n\n for cord in [nose_cords, ear_left_cords, ear_right_cords]:\n if len(cord) != len(self.animal_bp_dict.keys()) * 2:\n direction_viable = False\n\n if direction_viable:\n nose_cords = [\n nose_cords[i * 2 : (i + 1) * 2]\n for i in range((len(nose_cords) + 2 - 1) // 2)\n ]\n ear_left_cords = [\n ear_left_cords[i * 2 : (i + 1) * 2]\n for i in range((len(ear_left_cords) + 2 - 1) // 2)\n ]\n ear_right_cords = [\n ear_right_cords[i * 2 : (i + 1) * 2]\n for i in range((len(ear_right_cords) + 2 - 1) // 2)\n ]\n\n return direction_viable, nose_cords, ear_left_cords, ear_right_cords", "def does_uav_intersect_obstacle_vertically(self, obstacle, drone_point, waypoint):\n if isinstance(obstacle, StationaryObstacle):\n if drone_point[2] < obstacle.height + Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS:\n return True\n\n return False", "def in_field(self, vec):\n return (abs(vec[0]) + abs(vec[1]) + abs(vec[2])) <= 2 * self.n", "def near_way(self):\r\n\r\n prey_position = np.array(self.prey.position)\r\n actual_position = np.array(self.previous_data[-1])\r\n previous_position = np.array(self.previous_data[-2])\r\n\r\n difference_actual = np.linalg.norm(prey_position - actual_position)\r\n difference_previous = np.linalg.norm(prey_position - previous_position)\r\n\r\n if difference_actual < difference_previous:\r\n return True\r\n else:\r\n return False", "def IsPointInsideMesh(MeshObj, PointInObjectSpace):\n #direction is irellevant unless mesh is REALLY wierd shaped\n direction = mathutils.Vector((1,0,0)) \n epsilon = direction * 1e-6 \n count = 0 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace, direction) \n while result: \n count += 1 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace + epsilon, direction) \n return (count % 2) == 1", "def arrived(self):\n \"\"\" Responsible for transformations \"\"\"\n \n if self.phase == 1:\n if self.closest_i_could_get is not None:\n return array_equal(self.closest_i_could_get, array([0,0]))\n else: \n return array_equal(self.destination, array([0,0]))\n elif self.phase > 1:\n if self.closest_i_could_get is not None:\n return array_equal(self.closest_i_could_get, self.position)\n else: \n return array_equal(self.destination, self.position)", "def are_torsions_same2(geo, geoi, idxs_lst):\n dtol = 0.09\n same_dihed = True\n for idxs in idxs_lst:\n val = dihedral_angle(geo, *idxs)\n vali = dihedral_angle(geoi, *idxs)\n valip = vali+2.*numpy.pi\n valim = vali-2.*numpy.pi\n vchk1 = abs(val - vali)\n vchk2 = abs(val - valip)\n vchk3 = abs(val - valim)\n if vchk1 > dtol and vchk2 > dtol and vchk3 > dtol:\n same_dihed = False\n return same_dihed", "def is_solved(level_map):\n shape = level_map.shape\n for x in range(shape[0]):\n for y in range(shape[1]):\n i = (x, y)\n tile = level_map[i]\n if tile == 0:\n continue\n left_index, up_index, right_index, down_index = get_direction_indices(i)\n if has_connection_left(tile) and \\\n (tile_is_out_of_borders(left_index, shape) or not has_connection_right(level_map[left_index])):\n return False\n if has_connection_up(tile) and \\\n (tile_is_out_of_borders(up_index, shape) or not has_connection_down(level_map[up_index])):\n return False\n if has_connection_right(tile) and \\\n (tile_is_out_of_borders(right_index, shape) or not has_connection_left(level_map[right_index])):\n return False\n if has_connection_down(tile) and \\\n (tile_is_out_of_borders(down_index, shape) or not has_connection_up(level_map[down_index])):\n return False\n return True", "def is_at_intersection(self):\n directions = 0\n self.tile = (self.get_nearest_row(), self.get_nearest_col())\n if self.internal_map[self.tile[0] - 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0] + 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] - 1] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] + 1] not in ('x', ):\n directions += 1\n return True if directions > 2 else False", "def is_pentagonal(x):\n solution = solve_quad(3, -1, -2*x)\n return max(solution) % 1 == 0", "def check_directions_find_waypoint(current_point, current_segment,\n delta_before_after, segmented_points):\n\n delta_lat_before_current = delta_before_after[0]\n delta_lng_before_current = delta_before_after[1]\n\n delta_lat_after_current = delta_before_after[2]\n delta_lng_after_current = delta_before_after[3]\n\n # check to see if the delta x's in both directions are longer\n # than the delta y's in both directions\n if (delta_lat_before_current > delta_lng_before_current) and \\\n (delta_lat_after_current > delta_lng_after_current):\n print \"inside first if\"\n # the latitudes are longer than the longitudes, get waypoints\n # in the longitude direction\n\n # don't forget to generate waypoints\n waypoint_e_w = inspect_waypoints(current_point, \"lngwise\")\n try_waypoints(waypoint_e_w, current_segment, segmented_points)\n elif (delta_lng_before_current > delta_lat_before_current) and \\\n (delta_lng_after_current > delta_lat_after_current):\n print \"inside elif, checks the north and south creation\"\n # the longitudes are longer than the latitudes, get waypoints\n # in the latitude direction\n\n # don't forget to generate waypoints\n waypoint_n_s = inspect_waypoints(current_point, \"latwise\")\n try_waypoints(waypoint_n_s, current_segment, segmented_points)\n else:\n print \"inside else, checks all directions NS-EW\"\n\n # don't forget to generate waypoints\n waypoint_all = inspect_waypoints(current_point, \"all\")\n try_waypoints(waypoint_all, current_segment, segmented_points)\n\n # return only the waypoints and start/end lat,lngs\n return segmented_points", "def isOnInteriorSide(self, v):\n n = self.normalVect()\n return n.dotProduct(vector(self.vertices[0]) - vector(v)) > 0", "def __direction(self, vector, hits, coordinate):\n try:\n assert hits is not None\n # Button at the end to the vector\n next_x = coordinate[0]+vector[0]\n next_y = coordinate[1]+vector[1]\n next_coordinate = [next_x,next_y]\n # Check token and save it as new\n if self._board_snapshot[next_y][next_x] == self._player:\n # Add hit and continue if next token is of the players\n return self.__direction(vector, hits+1 ,next_coordinate)\n else:\n return hits\n # Out of bounds\n except IndexError:\n return hits", "def checkAdjacent(self, direction, maze):\n\n y = self.y\n x = self.x\n\n # Shift x or y depending on the given direction.\n if direction in NS:\n y = shift[direction](y)\n elif direction in EW:\n x = shift[direction](x)\n\n # Check new location for obstacle or unwanted direction\n if maze[y][x] == 1 or ([x, y] in self.fullpath()) or (self.moved() is False and direction in self.dead_end_direction()[-1]):\n return False\n else:\n return True", "def check_allowed(i,j,DIR):\r\n #DIR is a unit vector pointing from i to j (eg: DIR = [0,1] indicates that j is 1 to the right of i)\r\n #check only specific arrangement on-demand\r\n if DIR == [1,0]: #i j #RIGHTWARD\r\n if get_blocks(i)[1][2] == get_blocks(j)[1][0]:\r\n return True\r\n elif DIR == [-1,0]: #LEFTWARD\r\n if get_blocks(j)[1][2] == get_blocks(i)[1][0]: #reverse indices\r\n return True\r\n elif DIR == [0,-1]: #UPWARD\r\n if get_blocks(j)[2][1] == get_blocks(i)[0][1]:\r\n return True \r\n elif DIR == [0,1]: #DOWNWARD\r\n if get_blocks(i)[2][1] == get_blocks(j)[0][1]:\r\n return True\r\n else:\r\n raise ValueError (\"Invalid DIR vector!\")\r\n return False", "def hit(self, origin, sightVector, hitError):\n distance = [0, 0, 0]\n for i in range(0, 3):\n distance[i] = self.translation[i] - origin[i]\n v1 = normalize(distance)\n v2 = normalize(sightVector)\n return abs(v1[0] - v2[0]) < hitError and \\\n abs(v1[1] - v2[1]) < hitError and abs(v1[2] - v2[2]) < hitError", "def __comparing_points(self, point1, point2) -> bool:\n return (abs(point1.x - point2.x) <= self.dirt_pos_tolerance and abs(\n point1.y - point2.y) <= self.dirt_pos_tolerance)", "def _is_solvent_accessible(protein_coords, atm, min_distance=2):\n if str(atm.atomic_symbol) == 'H':\n atm_position = np.array(atm.coordinates)\n neighbour = np.array(atm.neighbours[0].coordinates)\n direction = np.subtract(atm_position, neighbour) * 2\n position = np.array([direction + atm_position])\n distance = min(np.linalg.norm(protein_coords - position, axis=1))\n if distance > min_distance:\n return True\n else:\n return False\n\n else:\n return True", "def find_solution(self, motor_positions):\n\n # Valuto il tempo necessario alla trasformazione\n if self.search_base_angles(motor_positions):\n self.find_plane_angles(motor_positions[3])\n # logging.debug(\"Input: [{:+09.6f}, {:+09.6f}, {:+09.6f}, {:+09.6f}], \"\n # \"Position: [{:+06.2f}, {:+06.2f}, {:+06.2f}], \"\n # \"Process: {:4d}/{:.3f} ms\".format(\n # motor_positions[0],\n # motor_positions[1],\n # motor_positions[2],\n # motor_positions[3],\n # self.zyx3,\n # self.zyx2,\n # self.zyx1,\n # self.cycles,\n # process_time))\n self.isLastAnglesValid = True\n return True\n else:\n self.zyx1_r = 0.0\n self.zyx2_r = 0.0\n self.zyx3_r = 0.0\n self.zyx3 = 0.0\n self.zyx2 = 0.0\n self.zyx1 = 0.0\n self.isLastAnglesValid = False\n return False", "def filter_directions(self):\n dot_products = np.matmul(self.box, self.direction)\n return self.box[[True if dp > 0 else False for dp in dot_products]]", "def _infer_direction(self):\n data = self.get_data(None)\n if data is not None:\n # Infer the direction from the data\n if data._size > 1:\n data = data[0:2].array\n return bool(\n data.item(\n 0,\n )\n < data.item(\n 1,\n )\n )\n # --- End: if\n\n # Still here?\n data = self.get_bounds_data(None)\n if data is not None:\n # Infer the direction from the bounds\n b = data[(0,) * (data.ndim - 1)].array\n return bool(\n b.item(\n 0,\n )\n < b.item(\n 1,\n )\n )\n\n # Still here? Then infer the direction from the units.\n return not self.Units.ispressure", "def semi_plan_check(coords_list, normal_plane, point_on_plane, tol=1e-8):\n center_to_coords = coords_list - \\\n np.repeat(point_on_plane.reshape((-1,3)), len(coords_list), axis=0)\n normal_plane = \\\n np.repeat(normal_plane.reshape((-1,3)), len(coords_list), axis=0)\n inner_product = np.sum(center_to_coords*normal_plane,axis=1)\n flag = np.zeros(inner_product.shape, dtype=bool)\n flag[inner_product >= 0] = True\n return flag", "def identical_to(self, elem):\n \n return (self.n == elem.n) and (math.fabs(self.dx - elem.dx) < 0.001) and (math.fabs(self.dy - elem.dy) < 0.001) and (math.fabs(self.dz - elem.dz) < 0.001)", "def check_offset(self):\n\n for d in range(self.n_dmps):\n if abs(self.y0[d] - self.goal[d]) < 1e-4:\n self.goal[d] += 1e-4", "def check_around(self, x: int, y: int) -> Optional[List[str]]:\n\t\tfor direction in [[-1,0], [-1,-1], [0,-1], [1,-1]]:\n\t\t\tresults = self.check_direction(x, y, direction[0], direction[1])\n\t\t\tif results is not None:\n\t\t\t\treturn results\n\t\treturn None", "def _calc_side(self):\n\n # Calculation of the side of the car with respect to the trajectory\n next_index = self.index + 1\n\n if next_index == len(self.x_trajectory):\n next_index = self.index\n\n trajectory_vector = ((self.x_trajectory[next_index]\n - self.x_trajectory[self.index]),\n (self.y_trajectory[next_index]\n - self.y_trajectory[self.index]))\n\n x_diff = self.x - self.x_trajectory[self.index]\n y_diff = self.y - self.y_trajectory[self.index]\n\n ugv_vector = (x_diff, y_diff)\n\n vector_z = ugv_vector[0] * trajectory_vector[1] \\\n - ugv_vector[1] * trajectory_vector[0]\n\n if vector_z >= 0:\n\n # It is in the right side\n self.sign = 1\n\n else:\n\n # It is in the left side\n self.sign = -1\n\n return self.sign", "def legal_move(marker, x, y, direction):\n # first if statement determines the directions\n # second if statement checks if the \"potential move\" is within the index\n if direction == \"N\":\n if 0 <= y-2 < len(marker):\n return marker[y-2][x] == marker[y-1][x] == '*'\n if direction == \"S\":\n if 0 <= y+2 < len(marker):\n return marker[y+2][x] == marker[y+1][x] == '*'\n if direction == \"W\":\n if 0 <= x-2 < len(marker[0]):\n return marker[y][x-2] == marker[y][x-1] == '*'\n if direction == \"E\":\n if 0 <= x+2 < len(marker[0]):\n return marker[y][x+2] == marker[y][x+1] == '*'\n return False", "def is_solved(self):\n colors = ['green', 'blue', 'red', 'orange', 'white', 'yellow']\n for row in range(3):\n for column in range(3):\n if self.front[row][column] != colors[0]:\n return False\n for row in range(3):\n for column in range(3):\n if self.back[row][column] != colors[1]:\n return False\n for row in range(3):\n for column in range(3):\n if self.right[row][column] != colors[2]:\n return False\n for row in range(3):\n for column in range(3):\n if self.left[row][column] != colors[3]:\n return False\n for row in range(3):\n for column in range(3):\n if self.up[row][column] != colors[4]:\n return False\n for row in range(3):\n for column in range(3):\n if self.down[row][column] != colors[5]:\n return False\n return True", "def is_vertical(self, tangent_vec, base_point, atol=gs.atol):\n return gs.all(\n gs.isclose(\n 0.0,\n self.tangent_riemannian_submersion(tangent_vec, base_point),\n atol=atol,\n ),\n axis=(-2, -1),\n )", "def IsPointInsideMesh2(obj, p, max_dist = 1.84467e+19):\n bResult, point, normal, face = obj.closest_point_on_mesh(p, max_dist)\n p2 = point-p\n v = p2.dot(normal)\n return not(v < 0.0)", "def if_goal_reached(self, pose):\n dx = self.pos.x - pose.x\n dy = self.pos.y - pose.y\n dist = math.sqrt(dx ** 2 + dy ** 2)\n return dist < self.radiu", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def __contains__(self, point, e=10e-10):\n v1 = self.vector\n v2 = Vector.createFromTwoPoints(self.point, point)\n return abs(v1.angle - v2.angle) < e", "def is_point_inside_mask(border, target):\n degree = 0\n for i in range(len(border) - 1):\n a = border[i]\n b = border[i + 1]\n\n # calculate distance of vector\n A = get_cartersian_distance(a[0], a[1], b[0], b[1])\n B = get_cartersian_distance(target[0], target[1], a[0], a[1])\n C = get_cartersian_distance(target[0], target[1], b[0], b[1])\n\n # calculate direction of vector\n ta_x = a[0] - target[0]\n ta_y = a[1] - target[1]\n tb_x = b[0] - target[0]\n tb_y = b[1] - target[1]\n\n cross = tb_y * ta_x - tb_x * ta_y\n clockwise = cross < 0\n\n # calculate sum of angles\n if clockwise:\n degree = degree + np.rad2deg(\n np.arccos((B * B + C * C - A * A) / (2.0 * B * C))\n )\n else:\n degree = degree - np.rad2deg(\n np.arccos((B * B + C * C - A * A) / (2.0 * B * C))\n )\n\n if abs(round(degree) - 360) <= 3:\n return True\n return False", "def is_edge_phase(x, x_last):\n _x = x/(2*np.pi)\n _x = round(_x - round(_x), 5)\n _x_last = x_last/(2*np.pi)\n _x_last = round(_x_last - round(_x_last), 5)\n if _x == 0.0 or (_x_last < 0.0 and _x > 0.0):\n return True\n else:\n return False", "def is_solved(self):\n return self._start == self._target", "def is_solved(self) -> bool:\n return set(self.boxes) == set(self.storage_locations)", "def _hasChangedAxisDirectionAt(motionPts: list, axis: int) -> tuple or None:\n dispPts = Ball._getDisplacements(motionPts)\n lastDir = None\n dirChange = False\n for i, dispPt in enumerate(dispPts):\n # Compute difference - 0 and 1 are arbitrary\n currentDir = 0 if dispPt[axis] > 0 else 1\n # Look for changes\n if lastDir is None:\n lastDir = currentDir\n elif currentDir != lastDir:\n return motionPts[i]\n return None", "def still_going(ball_stats):\n if ball_stats[3] <= 0: # if vy = vx = 0 we should stop\n return False\n\n if ball_stats[0] > 41 * 2.54 or ball_stats[0] < 0: # checking if we are out of the lane\n return False\n pins_loc = ORIG_PINS_LOC.copy()\n for p in pins_loc:\n if dist((ball_stats[0], ball_stats[1]), p) < R_BALL + R_PIN: # checking if we hit one of the balls\n return False\n return True", "def _is_valid_move(self, vector, current_piece, other_piece):\n # If direction is forward and the space is non-empty, break\n if vector[0] == 0 and other_piece != \"empty\":\n return False\n # If direction is diagonal and space is empty, break\n if vector[0] != 0 and other_piece == \"empty\":\n return False\n # If moving by 2 spaces, check if in starting row\n if vector[1] == 2 and current_piece.position[1] != 1:\n return False\n if vector[1] == -2 and current_piece.position[1] != 6:\n return False\n\n return True", "def is_versor(self) -> bool:\n return np.isclose(np.linalg.norm(self.A), 1.0)", "def isAcute(trpl):\n vd = vectorFormat(trpl)\n if angle_between(*vd) < np.pi/2:\n return True\n else:\n return False", "def is_solved(self):\n return self._from_word == self._to_word", "def origin_is_inside_hitbox(self, hitbox):\n if self.hitdetection.accurate:\n max_x = max(hitbox, key = lambda index: abs(index[0]))[0]\n max_y = max(hitbox, key = lambda index: abs(index[1]))[1]\n \n m = max(max_x, max_y)\n \n num_intersections = 0\n for i in range(0, len(hitbox), 1):\n if self.hitdetection.module.does_intersect([[m, m], [0, 0]], [hitbox[i], hitbox[(i + 1) % len(hitbox)]]):\n num_intersections += 1\n return [False, True][num_intersections % 2]\n else:\n has_smaller = False\n has_bigger = False\n for hx, hy in hitbox:\n if hx > 0 and hy > 0:\n has_bigger = True\n if hx < 0 and hy < 0:\n has_smaller = True\n return has_smaller and has_bigger", "def is_solved(self):\n self.solved = self.current_pos == self.finish_pos\n return self.solved", "def can_move_direction(entity, neighbor, game_map):\n new_x, new_y = neighbor\n if not game_map.in_bounds(x=new_x, y=new_y, margin=1):\n return False\n elif game_map.in_bounds(x=new_x, y=new_y) \\\n and game_map.terrain[new_x][new_y].elevation > Elevation.SHALLOWS \\\n and not entity.wings:\n return False\n return True", "def test_velocity_boundaries(self):\n L_x = self.x_edge[-1]\n np.testing.assert_array_almost_equal(self.v_box(self.t, 0), 0, decimal=4)\n np.testing.assert_array_almost_equal(self.v_box(self.t, L_x), 0, decimal=4)", "def goal_test(self, state):\n for x, y in state.alvos:\n if state.tabuleiro[x][y] is not BOX_ON_TARGET:\n return False\n return True", "def is_solvable(self) -> bool:\r\n inv_count = 0\r\n arr = self.current_state.flatten()\r\n for i in range(0, 9):\r\n for j in range(i + 1, 9):\r\n if arr[j] and arr[i] and arr[i] > arr[j]:\r\n inv_count += 1\r\n return inv_count % 2 == 0", "def is_ccw(point_a, point_b, point_c):\r\n return is_on_line(point_a, point_b, point_c) > 0", "def is_coplanar(points, tol=0.01):\n tol2 = tol ** 2\n if len(points) == 4:\n v01 = subtract_vectors(points[1], points[0])\n v02 = subtract_vectors(points[2], points[0])\n v23 = subtract_vectors(points[3], points[0])\n res = dot_vectors(v02, cross_vectors(v01, v23))\n return res**2 < tol2\n # len(points) > 4\n # compare length of cross product vector to tolerance\n u = subtract_vectors(points[1], points[0])\n v = subtract_vectors(points[2], points[1])\n w = cross_vectors(u, v)\n for i in range(1, len(points) - 2):\n u = v\n v = subtract_vectors(points[i + 2], points[i + 1])\n wuv = cross_vectors(w, cross_vectors(u, v))\n if wuv[0]**2 > tol2 or wuv[1]**2 > tol2 or wuv[2]**2 > tol2:\n return False\n return True", "def checkCoast1d(fieldset, x, y, direction=None, time=0):\n if direction == None:\n coast_x = checkCoast1d(fieldset, x, y, direction=\"x\")\n coast_y = checkCoast1d(fieldset, x, y, direction=\"y\")\n return coast_x, coast_y\n\n elif direction == \"x\":\n dims_U = fieldset.U.data.shape\n\n vector_U = fieldset.U.data[time, y, x-1:x+2]\n # vector_U = np.zeros(3)\n # vector_U[0] = fieldset.U.data[time, y, x-1%dims_U[-1]]\n # vector_U[1] = fieldset.U.data[time, y, x%dims_U[-1]]\n # vector_U[2] = fieldset.U.data[time, y, x+1%dims_U[-1]]\n #\n vector_U_trim = np.trim_zeros(vector_U)\n\n if len(vector_U_trim) == 1:\n # Checks if vector contains 2 zeros and one non-zero\n # and if the non_zero is at the begin or end\n if vector_U_trim == vector_U[0]:\n return [True, False]\n elif vector_U_trim == vector_U[-1]:\n return [False, True]\n else:\n return [False, False]\n else:\n return [False, False]\n\n elif direction == \"y\":\n dims_V = fieldset.V.data.shape\n\n vector_V = fieldset.V.data[time, y-1:y+2, x]\n # vector_V = np.zeros(3)\n # vector_V[0] = fieldset.V.data[time, y-1%dims[-2], x-1]\n # vector_V[1] = fieldset.V.data[time, y%dims[-2], x]\n # vector_V[2] = fieldset.V.data[time, y+1%dims[-2], x]\n\n vector_V_trim = np.trim_zeros(vector_V)\n\n if len(vector_V_trim) == 1:\n # Checks if vector contains 2 zeros and one non-zero\n # and if the non_zero is at the begin or end\n if vector_V_trim == vector_V[0]:\n return [True, False]\n elif vector_V_trim == vector_V[-1]:\n return [False, True]\n else:\n return [False, False]\n else:\n return [False, False]\n\n else:\n print \"checkCoast1d(): direction is not None, 'x' or 'y'.\"\n return False", "def _point_in_tris(self, pos, obj):\n these_tris = obj._tris['fill'].reshape(-1, 3)\n for tri in these_tris:\n if self._point_in_tri(pos, obj._points['fill'][tri]):\n return True\n return False", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def has_velocities(trajectory):\n\tfor point in trajectory.points:\n\t\tif len(point.velocities) != len(point.positions):\n\t\t\treturn False\n\treturn True", "def infront(self, xyz: np.ndarray, directions: bool = False) -> np.ndarray:\n dxyz = xyz if directions else xyz - self.xyz\n z = np.dot(dxyz, self.R.T[:, 2])\n return z > 0", "def is_origin(self) -> bool:\n return self.x == 0 and self.y == 0", "def isSolvableVect(a, b, c):\n return all(isSolvableBool(ai, bi, ci) for (ai, bi, ci) in zip(a, b, c))", "def right_of(self,v):\n x,y = v[0:2]\n if y < self.ylo: return False\n if y >= self.yhi: return False\n if x > self.xhi: return False\n if x > ((y * self.m) + self.b): return False\n return True", "def is_straight_line(self, arr):\n # First pair of point (x0, y0) \n x0 = arr[0][0]\n y0 = arr[0][1]\n\n # Second pair of point (x1, y1) \n x1 = arr[len(arr) - 1][0]\n y1 = arr[len(arr) - 1][1]\n\n dx = x1 - x0\n dy = y1 - y0\n\n # Loop to iterate over the points \n for i in range(len(arr)):\n x = arr[i][0]\n y = arr[i][1]\n\n if (dx * (y - y1) - dy * (x - x1)) > self.movement_tolerance:\n return False\n\n return True", "def in_box(coords, box):\n if box[0][0] < coords[0] < box[1][0] and box[1][1] < coords[1] < box[0][1]:\n return True\n return False", "def clockwise(p1, p2, p3):\n\tv1 = p2 - p1\n\tv2 = p3 - p2\n\tc = (v2.x * v1.y) - (v1.x * v2.y)\n\tif c > 0:\n\t\treturn True\n\telse:\n\t\treturn False", "def valid_directions(y, x):\n results = []\n if x % 2: # east side of intersection\n results.append((y, x-1, 1))\n if x+1 < M*2:\n results.append((y, x+1, 2)) # street\n else: # west side of intersection\n if x > 0:\n results.append((y, x-1, 2)) # street\n results.append((y, x+1, 1))\n if y % 2: # south side of intersection\n if y+1 < N*2:\n results.append((y+1, x, 2)) # street\n results.append((y-1, x, 1))\n else:\n results.append((y+1, x, 1))\n if y > 0:\n results.append((y-1, x, 2))\n return results", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def is_within_distance(target_location, current_location, orientation, max_distance, d_angle_th_up, d_angle_th_low=0):\n target_vector = np.array([target_location.x - current_location.x, target_location.y - current_location.y])\n norm_target = np.linalg.norm(target_vector)\n\n # If the vector is too short, we can simply stop here\n if norm_target < 0.001:\n return True\n\n if norm_target > max_distance:\n return False\n\n forward_vector = np.array(\n [math.cos(math.radians(orientation)), math.sin(math.radians(orientation))])\n d_angle = math.degrees(math.acos(np.clip(np.dot(forward_vector, target_vector) / norm_target, -1., 1.)))\n\n return d_angle_th_low < d_angle < d_angle_th_up", "def tooTight(self, row, col, i, j):\n return self.distanceToGoal[row + i][col] == self.infinity or \\\n self.distanceToGoal[row][col + j] == self.infinity", "def is_approaching(self, other_particle):\n if self.pos_x < other_particle.pos_x:\n d_v_x = self.velocity_x - other_particle.velocity_x\n else:\n d_v_x = other_particle.velocity_x - self.velocity_x\n\n if self.pos_y < other_particle.pos_y:\n d_v_y = self.velocity_y - other_particle.velocity_y\n else:\n d_v_y = other_particle.velocity_y - self.velocity_y\n\n return d_v_x > 0 or d_v_y > 0", "def check_neighbours(self):\n for p in self.targetCell.possibilities:\n if p != 0:\n if p not in self.targetCell.row_neighbour_possibilities:\n self.targetCell.solve(p)\n return True\n elif p not in self.targetCell.column_neighbour_possibilities:\n self.targetCell.solve(p)\n return True\n elif p not in self.targetCell.box_neighbour_possibilities:\n self.targetCell.solve(p)\n return True\n return False", "def isSolvable(self):\n tiles = []\n for i in range(len(self.tiles)):\n for j in range(len(self.tiles)):\n if self.tiles[j][1] * 3 + self.tiles[j][0] + 1 == i + 1:\n tiles.append(j + 1)\n count = 0\n for i in range(len(tiles) - 1):\n for j in range(i + 1, len(tiles)):\n if tiles[i] > tiles[j] and tiles[i] != 9:\n count += 1\n return count % 2 == 0 and count != 0", "def _check_neighbors(self):\n for direction, dir_info in self.DIRECTIONS.items():\n pos = Point(\n self.position.x + dir_info[\"mask\"][0],\n self.position.y + dir_info[\"mask\"][1]\n )\n status = self.move(direction)\n self.grid[status].add(pos)\n if status in (1, 2):\n # moved\n self.move(dir_info[\"opposite\"])\n yield pos", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def check_path_collision(self, X):\n\n #check collision with circular obstacles\n for i in range(X.shape[1]):\n p = X[0:3,i].flatten()\n\n for obs_loc in self.obs_locs:\n if np.linalg.norm(p[0:2] - obs_loc) < self.obs_rad:\n return True\n if i>0:\n p2 = X[0:3,i-1].flatten()\n #check collision with walls\n for win in self.windows:\n if win.check_collision(p, p2):\n return True\n\n return False", "def _is_valid_move(self, vector, current_piece, other_piece):\n return True", "def within(point: tuple, box: tuple) -> bool:\r\n \r\n return box[0] < point[0] < box[2] and box[1] < point[1] < box[3]", "def verify_plane_endpoints(self):\n return [self.x0 + self.nx * self.dx, self.y0 + self.ny * self.dy, self.z0 + self.nz * self.dz]", "def in_box(coords, box):\n\tif box[0][0] < coords[0] < box[1][0] and box[1][1] < coords[1] < box[0][1]:\n\t\treturn True\n\treturn False" ]
[ "0.7193433", "0.65259945", "0.6495801", "0.6493202", "0.6424737", "0.6406378", "0.640443", "0.64031774", "0.6398309", "0.6398309", "0.6398309", "0.62759835", "0.6256729", "0.62326354", "0.6192946", "0.6183871", "0.6148293", "0.61077344", "0.6098693", "0.6095345", "0.60564905", "0.6047996", "0.6025313", "0.60187346", "0.5989811", "0.59807545", "0.5971254", "0.5966417", "0.59474695", "0.5942117", "0.5860061", "0.58599365", "0.585886", "0.58536595", "0.5835544", "0.58305556", "0.5830433", "0.58303493", "0.5815009", "0.5801189", "0.57989734", "0.57805103", "0.5749492", "0.5746343", "0.5742048", "0.5741155", "0.5740706", "0.5736277", "0.5725864", "0.57220405", "0.5721922", "0.57018757", "0.57012945", "0.5693748", "0.56911916", "0.56833154", "0.5673629", "0.56734747", "0.5670341", "0.5670166", "0.56696403", "0.5665869", "0.566167", "0.56593037", "0.5659205", "0.56478095", "0.56427324", "0.5642208", "0.56399846", "0.5639816", "0.56392944", "0.5630772", "0.5615404", "0.5614197", "0.56126434", "0.5601707", "0.5598527", "0.5596133", "0.5595507", "0.5577953", "0.5576343", "0.5575752", "0.557523", "0.5568286", "0.5566336", "0.5565962", "0.55650526", "0.5560612", "0.5560612", "0.55578625", "0.5554203", "0.5552225", "0.55495983", "0.5542392", "0.55421746", "0.5531563", "0.55262524", "0.5514593", "0.550787", "0.55047417" ]
0.72141993
0
Return the shortest path from the paths provided. This function assumes that the paths are possible waypoints calculated from the is_obstacle_in_path() function
def get_min_path(self, paths): shortest_path = paths[0] shortest_distance = self.get_path_distance(paths[0]) for path in paths[1:]: distance = self.get_path_distance(path) if distance < shortest_distance: shortest_path = path shortest_distance = distance return shortest_path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shortest_path(env, service, paths):\n for idp, path in enumerate(paths):\n if is_path_free(env.topology, path, service.number_units):\n return True, idp\n return False, env.k_paths # returns false and an index out of bounds if no path is available", "def constructShortestPath(self):\r\n sp = []\r\n v = self.t\r\n while self.preds[v]: # is not None\r\n sp.append(v)\r\n v = self.preds[v]\r\n sp.append(self.s) # source\r\n sp.reverse() # to have the path from source to dest and not t to s\r\n return sp, self.graph.getCoords(sp)", "def shortest_path(self, start: str, goal: str) -> Path:\n return next(self.bfs_paths(start, goal), [])", "def shortest_path(self, source, destination, parameter=None):\n paths = []\n for path in self.graph.shortest_paths(source, destination, parameter):\n paths.append({'hops': path})\n return jsonify({'paths': paths})", "def shortestPathsInLatency (cls, G, return_paths=False,\n id_connector_character='&'):\n exploded_G = NFFGToolBox.explodeGraphWithPortnodes(G,\n id_connector_character)\n\n exploded_dists = networkx.all_pairs_dijkstra_path_length(exploded_G,\n weight='delay')\n dists, min_dist_pairs = NFFGToolBox.extractDistsFromExploded(G,\n exploded_dists,\n id_connector_character)\n\n if return_paths:\n exploded_paths = networkx.all_pairs_dijkstra_path(exploded_G,\n weight='delay')\n paths = NFFGToolBox.extractPathsFromExploded(exploded_paths,\n min_dist_pairs,\n id_connector_character)\n return paths, dists\n else:\n return dists", "def get_shortest_as_path(self, routes) -> List[str]:\n if len(routes) <= 0:\n return []\n # start shortest path as the first route's path\n shortest_path = [routes[0]]\n # start the length of the shortest path as that\n # of the first route's path\n min_path = len(routes[0][APTH])\n # iterate through all routes in given list and\n # find the shortest AS Path\n for route in routes:\n r_len = len(route[APTH])\n if r_len < min_path:\n min_path = r_len\n shortest_path = [route]\n elif r_len == min_path:\n shortest_path.append(route)\n return shortest_path", "def find_shortest_path(self, start, end, path=[]):\n path = path+[start]\n if start == end:\n return path\n shortest_path = []\n for node in self.graph[start]:\n if node not in path:\n newpath = self.find_path(node, end, path)\n if not shortest_path or len(shortest_path) > len(newpath):\n shortest_path = newpath\n return shortest_path if shortest_path else None", "def shortest_path(N, a_0, a_1=None):\n path = HJ_path(a_1*N, a_0*N)\n path = [c/d/N for c, d in path]\n return path", "def shortest_path(self, source, target, via=None, weight='length', bbox=None):\n\n if self._graph_backend == 'networkx':\n return networkx.shortest_path(self._graph, source, target, weight=weight)\n else:\n if isinstance(via, list):\n return self._pgr.get_route(source, target, via_nodes=via, bbox_nodes=bbox)\n else:\n return self._pgr.get_route(source, target)", "def shortest_path(graph, source, target):\n return shortest_path_recursive(graph, source, target, set())", "def shortest_path_search(start, successors, is_goal):\r\n if is_goal(start):\r\n return [start]\r\n\r\n explored = set() # set of states we have visited\r\n frontier = [ [start] ] # ordered list of paths we have blazed\r\n while frontier:\r\n path = frontier.pop(0)\r\n s = path[-1]\r\n for (state, action) in successors(s).items():\r\n if state not in explored:\r\n explored.add(state)\r\n path2 = path + [action, state]\r\n if is_goal(state):\r\n return path2\r\n else:\r\n frontier.append(path2)\r\n return Fail", "def dijkstra_shortest_path(grid_obs, source, dest):\n #------------------------------------\n #\n # Fill and submit this code\n #\n predecessors = {source: float('inf')}\n visited_blocks = {source: 0}\n queue = PQ()\n queue.__setitem__(source, 0)\n goodIndices = []\n\n print len(grid_obs)\n\n for index in range(len(grid_obs)):\n if grid_obs[index] != \"air\":\n goodIndices.append(index)\n\n for index in goodIndices:\n if index != source:\n visited_blocks[index] = float('inf')\n\n while queue:\n blocks_to_go = []\n current_position = queue.smallest()\n del queue[current_position]\n\n for difference in [-81, -1, 1, 81]:\n if (current_position + difference) in goodIndices:\n blocks_to_go.append(current_position + difference)\n\n for block_Index in blocks_to_go:\n gap = visited_blocks[current_position] + 1\n if gap < visited_blocks[block_Index]:\n visited_blocks[block_Index] = gap\n predecessors[block_Index] = current_position\n queue.__setitem__(block_Index, gap)\n\n shortest_paths = []\n while dest != source:\n shortest_paths.append(dest)\n dest = predecessors[dest]\n shortest_paths.append(source)\n shortest_paths.reverse()\n\n return shortest_paths\n #-------------------------------------", "def get_shortest_as_path(self, routes):\n outroutes = []\n min_val = float('inf');\n # get shortest AS path first\n for r in routes:\n if len(r[MESG][APTH]) < min_val:\n min_val = len(r[MESG][APTH])\n # find all routes with that val\n for r in routes:\n if len(r[MESG][APTH]) == min_val:\n outroutes.append(r)\n\n return outroutes", "def _shortest_path(G, start, end, sp_cache):\n if (start, end) in SP_TABLE:\n return sp_cache[(start, end)]\n elif (end, start) in SP_TABLE:\n return sp_cache[(end, start)]\n else:\n D, P = _dijkstra(G, start, end)\n path = []\n temp = end\n while 1:\n path.append(end)\n if end == start: break\n end = P[end]\n path.reverse()\n sp_cache[(start, temp)] = path\n return path", "def shortest_path(M, start, goal):\n\n print(\"shortest path called\")\n\n came_from = {}\n g_score = {}\n came_from[start] = None\n g_score[start] = 0\n open_heap = []\n heappush(open_heap, (0, start))\n\n while open_heap:\n current = heappop(open_heap)[1]\n\n if current == goal:\n break\n\n for neighbor in M.roads[current]:\n new_g_score = g_score[current] + heuristic(M.intersections[current], M.intersections[neighbor])\n\n if neighbor not in g_score or new_g_score < g_score[neighbor]:\n came_from[neighbor] = current\n g_score[neighbor] = new_g_score\n heappush(open_heap, (new_g_score, neighbor))\n\n optimal_path = []\n node = goal\n\n while came_from[node]:\n optimal_path.append(node)\n node = came_from[node]\n else:\n optimal_path.append(node)\n\n optimal_path.reverse()\n\n return optimal_path", "def get_shortest_as_path(self, routes):\n # filter out any routes that don't have the shortest AS path\n outroutes = routes.copy()\n outroutes.sort(key=lambda r: len(r[MESG][APTH]))\n lowest = len(outroutes[0][MESG][APTH])\n outroutes = list(filter(lambda r: len(r[MESG][APTH]) == lowest, outroutes))\n return outroutes", "def shortestPath( self, source, target, weight = None ):\n if weight == None:\n return nx.shortest_path(self._G, source, target)\n else:\n return nx.shortest_path(self._G, source, target, weight = weight)", "def get_shortest(args_array):\n\n node, G, paths_list = args_array\n shortest_score = float(\"inf\")\n path = None\n for pred in G.predecessors(node):\n try:\n path_len,shortest_path = nx.bidirectional_dijkstra(G, node, pred, weight='cost')\n if path_len < shortest_score:\n path = shortest_path\n shortest_score = path_len\n except nx.exception.NetworkXNoPath:\n continue\n if path is not None: paths_list.append(path)\n # done", "def get_closest_distance_to_path(self, path):\n min_distance_to_line = float(\"inf\")\n for p in path:\n game_path = p[:]\n\n game_path.sort(key = lambda coord: calculate_distance(self, coord))\n point_A = game_path[0] # Closest point out of all the points on the path to to the tower\n\n try:\n point_after_A = p[p.index(point_A) + 1]\n point_before_A = p[p.index(point_A) - 1]\n closest_to_A = min(point_after_A, point_before_A, key = lambda point: calculate_distance(point_A, point))\n except:\n if p.index(point_A) == 0:\n closest_to_A = p[p.index(point_A) + 1]\n \n elif p.index(point_A) == len(p) - 1:\n closest_to_A = p[p.index(point_A) - 1]\n finally:\n if closest_to_A[0] != point_A[0]:\n m = (closest_to_A[1] - point_A[1]) / (closest_to_A[0] - point_A[0])\n else:\n m = 2\n\n b = point_A[1] - m * point_A[0]\n\n closest_distance = abs(-m * self.x + self.y - b) / math.sqrt((-m) ** 2 + 1)\n min_distance_to_line = min(closest_distance, min_distance_to_line)\n \n return min_distance_to_line", "def bfs_shortest_path(graph: dict=g2, start: str = \"1\", goal: str = \"4\") -> list:\n visited = []\n queue = [[start]]\n\n while queue:\n path = queue.pop(0)\n node = path[-1]\n if node not in visited:\n neighbours = graph[node]\n for neighbour in neighbours:\n new_path = path[:]\n new_path.append(neighbour)\n queue.append(new_path)\n if neighbour == goal:\n return new_path\n visited.append(node)\n # No path\n return [\"No Path\"]", "def shortest_path(source, target):\n #although lecture checks for goal when a node is popped off the frontier, efficiency of search can be improved\n #by checking for a goal as nodes are ADDED. If goal detected, don't add it to frontier, just return the solution\n #immediately\n\n #create start point\n start = Node(state = source, parent = None, action = None)\n frontier = QueueFrontier()\n frontier.add(start)\n\n #create explored set\n explored = set()\n\n while True:\n #if nothing left in frontier, no path exists\n if frontier.empty():\n return None\n\n #choose a node from the frontier\n node = frontier.remove()\n #if node is goal, we have solution\n\n #add neighbors 2 frontier using function THATS ALR THERE DUMMY\n for (movie, star) in neighbors_for_person(node.state):\n newNode = Node(state = star, parent = node, action=movie)\n if not frontier.contains_state(newNode) and newNode.state not in explored:\n if newNode.state == target:\n #reverse the solution\n solution = []\n while newNode.parent is not None:\n actionTuple = (newNode.action, newNode.state)\n solution.append(actionTuple)\n newNode = newNode.parent\n solution.reverse()\n return solution\n else: frontier.add(newNode)\n\n #mark state as explored\n explored.add(node.state)", "def get_attack_path(targets, map_, y, x):\n target_path = {}\n for t in targets:\n adjacent = map_.find_adjacent_open_squares(t.y, t.x)\n paths = []\n for (dy, dx) in adjacent:\n path = map_.bfs(y, x, dy, dx)\n if path is not None:\n paths.append(path)\n if not paths:\n continue\n target_path[dy, dx] = (t, min(paths, key=len))\n if not target_path:\n return None, None\n min_len = min([len(p[1]) for p in target_path.values()])\n min_paths = {k: v for (k, v) in target_path.items() if len(v[1]) == min_len}\n for k, v in sorted(min_paths.items()):\n return v[1][0]", "def shortest_path(self):\n\t\t#dict that will hold the cost of traveling to each station\n\t\t#add the initial cost of the starting station, which is 0\n\t\tD = {0:0}\n\n\t\t#add all of our dict keys (stations) to our queue\n\t\tstation_queue = self.station_graph.keys()\n\n\t\t#sort the keys! since the graph is directed and acyclic, the stations\n\t\t#can be explored one at a time, in order, without having to adjust\n\t\t#for the lowest distance value via priority queue.\n\t\t#\n\t\t#sort them with reverse=True so that they can be popped from the\n\t\t#end of the list instead of from the beginning. This should save\n\t\t#some cpu time.\n\t\tstation_queue.sort(reverse=True)\n\t\twhile len(station_queue) > 0:\n\n\t\t\tstation = station_queue.pop() #grab the next node in the queue\n\n\t\t\tfor next_st, next_cost in self.station_graph[station].iteritems():\n\t\t\t\t#loops through the current station's neighbors, and calculates\n\t\t\t\t#their costs from the starting node, making sure to store\n\t\t\t\t#the lowest cost in our D dict\n\t\t\t\talt = D[station] + next_cost #sum the costs\n\t\t\t\tif not D.has_key(next_st) or alt < D[next_st]:\n\t\t\t\t\t#if there is no cost on record, or if the newly calculated\n\t\t\t\t\t#cost is lower than the currently recorded one, then\n\t\t\t\t\t#record the newly calculated cost as the lowest\n\t\t\t\t\tD[next_st] = alt #set the cost to get to next_st\n\n\t\treturn D[self.final_stop]", "def shortest_path_search(start, successors, is_goal):\n if is_goal(start): return [start]\n explored = set()\n frontier = [[start]]\n while frontier:\n path = frontier.pop(0)\n s = path[-1]\n for (state,action) in successors(s).items():\n if state not in explored:\n explored.add(state)\n npath = path + [action,state]\n if is_goal(state): return npath\n else: frontier.append(npath)\n return []", "def shortest_path_search(start, successors, is_goal):\r\n if is_goal(start):\r\n return [start]\r\n explored = set()\r\n frontier = [ [start] ]\r\n while frontier:\r\n path = frontier.pop(0)\r\n s = path[-1]\r\n for (state, action) in successors(s).items():\r\n if state not in explored:\r\n explored.add(state)\r\n path2 = path + [action, state]\r\n if is_goal(state):\r\n return path2\r\n else:\r\n frontier.append(path2)\r\n return Fail", "def shortest_path_search(start, successors, is_goal):\n if is_goal(start):\n return [start]\n explored = set()\n frontier = [ [start] ] \n while frontier:\n path = frontier.pop(0)\n s = path[-1]\n for (state, action) in successors(s).items():\n if state not in explored:\n explored.add(state)\n path2 = path + [action, state]\n if is_goal(state):\n return path2\n else:\n frontier.append(path2)\n return Fail", "def FindShortestPath(graph, start, end, path=[]):\n path = path + [start]\n if start == end:\n return path\n if start not in graph:\n return None\n shortest = None\n for node in graph[start]:\n if node not in path:\n newpath = FindShortestPath(graph, node, end, path)\n if newpath:\n if not shortest or len(newpath) < len(shortest):\n shortest = newpath\n return shortest", "def build_shortest_path(self):\n # Explore paths until all nodes have been visited\n while self.nodes_heap:\n # Grab min distance node\n current_node = heappop(self.nodes_heap)\n # Grab each valid neighbor and update distance if shorter path is found\n for node_label, edge_weight in enumerate(current_node.edges):\n if edge_weight and node_label in self.nodes_heap:\n neighbor = self.nodes_heap[self.nodes_heap.index(node_label)]\n if neighbor.distance > (current_node.distance + edge_weight):\n neighbor.distance = current_node.distance + edge_weight\n neighbor.previous = current_node\n # Make sure our min heap invariant is met before continuing\n heapify(self.nodes_heap)", "def min_path(self, start, end, maxD=1e309):\n tdist, preceding_node = self.dijkstra(start, maxD)\n dist = tdist[end]\n backpath = [end]\n try:\n while end != start:\n end = preceding_node[end]\n backpath.append(end)\n path = list(reversed(backpath))\n except KeyError:\n path = None\n\n return dist, path", "def find_shortest_path(graph, start, end, path=[]):\n path = path + [start]\n if start == end:\n return path\n if not graph.has_key(start):\n return None\n shortest = None\n for node in graph[start]:\n if node not in path:\n newpath = find_shortest_path(graph, node, end, path)\n if newpath:\n if not shortest or len(newpath) < len(shortest):\n shortest = newpath\n return shortest", "def shortestPath(self, source, target):\n dist = {}\n prev = {}\n q = []\n for y,a in enumerate(self.sm):\n for x,b in enumerate(self.sm[y]):\n dist[(x,y)] = sys.maxint\n prev[(x,y)] = None\n q.append((x,y))\n dist[source] = 0\n\n while len(q) is not 0:\n # find the node with minimum value (u)\n d = deepcopy(dist)\n while True:\n b = dict(map(lambda item: (item[1],item[0]), d.items()))\n u = b[min(b.keys())]\n if u not in q:\n d.pop(u)\n else:\n break\n\n if dist[u] == sys.maxint: # remaining nodes are inaccessible\n break\n\n q.remove(u)\n\n\n if u == target: # target found\n break\n\n for v in self.getNeighbors(u):\n alt = dist[u] + 1\n if alt < dist[v]:\n dist[v] = alt\n prev[v] = u\n\n s = []\n u = target\n while prev[u] is not None:\n s.append(u)\n u = prev[u]\n s.reverse()\n\n return s", "def path(most_important_up, most_important_down, total_distance, to_source2, to_source1):\n\n if total_distance == min(total_distance, to_source2[0], to_source1[0]):\n return source_to_source(most_important_up, most_important_down), total_distance\n elif to_source2[0] == min(total_distance, to_source2[0], to_source1[0]):\n return most_important_to_source(to_source2[1]), to_source2[0]\n else:\n return most_important_to_source(to_source1[1], up=False), to_source1[0]", "def constructShortestPath(self):", "def one_way_path(most_important, total_distance, to_source2, to_source1):\n\n if total_distance == min(total_distance, to_source2[0], to_source1[0]):\n return most_important_to_source(most_important), total_distance\n elif to_source2[0] == min(total_distance, to_source2[0], to_source1[0]):\n return most_important_to_source(to_source2[1]), to_source2[0]\n else:\n return most_important_to_source(to_source1[1], up=False), to_source1[0]", "def find_short_path(aux_structures, loc1, loc2):\n node1 = get_closest_node(aux_structures, loc1)\n node2 = get_closest_node(aux_structures, loc2)\n p = find_min_cost_path(\n aux_structures,\n node1,\n lambda x: x == node2,\n lambda parent_id: aux_structures[parent_id]['adjacent'],\n get_dist_cost,\n lambda x: gcd_heuristic(aux_structures, x, node2))\n return get_coord_list(aux_structures, p) if p is not None else None", "def test_find_shortest_path():\n g = Graph()\n node_1 = Node({'A':['B','C']})\n g.add(node_1)\n node_2 = Node({'B':['C','D']})\n g.add(node_2)\n node_3 = Node({'C':['D']})\n g.add(node_3)\n node_4 = Node({'D':['C']})\n g.add(node_4)\n node_5 = Node({'E':['C']})\n g.add(node_5)\n\n # zero path between node_1 and node_5\n path_0 = g.find_shortest_path(node_1, node_5)\n assert path_0 == None\n # only one path between node_5 and node_4\n path_1 = g.find_shortest_path(node_5, node_4)\n assert [ node.name for node in path_1 ] == [ node_5.name, node_3.name, node_4.name ]\n # three paths between node_1 and node_3, verify the shortest one is returned\n path_3 = g.find_shortest_path(node_1, node_3)\n assert [ node.name for node in path_3 ] == [ node_1.name, node_3.name ]", "def Optimum_prun_based_routing(self, S, D, L):\n if self.has_path(S, D):\n \n Shortest_path = nx.dijkstra_path(self.G, S, D, weight='w') \n Opt_path = Shortest_path\n PathConcave_cost = self.max_path_cost(Shortest_path, 'c1') \n while len(Shortest_path) != 0:\n path_cost = self.additive_path_cost(Shortest_path, 'w') \n if path_cost <= L:\n \"\"\"go to concave cost\"\"\"\n PathConcave_cost = self.max_path_cost(Shortest_path, 'c1') \n self.G = self.rm_edge_constraint(PathConcave_cost) # remove all links where the concave link is greater than PathConcave_cost\n \n Opt_path = Shortest_path\n if self.has_path(S, D):\n Shortest_path = nx.dijkstra_path(self.G, S, D, weight='w')\n else:\n Shortest_path = [] \n else:\n break \n else:\n self.logger.info('No path from %s to %s', S, D)\n PathConcave_cost = 0\n Opt_path = []\n return PathConcave_cost, Opt_path", "def _bidirectional_shortest_path(G, source, target, exclude):\n # call helper to do the real work\n results = _bidirectional_pred_succ(G, source, target, exclude)\n pred, succ, w = results\n\n # build path from pred+w+succ\n path = []\n # from source to w\n while w is not None:\n path.append(w)\n w = pred[w]\n path.reverse()\n # from w to target\n w = succ[path[-1]]\n while w is not None:\n path.append(w)\n w = succ[w]\n\n return path", "def findFirstShortestPath(self, source, dest):\n\n # Used to initialize or reinitialize the algorithm\n # Computes the shortest path via Dijsktra\n\n self.kPath = None\n self.pathHeap = []\n self.pathList = []\n self.source = source\n self.dest = dest\n\n # Compute the shortest path\n # nodeList = nx.dijkstra_path(self.g, source, dest, self.wt)\n\n alg = ModifiedDijkstra(self.g, self.wt)\n nodeList = alg.getPath(source, dest, as_nodes=True)\n if len(nodeList) == 0:\n return None\n deletedLinks = set()\n self.kPath = WeightedPath(nodeList, deletedLinks, self.g, wt=self.wt,\n cap=self.cap)\n self.kPath.dNode = source\n self.pathList.append(self.kPath)\n return self.kPath", "def get_min_path(self, node):\r\n if self.have_min_distance(node):\r\n path = []\r\n while node != self.start:\r\n path.insert(0, node)\r\n node = self.table[node][\"prev\"]\r\n path.insert(0, node)\r\n return path\r\n return None", "def shortest_path(self, distance_graph: dict) -> list:\n # Start from start pos\n start = self.start\n path = []\n\n node = start\n while True:\n # Default min point is point itself\n min_node = node\n min_val = distance_graph[node]\n # Find a neighbor that has lowest distance from center\n for neighbor in self.graph[node]:\n # If neighbor is not mapped in distanceGraph then it is a member that is far away\n if neighbor not in distance_graph:\n continue\n val = distance_graph[neighbor]\n if min_val > val:\n min_val = val\n min_node = neighbor\n node = min_node\n # Add node to path\n path.append(node)\n\n if min_val == 0:\n # Center found\n break\n return path", "def find_shortest_path(graph, current, destination, path=[]):\n\n # create a new list so not to mess up the old ones in the stack \n path = path + [current]\n\n # base case: reached destination\n if current == destination:\n return path\n\n # base case: dead end\n if current not in graph:\n reutrn None\n\n # difference: keep track of shortest path\n shortest = None\n\n # recursively progress for each possible step forward\n for node in graph[current]:\n if node not in path: \n newpath = find_shortest_path(graph, node, destination, path)\n # if recursion returns a path\n if newpath:\n # if we don't have shortest yet, or newpath is shorter\n if not shortest or len(newpath) < len(shortest):\n shortest = newpath\n\n return shortest", "def get_next_unvisited(visited, shortest_paths):\n min_dist = math.inf\n min_node = None\n for node_id in shortest_paths:\n dist = shortest_paths[node_id][0]\n if dist < min_dist and visited[node_id] == False:\n min_dist = dist\n min_node = node_id\n return min_node", "def min_path(vs, es, source, target):\n dijkstra(vs, es, source, stop = target)\n test = target\n result = []\n while test != source:\n e = test._ss_edge\n result.append(e)\n test = e.v1 if e.v1 != test else e.v2\n assert test == source and test._ss_edge is None\n return result[::-1]", "def shortest_path(graph, src, dest, modifiers):\r\n # Distances to source node\r\n distances = {vertex: float(\"inf\") for vertex in range(graph.num_vertices)}\r\n # Previous node in optimal path\r\n previous = {vertex: -1 for vertex in range(graph.num_vertices)}\r\n # Shortest path from source to source is 0\r\n distances[src] = 0\r\n # Initialize priority queue and vertex set\r\n pqueue = [(distances[src], src)]\r\n vertex_set = {src}\r\n\r\n while len(pqueue) != 0:\r\n vertex_added = False\r\n curr = heappop(pqueue)[1]\r\n vertex_set.remove(curr)\r\n for neighbor in graph.outgoing(curr):\r\n alt = distances[curr] + weight(neighbor, modifiers)\r\n other = neighbor.other(curr) # Opposite vertex\r\n if alt < distances[other]:\r\n distances[other] = alt\r\n previous[other] = curr\r\n if other not in vertex_set:\r\n vertex_added = True\r\n pqueue.append((alt, other))\r\n vertex_set.add(other)\r\n if vertex_added:\r\n heapify(pqueue)\r\n\r\n # Shortest path\r\n shortest_path = []\r\n shortest_path_distance = distances[dest]\r\n\r\n # Traverse previous[] to look for shortest path to target\r\n current_node = dest\r\n while previous[current_node] != -1:\r\n shortest_path.append(current_node)\r\n current_node = previous[current_node]\r\n if len(shortest_path) != 0:\r\n shortest_path.append(current_node)\r\n shortest_path.reverse()\r\n\r\n return shortest_path, shortest_path_distance", "def path(l_s, l_f, leg_list):\n # candidates =\n return min(heuristic_path([l_s], l_f, leg_list, []), key = len)", "def getPath(\n self,\n source,\n dest,\n as_nodes=False,\n ):\n\n self.dist = {} # A map from nodes to their labels (float)\n self.predecessor = {} # A map from a node to a node\n\n # Initialize the distance labels to \"infinity\"\n\n vertices = self.g.nodes()\n for vertex in vertices:\n self.dist[vertex] = self.inf\n self.predecessor[vertex] = source\n\n # Further set up the distance from the source to itself and\n # to all one hops away.\n\n self.dist[source] = 0.0\n if self.g.is_directed():\n outEdges = self.g.out_edges([source])\n else:\n outEdges = self.g.edges([source])\n for edge in outEdges:\n self.dist[edge[1]] = self.g[edge[0]][edge[1]][self.wt]\n\n s = set(vertices)\n s.remove(source)\n currentMin = self._findMinNode(s)\n if currentMin == None:\n return None\n s.remove(currentMin)\n while currentMin != dest and len(s) != 0 and currentMin != None:\n if self.g.is_directed():\n outEdges = self.g.out_edges([currentMin])\n else:\n outEdges = self.g.edges([currentMin])\n for edge in outEdges:\n opposite = edge[1]\n if self.dist[currentMin] + self.g[edge[0]][edge[1]][self.wt] \\\n < self.dist[opposite]:\n self.dist[opposite] = self.dist[currentMin] \\\n + self.g[edge[0]][edge[1]][self.wt]\n self.predecessor[opposite] = currentMin\n s.add(opposite)\n\n currentMin = self._findMinNode(s)\n\n # print \"Current min node {}, s = {}\".format(currentMin, s)\n\n if currentMin == None:\n return None\n s.remove(currentMin)\n\n # Compute the path as a list of edges\n\n currentNode = dest\n predNode = self.predecessor.get(dest)\n node_list = [dest]\n done = False\n path = []\n while not done:\n path.append((predNode, currentNode))\n currentNode = predNode\n predNode = self.predecessor[predNode]\n node_list.append(currentNode)\n done = currentNode == source\n node_list.reverse()\n if as_nodes:\n return node_list\n else:\n return path", "def getShortestPath(self, src, dest):\n vertices = self.floorGraph.getVertList()\n unvisitedQueue = []\n srcPath = Path()\n srcPath.addNode(src)\n srcPath.pathValue = 0\n unvisitedQueue.append(srcPath)\n connections = self.floorGraph.getVertex(src).getConnections()\n #initialisez distances\n for vertex in vertices:\n newPath = Path()\n newPath.nodeList = list(srcPath.nodeList)\n newPath.addNode(vertex)\n if self.floorGraph.getVertex(vertex) in connections:\n newPath.pathValue = self.floorGraph.getVertex(src).getWeight(self.floorGraph.getVertex(vertex))\n unvisitedQueue.append(newPath)\n else:\n newPath.pathValue = math.inf\n self.shortestDistanceMap[src+vertex] = newPath\n # updates distances as per shorter routes\n while len(unvisitedQueue) is not 0:\n unvisitedQueue = sorted(unvisitedQueue, key=functools.cmp_to_key(compareNodes))\n chkPath = unvisitedQueue.pop(0)\n chkNode = chkPath.nodeList[len(chkPath.nodeList)-1]\n for vertex in vertices:\n if(self.floorGraph.getVertex(vertex) in self.floorGraph.getVertex(chkNode).getConnections()):\n newWeight = chkPath.pathValue + self.floorGraph.getVertex(chkNode).getWeight(self.floorGraph.getVertex(vertex))\n if(newWeight < self.shortestDistanceMap[src+vertex].pathValue):\n self.shortestDistanceMap[src+vertex].pathValue = newWeight\n self.shortestDistanceMap[src+vertex].nodeList = list(chkPath.nodeList)\n self.shortestDistanceMap[src+vertex].nodeList.append(vertex)\n newPath = Path()\n newPath.nodeList = list(self.shortestDistanceMap[src+vertex].nodeList)\n newPath.pathValue = newWeight\n unvisitedQueue.append(newPath)\n print(self.shortestDistanceMap[src+dest].nodeList)\n print(self.shortestDistanceMap[src+dest].pathValue)", "def findShortestPath(self):\r\n pass", "def find_best_path(self, paths, sw, util, duration, time_now):\n bestpath = None\n bestpathmetric = None # [0,1] lower means better path\n bestpathlen = None # lower -> better path\n candidatepaths = []\n \n assert len(paths) == 2\n \n path_to_shift, shift_by = self.calculate_what_to_shift(paths, sw)\n\n pathmetrics = {}\n paths_by_length = {}\n metrics = []\n metricpaths = {}\n for path in paths:\n metric, length = self.compute_path_metric(sw, path, 0, 0, local_contrib=True)\n paths_by_length[length] = path\n metrics.append(metric)\n assert metric >= 0 \n pathmetrics[\" \".join(path)] = metric\n metricpaths[metric] = path\n\n logging.debug(\"SS FBP PATH METRICS:, %s\", str(metricpaths))\n if path_to_shift == None:\n # return shortest path\n logging.debug(\"SS FBP Returning LOCAL: %s\", str((paths_by_length[min(paths_by_length.keys())],0)))\n return (paths_by_length[min(paths_by_length.keys())], 0)\n \n \n path_to_shift_metric = pathmetrics.pop(\" \".join(path_to_shift))\n path_to_receive_metric = pathmetrics.pop(pathmetrics.keys()[0])\n logging.debug(\"SS FBP Path to Recv: %s\", str(metricpaths[path_to_receive_metric]))\n\n if (path_to_receive_metric == 0):\n logging.debug(\"SS FBP EARLY Returning : %s\", str((metricpaths[min(metrics)], 0)))\n return (metricpaths[min(metrics)], 0)\n else:\n current_ratio = path_to_shift_metric * 1.0 / path_to_receive_metric\n\n logging.debug(\"SS FBP CURRENT RATIO: %s\", str(current_ratio))\n\n\n goal_path_to_shift_metric = path_to_shift_metric * (1 - (shift_by * self.alpha))\n goal_path_to_receive_metric = path_to_receive_metric + (path_to_shift_metric * (shift_by * self.alpha))\n\n if (goal_path_to_receive_metric == 0):\n # large number for practical purposes\n goal_ratio = 100000\n else:\n goal_ratio = goal_path_to_shift_metric * 1.0 / goal_path_to_receive_metric\n\n logging.debug(\"SS FBP GOAL RATIO: %s\", str(goal_ratio))\n\n # FINALLY DECIDE WHICH PATH TO RETURN BASED ON GOAL-Current RATIO\n if goal_ratio - current_ratio < 0:\n # return path with lower utiliztion\n logging.debug(\"SS FBP LOWER Returning : %s\", str((metricpaths[min(metrics)], 0)))\n return (metricpaths[min(metrics)], 0)\n \n if goal_ratio - current_ratio > 0:\n # return path with higher utilization\n logging.debug(\"SS FBP HIGHER Returning : %s\", str((metricpaths[max(metrics)], 0)))\n return (metricpaths[max(metrics)], 0)\n\n if goal_ratio - current_ratio == 0:\n # return shortest path\n logging.debug(\"SS FBP Returning LOCAL: %s\",\n str((paths_by_length[min(paths_by_length.keys())], 0)))\n return (paths_by_length[min(paths_by_length.keys())], 0)", "def bfsShortestPath(graph, start, goal):\n\n # set up a path list\n path = [start]\n\n # return a simple path if start is the goal\n if start == goal:\n return path\n\n # list to keep track of all visited nodes\n explored = []\n\n # the FIFO queue\n queue = []\n\n # add the first path to the queue\n queue.append(path)\n\n # keep looping until there are no nodes still to be checked\n while len(queue) > 0:\n\n # pop first item from queue (FIFO)\n path = queue.pop(0)\n\n # retrieve the last node from the path list\n node = path[-1]\n\n # check if the node has already been explored\n if node not in explored:\n\n # add node to list of checked nodes\n explored.append(node)\n\n # get neighbours if node is present, otherwise default to empty list\n neighbours = graph.get(node, [])\n\n # go through all neighbour nodes\n for neighbour in neighbours:\n # make a copy of the current path\n path1 = path[:]\n\n # add this neighbour to the path\n path1.append(neighbour)\n\n # return path if neighbour is goal\n if neighbour == goal:\n return path1\n\n # push it onto the queue for further exploration\n queue.append(path1)\n\n # we couldn't find the goal... :(\n return None", "def find_shortest_path(g, n, s, e):\n dist, prev = lazy_dijkstra(g, n, s)\n path = []\n if (dist[e] == inf):\n return path\n # loop backwards from the end vertex\n path.append(e)\n i = e\n # prev[i] == None corresponds to the start node\n while prev[i] != None:\n path.append(prev[i])\n i = prev[i]\n return list(reversed(path))", "def topo_shortestpathij(self, i, j):\n pathlist = []\n self.pathij(i, j, pathlist)\n distance = []\n \n for i in range(len(pathlist)):\n distance.append(len(pathlist[i]) - 1)\n \n if(len(distance) == 0):\n return None\n else:\n return min(distance)", "def get_shortest_path(graph, origin, destination):\n visited, paths = dijkstra(graph, origin)\n full_path = deque()\n if destination in paths: \n _destination = paths[destination]\n \n while _destination != origin:\n full_path.appendleft(_destination)\n _destination = paths[_destination]\n\n full_path.appendleft(origin)\n full_path.append(destination)\n\n return list(full_path)", "def shortest_path(edges, start, end):\n visitedNodes = []\n queue = [[start]]\n if start == end:\n return [start]\n \n while queue:\n path = queue.pop(0)\n node = path[-1]\n if node not in visitedNodes:\n neighbors = get_neighbors(edges, node)\n for neighbor in neighbors:\n newPath = list(path)\n newPath.append(neighbor)\n queue.append(newPath)\n if neighbor == end:\n return fix_format(edges, newPath)\n visitedNodes.append(node)\n return None", "def get_shortest_path(self, src, dst):\n \n return self.get_sorted_paths(src, dst)[0]", "def compute_shortest_routes(start_ids, dest_ids, G):\n return ox.distance.shortest_path(G, start_ids, dest_ids, weight='travel_time', cpus=6)", "def shortest_flight(g):\n min_distance = sys.maxsize\n min_destination = None\n min_key = None\n \n for key in g.city_dict:\n for flight in g.city_dict[key].get_flights_out():\n if(flight[1] < min_distance):\n min_key = key\n min_destination = flight[0]\n min_distance = flight[1]\n return g.city_dict[min_key].get_name(), min_destination, min_distance", "def shortest(graph, a, b):\n try:\n s = nx.shortest_path(graph, a, b)\n return s\n except BaseException:\n # try traversing the DiGraph backwards\n s = nx.shortest_path(graph, b, a)\n return s[::-1]", "def optimized_path(coords, startid, mask):\n coords = np.column_stack((coords, mask))\n pass_by = np.asarray(coords)\n path = [coords[startid]]\n pass_by = np.delete(pass_by, startid, axis=0)\n while pass_by.any():\n nearest_id, nearest = min(\n enumerate(pass_by), key=lambda x: distance(path[-1][:2], x[1][:2]))\n path.append(nearest)\n pass_by = np.delete(pass_by, nearest_id, axis=0)\n\n return path", "def shortest(self, from_node, to_node):\n print \"Shortest path from {} to {}\".format(from_node.name, to_node.name)\n current = from_node\n solution = {current.name: 0}\n visited = []\n if from_node.name == to_node.name:\n return \"No route necessary\"\n\n while current:\n if current.name == to_node.name:\n return \"Solution {}\".format(solution.get(to_node.name))\n\n for edge in current.edges:\n # look at routes from this node\n if edge.from_node.name != current.name:\n continue\n weight = (solution.get(edge.from_node.name) or 0) + edge.weight\n if not solution.get(edge.to_node.name):\n solution.update({edge.to_node.name: weight})\n elif solution.get(edge.to_node.name) > weight:\n solution.update({edge.to_node.name: weight})\n\n # find the lowest weight, go to that node next\n lowest = None\n next_node = None\n for node_name, weight in solution.iteritems():\n if node_name in visited:\n continue\n if lowest is None or weight < lowest:\n lowest = weight\n next_node = self.graph.nodes.get(node_name)\n visited.append(current.name)\n current = next_node\n return \"No solution\"", "def find_shortest_path(self, box_indicator):\n # To be implemented\n dict = {}\n shortest_path = []\n visited = set()\n queue = deque()\n queue.append(self.grid_pos)\n goal_node = None\n while queue:\n node = Vec2d(queue.popleft())\n if node == self.get_target_tile().int_tuple:\n goal_node = node.int_tuple\n break\n for neighbor in self.get_tile_neighbors(node, box_indicator):\n neighbor = neighbor.int_tuple\n if neighbor not in visited:\n queue.append(neighbor)\n visited.add(neighbor)\n dict[neighbor] = node.int_tuple\n if goal_node is None:\n return deque([])\n else:\n key = goal_node\n while key != self.grid_pos.int_tuple:\n shortest_path.append(Vec2d(key))\n parent_node = dict[key]\n key = parent_node\n return deque(shortest_path)", "def fastest_path_estimation(sol):\n\n class Path:\n def __init__(self, places, graph):\n self.g = 0 # current cost\n self.graph = graph\n self.visited = [places[0]] # list of already visited attractions\n self.not_visited = copy.deepcopy(places[1:]) # list of attractions not yet visited\n\n def __lt__(self, other):\n return self.g < other.g\n\n def add(self, idx):\n # add the cost\n self.g += self.graph[self.visited[-1], idx]\n # add the to the visited place and remove from the unvisited places\n self.visited.append(idx)\n self.not_visited.remove(idx)\n\n def add_to_heap_queue(path):\n # custom function to add to heap queue sorted by the solution's cost\n heappush(h_queue, path)\n\n if len(sol.not_visited) == 0:\n return 0\n elif len(sol.not_visited) == 1:\n return sol.graph[sol.visited[-1], sol.not_visited[0]]\n\n c = sol.visited[-1]\n pm = sol.not_visited[-1]\n # the heap queue of solution sorted by their cost - change all to tuples with g for dijkstra\n h_queue = []\n\n # the places to use for the graph\n sub_search_places = [c]\n sub_search_places.extend(sol.not_visited)\n\n # push the first \"node\" in the queue\n add_to_heap_queue(Path(sub_search_places, sol.graph))\n while True:\n # take the next solution with the shortest cost\n path = heappop(h_queue)\n # if it contains destination, stop and return that solution\n if pm in path.visited:\n return path.g\n # create a new solution for each neighbor of the current vertex and add it to heap queue\n for place in path.not_visited:\n new_path = copy.deepcopy(path)\n new_path.add(place)\n add_to_heap_queue(new_path)", "def shortestPath(G: Matrix,\n sourceNode: int,\n **kwargs: Dict[str, VALID_INPUT_TYPES]):\n\n params_dict = {'G': G, 'sourceNode': sourceNode}\n params_dict.update(kwargs)\n return Matrix(G.sds_context,\n 'shortestPath',\n named_input_nodes=params_dict)", "def shorter_path(start, goal):\n if start == goal:\n return [start]\n explored = set() \n queue = [ [start] ] \n while queue:\n path = queue.pop(0)\n s = path[-1]\n for state, action in bj_subway[s].items():\n if state not in explored:\n explored.add(state)\n path2 = path + [action, state]\n if state == goal:\n\t\t\t\t\t# print path2\n\t\t\t\t\t# for x in queue:\n\t\t\t\t\t# print x\n\t\t\t\t\treturn path2\n else:\n queue.append(path2)\n return []", "def get_shortest_as_path(self, routes):\n if len(routes) == 0:\n return []\n\n min_len = min([ len(route[\"msg\"][\"ASPath\"]) for route in routes ])\n outroutes = [ route for route in routes if len(route[\"msg\"][\"ASPath\"]) == min_len ]\n return outroutes", "def test_correctly_identify_shortest_path(self):\r\n\r\n # Adjacency List of graph G\r\n G = {}\r\n G[0] = [1, 2]\r\n G[1] = [0, 3]\r\n G[2] = [0, 3, 4]\r\n G[3] = [1, 2, 4, 5]\r\n G[4] = [2, 3, 5]\r\n G[5] = [4, 5]\r\n\r\n # Start node\r\n s = 0\r\n \r\n dist = BFS.BFSShortestPath(G, s)\r\n expDist = {0:0, 1:1, 2:1, 3:2, 4:2, 5:3}\r\n \r\n self.assertEqual(expDist, dist)", "def nearest_neighbor_tsp(shortest_paths, starting_point=0):\n number_of_nodes = len(shortest_paths)\n unvisited_nodes = list(range(number_of_nodes))\n unvisited_nodes.remove(starting_point)\n visited_nodes = [starting_point]\n\n while number_of_nodes > len(visited_nodes):\n neighbor_distances = pd.Series(shortest_paths[visited_nodes[-1]])\n neighbor_distances = neighbor_distances[(neighbor_distances > 0) &\n (neighbor_distances.index\n .isin(set(unvisited_nodes)))]\n next_node = neighbor_distances.idxmin()\n visited_nodes.append(next_node)\n unvisited_nodes.remove(next_node)\n return visited_nodes", "def getNextShortestPath(self):\n\n if self.kPath == None:\n raise UserWarning('Must call findFirstShortestPath before this method or no path exists'\n )\n\n # Iterate over all the nodes in kPath from dNode to the node before the destination\n # and add candidate paths to the path heap.\n\n kNodes = self.kPath.nodeList\n index = kNodes.index(self.kPath.dNode)\n curNode = kNodes[index]\n while curNode != self.dest:\n self._removeEdgesNodes(curNode)\n candidate = self._computeCandidatePath(curNode)\n self._restoreGraph()\n if candidate != None:\n heapq.heappush(self.pathHeap, candidate)\n index = index + 1\n curNode = kNodes[index]\n\n if len(self.pathHeap) == 0:\n return None\n p = heapq.heappop(self.pathHeap) # after iterations contains next shortest path\n self.pathList.append(p)\n self.kPath = p # updates the kth path\n return p", "def shortestpath(graph,start,end,visited=[],distances={},predecessors={}):\r\n # detect if first time through, set current distance to zero\r\n try:\r\n if not visited: \r\n distances[start]=0\r\n # if we've found our end node, find the path to it, and return\r\n if start==end:\r\n path=[]\r\n while end != None:\r\n path.append(end)\r\n end=predecessors.get(end,None)\r\n return distances[start], path[::-1]\r\n # process neighbors as per algorithm, keep track of predecessors\r\n for neighbor in graph[start]:\r\n if neighbor not in visited:\r\n neighbordist = distances.get(neighbor,float(math.inf))\r\n tentativedist = distances[start] + graph[start][neighbor]\r\n if tentativedist < neighbordist:\r\n distances[neighbor] = tentativedist\r\n predecessors[neighbor]=start\r\n # neighbors processed, now mark the current node as visited \r\n visited.append(start)\r\n # finds the closest unvisited node to the start \r\n unvisiteds = dict((k, distances.get(k,float(math.inf))) for k in graph if k not in visited)\r\n closestnode = min(unvisiteds, key=unvisiteds.get)\r\n # now take the closest node and recurse, making it current\r\n except:\r\n pygame.time.delay(700)\r\n SCREEN.fill((0,0,0))\r\n displayText(\"Path Not Found\",400,200)\r\n displayText(\"Try Again....!!\",400,240)\r\n pygame.display.update()\r\n pygame.time.delay(1500)\r\n pygame.quit()\r\n sys.exit() \r\n return shortestpath(graph,closestnode,end,visited,distances,predecessors)", "def find_path(sources, goals, connections):\n visited = set()\n expanded = set()\n queue = deque()\n\n for s in sources:\n queue.appendleft([s])\n\n while queue:\n path = queue.pop()\n head = path[-1]\n visited.add(head)\n\n neighbours = [o for (i, o) in connections if i == head]\n for neighbour in neighbours:\n if neighbour in goals:\n return path + [neighbour]\n elif neighbour not in visited:\n queue.appendleft(path + [neighbour])\n\n return []", "def all_shortest_paths(self, start_node, end_node):\n s=self.min_dist(start_node,end_node)\n return self.all_paths(start_node,end_node,s,[])", "def shortest_path_to_root(self):\n paths = self.hypernym_paths()\n shortest = paths.index(min([len(path) for path in paths]))\n return paths[shortest]", "def shortest_path(self, source, target):\r\n key = self.d.keys()\r\n #check that endpoints are in graph\r\n if source not in key or target not in key:\r\n raise KeyError(str(source) + \" and \" + str(target) + \" must be in graph\")\r\n #initialize V,Q and M\r\n V = []\r\n vis = dict()\r\n Q = deque()\r\n Q.append(source)\r\n M = set(source)\r\n #while target has not been visited\r\n while target not in M:\r\n #take first element of Q\r\n current = Q.popleft()\r\n #add element to visited\r\n V.append(current)\r\n neighbors = self.d[current]\r\n #for each neighbor of element\r\n for n in neighbors:\r\n #if element has not been checked, add it to queue\r\n #also save traveled edge in visited\r\n if n not in M:\r\n Q.append(n)\r\n vis.update({n:current})\r\n M.add(n)\r\n L = [target]\r\n #reverse the order of the traveled edges\r\n while L[-1] in vis.keys():\r\n L.append(vis[L[-1]])\r\n return L[::-1]", "def shortest_path(graph, start, end):\n nodes_to_visit = {start}\n visited_nodes = set()\n # Distance from start to start is 0\n distance_from_start = {start: 0}\n predecessors = {} # Store previous node for shortest route for each node\n\n while nodes_to_visit:\n # Get node with smallest weight\n current = min(\n [(distance_from_start[node], node) for node in nodes_to_visit]\n )[1]\n\n # If the end is reached, quit\n if current == end:\n break\n\n nodes_to_visit.discard(current)\n visited_nodes.add(current)\n\n edges = graph[current]\n unvisited_neighbours = set(edges).difference(visited_nodes)\n for neighbour in unvisited_neighbours:\n neighbour_distance = distance_from_start[current] + \\\n edges[neighbour]\n if neighbour_distance < distance_from_start.get(neighbour,\n float('inf')):\n distance_from_start[neighbour] = neighbour_distance\n predecessors[neighbour] = current\n nodes_to_visit.add(neighbour)\n\n return _deconstruct_path(predecessors, end)", "def get_shortest_path(self, r_start, r_goal):\n neighbors = [(0, 1), (0, -1), (1, 0), (-1, 0), (1, 1), (1, -1), (-1, 1), (-1, -1)]\n start = (int(r_start[0] / Map.RESOLUTION), int(r_start[1] / Map.RESOLUTION))\n goal = (int(r_goal[0] / Map.RESOLUTION), int(r_goal[1] / Map.RESOLUTION))\n close_set = set()\n came_from = {}\n gscore = {start: 0}\n fscore = {start: Map._heuristic(start, goal)}\n oheap = []\n\n heappush(oheap, (fscore[start], start))\n\n while oheap:\n current = heappop(oheap)[1]\n\n if current == goal:\n data = []\n while current in came_from:\n data.append((int((current[0] * Map.RESOLUTION) + (Map.RESOLUTION / 2)),\n int((current[1] * Map.RESOLUTION) + (Map.RESOLUTION / 2))))\n current = came_from[current]\n data.reverse()\n return data\n\n close_set.add(current)\n for i, j in neighbors:\n neighbor = current[0] + i, current[1] + j\n tentative_g_score = gscore[current] + Map._heuristic(current, neighbor)\n if 0 <= neighbor[0] < self.col_grid.shape[0]:\n if 0 <= neighbor[1] < self.col_grid.shape[1]:\n if self.col_grid[neighbor[0]][neighbor[1]] == 1:\n continue\n else:\n # array bound y walls\n continue\n else:\n # array bound x walls\n continue\n\n if neighbor in close_set and tentative_g_score >= gscore.get(neighbor, 0):\n continue\n if tentative_g_score < gscore.get(neighbor, 0) or neighbor not in [i[1] for i in oheap]:\n came_from[neighbor] = current\n gscore[neighbor] = tentative_g_score\n fscore[neighbor] = tentative_g_score + Map._heuristic(neighbor, goal)\n heappush(oheap, (fscore[neighbor], neighbor))\n\n return []", "def shortestPath(G,start,end):\n\n D,P = Dijkstra(G,start)\n Path = []\n while 1:\n Path.append(end)\n if end == start: break`\u001b`\n end = P[end]\n Path.reverse()\n return Path", "def find_paths(start, current, distance, paths, choices):\r\n # Find all paths resulting in the minimum distance\r\n options = []\r\n min_distance = min(paths[current].values())\r\n for option, distance in paths[current].items():\r\n if distance == min_distance:\r\n\r\n # If we find the beginning, break out\r\n if option == start:\r\n if option not in choices or choices[current] < distance + min_distance:\r\n choices[current] = distance + min_distance\r\n return\r\n\r\n # Add to list of options\r\n options.append(option)\r\n\r\n # For each path, recursively find minimal paths\r\n for option in options:\r\n find_paths(start, option, min_distance, paths, choices)", "def least_cost_path(G, start, dest, cost):\n\n # Create a priority queue\n todo = pqueue.PQueue()\n todo.update(start, 0);\n\n # v in visited when the vertex v's least cost from start has been determined\n visited = set()\n\n # parent[v] is the vertex that just precedes v in the path from start to v\n parent = {}\n\n while todo and (dest not in visited):\n\n # priority queue operation\n # remove smallest estimated cost vertex from todo list\n (cur, c) = todo.pop_smallest()\n\n # it is now visited, and will never have a smaller cost\n visited.add(cur)\n\n for n in G.adj_to(cur):\n if n in visited: continue\n if todo.update(n, c+cost((cur,n))):\n parent[n] = cur\n\n # now, if there is a path, extract it. The graph may be disconnected\n # so in that case return None\n if dest not in visited:\n return None\n\n path = [dest]\n cur = dest\n while start not in path:\n cur = parent[cur]\n path.append(cur)\n\n path.reverse()\n return path", "def _find_fastest_path(self):\n from simulator import Robot\n clone_robot = Robot(exploration_status=self._robot.exploration_status,\n facing=self._robot.facing,\n discovered_map=self._robot.discovered_map,\n real_map=[[0] * 15 for _ in range(20)])\n\n fastest_path_start_way_point = get_shortest_path_moves(clone_robot,\n start=(1, 1),\n goal=self._way_point)\n\n if fastest_path_start_way_point:\n for move in fastest_path_start_way_point:\n clone_robot.move_robot(move)\n\n before_way_point = previous_cell(clone_robot.center, clone_robot.facing)\n\n fastest_path_way_point_goal = get_shortest_path_moves(clone_robot,\n start=self._way_point,\n goal=(18, 13),\n before_start_point=before_way_point)\n\n return fastest_path_start_way_point + fastest_path_way_point_goal", "def find_min_cost_path(data, start, is_goal, get_children, cost, heuristic=lambda x: 0):\n paths = Heap('min', heuristic(start), ([start], 0)) # Min heap of paths and their respective costs (sorted by heuristic cost)\n seen = set() # Set of nodes we've already found shorter paths to\n\n # T H E S E A R C H L O O P B E G I N S\n while not paths.empty():\n next_path = paths.next() # get the minimum cost path (heuristic cost, (path, path cost))\n min_cost_path = next_path[1][0]\n min_cost = next_path[1][1]\n terminal_node = min_cost_path[-1]\n\n while terminal_node in seen:\n # If we've already found a path to the same node with a lower cost, we pick a new next_path\n if paths.empty():\n # If we run out of paths to search, we return nothing\n return None\n next_path = paths.next()\n min_cost_path = next_path[1][0]\n min_cost = next_path[1][1]\n terminal_node = min_cost_path[-1]\n\n if is_goal(terminal_node):\n return min_cost_path\n\n seen.add(terminal_node)\n children = get_children(terminal_node)\n for c in children:\n if c not in seen:\n # If this child does not have an existing path to it already, we build a\n # data structure for it and at it to our min heap\n path_to_c = min_cost_path + [c]\n c_cost = min_cost + cost(data, terminal_node, c)\n c_heuristic = c_cost + heuristic(c)\n paths.add(c_heuristic, (path_to_c, c_cost))\n # T H E S E A R C H L O O P E N D S\n\n return None # We failed to find a path to the goal node. Very sad. Return nothing :(", "def find_closest_path(self):\n\t\tclosest_distance = sys.maxint\n\t\tclosest_path = 0\n\t\tbike_position = (self.map_model.bike.xB, self.map_model.bike.yB)\n\t\tfor path_index in range(len(self.map_model.paths)):\n\t\t\tnearest_point = geometry.nearest_point_on_path(self.map_model.paths[path_index], bike_position)\n\t\t\tdistance_to_bike = geometry.distance(nearest_point, bike_position)\n\t\t\tif (closest_distance > distance_to_bike):\n\t\t\t\tclosest_distance = distance_to_bike\n\t\t\t\tclosest_path = path_index \n\t\tdisp_next = self.displacement_to_turn(target_path = (closest_path+1)%len(self.map_model.paths))\n\t\ttarget_path = (closest_path+1)%len(self.map_model.paths)\n\t\tdistance_next = geometry.distance_from_path(bike_position, self.map_model.paths[target_path])\n\t\tif disp_next - np.abs(distance_next)>-0.01:\n\t\t\tclosest_path = np.mod(closest_path + 1,len(self.map_model.paths))\n\t\treturn closest_path", "def find_next_step(start, end, paths):\r\n def find_paths(start, current, distance, paths, choices):\r\n \"\"\"\r\n Given the start point, and the current point, builds a dictionary indicating the first step\r\n and the minimum distance to the end using that step. Distance indicates the distance from\r\n current to end.\r\n \"\"\"\r\n # Find all paths resulting in the minimum distance\r\n options = []\r\n min_distance = min(paths[current].values())\r\n for option, distance in paths[current].items():\r\n if distance == min_distance:\r\n\r\n # If we find the beginning, break out\r\n if option == start:\r\n if option not in choices or choices[current] < distance + min_distance:\r\n choices[current] = distance + min_distance\r\n return\r\n\r\n # Add to list of options\r\n options.append(option)\r\n\r\n # For each path, recursively find minimal paths\r\n for option in options:\r\n find_paths(start, option, min_distance, paths, choices)\r\n\r\n choices = {}\r\n find_paths(start, end, 0, paths, choices)\r\n choices = sorted(choices.keys())\r\n return choices[0]", "def path(self, first, second):\r\n if not((0 <= first < self.size) and (0 <= second < self.size)):\r\n raise ValueError(\"Cannot find distances for nodes not in the graph\")\r\n if first == second:\r\n return 0\r\n dist_tracker = self._perform_dijkstra(first, second)\r\n first_dist = dist_tracker.get_min_distance(first)\r\n second_dist = dist_tracker.get_min_distance(second)\r\n if first_dist == float('inf') or second_dist == float('inf'):\r\n return []\r\n furthest = first if first_dist > second_dist else second\r\n potential_path = dist_tracker.get_min_path(furthest)\r\n if first in potential_path and second in potential_path:\r\n return potential_path\r\n return []", "def get_shortest_paths(G):\n \n shortest_paths = {}\n \n for origin in list(G.nodes):\n for destination in list(G.nodes):\n \n if origin != destination:\n shortest_path = nx.shortest_path(G, origin, destination, weight='length')\n shortest_paths[f'{origin}-{destination}'] = shortest_path\n \n return shortest_paths", "def FindShortestPath(PathInfoList):\r\n\tmin_time = 999999999;\r\n\tmin_time_index = None\r\n\r\n\tfor i in range(0, len(PathInfoList)):\r\n\t\tPathInfo = PathInfoList[i]\r\n\t\tarival_time = PathInfo[-1][ConnInfoInd['arrival_hour']]*60 + PathInfo[-1][ConnInfoInd['arrival_min']]\r\n\t\tif arival_time < min_time:\r\n\t\t\tmin_time_index = i\r\n\t\t\tmin_time = arival_time\t\r\n\r\n\t# return path list with the shortest path only\r\n\tShortestPathInfo = PathInfoList[min_time_index]\r\n\treturn [ShortestPathInfo]", "def get_shortest_paths(id1, id2):\n syn1 = germanet_data.get_synset_by_id(id1)\n syn2 = germanet_data.get_synset_by_id(id2)\n assert len(syn1.shortest_path(syn2)) == 1, \"do not test for synsets with several shortest paths\"\n return syn1.shortest_path(syn2)[0]", "def DFS1(graph, start, end, path=[], shortest=None):\n path = path + [start]\n print 'Current DFS path:', printPath(path)\n if start == end:\n return path\n for node in graph.childrenOf(start):\n if node not in path: #avoid cycles\n if shortest == None or len(path) < len(shortest):\n newPath = DFS1(graph, node, end, path, shortest)\n if newPath != None:\n shortest = newPath\n return shortest", "def shortestpaths(self, start, end, edgeweight=\"t_0\"):\n graph = self.graph\n shortest_nodepaths = list(\n nx.all_shortest_paths(\n graph, start, end, weight=edgeweight, method=\"dijkstra\"\n )\n )\n shortest_paths = []\n for path in shortest_nodepaths:\n edgepath = []\n for i in range(len(path) - 1):\n edgepath.append((path[i], path[i + 1]))\n shortest_paths.append(edgepath)\n\n return shortest_paths", "def getBestPath(self):\n if self._bestPathVertex.getNextWaypoint() is None:\n numWaypointsCompleted = len(self._waypoints)\n quality = 2\n if self._vertexQueue.isEmpty():\n quality += 1\n else:\n numWaypointsCompleted = self._bestPathVertex.getNextWaypoint().getIndex()\n quality = 1\n if self._vertexQueue.isEmpty():\n quality -= 1\n \n return outputPath.generatePath(self._bestPathVertex, self._params.waypointAcceptanceRadii, quality, numWaypointsCompleted)", "def shortestPath(graph, start, end, maxOutdistance, toPrint = False):\r\n return DFS(graph, start, end, [], None, sys.maxsize, sys.maxsize, 0, maxOutdistance, toPrint)", "def bfs_shortest_path(graph, start, end):\n assert not graph.weighted, 'This method will not work for weighted graphs.'\n\n parents = {}\n distances = {start: 0}\n\n queue = deque([start])\n while queue:\n node = queue.popleft()\n for next_node in (graph.adj[node] - distances.keys()):\n parents[next_node] = node\n distances[next_node] = distances[node] + 1\n if next_node == end:\n return backtrace_path(start, end, parents)\n queue.append(next_node)\n\n return None", "def shortest_route(self, start, finish):\n distances = dict()\n previous = dict()\n nodes = list()\n result = dict()\n best_price = 0\n\n for vertex in self.vertices:\n if vertex == start:\n distances[vertex] = 0\n heapq.heappush(nodes, [0, vertex])\n else:\n distances[vertex] = sys.maxsize\n heapq.heappush(nodes, [sys.maxsize, vertex])\n previous[vertex] = None\n\n while nodes:\n smallest = heapq.heappop(nodes)[1]\n if smallest == finish:\n path = []\n while previous[smallest]:\n path.append(smallest)\n smallest = previous[smallest]\n if len(path) == 1:\n result[\"Cost\"] = distances[finish]\n else:\n result[\"Cost\"] = best_price\n\n if len(path) > 0:\n path.append(start)\n result[\"Path\"] = path[::-1]\n else:\n result = dict()\n\n self.spa_result = result\n return result\n\n if distances[smallest] == sys.maxsize:\n break\n\n for neighbor in self.vertices[smallest]:\n cost = distances[smallest] + self.vertices[smallest][neighbor]\n if cost < distances[neighbor]:\n distances[neighbor] = cost\n previous[neighbor] = smallest\n for n in nodes:\n if n[1] == neighbor:\n n[0] = cost\n best_price = cost\n break\n heapq.heapify(nodes)\n\n return result", "def single_source_shortest_paths(self, source, engine='cython', *args, **kwargs):\n\t\tif not isinstance(source, int):\n\t\t\tsource = self.node_names.index(source)\n\n\t\tif self.local_paths[source] is None:\n\t\t\t# Calculates the local paths in case it hasn't been calculated.\n\t\t\traise Exception (\"Shortest distances and local paths must be calculated first. Run `single_source_shortest_distances` or `all_pairs_shortest_distances`.\")\n\t\t\n\t\t# Shortest Paths\n\t\tif engine == 'python':\n\t\t\tshortest_paths = _py_single_source_complete_paths(source, self.N, self.local_paths[source])\n\t\telif engine == 'cython':\n\t\t\tshortest_paths = _cy_single_source_complete_paths(source, self.N, self.local_paths[source])\n\n\t\t# Save to object\n\t\tself.shortest_paths[source] = shortest_paths\n\n\t\treturn shortest_paths", "def shortest_path(self, other):\n shortest_paths = []\n lcs = self.lowest_common_subsumer(other)\n for subsumer in lcs:\n paths_to_lcs1 = self.shortest_path_to_hypernym(subsumer)\n paths_to_lcs2 = other.shortest_path_to_hypernym(subsumer)\n for path_to_lcs1 in paths_to_lcs1:\n for path_to_lcs2 in paths_to_lcs2:\n current_path = path_to_lcs1\n path_to_lcs2 = path_to_lcs2[::-1]\n for el in path_to_lcs2[1:]:\n current_path.append(el)\n shortest_paths.append(current_path)\n return shortest_paths", "def findShortestPath(start, end):\n # Using a queue as the dispenser type will result in a breadth first\n # search\n queue = []\n queue.append(start) # prime the queue with the start vertex\n\n # The predecessor dictionary maps the current Vertex object to its\n # immediate predecessor. This collection serves as both a visited\n # construct, as well as a way to find the path\n predecessors = {}\n predecessors[start] = None # add the start vertex with no predecessor\n\n # Loop until either the queue is empty, or the end vertex is encountered\n while len(queue) > 0:\n current = queue.pop(0)\n if current == end:\n break\n for neighbor in current.getConnections():\n if neighbor not in predecessors: # if neighbor unvisited\n predecessors[neighbor] = current # map neighbor to current\n queue.append(neighbor) # enqueue the neighbor\n\n # If the end vertex is in predecessors a path was found\n if end in predecessors:\n path = []\n current = end\n while current != start: # loop backwards from end to start\n path.insert(0, current) # prepend current to the path list\n current = predecessors[current] # move to the predecessor\n path.insert(0, start)\n return path\n else:\n return None", "def shortest_paths(indicator,\n pairs,\n n_threads = 1):\n\n gridgr = graphs.gridGraph(indicator.shape)\n gridgr_edgeind = graphs.implicitMeanEdgeMap(gridgr, indicator.astype('float32'))\n\n def single_path(pair, instance = None):\n source = pair[0]\n target = pair[1]\n print 'Calculating path from {} to {}'.format(source, target)\n if instance == None:\n instance = graphs.ShortestPathPathDijkstra(gridgr)\n\n targetNode = gridgr.coordinateToNode(target)\n sourceNode = gridgr.coordinateToNode(source)\n\n instance.run(gridgr_edgeind, sourceNode, target=targetNode)\n path = instance.path(pathType='coordinates')\n if path.any():\n return path\n\n if n_threads > 1:\n print \"Multi-threaded w/ n-threads = \", n_threads\n with futures.ThreadPoolExecutor(max_workers = n_threads) as executor:\n tasks = [executor.submit(single_path, pair) for pair in pairs]\n paths = [t.result() for t in tasks]\n else:\n print \"Single threaded\"\n instance = graphs.ShortestPathPathDijkstra(gridgr)\n paths = [single_path(pair, instance) for pair in pairs]\n\n return paths", "def find_fast_path(aux_structures, loc1, loc2):\n node1 = get_closest_node(aux_structures, loc1)\n node2 = get_closest_node(aux_structures, loc2)\n p = find_min_cost_path(\n aux_structures,\n node1,\n lambda x: x == node2,\n lambda parent_id: aux_structures[parent_id]['adjacent'],\n get_speed_cost)\n return get_coord_list(aux_structures, p) if p is not None else None", "def Option2_routing(self, S, D, L):\n if self.has_path(S, D): \n Shortest_path = nx.dijkstra_path(self.G, S, D, weight='w') \n Opt_path = Shortest_path\n path_cost_with_weighted_sum = self.calculate_path_cost_with_weighted_sum(Shortest_path, 'c1', 'c2')\n return path_cost_with_weighted_sum, Opt_path\n\n while len(Shortest_path) != 0:\n path_cost = self.additive_path_cost(Shortest_path, 'w') \n #self.logger.info('Path cost - %d', path_cost)\n if path_cost <= L:\n \"\"\"go to path cost with weighted sum\"\"\"\n path_cost_with_weighted_sum = self.calculate_path_cost_with_weighted_sum(Shortest_path, 'c1', 'c2')\n self.G = self.rm_edge_constraint(path_cost_with_weighted_sum) # remove all links where the concave link is greater than PathConcave_cost \n Opt_path = Shortest_path\n if self.has_path(S, D):\n Shortest_path = nx.dijkstra_path(self.G, S, D, weight='w')\n else:\n Shortest_path = [] \n else:\n break \n else:\n self.logger.info('No path from %s to %s', S, D)\n Opt_path = []\n path_cost_with_weighted_sum = 0\n return path_cost_with_weighted_sum, Opt_path", "def update_trip_path(trip_mpois, paths, graph):\n n_nodes = len(trip_mpois)\n # adjacency matrix\n new_paths = np.zeros(shape=(n_nodes, n_nodes))\n\n # iterate through all the nodes and create a list of nodes with sequential id\n for i, node1 in enumerate(trip_mpois):\n for j, node2 in enumerate(trip_mpois):\n new_paths[i, j] = paths[node1, node2]\n\n # new_paths = new_paths/np.max(new_paths[new_paths < _INF])\n # new_paths[np.isinf(new_paths)] = _INF\n\n # create a dummy edge between end and start node with weight 0\n new_paths[1,0] = -_INF\n # new_paths[0,1] = _INF\n\n shortest_path = None\n if n_nodes > 5:\n shortest_path, dist = tsp.solve(n_nodes, new_paths)\n # shortest_path = range(n_nodes)\n else:\n shortest_path = range(n_nodes)\n\n trip_path = np.array(trip_mpois)[shortest_path]\n\n if ___DEBUG:\n fname = 'dump/' + str(n_nodes) + '.dist'\n np.savetxt(fname, new_paths, fmt='%.6f')\n \n mpoi_pos = np.zeros(shape=(n_nodes,2))\n \n for i, node in enumerate(trip_mpois):\n pos_3d = graph.vs[node]['position']\n assert node == graph.vs[node].index\n mpoi_pos[i,:] = pos_3d[:2]\n\n fname = 'dump/' + str(n_nodes) + '.pos'\n np.savetxt(fname, mpoi_pos)\n \n # print trip_mpois, trip_path\n\n return trip_path" ]
[ "0.7273277", "0.70496786", "0.69742703", "0.6943726", "0.68671346", "0.6855859", "0.68451226", "0.68320864", "0.6805442", "0.67743856", "0.6726081", "0.6716487", "0.6714552", "0.670571", "0.669246", "0.66918075", "0.665236", "0.66394556", "0.6638905", "0.6598199", "0.6593292", "0.659285", "0.657804", "0.65729445", "0.65665996", "0.65652406", "0.6552652", "0.65393376", "0.6528136", "0.65234226", "0.65050846", "0.64862645", "0.6473335", "0.6463933", "0.64295983", "0.6423085", "0.6415409", "0.6412756", "0.6411685", "0.64079374", "0.63830066", "0.6379349", "0.6363004", "0.63608855", "0.63498944", "0.6345347", "0.63265294", "0.6314906", "0.6313073", "0.63115776", "0.6294967", "0.6293698", "0.62842447", "0.62840414", "0.62808585", "0.6263742", "0.6253945", "0.62419", "0.6241813", "0.62411743", "0.6226088", "0.6224038", "0.6223343", "0.6212219", "0.620583", "0.61951774", "0.61892843", "0.6182261", "0.6177841", "0.617044", "0.6166451", "0.6160734", "0.6145395", "0.61445355", "0.61341876", "0.61302316", "0.612991", "0.6129858", "0.6129684", "0.61242056", "0.6109383", "0.61059403", "0.6087645", "0.60831386", "0.6082362", "0.6079616", "0.6077534", "0.6077226", "0.6075188", "0.60678655", "0.606196", "0.60514396", "0.605119", "0.6047156", "0.60365826", "0.60281086", "0.6018294", "0.60129684", "0.6005886", "0.60043156" ]
0.81187457
0
Get the path distance from drone point to path points to current waypoint. It will find the distance of a nlength path
def get_path_distance(self, path): distance = VectorMath.get_magnitude(self.drone.get_point(), path[0]) for index in range(len(path[:-1])): distance += VectorMath.get_magnitude(path[index], path[index + 1]) distance += VectorMath.get_magnitude(path[-1], self.drone.get_waypoint_holder().get_current_waypoint()) if abs(self.drone.get_point()[2] - path[0][2]) > 1: distance *= Constants.VERTICAL_PATH_WEIGHTING_MULTIPLE return distance
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def distance_to_current_waypoint(self):\n next_waypoint = self.vehicle.commands.next\n if next_waypoint == 1:\n return None\n mission_item = self.vehicle.commands[next_waypoint]\n lat = mission_item.x\n lon = mission_item.y\n alt = mission_item.z\n waypoint_location = Location(lat, lon, alt, is_relative=True)\n distance = get_distance_meters(self.vehicle.location, waypoint_location)\n return distance", "def get_path_distance(self, path):\n length = len(path)\n distance = 0\n for i in range(length):\n distance += self.distances[path[i]][path[(i + 1) % length]]\n return distance", "def distance_to_current_waypoint():\n nextwaypoint = vehicle.commands.next\n if nextwaypoint == 0:\n return None\n missionitem = vehicle.commands[nextwaypoint -\n 1] #commands are zero indexed\n lat = missionitem.x\n lon = missionitem.y\n alt = missionitem.z\n targetWaypointLocation = LocationGlobalRelative(lat, lon, alt)\n distancetopoint = get_distance_metres(vehicle.location.global_frame,\n targetWaypointLocation)\n return distancetopoint", "def GetDistance(self,pnt='',prompt=''):\n\t\tif not pnt:\n\t\t\treturn self.acad.ActiveDocument.Utility.GetDistance(ArrayTransform(self.GetPoint()),prompt)\n\t\telse:\n\t\t\treturn self.acad.ActiveDocument.Utility.GetDistance(pnt,prompt)", "def distance_to_current_waypoint(vehicle):\n nextwaypoint = vehicle.commands.next\n if nextwaypoint==0:\n return None\n missionitem=vehicle.commands[nextwaypoint-1] #commands are zero indexed\n lat = missionitem.x\n lon = missionitem.y\n alt = missionitem.z\n targetWaypointLocation = LocationGlobalRelative(lat,lon,alt)\n distancetopoint = get_distance_metres(vehicle.location.global_frame, targetWaypointLocation)\n return distancetopoint", "def distance(self) -> float:\n return self._dist_two_wire() # at this time we only support 2-wire meausre", "def evaluatePath(self):\n pathLength = 0\n if len(self.path) > 0:\n previousCity = self.path[0]\n for ind in range(1, len(self.path)):\n pathLength += previousCity.distanceWith(self.path[ind].name)\n previousCity = self.path[ind]\n return pathLength", "def length(self) -> float:\n n = self.geodesic.extrinsicDimension()\n third = 1.0/3.0\n def distance(x,y):\n cp0 = x[:n]\n cp1 = self.geodesic.integrate(cp0,vectorops.mul(x[n:],third))\n cp3 = y[:n]\n cp2 = self.geodesic.integrate(cp3,vectorops.mul(y[n:],-third))\n return self.geodesic.distance(cp0,cp1) + self.geodesic.distance(cp1,cp2) + self.geodesic.distance(cp2,cp3)\n return Trajectory.length(self,distance)", "def get_distance(self) -> int:\n return self.get_measurement_data().distance", "def getPathLength(self, starlist):\n sum = 0\n for i in range(len(starlist)-1):\n sum += self.getDistance(starlist[i],starlist[i+1])\n return sum", "def _calculate_distance(self):\n xy = list(zip(self.x, self.y))\n\n dist = [0]\n for i in range(1, len(xy)):\n dist.append(self.distance_between_two_points(xy[i-1], xy[i]))\n\n return np.array(dist).cumsum()", "def get_distance(self):\n print(\"voici la distance à l'obstacle\")", "def get_distance_for_route(self, route):\n cities = route.split('-')\n num_cities_in_route = len(cities)\n total_route_length = 0\n try:\n for i in range(num_cities_in_route - 1):\n start_city = cities[i]\n end_city = cities[i + 1]\n total_route_length += self.graph[start_city][end_city]\n return total_route_length\n except:\n return 'NO SUCH ROUTE'", "def get_distance(self, point):\n if not isinstance(point, Point):\n point = Point(*point)\n\n distances = [(point.distance_to_point(p), p) for p in self.points]\n sortpoints = sorted(distances, key=lambda x: x[0])\n closest = sortpoints[0][1]\n\n vc = Vector(*closest)\n d1 = vc.dot(vc)\n\n secondc = sortpoints[1][1]\n vs = Vector(*secondc)\n v1 = Vector(*point) - (vc+vs)/2\n v2 = vs-vc\n v2.unitize()\n d2 = v1.dot(v2)\n\n return abs(min(d1, d2)) - self.thickness/2", "def _distance_next(self):\n\n self.distance = 10\n\n # Here a set index to 0 if the car is finishing a lap\n # Also reset the farthest\n if self.index > (len(self.x_trajectory) - 6) and self.closed:\n self.index = 0\n self.farthest = -1\n self.laps += 1\n\n for w in range(self.index, self.index + 20):\n\n self.dist_point = math.sqrt((self.x_trajectory[w] - self.x)**2\n + (self.y_trajectory[w] - self.y)**2)\n\n if self.dist_point < self.distance:\n self.distance = self.dist_point\n self.index = w\n\n if w >= (len(self.x_trajectory) - 1):\n break\n\n self._calc_side()\n\n self.distance = self.distance * self.sign\n\n return self.distance", "def getPathLength(self, path):\r\n path_nodes = path\r\n # print(path_nodes)\r\n total_length = 0 # km\r\n for i in range(len(path_nodes)-1):\r\n next_edge = None\r\n for edge in self.graph.getAdj(path_nodes[i]):\r\n if edge.getExtremityNode() == path_nodes[i+1]:\r\n next_edge = edge\r\n if next_edge is None: # it means the path is invalid\r\n return None\r\n total_length += next_edge.getWeight()\r\n # print(next_edge.getTravelType(), end=\" \")\r\n return total_length", "def _computeDistance(self, mote, neighbor):\n\n return 1000*math.sqrt((mote.x - neighbor.x)**2 +\n (mote.y - neighbor.y)**2)", "def _computeDistance(self, mote, neighbor):\n\n return 1000*math.sqrt((mote.x - neighbor.x)**2 +\n (mote.y - neighbor.y)**2)", "async def distance(self):\n return round(await self._rpc.distance(), 2)", "def get_distance_over_path(G, path):\r\n node1 = str(path[0])\r\n node2 = str(path[-1])\r\n\r\n pos1 = G.nodes[node1]['pos']\r\n pos2 = G.nodes[node2]['pos']\r\n\r\n return np.sqrt((pos1[0] - pos2[0])**2 + (pos1[1] - pos2[1])**2)", "def getDistance(self):\n return sqrt(self.state[0] * self.state[0] + self.state[2] * self.state[2])", "def distance(self, p=None, l=None):\n if l is None:\n d = p - self.zero\n n = np.zeros(3)\n # try:\n # n = d - np.dot(d, self.direction) * self.direction\n # except RuntimeWarning:\n # print(d, self.direction)\n # return norm(n)\n with warnings.catch_warnings(record=True) as w:\n # Cause all warnings to always be triggered.\n warnings.simplefilter(\"always\")\n n = d - np.dot(d, self.direction) * self.direction\n # print(n, norm(n))\n if len(w) > 0 and issubclass(w[-1].category, RuntimeWarning):\n # Todo: check w/ Ram if this is what he meant to do when catch a warning: n = np.zeros(3)\n # n = np.zeros(3)\n # print(d, self.direction)\n pass\n return norm(n)\n else:\n normal = np.cross(self.direction, l.direction)\n n = norm(normal)\n if n < sys.float_info.min:\n # Lines are parallel.\n return self.distance(p=l.zero)\n offset = np.dot(l.zero - self.zero, normal) / n\n return np.abs(offset)", "def get_closest_distance_to_path(self, path):\n min_distance_to_line = float(\"inf\")\n for p in path:\n game_path = p[:]\n\n game_path.sort(key = lambda coord: calculate_distance(self, coord))\n point_A = game_path[0] # Closest point out of all the points on the path to to the tower\n\n try:\n point_after_A = p[p.index(point_A) + 1]\n point_before_A = p[p.index(point_A) - 1]\n closest_to_A = min(point_after_A, point_before_A, key = lambda point: calculate_distance(point_A, point))\n except:\n if p.index(point_A) == 0:\n closest_to_A = p[p.index(point_A) + 1]\n \n elif p.index(point_A) == len(p) - 1:\n closest_to_A = p[p.index(point_A) - 1]\n finally:\n if closest_to_A[0] != point_A[0]:\n m = (closest_to_A[1] - point_A[1]) / (closest_to_A[0] - point_A[0])\n else:\n m = 2\n\n b = point_A[1] - m * point_A[0]\n\n closest_distance = abs(-m * self.x + self.y - b) / math.sqrt((-m) ** 2 + 1)\n min_distance_to_line = min(closest_distance, min_distance_to_line)\n \n return min_distance_to_line", "def player_goal_distance(self) -> float:\n route = self.best_route\n return sum(route.values())", "def node_distance(self, node1, node2):\n if node1 == node2:\n return 0.0\n for i, (n1, n2) in enumerate(zip(self.paths[node1], self.paths[node2])):\n if n1 != n2:\n break\n else:\n i = min(len(self.paths[node1]), len(self.paths[node2]))\n return sum(self.path_dists[node1][i:]) + sum(self.path_dists[node2][i:])", "def get_distance(start, end):\n\n\t\tloc_start, loc_end, dst_node = create_distance(start, end)\n\t\tdistance = cmds.getAttr(\"%s.distance\" % dst_node)\n\n\t\tcmds.delete([loc_start, loc_end, dst_node])\n\n\t\treturn distance", "def distance_to(self, n):\n\n d = ( (self.x - n.x) ** 2 + (self.y - n.y) ** 2 + (self.z - n.z) ** 2 ) ** 0.5\n \n return d", "def driving_distance(self, area_graph, startpoint, endpoint):\n\n # Find nodes closest to the specified Coordinates\n node_start = ox.utils.get_nearest_node(area_graph, startpoint)\n node_stop = ox.utils.get_nearest_node(area_graph, endpoint)\n # Calculate the shortest network distance between the nodes via the edges\n # \"length\" attribute\n try:\n distance = nx.shortest_path_length(\n self.area_graph, node_start, node_stop, weight=\"length\")\n except:\n logger.error(str(self.thread_count) + \" Can not calculate path from (\" + str(startpoint[0]) +\n \",\" + str(startpoint[0]) + \")\" + \" to (\" +\n str(endpoint[0]) + \",\" +\n str(endpoint[1]) + \"). Using fallback function\")\n distance = self.point_distance(startpoint, endpoint)\n return distance", "def calc_distance(first: Waypoint, second: Waypoint) -> int:\n return int(distance.vincenty(first.coords(), second.coords()).m)", "def directed_distance(trail, point):\n expectedLocation = \\\n (trail[-1][0] + (trail[-1][0] - trail[-2][0]),\n trail[-1][1] + (trail[-1][1] - trail[-2][1]))\n return euclidean_distance(expectedLocation, point)", "def get_distance(self, node):\n return np.sqrt(\n (self.x - node.x) ** 2 +\n (self.y - node.y) ** 2\n )", "def distance( self, source, target ):\n return nx.shortest_path_length(self._G, source, target)", "def total_distance(self) -> float:\n if len(self) < 2:\n raise ValueError(\"Need at least 2 points in the route to calculate a\"\n \"distance.\")\n\n total = 0.0\n other = False\n for point in self:\n\n # starting condition\n if not other:\n other = point\n continue\n\n total += point.distance(other)\n other = point\n\n return total", "def computeDirLength(self):\n self.length , uv = dirAndLength(self.pointN, self.point1)\n self.unitv = uv\n return uv", "def get_line_distance(self, p):\n\n y = 1000 * p.y\n R = 1000 * self.geometry.R\n x = copysign(sqrt(y ** 2 + (R - sqrt(R ** 2 - y ** 2))), y)\n x = 2 * R * asin(x / (2 * R))\n #x=y\n b = -x / sqrt(R ** 2 - x ** 2)\n theta = atan(b) # grating tangent angle\n print b, theta\n d = 0\n for n, a in enumerate(self.an):\n d += a * x ** n\n d *= cos(theta)\n return 1e-3 / d", "def get_nodes_distance(dbpath,node1,node2,inst,stepname,nframe=-1):\n odb = openOdb(path=dbpath)\n _inst = odb.rootAssembly.instances[inst]\n ic = odb.rootAssembly.instances[inst].nodes\n us = odb.steps[stepname].frames[nframe].fieldOutputs['U'].getSubset(region=_inst).values\n xx1 = ic[node1-1].coordinates[0]+us[node1-1].data[0]\n yy1 = ic[node1-1].coordinates[1]+us[node1-1].data[1]\n xx2 = ic[node2-1].coordinates[0]+us[node2-1].data[0]\n yy2 = ic[node2-1].coordinates[1]+us[node2-1].data[1]\n if _inst.embeddedSpace == THREE_D:\n zz1 = ic[node1-1].coordinates[2]+us[node1-1].data[2]\n zz2 = ic[node2-1].coordinates[2]+us[node2-1].data[2]\n d = np.sqrt((xx2-xx1)**2 + (yy2-yy1)**2 + (zz2-zz1)**2)\n else:\n d = np.sqrt((xx2-xx1)**2+(yy2-yy1)**2)\n return d", "def get_maze_distance(self, pos1, pos2):\n d = self.distancer.get_distance(pos1, pos2)\n return d", "async def get_distance() -> int:\n\n _initialize_sensor()\n pulse_start, pulse_end = await _get_echo_time(False), await _get_echo_time(True)\n signal_delay = pulse_end - pulse_start\n distance = _compute_distance(signal_delay)\n return int(distance)", "def get_distance(pt1,pt2):\r\n x1 = pt1[1]\r\n y1 = pt1[0]\r\n x2 = pt2[1]\r\n y2 = pt2[0]\r\n d = np.sqrt((x2-x1)**2 + (y2-y1)**2)\r\n return d", "def get_distance(self):\n\n # Activate trigger\n self.trigger()\n\n # Detect rising edge of echo pin\n channel = GPIO.wait_for_edge(self.echo_pin, GPIO.RISING, timeout=2)\n if channel is None:\n # Timeout on wait of rising interrupt\n return None\n else:\n # Rising edge detected, save pulse start\n pulse_start = time.time()\n\n\n # Detect falling edge of echo pin\n channel = GPIO.wait_for_edge(self.echo_pin, GPIO.FALLING, timeout=2)\n if channel is None:\n # Timeout on wait of falling interrupt\")\n return None\n else:\n # Falling edge detected, save pulse end\n pulse_end = time.time()\n\n # Calculated pulse width in microseconds (x1mln)\n pulse_width = (pulse_end - pulse_start)*1000000\n\n # Return distance in cm\n return pulse_width / 58", "def getDistance(self,p1,p2):\n return sum([(p1[i]-p2[i])**2 for i in range(2)])", "def get_distance(self, heuristic=\"\"):\n # If no heuristic is specified, used the default\n if(heuristic == \"\"):\n heuristic = BoardPath._heuristic\n\n if(heuristic == \"manhattan\"):\n return self.calculate_manhattan_dist()\n elif(heuristic == \"euclidean\"):\n return self.calculate_euclidean_dist()\n elif(heuristic == \"made_up\"):\n return self.calculate_made_up_dist()\n else:\n sys.exit()", "def route_distance(self, route):\n\n dist = 0\n src = route[0]\n\n if src not in self.G:\n # don't return two diff types/meanings, throw exception instead. same below\n # TODO best impl?\n raise SGraph.NoSuchRoute('NO SUCH ROUTE')\n\n for city in route[1:]:\n if city not in self.G[src]:\n raise SGraph.NoSuchRoute('NO SUCH ROUTE')\n dist += self.G[src][city]\n src = city\n\n return dist", "def getDistance(self, src, dst):\n pos_src = self.nodePosition[str(src)]\n pos_dst = self.nodePosition[str(dst)]\n if self.plotGraph and self.cancelPlot==False:\n self.plot(src, dst, pos_src, pos_dst)\n points = np.array([(pos_src[0], pos_src[1], pos_src[2]), (pos_dst[0], pos_dst[1], pos_dst[2])])\n dist = distance.pdist(points)\n return dist", "def __get_distance(self, game_object):\n obj_x, obj_y = game_object.get_coordinates()\n self_x, self_y = self._coordinates\n\n inner = (obj_x-self_x)**2 + (obj_y-self_y)**2\n return math.sqrt(inner)", "def get_path_distance(path, graph):\n distance = 0\n for i,w in enumerate(path):\n j=i+1\n if j<len(path):\n distance += round(graph.edge[path[i]][path[j]][\"weight\"], 6)\n return distance", "def degrees_of_separation(self, n1, n2):\n\t\t# Nodes aren't in graph\n\t\tif n1 not in self.nodes or n2 not in self.nodes:\n\t\t\traise ValueError\n\n\t\t# Get node\n\t\ta = self.nodes[n1]\n\t\tb = self.nodes[n2]\n\n\t\t# Get shortest distance using BFS\n\t\tpath = bfs(a, b)\n\n\t\t# Return path length or -1\n\t\tif path is not None:\n\t\t\treturn len(path) - 1\n\t\telse:\n\t\t\treturn -1", "def total_distance(self):\n distance = 0\n\n for segment in self.data:\n segment_distance = 0\n\n last_lon = None\n last_lat = None\n\n for point in segment:\n current_lon = point[\"lon\"]\n current_lat = point[\"lat\"]\n\n # in case data is missing skip point !\n if current_lon is None or current_lat is None:\n continue\n\n # the first valid element is processed, get distance\n if not (last_lon is None or last_lat is None):\n d = gpx_distance(last_lat, last_lon, current_lat, current_lon)\n segment_distance += d\n\n last_lon = current_lon\n last_lat = current_lat\n\n distance += segment_distance\n\n return distance", "def _get_distance(reindeer, race_time):\n interval = reindeer.flight_time + reindeer.rest_time\n cycles = race_time // interval\n flight_time = min(reindeer.flight_time, race_time - interval * cycles)\n total_flying_time = reindeer.flight_time * cycles + flight_time\n return total_flying_time * reindeer.flight_speed", "def dist(pnt1, pnt2):\n return ((pnt2[0] - pnt1[0])**2 + (pnt2[1] - pnt1[1])**2 + (pnt2[2] - pnt1[2])**2)**0.5", "def ab_path_length(trajectory, a, b):\n a_idx = find_closest_index(trajectory, a)\n b_idx = find_closest_index(trajectory, b)\n path = trajectory[a_idx:b_idx]\n step_dists = np.linalg.norm(np.diff(path, axis=0, prepend = np.array([[0,0]])), axis = 1)\n path_length = np.sum(step_dists)\n\n return(path_length)", "def get_distance(self, dim):\n p_1, p_2 = self.points\n nb_dim = len(p_1.values)\n\n other_p = copy.deepcopy(self.points[0])\n for d in range(nb_dim):\n if d == dim:\n continue\n other_p[d] = p_2[d]\n return p_1.distance(other_p)", "def diameter(self):\n\n v = self.vertices()\n pairs = [ (v[i],v[j]) for i in range(len(v)-1) for j in range(i+1, len(v))]\n smallest_paths = []\n for (s,e) in pairs:\n paths = self.find_all_path(s,e)\n smallest = sorted(paths, key=len)[0]\n smallest_paths.append(smallest)\n\n smallest_paths.sort(key=len)\n\n # Print the list smallest_paths\n\n # Longest path is at the end of list\n # ie diameter corresponds to length of this path\n\n diameter = len(smallest_paths[-1]) -1\n return diameter", "def getDistance(angle):\n\n panTilt.pan(angle)\n time.sleep(DELAY)\n wallDistance = getWallDistance()\n edgeDistance = getEdgeDistance() if wallDistance is None else None\n\n return wallDistance, edgeDistance", "def find_path(self):\n \n if self.line_num != -1:\n return self.line_num\n\n max_line = self.graph.gps_length - 1\n min_line = 0\n #last_id = dg.normalize(self.graph.lines[-1])[0]\n last_id = normalize_simple(self.graph.lines[-1])[0]\n pivot = int((self.trip_id-1)/float(last_id)*self.graph.gps_length)\n #cur_id = dg.normalize(self.graph.lines[pivot])[0]\n cur_id = normalize_simple(self.graph.lines[pivot])[0]\n while cur_id != self.trip_id:\n if cur_id < self.trip_id:\n min_line = pivot\n else:\n max_line = pivot\n #TODO: could make this run in essentially constant time by hopping predetermined distance\n pivot = (min_line + max_line) / 2\n #cur_id = dg.normalize(self.graph.lines[pivot])[0]\n cur_id = normalize_simple(self.graph.lines[pivot])[0]\n\n #while dg.normalize(self.graph.lines[pivot])[0] == self.trip_id:\n while normalize_simple(self.graph.lines[pivot])[0] == self.trip_id:\n pivot -= 1\n\n pivot += 1\n self.line_num = pivot\n return pivot", "def get_distance(route, dists):\n cost = 0\n if route[0] != route[-1]:\n route.append(route[0])\n\n for i in range(len(route)-1):\n cost += dists[route[i], route[i+1]]\n # cost += dists[route[-1], route[0]]\n return cost", "def __get_distance(point1: np.ndarray, point2: np.ndarray) -> float:\n return np.sqrt(np.sum(np.square(point1 - point2)))", "def getDistance(point1,point2):\n dx = point2[0]-point1[0]\n dy = point2[1]-point1[1]\n return math.sqrt(dy*dy + dx*dx)", "def distance_vehicle(waypoint, vehicle_transform):\n loc = vehicle_transform.location\n x = waypoint.transform.location.x - loc.x\n y = waypoint.transform.location.y - loc.y\n\n return math.sqrt(x * x + y * y)", "def distance_vehicle(waypoint, vehicle_transform):\n loc = vehicle_transform.location\n x = waypoint.transform.location.x - loc.x\n y = waypoint.transform.location.y - loc.y\n\n return math.sqrt(x * x + y * y)", "def find_diameter(self):\n all_ways = []\n for vertex1 in self.graph.keys():\n for vertex2 in self.graph.keys():\n if vertex2 != vertex1:\n result = self.pathFinder(vertex1, vertex2)\n for path in result:\n all_ways.append(len(path) - 1)\n self.diameter = max(all_ways)\n print(f\"Diameter of network is {self.diameter}\")", "def distance(self) -> int:\n return 0", "def GetTotalDurationOfPath(PathInfo):\r\n\tif PathInfo == None: return 0\r\n\tif len(PathInfo) < 2: return 0\r\n\t\r\n\tdeparture_first_station = PathInfo[0][ConnInfoInd['arrival_hour']]*60 + PathInfo[0][ConnInfoInd['arrival_min']]\r\n\t# departure_first_station = PathInfo[1][ConnInfoInd['departure_hour']]*60 + PathInfo[1][ConnInfoInd['departure_min']]\r\n\r\n\tarrival_last_station = PathInfo[-1][ConnInfoInd['arrival_hour']]*60 + PathInfo[-1][ConnInfoInd['arrival_min']]\r\n\treturn (arrival_last_station - departure_first_station)", "def spatial_diameter(self):\n import math\n \n Temp = 0\n for i in range(self.nodenum):\n for j in range(self.nodenum):\n pathlist = []\n self.pathij(i, j, pathlist)\n distance = []\n \n for k in range(len(pathlist)):\n Temp2 = 0\n for m in range(len(pathlist[k]) - 1):\n Temp2 += self.Dismatrix[pathlist[k][m], pathlist[k][m+1]]\n distance.append(Temp2)\n \n if(len(distance) == 0):\n continue\n else:\n if(min(distance) >= Temp):\n Temp = min(distance)\n \n self.diameter = Temp", "def getDistance(self, x1, x2, y1, y2):\n return ((x1 - x2)**2 + (y1 - y2)**2)**0.5", "def getDistanceBetweenTwoPoints(self, one, two):\n dx = one.x - two.x\n dy = one.y - two.y\n return math.sqrt(dx * dx + dy * dy)", "def dist(self, point: np.array):\n return np.linalg.norm(\n np.cross(point - self.r_start, self.direction), axis=1) / \\\n np.linalg.norm(self.direction)", "def distance_to(self, p):\n return (self - p).length()", "def distance_to(self, p):\n return (self - p).length()", "def heuristic(self, node):\n res = 0\n np_node = np.array(node)\n for i in range(4):\n for j in range(4):\n tmp = self.goal[i][j]\n if tmp != 0:\n point = np.where(np_node == tmp)\n x = int(point[0])\n y = int(point[1])\n distance = np.sqrt(np.square(x - i) + np.square(y - j))\n res += distance\n return res", "def num_points_in_distance(d):\n return 1 + 3 * d * (d + 1)", "def get_point_line_distance(point, r0, n):\n dr = (point[0] - r0[0], point[1] - r0[1])\n return sc_mul(dr, n) / norm_2d(n)", "def _get_distance_diff(self, input):\n nbatch = input.shape[0]\n in1 = input.unsqueeze(1).expand(\n nbatch, self.nelec, self.nelec, self.ndim)\n in2 = input.unsqueeze(2).expand(\n nbatch, self.nelec, self.nelec, self.ndim)\n dist = torch.pow(in1 - in2, 2).sum(3)\n return dist", "def length(self) -> float:\n n = len(self.milestones[0])//2\n third = 1.0/3.0\n def distance(x,y):\n cp0 = x[:n]\n cp1 = vectorops.madd(cp0,x[n:],third)\n cp3 = y[:n]\n cp2 = vectorops.madd(cp3,y[n:],-third)\n return third*vectorops.norm(x[n:]) + vectorops.distance(cp1,cp2) + third*vectorops.norm(y[n:])\n return Trajectory.length(self,distance)", "def calculate_distance_edge(self):\n if self.mu > 0:\n # right interface is intersected next\n dx = self.cell_xr - self.x\n self.next_cell_index = self.cell_index + 1\n else:\n # left interface is intersected next\n dx = self.cell_xl - self.x\n self.next_cell_index = self.cell_index - 1\n\n return dx / self.mu", "def _get_dist(self, p1, p2): \r\n\r\n distance = np.sqrt(\r\n (p1[0] - p2[0]) ** 2 +\r\n (p1[1] - p2[1]) ** 2 +\r\n (p1[2] - p2[2]) ** 2)\r\n\r\n return distance", "def get_step(self):\n # decide which direction and how far\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def virtual_distance(self):\n conflict_zone_radio = 384.0\n path_width = 172.0\n right_turn_radio = path_width / 4.0\n left_turn_radio = 3 * path_width / 4.0\n initial_straight_section = conflict_zone_radio - path_width / 2.0\n if self.get_intention() == \"s\":\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_intention() == \"r\":\n # Calculate real virtual distance\n if self.get_virtual_x_position() <= initial_straight_section:\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_virtual_y_position() > -right_turn_radio:\n virtual_distance_value = (\n initial_straight_section + atan(\n (\n self.get_virtual_x_position() -\n initial_straight_section\n ) / (right_turn_radio + self.get_virtual_y_position())\n ) * right_turn_radio\n )\n else:\n virtual_distance_value = (\n initial_straight_section + pi * right_turn_radio / 2.0 -\n self.get_virtual_y_position() - right_turn_radio\n )\n\n a = path_width / 2.0\n b = right_turn_radio + path_width / 4.0\n c = pi * right_turn_radio / 2.0\n # Scale virtual distance\n if virtual_distance_value <= initial_straight_section + c:\n virtual_distance_value *= (\n (initial_straight_section + a + b) /\n (initial_straight_section + c)\n )\n else:\n virtual_distance_value += a + b - c\n\n else:\n # Calculate real virtual distance\n if self.get_virtual_x_position() <= initial_straight_section:\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_virtual_y_position() < left_turn_radio:\n virtual_distance_value = (\n initial_straight_section + atan(\n (\n self.get_virtual_x_position() -\n initial_straight_section\n ) / (\n left_turn_radio -\n self.get_virtual_y_position()\n )\n ) * left_turn_radio\n )\n else:\n virtual_distance_value = (\n initial_straight_section + pi * left_turn_radio / 2 +\n self.get_virtual_y_position() - left_turn_radio\n )\n\n a = path_width / 2\n b = right_turn_radio + path_width / 4\n c = pi * left_turn_radio / 2\n # Scale virtual distance\n if virtual_distance_value <= initial_straight_section + c:\n virtual_distance_value *= (\n (initial_straight_section + a + b) /\n (initial_straight_section + c)\n )\n else:\n virtual_distance_value += a + b - c\n\n return virtual_distance_value", "def evaluate_distance(self):\n\n fitness = 0\n routes = split_to_routes(self)\n\n for route in routes:\n route = [home] + route + [home]\n for i in range(1,len(route)):\n # Calculates full distance, including from last city\n # to first, to terminate the trip\n pos_from = route[i - 1]\n pos_to = route[i]\n distance = dm[pos_from][pos_to]\n fitness += distance\n\n return int(fitness)", "def _getDistance(self, source, dest):\n\n lat1 = source[0]\n lat2 = dest[0]\n lon1 = source[1]\n lon2 = dest[1]\n\n # Formula from https://www.movable-type.co.uk/scripts/latlong.html\n R = 6370000\n phi1 = math.radians(lat1)\n phi2 = math.radians(lat2)\n deltaPhi = math.radians(lat2-lat1)\n deltalmb = math.radians(lon2-lon1)\n a = math.sin(deltaPhi/2) * math.sin(deltaPhi/2) + \\\n math.cos(phi1) * math.cos(phi2) * \\\n math.sin(deltalmb/2) * math.sin(deltalmb/2)\n c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a));\n d = (R * c)/1000.\n\n return d", "def topo_diameter(self):\n import math\n \n Temp = 0\n for i in range(self.nodenum):\n for j in range(self.nodenum):\n pathlist = []\n self.pathij(i, j, pathlist)\n distance = []\n \n for k in range(len(pathlist)):\n distance.append(len(pathlist[k]) - 1)\n \n if(len(distance) == 0):\n continue\n else:\n if(min(distance) >= Temp):\n Temp = min(distance)\n \n self.topodiameter = Temp", "def _calculate_distance(self, passenger, driver):\n londriver, latdriver = driver['lon'], driver['lat']\n lonpassenger, latpassenger = passenger['lon'], passenger['lat']\n lon_p, lat_p, lon_d, lat_d = map(radians,\n [float(lonpassenger), float(latpassenger), float(londriver), float(latdriver)])\n lon_distance = lon_d - lon_p\n lat_distance = lat_d - lat_p\n a = sin(lat_distance / 2) ** 2 + cos(lat_p) * cos(lat_d) * sin(lon_distance / 2) ** 2\n c = 2 * asin(sqrt(a))\n km = 6367 * c\n return km", "def _calculate_distance(self, passenger, driver):\n londriver, latdriver = driver['lon'], driver['lat']\n lonpassenger, latpassenger = passenger['lon'], passenger['lat']\n lon_p, lat_p, lon_d, lat_d = map(radians,\n [float(lonpassenger), float(latpassenger), float(londriver), float(latdriver)])\n lon_distance = lon_d - lon_p\n lat_distance = lat_d - lat_p\n a = sin(lat_distance / 2) ** 2 + cos(lat_p) * cos(lat_d) * sin(lon_distance / 2) ** 2\n c = 2 * asin(sqrt(a))\n km = 6367 * c\n return km", "def calc_walked_distance_with_direction(self, window_size=0):\n walked_distance = []\n walked_distance_window = []\n\n for i in range(len(self)):\n vel = self.get_velocity_with_direction(i)\n if vel is None: vel = 0\n\n walked_distance.append( vel + (walked_distance[i-1] if i>0 else 0) )\n walked_distance_window.append( walked_distance[i] - (walked_distance[i-window_size] if i>window_size else 0) )\n \n return walked_distance, walked_distance_window", "def countTotalDistance(path):\n current = path[0]\n totalDistance = 0\n\n for node in path[1:]:\n totalDistance += distance_func(current, node)\n current = node\n\n return totalDistance", "def shortest_path_distance(self, other):\n if self == other:\n return 0\n\n paths = self.shortest_path(other)\n return None if paths == [] else len(paths[0]) - 1", "def getDistance(p1, p2):\n\tdist = la.norm(p2 - p1)\n\treturn dist", "def totalDist(currPath, distances):\n currCost = 0\n for i in range(1, len(currPath)):\n currCost += distances.get(currPath[i], currPath[i - 1])\n currCost += distances.get(currPath[0], currPath[-1])\n return currCost", "def get_distance(p1, p2):\n return ((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) ** 0.5", "def distance(self, pt):\n return math.sqrt((self.x - pt.x) ** 2 + (self.y - pt.y) ** 2)", "def calcDistance(self, left, right):\n\n return math.fabs(right-left)", "def calc_distance(self, observation):\n actual_obs = observation[0]\n scrn_player = actual_obs.observation.feature_screen.player_relative\n scrn_select = actual_obs.observation.feature_screen.selected\n scrn_density = actual_obs.observation.feature_screen.unit_density\n\n state_added = scrn_select + scrn_density\n\n marine_center = np.mean(self.xy_locs(scrn_player == 1), axis=0).round()\n\n # first step\n if np.sum(scrn_select) == 0:\n marine_center = np.mean(self.xy_locs(scrn_player == 1), axis=0).round()\n # marine behind beacon\n if isinstance(marine_center, float):\n marine_center = np.mean(self.xy_locs(state_added == 2), axis=0).round()\n else:\n # normal navigation\n marine_center = np.mean(self.xy_locs(state_added == 2), axis=0).round()\n if isinstance(marine_center, float):\n marine_center = np.mean(self.xy_locs(state_added == 3), axis=0).round()\n\n beacon_center = np.mean(self.xy_locs(scrn_player == 3), axis=0).round()\n #\n # print(state_added)\n # print(\"---- Marine {} | {} Beacon ----\".format(marine_center, beacon_center))\n # time.sleep(0.2)\n distance = math.hypot(beacon_center[0] - marine_center[0],\n beacon_center[1] - marine_center[1])\n\n return beacon_center, marine_center, distance", "def get_distance (phi,psi) :\n return abs(get_diffvec (phi,psi))", "def get_distance (phi,psi) :\n return abs(get_diffvec (phi,psi))", "def length(self):\n return self.endpoints[0].distance_to(self.endpoints[1])", "def getDistance(self):\n taBox = (self.thor * self.tvert)/(720*960) #box area as percentage of whole\n if(taBox==None or taBox<=0): return -1\n const = 4 * math.tan(0.471)*math.tan(0.3576)\n return math.sqrt((self.abox)/(const*taBox))", "def _distance(self, new_pt):\n\t\tnew_pt = np.resize(new_point, (self.n_row, new_pt.shape[0]))\n\t\tdist = euclidean_distance(self.data[:,0:-1], new_pt)\n\n\t\treturn dist", "def find_closest_path(self):\n\t\tclosest_distance = sys.maxint\n\t\tclosest_path = 0\n\t\tbike_position = (self.map_model.bike.xB, self.map_model.bike.yB)\n\t\tfor path_index in range(len(self.map_model.paths)):\n\t\t\tnearest_point = geometry.nearest_point_on_path(self.map_model.paths[path_index], bike_position)\n\t\t\tdistance_to_bike = geometry.distance(nearest_point, bike_position)\n\t\t\tif (closest_distance > distance_to_bike):\n\t\t\t\tclosest_distance = distance_to_bike\n\t\t\t\tclosest_path = path_index \n\t\tdisp_next = self.displacement_to_turn(target_path = (closest_path+1)%len(self.map_model.paths))\n\t\ttarget_path = (closest_path+1)%len(self.map_model.paths)\n\t\tdistance_next = geometry.distance_from_path(bike_position, self.map_model.paths[target_path])\n\t\tif disp_next - np.abs(distance_next)>-0.01:\n\t\t\tclosest_path = np.mod(closest_path + 1,len(self.map_model.paths))\n\t\treturn closest_path", "def distance(self):\n _, _, costs = self.calculate_costs()\n return np.sum(costs)", "def path_length(self, tool_number=None):\r\n lengths = {}\r\n positions = {}\r\n for hit in self.hits:\r\n tool = hit.tool\r\n num = tool.number\r\n positions[num] = (0, 0) if positions.get(num) is None else positions[num]\r\n lengths[num] = 0.0 if lengths.get(num) is None else lengths[num]\r\n lengths[num] = lengths[num] + math.hypot(*tuple(map(operator.sub, positions[num], hit.position)))\r\n positions[num] = hit.position\r\n\r\n if tool_number is None:\r\n return lengths\r\n else:\r\n return lengths.get(tool_number)" ]
[ "0.7367824", "0.71563923", "0.7043922", "0.6508495", "0.64370704", "0.64057875", "0.63992316", "0.6391346", "0.6376828", "0.6344942", "0.6311526", "0.6288452", "0.625014", "0.6223638", "0.61669296", "0.61589134", "0.6145705", "0.6145705", "0.61372155", "0.613306", "0.6127435", "0.6124742", "0.61128354", "0.6103022", "0.609904", "0.60962397", "0.606177", "0.6054606", "0.60511464", "0.60467494", "0.60383177", "0.60159963", "0.6010136", "0.6002648", "0.59840477", "0.59800315", "0.597971", "0.59713316", "0.59539753", "0.5940367", "0.59336185", "0.5932893", "0.59273887", "0.59192336", "0.59115577", "0.59089124", "0.5900138", "0.5895194", "0.58929676", "0.58827794", "0.5873882", "0.5848122", "0.58462477", "0.58462095", "0.58417845", "0.5839534", "0.58370376", "0.58314717", "0.5826938", "0.5826938", "0.5823736", "0.5814519", "0.5813653", "0.58098537", "0.5809417", "0.58031124", "0.579977", "0.57931405", "0.57931405", "0.5790297", "0.57882386", "0.5775843", "0.5772327", "0.5771614", "0.5759259", "0.5756036", "0.5749718", "0.57411087", "0.5740042", "0.57316136", "0.5730534", "0.57238454", "0.57238454", "0.5711962", "0.5711854", "0.5710875", "0.5710695", "0.5705127", "0.57021", "0.5701907", "0.56980485", "0.56972146", "0.56955653", "0.56955653", "0.5690305", "0.56870115", "0.56804985", "0.56750315", "0.56689566", "0.566687" ]
0.7029971
3
Return the obstacles in the map
def get_obstacles(self): return self.obstacles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getObstacles(self):\r\n ausgabeObstacle = self.globalObstaclesList + self.globalHardObstaclesList\r\n self.globalObstaclesList = []\r\n return(ausgabeObstacle)", "def obstacles(self):\r\n\r\n #Radious arround the head\r\n limit_sight = self.snake_sight\r\n head = self.body[0].position\r\n binary_map_complete = self.complete_mapping()\r\n map_matrix = np.matrix(binary_map_complete)\r\n obstacles = []\r\n\r\n #limits in all directions\r\n left_x = head[0] - limit_sight\r\n right_x = head[0] + limit_sight\r\n up_y = head[1] - limit_sight\r\n down_y = head[1] + limit_sight\r\n\r\n #submatrix with limits size\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:right_x+1]\r\n\r\n #Special cases where the snake approximates to the borders\r\n ##Corners\r\n if left_x < 0 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[temporal, snake_sight] \r\n return snake_sight\r\n \r\n if left_x < 0 and down_y > self.limits[1] - 1:\r\n snake_sight = map_matrix[up_y:self.limits[1], 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[temporal, snake_sight]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n\r\n ##Middle\r\n if left_x < 0:\r\n snake_sight = map_matrix[up_y:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n return snake_sight\r\n\r\n if right_x > self.limits[0]-1:\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n return snake_sight\r\n\r\n if up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:right_x+1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[interval_y_matrix, snake_sight]\r\n return snake_sight\r\n \r\n if down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:right_x+1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[snake_sight, interval_y_matrix]\r\n return snake_sight\r\n\r\n return snake_sight", "def get_map(self) -> list:\n return self.map_obstacle", "def get_obstacles_map(obstacles, placed_pecies):\n \n #create a mask image to draw the obstacles on\n blocks = np.zeros(ARENA_SIZE[::-1], np.uint8)\n\n #get the grid points where the robot needs to placed\n grid = get_grid(ARENA_SIZE)\n\n #draw the obstacles and their safety region on the map\n for i in obstacles.keys():\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(obstacles[i][0]/4), i[1]-int(obstacles[i][1]/4)), (i[0]+int(obstacles[i][0]/4), i[1]+int(obstacles[i][1]/4)), 255, -1)\n\n #draw the obstacles and their safety region on the map\n for i in placed_pecies.keys():\n try:\n if not i == grid[5]:\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n else:\n cv2.rectangle(blocks, (int(i[0]-7.4*placed_pecies[i][0]/4), int(i[1]-7.4*placed_pecies[i][1]/4)),\n (int(i[0]+7.4*placed_pecies[i][0]/4), int(i[1]+7.4*placed_pecies[i][1]/4)), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(placed_pecies[i][0]/4), i[1]-int(placed_pecies[i][1]/4)), (i[0]+int(placed_pecies[i][0]/4), i[1]+int(placed_pecies[i][1]/4)), 255, -1)\n except Exception as e:\n print(e)\n\n return cv2.bitwise_not(blocks)", "def get_obstacles(self, map_server):\n\n self.obstacle_list = []\n for index, element in enumerate(map_server):\n if element > 0:\n self.obstacle_list.append(index)\n return(self.obstacle_list)", "def place_obstacles():\n #Randomly generate different sized rectangles\n #Soem may overlap, which gives more variety in shape of obstacles\n xvals = np.random.randint(0,self.map_dimensions[1],size=self.N_obstacles)\n yvals = np.random.randint(0,self.map_dimensions[0],size=self.N_obstacles)\n lower_left = zip(xvals,yvals)\n rects = []\n for LL in lower_left:\n x = LL[0]\n y = LL[1]\n wmax = self.map_dimensions[1] - x\n w = np.random.randint(0,wmax,size=1)[0]\n hmax = self.map_dimensions[0] - y\n h = np.random.randint(0,hmax,size=1)[0]\n rects += [(x,y,w,h)]\n self.coordinates__obstacles = rects", "def generate_obstacles(self):\r\n obstacles = self.get_obstable_metrics\r\n obstacle_arrays = []\r\n\r\n for nb_obstacle in obstacles:\r\n empty_array = np.zeros(shape=(self.WINDOW_HEIGHT,\r\n self.WINDOW_WIDTH))\r\n start_location = 0 if nb_obstacle[2] == 1 else self.WINDOW_HEIGHT\r\n y, x = start_location - 1, nb_obstacle[3]\r\n empty_array[y, x] = -1\r\n\r\n for w_value in range(nb_obstacle[0]):\r\n x_updated = x + w_value\r\n\r\n for h_value in range(nb_obstacle[1]):\r\n if nb_obstacle[2] == 1:\r\n y_updated = y + h_value\r\n else:\r\n y_updated = y - h_value\r\n # Replace Value\r\n empty_array[y_updated, x_updated] = -1\r\n\r\n new_array = self.trim_whitespace(empty_array,\r\n nb_obstacle[2],\r\n self.MIN_GAP)\r\n obstacle_arrays.append(new_array)\r\n\r\n return obstacle_arrays", "def get_obstacles(image):\n\n ih, iw = image.shape[:2]\n image_copy = image.copy()\n\n #resize the image to the size of arena\n image = cv2.resize(image, ARENA_SIZE, interpolation=cv2.INTER_CUBIC)\n gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n\n #replace all black pixels to white pixels\n gray[np.where(gray == 0)]= 255\n\n #get the thresholded binary image\n ret,threshold = cv2.threshold(gray,200,255,cv2.THRESH_BINARY_INV)\n\n #find all the countours in the binary image\n _, contours, heiarchy = cv2.findContours(threshold, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n\n cont = []\n\n #create a mask to draw contours on\n blocks = mask = np.zeros(threshold.shape[:2], np.uint8)\n\n #create a dictionary to hold image roi of all puzzle peices\n blocks_roi = {}\n\n #iterate through all contours\n for i, c in enumerate(contours[1:]):\n\n #find the minimum area fitting rectangle of the contour\n rect = cv2.minAreaRect(c)\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n\n #create the copy of the mask\n mask_copy = mask.copy()\n\n #draw the rectangle on the mask\n cv2.drawContours(mask_copy, [box], -1, (255,255,255), 3)\n\n #floodfill the rectangle\n cv2.floodFill(mask_copy, None, (0,0), 255)\n mask_inv = cv2.bitwise_not(mask_copy)\n blocks = cv2.add(blocks, mask_inv)\n\n _, contours, heiarchy = cv2.findContours(blocks, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n\n obstacles = {}\n\n for c in contours:\n x,y,w,h = cv2.boundingRect(c)\n obstacles.update({(int(x+w/2), int(y+h/2)): BLOCK_SIZE})\n #obstacles.update({(int(x+w/2), int(y+h/2)): (w, h)}) # for unknown block sizes\n bottom_r = remap((x+w, y+h), ARENA_SIZE, (iw,ih))\n top_l = remap((x, y), ARENA_SIZE, (iw,ih))\n blocks_roi.update({(int(x+w/2), int(y+h/2)): image_copy[top_l[1]:bottom_r[1], top_l[0]:bottom_r[0]]})\n\n return obstacles, blocks_roi", "def obstacles(p):\n c1 = np.array([-0.5,-1.])\n r1 = 1.\n c2 = np.array([0.75,0.5])\n r2 = 0.5\n return [\n (p[0] + 2, np.array([1.,0.])), # left\n (2 - p[0], np.array([-1.,0.])), # right\n (p[1] + 1, np.array([0.,1.])), # bottom\n (1 - p[1], np.array([0.,-1.])), # top\n (norm(p - c1) - r1, (p - c1)/norm(p - c1)), # circle 1\n (norm(p - c2) - r2, (p - c2)/norm(p - c2)) # circle 2\n ]", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def draw_obstacles(self):\n for obstacle in self.obstacles:\n obstacle.draw(self.window, Colors.BLACK.value)", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "def obstacle_iterator(self):\n for obstacle in self.tmx_data.get_layer_by_name(\"obstacles\"):\n yield obstacle", "def create_obstacles(self) -> List[Square]:\n obstacles_number = random.randint(1, self.maximum_obstacles_on_board)\n obstacles = list()\n\n while len(obstacles) < obstacles_number:\n\n obstacle_x_pos = random.randint(0, Dimension.board_width() - 1)\n obstacle_y_pos = random.randint(0, Dimension.board_height() - 1)\n obstacle = Square(obstacle_x_pos, obstacle_y_pos)\n if obstacle not in obstacles:\n self.board_matrix[obstacle_y_pos][obstacle_x_pos] = 0\n obstacles.append(obstacle)\n\n return obstacles", "def calcGlobalObstaclePosition(self, obstacles): \r\n global_obstacle_list = []\r\n for obstacle in obstacles: \r\n #Wandeln Winkeldaten für Globalberechnung: -90zu+90 und +90zu-90 0=0\r\n #ScanList[i][0]=degrees(asin(sin(radians(ScanList[i][0])+radians(180))))\r\n\r\n Dx = obstacle[0]\r\n Dy = obstacle[1]\r\n\r\n #Drehmatrix für X, Returns Global Hindernis Position\r\n X=(Dx*cos(radians(self.global_kurs))+Dy*(-sin(radians(self.global_kurs))))+self.RoboPosX\r\n #Drehmatrix für Y, Returns Global Hindernis Position\r\n Y=(Dx*sin(radians(self.global_kurs))+Dy*(cos(radians(self.global_kurs))))+self.RoboPosY\r\n\r\n global_obstacle_list.append([int(X),int(Y)])\r\n return(global_obstacle_list)", "def init_map(self, obstacle_rate=0.9):\n n = self.size()\n\n map_obstacles = [] # np.zeros((n, n)) # 1: obstacle, 0: non-obstacle\n \n for i in range(n):\n # We only need 2 bit to encode 1/0 for each element of NumberArray\n row = NumberArray(2, n)\n for j in range(n):\n if i == j:\n # map_obstacles[i][j] = 0\n row[j] = 0\n elif i > j:\n # map_obstacles[i][j] = map_obstacles[j][i]\n row[j] = map_obstacles[j][i]\n else:\n # map_obstacles[i][j] = 1 if random.random() > 0.9 else 0\n row[j] = 1 if random.random() > obstacle_rate else 0\n map_obstacles.append(row)\n\n self.map_obstacle = map_obstacles", "def get_near(self,map):\n near_cells = []\n for i in range(self.x-1, self.x+2):\n for j in range(self.y-1, self.y+2):\n if(i>=0 and i<map.size and j>=0 and j<map.size): near_cells.append(map.search(i,j))\n return near_cells", "def find_open_tiles(self, arena, units):\r\n tiles = []\r\n for x, y in [(self.x+1, self.y), (self.x, self.y+1), (self.x-1, self.y), (self.x, self.y-1)]:\r\n if arena[x][y] == '.':\r\n tiles.append((x, y))\r\n return tiles", "def draw_obstacles():\n for obstacle in obstacles:\n plt.gca().add_patch(obstacle)", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def drawpath(self,obstacles):\n for i in obstacles:\n self.distance_map[i[0],i[1]]=44\n print(\"Distance map\")\n print(self.distance_map)\n for i in self.footprint:\n self.distance_map[i[0],i[1]]=88\n print(\"Evaluated path\")\n print(self.distance_map)", "def find_obstacle_loc(self, obstacle_list):\n\n x_obst = []\n y_obst = []\n #x_obst_append = x_obst.append\n #y_obst_append = y_obst.append\n locs = []\n\n for x in obstacle_list:\n if x < self.width:\n x_obst.append(x*self.resolution + self.resolution/2)\n else:\n x_obst.append((x % self.width)*self.resolution + self.resolution/2)\n\n for y in obstacle_list:\n y_obst.append((y/self.width)*self.resolution + self.resolution/2)\n\n locs = map(lambda x: x, zip(x_obst, y_obst))\n\n return(locs)", "def __generate_octagon_obstacles(self, world):\n obs_radius = self.cfg[\"obstacle\"][\"octagon\"][\"radius\"]\n obs_min_count = self.cfg[\"obstacle\"][\"octagon\"][\"min_count\"]\n obs_max_count = self.cfg[\"obstacle\"][\"octagon\"][\"max_count\"]\n obs_min_dist = self.cfg[\"obstacle\"][\"octagon\"][\"min_distance\"]\n obs_max_dist = self.cfg[\"obstacle\"][\"octagon\"][\"max_distance\"]\n\n # generate the obstacles\n obstacles = []\n obs_dist_range = obs_max_dist - obs_min_dist\n num_obstacles = randrange(obs_min_count, obs_max_count + 1)\n\n test_geometries = [r.global_geometry for r in world.robots]\n while len(obstacles) < num_obstacles:\n\n # generate position\n dist = obs_min_dist + (random() * obs_dist_range)\n phi = -pi + (random() * 2 * pi)\n x = dist * sin(phi)\n y = dist * cos(phi)\n\n # generate orientation\n theta = -pi + (random() * 2 * pi)\n\n # test if the obstacle overlaps the robots or the goal\n obstacle = OctagonObstacle(obs_radius, Pose(x, y, theta))\n intersects = False\n for test_geometry in test_geometries:\n intersects |= geometrics.convex_polygon_intersect_test(test_geometry, obstacle.global_geometry)\n if not intersects:\n obstacles.append(obstacle)\n return obstacles", "def random_map(self, world):\n obstacles = []\n if self.cfg[\"obstacle\"][\"octagon\"][\"enabled\"]:\n obstacles += self.__generate_octagon_obstacles(world)\n if self.cfg[\"obstacle\"][\"rectangle\"][\"enabled\"]:\n obstacles += self.__generate_rectangle_obstacles(world)\n\n # update the current obstacles and goal\n self.current_obstacles = obstacles\n self.add_new_goal()\n\n # apply the new obstacles and goal to the world\n self.apply_to_world(world)", "def obstacles_form(self,image):\r\n major_axis=60\r\n minor_axis=30\r\n c_y=246\r\n c_x=145\r\n c_y1=90\r\n c_x1=70\r\n radius=35\r\n for i in range(len(image)):\r\n for j in range(len(image[0])):\r\n\r\n #self.ellipse(image,major_axis,minor_axis,i,j,c_x,c_y)\r\n self.circle(image,100,i,j,200,200)\r\n self.circle(image,100,i,j,800,200)\r\n #self.slanted_rect(image,i,j)\r\n self.boundary(image,i,j)\r\n self.boundary1(image,i,j)\r\n self.boundary2(image,i,j)\r\n self.c_shape(image,i,j)\r\n #exploration.c_shape(image,i,j)\r", "def detect_object(world):\n # create the map with only the obstucale to non-zero\n world_hsv = cv2.cvtColor(world, cv2.COLOR_BGR2HSV)\n mask_red = cv2.inRange(world_hsv, low_red, up_red)\n occupancy_grid = np.array(mask_red)\n world_rows, world_cols, _ = world.shape\n\n # create the mask in order to find the goal\n world_hsv = cv2.cvtColor(world, cv2.COLOR_BGR2HSV)\n mask_goal = cv2.inRange(world_hsv, low_blue, up_blue)\n goal_x, goal_y = (15, 15) # goal by default\n\n # look for the obstacle and increase there size\n for i in range(world_rows):\n for j in range(world_cols):\n occupancy_grid[i][j] = int(occupancy_grid[i][j] / 255)\n if mask_goal[i][j] > 200:\n goal_x, goal_y = (i, j)\n object_grid = [[goal_x, goal_y]]\n return object_grid, occupancy_grid", "def generate_nearby_cells(self):\n for y in range(len(self.island_map)):\n for x in range(len(self.island_map[y])):\n list_of_nearby_cells = []\n\n if y != 0:\n self.generate_cell_above(x, y, list_of_nearby_cells)\n\n if x != 0:\n self.generate_cell_left(x, y, list_of_nearby_cells)\n\n if y != len(self.island_map)-1:\n self.generate_cell_below(x, y, list_of_nearby_cells)\n\n if x != len(self.island_map[y])-1:\n self.generate_cell_right(x, y, list_of_nearby_cells)\n\n self.island_map[y][x].nearby_cells = list_of_nearby_cells", "def updateHardObstacles(self):\r\n global_obs = self.calcGlobalObstaclePosition([[10, 20],[10, 0],[10, -20]])\r\n self.globalHardObstaclesList.extend(global_obs)", "def __generate_rectangle_obstacles(self, world):\n obs_min_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"min_dim\"]\n obs_max_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"max_dim\"]\n obs_max_combined_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"max_combined_dim\"]\n obs_min_count = self.cfg[\"obstacle\"][\"rectangle\"][\"min_count\"]\n obs_max_count = self.cfg[\"obstacle\"][\"rectangle\"][\"max_count\"]\n obs_min_dist = self.cfg[\"obstacle\"][\"rectangle\"][\"min_distance\"]\n obs_max_dist = self.cfg[\"obstacle\"][\"rectangle\"][\"max_distance\"]\n\n # generate the obstacles\n obstacles = []\n obs_dim_range = obs_max_dim - obs_min_dim\n obs_dist_range = obs_max_dist - obs_min_dist\n num_obstacles = randrange(obs_min_count, obs_max_count + 1)\n\n test_geometries = [r.global_geometry for r in world.robots]\n while len(obstacles) < num_obstacles:\n # generate dimensions\n width = obs_min_dim + (random() * obs_dim_range )\n height = obs_min_dim + (random() * obs_dim_range )\n while width + height > obs_max_combined_dim:\n height = obs_min_dim + (random() * obs_dim_range )\n\n # generate position\n dist = obs_min_dist + (random() * obs_dist_range)\n phi = -pi + (random() * 2 * pi)\n x = dist * sin(phi)\n y = dist * cos(phi)\n\n # generate orientation\n theta = -pi + (random() * 2 * pi)\n\n # test if the obstacle overlaps the robots or the goal\n obstacle = RectangleObstacle(width, height, Pose(x, y, theta))\n intersects = False\n for test_geometry in test_geometries:\n intersects |= geometrics.convex_polygon_intersect_test(test_geometry, obstacle.global_geometry)\n if not intersects:\n obstacles.append(obstacle)\n return obstacles", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def increased_obstacles_map(occupancy_grid):\n\n nb_rows = len(occupancy_grid)\n nb_cols = len(occupancy_grid[0])\n increased_occupancy_grid = np.zeros([nb_rows + 6, nb_cols + 6])\n\n for i in range(nb_rows):\n for j in range(nb_cols):\n\n if occupancy_grid[i, j] == OCCUPIED:\n increased_occupancy_grid[i:i + 7, j:j + 7] = np.ones([7, 7])\n\n final_occupancy_grid = increased_occupancy_grid[3:(LENGTH_case + 3), 3:(WIDTH_case + 3)]\n return final_occupancy_grid", "def containsObstacles(self, obstacleMatrix):\n\n containsGaps = False\n containsObstacles = False\n \n for row in obstacleMatrix:\n for column in row:\n if column > 0:\n containsObstacles = True\n if column < 0:\n containsGaps = True\n\n # stop the loop if already obstacles AND gaps are found\n # no need to look futher\n if containsObstacles == True and containsGaps == True:\n break\n\n return (containsObstacles, containsGaps)", "def get_neighbours(self, pos: tuple):\n x, y = pos[0], pos[1]\n neighbours = [(x + 1, y), (x + 1, y + 1), (x, y + 1), (x - 1, y + 1),\n (x - 1, y), (x - 1, y - 1), (x, y - 1), (x + 1, y - 1)]\n\n return {k: self.move_cost(pos, k) for k in neighbours if self.within_map(k)}", "def __init__(self, map_obstacle, main_graph):\n\n self.map_obstacle = map_obstacle\n self.main_graph = main_graph\n\n self.sight_range = self.calculate_sight_range()\n\n self.top_left_y = None\n self.top_left_x = None\n self.bottom_right_y = None\n self.bottom_right_x = None\n self.height = None\n self.width = None\n self.size = self.calculate_size()\n\n # nodes specific to this threat zone\n self.nodes = []", "def is_obstacle(self, pos: tuple):\n if self.within_map(pos):\n return self.map[round(pos[0]), round(pos[1])] == OBSTACLE\n else:\n return False", "def updateObstacleMap(self):\n\n all_sensor_readings = self.laser_readings + self.sonar_readings\n\n #we remove all the sensor readings that occur inside the robot frame\n restricted_sensor_readings = []\n for pt in all_sensor_readings:\n if not self.obstacle_map.inRobot(pt):\n restricted_sensor_readings.append(pt)\n\n #add the obstacles to the obstacle map\n self.obstacle_map_lock.acquire()\n self.obstacle_map.addObstacles(restricted_sensor_readings)\n self.obstacle_map_lock.release()\n\n return", "def get_neighbour(self, loc):\n y_lim, x_lim = np.shape(self.map)\n y, x = loc\n neighbour_cords = [(y - 1, x), (y + 1, x), (y, x - 1), (y, x + 1)]\n neighbour_cells = []\n for cords in neighbour_cords:\n curr_y, curr_x = cords\n if curr_y < 0 or curr_y >= y_lim:\n pass\n elif curr_x < 0 or curr_x >= x_lim:\n pass\n else:\n neighbour_cells.append(self.map[cords])\n\n return neighbour_cells", "def find_path(self, start_point: Pos, end_point: Pos, obstacles: list) -> list:\n pass", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def test_d2_get_neighborhood_small(self):\n config.NR_COLS = 3\n config.NR_ROWS = 3\n gamefield = [\n [1, 0, 0],\n [1, 0, 0],\n [0, 1, 1],\n ]\n # top left\n nh = logic.get_neighborhood(gamefield, 0, 0)\n self.assertEqual(nh, 3)\n # top right\n nh = logic.get_neighborhood(gamefield, 0, 2)\n self.assertEqual(nh, 4)\n # bottom left\n nh = logic.get_neighborhood(gamefield, 2, 0)\n self.assertEqual(nh, 4)\n # bottom right\n nh = logic.get_neighborhood(gamefield, 2, 2)\n self.assertEqual(nh, 3)\n # center\n nh = logic.get_neighborhood(gamefield, 1, 1)\n self.assertEqual(nh, 4)", "def updateObstacles(self, obstacles):\r\n global_obs = self.calcGlobalObstaclePosition(obstacles)\r\n self.globalObstaclesList.extend(global_obs)", "def neighbors(self):\n \n # find 0 - blank square\n \n x0 = None\n y0 = None\n \n for i in range(4):\n for j in range(4):\n if self.get_tile(i,j) == 0:\n y0 = i\n x0 = j\n\n if x0 == None or y0 == None:\n return []\n \n neighbor_list = []\n \n # move 0 to the right\n if x0 < 3:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0,x0+1)\n new_position.set_tile(y0,x0+1,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'r'\n neighbor_list.append(new_position)\n # move 0 to the left\n if x0 > 0:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0,x0-1)\n new_position.set_tile(y0,x0-1,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'l'\n neighbor_list.append(new_position)\n # move 0 up\n if y0 > 0:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0-1,x0)\n new_position.set_tile(y0-1,x0,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'u'\n neighbor_list.append(new_position)\n # move 0 down\n if y0 < 3:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0+1,x0)\n new_position.set_tile(y0+1,x0,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'd'\n neighbor_list.append(new_position)\n \n return neighbor_list", "def _check_sonar_obstacles(self):\n # TODO: what's a good number?\n BLOCKED_THRESHOLD = 0.7\n\n rate = rospy.Rate(10) # 10 hz\n count = 10\n left = 0\n center = 0\n right = 0\n\n for i in range(count):\n obstacle = self.swarmie.get_obstacle_condition()\n\n if obstacle & Obstacle.SONAR_LEFT == Obstacle.SONAR_LEFT:\n left += 1\n if (obstacle & Obstacle.SONAR_CENTER ==\n Obstacle.SONAR_CENTER):\n center += 1\n if obstacle & Obstacle.SONAR_RIGHT == Obstacle.SONAR_RIGHT:\n right += 1\n\n rate.sleep()\n\n left_blocked = left / count > BLOCKED_THRESHOLD\n center_blocked = center / count > BLOCKED_THRESHOLD\n right_blocked = right / count > BLOCKED_THRESHOLD\n\n return left_blocked, center_blocked, right_blocked", "def __init__(self, map_config):\n self.current_obstacles = []\n self.current_goal = None\n self.cfg = map_config", "def getInitialObstacles():\n # hardcode number of blocks\n # will account for movemnet\n from random import choice\n from globals import TILEWIDTH, TILEHEIGHT, WINHEIGHT, TILEFLOORHEIGHT, LEVEL, HALFWINWIDTH\n\n no_of_blocks = 50\n for b in range(no_of_blocks // 2):\n # get image\n # image = globals.IMAGESDICT['rock']\n for y in range(1,5):\n image = globals.IMAGESDICT[choice(['ugly tree', 'rock', 'tall tree'])]\n # make rect\n spaceRect = pygame.Rect((b * TILEWIDTH, y * TILEFLOORHEIGHT, TILEWIDTH, TILEFLOORHEIGHT))\n landscape = Landscape(image, spaceRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n image = globals.IMAGESDICT['corner']\n negativeRect = pygame.Rect([-150, WINHEIGHT - TILEHEIGHT, TILEWIDTH, TILEHEIGHT])\n landscape = Landscape(image, negativeRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n image = globals.IMAGESDICT['corner']\n positiveRect = pygame.Rect([LEVEL[0] - TILEWIDTH, WINHEIGHT - TILEHEIGHT, TILEWIDTH, TILEFLOORHEIGHT])\n landscape = Landscape(image, positiveRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n bottomRect = pygame.Rect([HALFWINWIDTH, LEVEL[1] - TILEHEIGHT, TILEWIDTH, TILEFLOORHEIGHT])\n landscape = Landscape(image, bottomRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n for x in range(0, LEVEL[0], 50):\n for y in range(10):\n image = globals.IMAGESDICT[choice(['ugly tree', 'rock', 'tall tree'])]\n spaceRect = pygame.Rect((x, LEVEL[1] - (y * TILEHEIGHT), TILEWIDTH, TILEFLOORHEIGHT))\n landscape = Landscape(image, spaceRect)\n if choice([0,1,0]):\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n\n return", "def avoid_obstacles(self):\n _a = v2d(0, 0)\n _count = 0\n\n # Process all obstacles\n for obs in self.target._obstacles:\n # Vector from target to me\n diff = self._posn - obs._posn\n dist = abs(diff) # Distance\n if 0 < dist < self._sensing_range: # Is it in range?\n # Get force exherted by obstacle\n _f = self.obstacle_force(obs)\n if _f.magnitude() > 1: # Is the force significant?\n _a += _f\n _count += 1\n \n if _count > 0:\n _a /= _count\n _a *= self._speed_cap\n #limit(_a, self._max_f)\n \n return _a", "async def find_nearby_independent_worlds(context: Anacreon) -> List[World]:\n jump_beacon_trait_ids = {\n e.id\n for e in context.game_info.scenario_info\n if e.is_jump_beacon and e.id is not None\n }\n\n jump_beacon_location = [\n world.pos\n for world in context.space_objects.values()\n if isinstance(world, OwnedWorld)\n and any(\n anacreonlib.utils.world_has_trait(\n context.game_info.scenario_info, world, trait_id\n )\n for trait_id in jump_beacon_trait_ids\n )\n ]\n\n return [\n world\n for world in context.space_objects.values()\n if isinstance(world, World)\n and world.sovereign_id == 1 # Is a sovereign world\n and any(\n utils.dist(world.pos, jump_beacon_pos) <= 250\n for jump_beacon_pos in jump_beacon_location\n ) # Is in distance\n ]", "def _trace_route(self, debug=False, time=False):\n self.radius = 2\n self.threshold = 1\n\n obstacles = []\n for vehicle in self._world.get_actors().filter('vehicle.*'):\n #print(vehicle.bounding_box)\n # draw Box\n bb_points = TestAgent._create_bb_points(vehicle)\n global_points= TestAgent._vehicle_to_world(bb_points, vehicle)\n global_points /= global_points[3,:]\n\n my_bb_points = TestAgent._create_bb_points(self._vehicle)\n my_global_points = TestAgent._vehicle_to_world(my_bb_points, self._vehicle)\n\n my_global_points /= my_global_points[3,:]\n dist = np.sqrt((my_global_points[0,2]-global_points[0,2])**2 + (my_global_points[1,2]-global_points[1,2])**2 + (my_global_points[2,2]-global_points[2,2])**2)\n\n if 0<dist:\n vehicle_box = [global_points[0,0],global_points[1,0],global_points[0,1],global_points[1,1]]\n obstacles.append(vehicle_box)\n print(f'vehicle box: {vehicle_box}')\n\n print('number of near obstacles: ', len(obstacles))\n if len(obstacles) == 0:\n self.obstacles = np.array([[-1,-1,-1,-1]]).astype(np.float32)\n self.num_obs = self.num_obs = np.array([0]).astype(np.int32)\n else:\n self.obstacles = np.array(obstacles).astype(np.float32)\n self.num_obs = self.num_obs = np.array([self.obstacles.shape[0]]).astype(np.int32)\n\n iter_parameters = {'start':self.start, 'goal':self.goal, 'radius':self.radius, 'threshold':self.threshold, 'obstacles':self.obstacles, 'num_obs':self.num_obs}\n \n start_timer = timer()\n route = self.gmt_planner.run_step(iter_parameters, iter_limit=1000, debug=debug, time=time)\n end_timer = timer()\n print(\"elapsed time: \", end_timer-start_timer) \n\n if time:\n self.time_df = pd.DataFrame(self.gmt_planner.time_data)\n \n\n # trace_route = []\n # for r in route:\n # wp = carla.Transform(carla.Location(self.states[r][0].item(), self.states[r][1].item(), 1.2), carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()))\n # trace_route.append(wp)\n # draw_route(self._vehicle.get_world(), trace_route)\n\n index = len(route)-1\n trace_route = []\n for i in range(len(route)-1):\n wp = self._map.get_waypoint(carla.Location(self.states[route[index]][0].item(), self.states[route[index]][1].item(), 1.2)) # , carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()\n trace_route.append((wp,-1))\n index -= 1\n\n return trace_route", "def build_obstacle_list(self, vehicle_transform, prediction_msg):\n obstacle_list = []\n # look over all predictions\n for prediction in prediction_msg.predictions:\n # use all prediction times as potential obstacles\n previous_origin = None\n for transform in prediction.trajectory:\n global_obstacle = vehicle_transform * transform\n obstacle_origin = np.array(\n [global_obstacle.location.x, global_obstacle.location.y])\n # distance filtering\n if (previous_origin is None\n or np.linalg.norm(previous_origin - obstacle_origin) >\n self._flags.obstacle_filtering_distance):\n previous_origin = obstacle_origin\n dist_to_ego = np.linalg.norm([\n vehicle_transform.location.x - obstacle_origin[0],\n vehicle_transform.location.y - obstacle_origin[1]\n ])\n if dist_to_ego < self._flags.distance_threshold:\n # use 3d bounding boxes if available, otherwise use default\n if isinstance(prediction.bounding_box, BoundingBox3D):\n start_location = \\\n prediction.bounding_box.transform.location - \\\n prediction.bounding_box.extent\n end_location = \\\n prediction.bounding_box.transform.location + \\\n prediction.bounding_box.extent\n start_transform = global_obstacle.transform_locations(\n [start_location])\n end_transform = global_obstacle.transform_locations(\n [end_location])\n else:\n start_transform = [\n pylot.utils.Location(\n obstacle_origin[0] -\n self._flags.obstacle_radius,\n obstacle_origin[1] -\n self._flags.obstacle_radius, 0)\n ]\n end_transform = [\n pylot.utils.Location(\n obstacle_origin[0] +\n self._flags.obstacle_radius,\n obstacle_origin[1] +\n self._flags.obstacle_radius, 0)\n ]\n obstacle_list.append([\n min(start_transform[0].x, end_transform[0].x),\n min(start_transform[0].y, end_transform[0].y),\n max(start_transform[0].x, end_transform[0].x),\n max(start_transform[0].y, end_transform[0].y)\n ])\n if len(obstacle_list) == 0:\n return np.empty((0, 4))\n\n return np.array(obstacle_list)", "def spawn_obstacles(self):\n self.obstacle_sprites.empty()\n number_of_obstacles = random.randint(MIN_OBSTACLES, MAX_OBSTACLES)\n while len(self.obstacle_sprites) < number_of_obstacles:\n obstacle = Obstacle(random.randrange(0, WIDTH), random.randrange(HEIGHT - 500, HEIGHT))\n obstacle_collision = pygame.sprite.spritecollide(obstacle, self.obstacle_sprites, False)\n if not obstacle_collision:\n self.obstacle_sprites.add(obstacle)", "def a_star_obs(obs_map):\n world_ndarray = np.copy(obs_map[0])\n\n start = tuple(np.argwhere(world_ndarray == -2)[0])\n goal = tuple(np.argwhere(world_ndarray == -3)[0])\n\n world_ndarray[world_ndarray == -2] = 0\n world_ndarray[world_ndarray == -3] = 0\n\n world_tuple = tuple(map(tuple, world_ndarray))\n\n def h_custom_i(cur, end, obstacle):\n ytop, ybot, minx = obstacle\n cur_y, cur_x = cur\n end_y, end_x = end\n obs_bot = np.where(world_ndarray[ybot] == -1)[0][0]\n mid_y = ybot + (ytop - ybot) // 2\n if cur_y in range(ybot, ytop) and cur_x in range(max(obs_bot, start[1]), end_x):\n return 5000 - abs(minx - cur_x) ** 2 - abs(cur_y - mid_y) ** 2\n return abs(cur_x - end_x) + abs(cur_y - end_y)\n\n pr_queue = [] # Use heapqueue as priority queue\n heappush(pr_queue, (0 + h_custom_i(start, goal, obs_map[1]), 0, \"\", start))\n visited = set() # Each element has to be unique in a set\n graph = get_neighbors(world_tuple)\n route_str = \"\"\n\n while pr_queue:\n _, cost, path, current = heappop(pr_queue)\n if current == goal:\n route_str = path\n break\n if current in visited:\n continue\n visited.add(current)\n for direction, neighbour in graph[current].iteritems():\n heappush(pr_queue, (cost + h_custom_i(neighbour, goal, obs_map[1]), cost + 1, path + direction, neighbour))\n world_ndarray[neighbour] = cost + 1\n\n # print \"Expanded nodes(A*+Custom H): \", len(visited), \" Path length: \", len(route_str)\n # Convert string directions to 2D(x,y) coordinates\n route_coord = [start]\n for p in route_str:\n route_coord.append(graph[route_coord[-1]][p])\n\n world_ndarray[start] = -2 # Mark the start and end coordinates again\n world_ndarray[goal] = -3\n\n return route_coord, world_ndarray, len(visited), len(route_str)", "def banned_places(self):\n ys1 = list(range(20, 1060, 5))\n ys2 = list(range(20, 1060, 5))\n x1, x2 = 220, self.game.arena.size[0]-20\n\n for y in range(20, 1060, 5):\n for ban in self.game.arena.banned:\n if ban[0] < x1 < ban[1] and ban[2] < y < ban[3]:\n ys1.remove(y)\n if ban[0] < x2 < ban[1] and ban[2] < y < ban[3]:\n ys2.remove(y)\n self.pos_dict_bot = {1: (x1, ys1), 2: (x2, ys2)}", "def is_collision_by_map_obstacle(self):\n for content in self.contents:\n if self.content.y == self.y and self.content.x == self.x:\n return True\n else:\n return False", "def findNeighbours(self):\n neighbours = []\n\n for i in range(self.xCoordinate - 1, self.xCoordinate + 2):\n for j in range(self.yCoordinate - 1, self.yCoordinate + 2):\n if (not (i == self.xCoordinate and j == self.yCoordinate)) and (0 <= i <= 394 and 0 <= j <= 499):\n neighbours.append(PixelPosition(i, j))\n\n return neighbours", "def is_map_obstacle_in_screen_range(self):\n raise NotImplementedError", "def world():\n bgcolor('black')\n path.color('blue')\n\n for index in range(len(tiles)):\n tile = tiles[index]\n \"\"\"\n Si estamos en un cuadro valido lo dibujamos en azul \n y ponemos el punto blanco\n \"\"\"\n if tile > 0:\n x = (index % 20) * 20 - 200\n y = 180 - (index // 20) * 20\n square(x, y)\n\n if tile == 1:\n path.up()\n path.goto(x + 10, y + 10)\n path.dot(2, 'white')", "def generate_obstacle_block(center, radius):\n obstacles = []\n\n for x in range(center[0] - radius, center[0] + radius + 1):\n for y in range(center[1] - radius, center[1] + radius + 1):\n for z in range(center[2] - radius, center[2] + radius + 1):\n obstacles.append([x, y, z])\n\n assert len(obstacles) == (radius * 2 + 1) ** 3\n return obstacles", "def get_neighbours(self):\n return []", "def __init__(self, costmap):\n # Copy the map metadata\n self.resolution = costmap.info.resolution\n self.min_x = costmap.info.origin.position.x\n self.min_y = costmap.info.origin.position.y\n self.y_width = costmap.info.height\n self.x_width = costmap.info.width\n self.max_x = self.min_x + self.x_width *self.resolution\n self.max_y = self.min_y + self.y_width *self.resolution\n print self.min_x, self.min_y\n print self.max_x, self.max_y\n print \"Resolution: \", self.resolution\n print self.x_width, self.y_width\n \n\n self.motion = self.get_motion_model()\n \n # Copy the actual map data from the map\n x = 0\n y = 0\n ox = list()\n oy = list()\n # obstacle map generation\n self.obstacle_map = [[False for _ in range(self.y_width)]\n for _ in range(self.x_width)]\n obstacles = 0\n for value in costmap.data:\n if value >95:\n obstacles += 1\n self.obstacle_map[x][y] = True\n ox.append(float(x)*self.resolution +self.min_x)\n oy.append(float(y)*self.resolution +self.min_y)\n # Update the iterators\n x += 1\n if x == self.x_width:\n x = 0\n y += 1\n print \"Loaded %d obstacles\"%(obstacles)\n if show_animation: # pragma: no cover\n plt.plot(ox, oy, \".k\")\n plt.grid(True)\n \n # plt.axis(\"equal\")", "def get_collisions(self) -> int:\n c = 0\n for o in self.obstacles:\n if not isinstance(o, Bomb):\n continue # only consider apples\n xy_diff = o.get_position()[:2] - self.agent.get_position()[:2]\n dist = np.linalg.norm(xy_diff)\n # obstacles are only active when they are visible...\n if o.is_visible and dist < self.detection_distance:\n o.update_visuals(make_visible=False)\n c += 1\n return c", "def publishObstacles(self):\n mk = Marker()\n mk.header.stamp = rospy.get_rostime()\n mk.header.frame_id = '/base_link'\n\n mk.ns='basic_shapes'\n mk.id = 0\n mk.type = Marker.POINTS\n mk.scale.x = 0.3\n mk.scale.y = 0.3\n mk.scale.z = 0.3\n mk.color.r = 1.0\n mk.color.a = 1.0\n\n for value in self.obstacle_map.obstacles_in_memory:\n p = Point()\n p.x = value[0]\n p.y = value[1]\n mk.points.append(p)\n\n\n self.obs_pub.publish(mk)", "def update_obstacles(self, new_obs):\n self.obstacles = new_obs", "def generate_possible_paths(self, obstacle):\n if self.does_uav_intersect_obstacle_vertically(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n if self.does_path_intersect_obstacle_2d(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n new_attempt_pos_points = [\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0], obstacle.get_point()[1] + obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0], obstacle.get_point()[1] - obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)]\n ]\n\n new_paths = []\n for new_pos_point in new_attempt_pos_points:\n if not self.does_path_intersect_obstacle_3d(obstacle, self.drone.get_point(), new_pos_point) and self.flight_boundary.is_point_in_bounds(new_pos_point):\n for recursive_new_pos_point in new_attempt_pos_points:\n if self.flight_boundary.is_point_in_bounds(recursive_new_pos_point) and abs(recursive_new_pos_point[2] - new_pos_point[2]) < 5:\n if recursive_new_pos_point[0] != new_pos_point[0] or recursive_new_pos_point[1] != new_pos_point[1]:\n if not self.does_path_intersect_obstacle_3d(obstacle, new_pos_point, recursive_new_pos_point) and not self.does_path_intersect_obstacle_3d(obstacle, recursive_new_pos_point, self.drone.get_waypoint_holder().get_current_waypoint()):\n new_paths.append([new_pos_point, recursive_new_pos_point])\n\n # Uncomment for DEBUGGING ONLY\n for path in new_paths:\n print(\"Point:\", str(path))\n\n return new_paths\n\n return []", "def _get_neighbours(self, position):\n grid = self._grid\n x, y = position\n neighbours = []\n offsets = [(0,1),(1,0),(0,-1),(-1,0)]\n shuffle(offsets)\n for offset in offsets:\n i, j = offset\n position = (x + i, y + j)\n if grid.valid_position(position) and position not in self.shots:\n neighbours.append(position)\n return neighbours", "def get_obstable_metrics(self):\r\n obstacle_details = []\r\n\r\n for nb_obstacle in range(self.N_OBSTABLE_GEN):\r\n # Width Random Size\r\n width_obs = randint(1, self.MAX_OBS_WIDTH)\r\n # Height Random Size\r\n height_obs = randint(1, self.MAX_OBS_HEIGHT)\r\n # Location Random Selection - 1 if Upper 0 Lower\r\n location_obs = 1 if random() > 0.5 else 0\r\n\r\n # Start Location\r\n location_st_obs = randint(0,\r\n self.WINDOW_WIDTH - width_obs)\r\n\r\n obstacle_details.append((width_obs,\r\n height_obs,\r\n location_obs,\r\n location_st_obs))\r\n\r\n return obstacle_details", "def get_num_obstacles(coord_a, coord_b):\n obstacle_count = 0\n \n for x in range(coord_a.x, coord_b.x + 1):\n for y in range(coord_a.y, coord_b.y + 1):\n coord = Coordinate(x, y)\n if coord in self.wall_coords or coord in state:\n obstacle_count += 1\n \n return obstacle_count", "def obstacles_on_face(face_id,T_matrix,quaternion,paint_points,width=0.10,offset=0.01):\n obstacles = []\n for i in range(len(paint_points)/2):\n stroke_1 = paint_points[i*2]\n stroke_2 = paint_points[i*2+1]\n x_o = (stroke_1[0] + stroke_2[0]) / 2.0\n y_o = (stroke_1[1] + stroke_2[1]) / 2.0 + width / 2.0\n z_o = offset / 2.0\n center = trans_to_coordinates(T_matrix, [[x_o, y_o, z_o]])\n obstacle_name = \"face %d obstacle %d\" % (face_id, i)\n obstacles.append([obstacle_name, center[0][0], center[0][1], center[0][2], quaternion[0], quaternion[1], quaternion[2],quaternion[3],[width, stroke_1[1] - stroke_2[1],offset]])\n # print obstacles\n return obstacles", "def getObstacles(request):\n # Validate user made a GET request\n if request.method != 'GET':\n logger.warning('Invalid request method for obstacle info request.')\n logger.debug(request)\n return HttpResponseBadRequest('Request must be GET request.')\n # Validate user is logged in to make request\n if not request.user.is_authenticated():\n logger.warning('User not authenticated for obstacle info request.')\n logger.debug(request)\n return HttpResponseBadRequest('User not logged in. Login required.')\n\n # Log user access to obstacle info\n logger.info('User downloaded obstacle info: %s.' % request.user.username)\n access_log = ObstacleAccessLog()\n access_log.user = request.user\n access_log.save()\n\n # Form JSON response portion for stationary obstacles\n stationary_obstacles_cached = True\n stationary_obstacles_key = '/StationaryObstacle/all'\n stationary_obstacles = cache.get(stationary_obstacles_key)\n if stationary_obstacles is None:\n stationary_obstacles = StationaryObstacle.objects.all()\n stationary_obstacles_cached = False\n stationary_obstacles_json = list()\n for cur_obst in stationary_obstacles:\n # Add current obstacle\n cur_obst_json = cur_obst.toJSON()\n stationary_obstacles_json.append(cur_obst_json)\n\n # Form JSON response portion for moving obstacles\n moving_obstacles_cached = True\n moving_obstacles_key = '/MovingObstacle/all'\n moving_obstacles = cache.get(moving_obstacles_key)\n if moving_obstacles is None:\n moving_obstacles = MovingObstacle.objects.all()\n moving_obstacles_cached = False\n moving_obstacles_json = list()\n for cur_obst in moving_obstacles:\n # Add current obstacle\n cur_obst_json = cur_obst.toJSON()\n moving_obstacles_json.append(cur_obst_json)\n\n # Form final JSON response\n data = {\n 'stationary_obstacles': stationary_obstacles_json,\n 'moving_obstacles': moving_obstacles_json\n }\n\n # Cache obstacles for next request\n if not stationary_obstacles_cached:\n cache.set(stationary_obstacles_key, stationary_obstacles)\n if not moving_obstacles_cached:\n cache.set(moving_obstacles_key, moving_obstacles)\n\n # Return JSON data\n return HttpResponse(json.dumps(data),\n content_type=\"application/json\")", "def get_collisions(self) -> int:\n return 0 # no obstacles are spawned for Circle tasks", "def discover_map(self):\n frontier = Queue()\n cleared = {self.position}\n for pos in self._check_neighbors():\n frontier.put(pos)\n self.add_node(pos, self.position)\n while not frontier.empty():\n next = frontier.get()\n if next not in cleared:\n self.move_to(next)\n for pos in self._check_neighbors():\n self.add_node(pos, self.position)\n frontier.put(pos)\n cleared.add(self.position)\n\n return tuple(self.grid[2])[0]", "def uniquePathsWithObstacles(self, obstacleGrid: List[List[int]]) -> int:\n if obstacleGrid[0][0] == 1:\n return 0\n\n m, n = len(obstacleGrid), len(obstacleGrid[0])\n dp = [[0 for _ in range(n)] for _ in range(m)]\n dp[0][0] = 1\n\n for i in range(1, m):\n if obstacleGrid[i][0] == 1: break\n else: dp[i][0] = dp[i-1][0]\n\n for j in range(1, n):\n if obstacleGrid[0][j] == 1: break\n else: dp[0][j] = dp[0][j-1]\n\n for i in range(1, m):\n for j in range(1, n):\n if obstacleGrid[i][j] == 0:\n dp[i][j] = dp[i-1][j] + dp[i][j-1]\n\n return dp[-1][-1]", "def get_neighbouring_nodes(node) :\r\n\r\n connected_nodes = [] #A list of the connected nodes\r\n\r\n #Checking if the node belongs to the 1st row\r\n if(node.coords[0] != 0) :\r\n connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the last row\r\n if(node.coords[0] != grid_dims[0] - 1) :\r\n connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != 0) :\r\n connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != grid_dims[1] - 1) :\r\n connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n return connected_nodes", "def draw_obstacle(start, end, img):\n # start, end, top_right, top_left = generate_obstacle_point(start, (start[0] + _OBSTACLE_SIZE, start[1] ))\n cv2.fillPoly(img, np.array([[[start[0] - 25, start[1] - 25],\n [start[0] + 25, start[1] - 25],\n [start[0] + 25, start[1] + 25],\n [start[0] - 25, start[1] + 25]]]), _RED)\n # cv2.rectangle(img, (start[0] - 25, start[1] - 25), (start[0] + 25, start[1] + 25), (0, 255, 0), 3)\n return img", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in obstacle_manager:\n boxes.append(obstacle.get_bounding_box())\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame)\n\n await Sleep(1)\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame, negative=True)", "def get_neighbours(self, boid: b.Boid) -> List[b.Boid]:\n neighbours = []\n for possible_neighbour in self.boids:\n if (\n possible_neighbour is not boid and\n possible_neighbour.species_name == boid.species_name and\n boid.sees(possible_neighbour.x, possible_neighbour.y)\n ):\n neighbours.append(possible_neighbour)\n return neighbours", "def gObs(p):\n g = np.array([0.,0.])\n for obs in obstacles(p):\n g += gPenalty(obs[0])*obs[1]\n return g", "def obstacle_count(self):\n #scan area in front of robot\n self.scan()\n #Figure ot how many obstacles there were\n see_an_object = False\n count = 0", "def empty_spots(self):\n\t\tret = []\n\t\tfor i in range(0, self.size):\n\t\t\tfor j in range(0, self.size):\n\t\t\t\tif(self.grid[i][j] == self.terminal):\n\t\t\t\t\tret.append((i,j))\n\t\treturn ret", "def set_obstacle(self, pos: tuple):\n if self.within_map(pos):\n self.map[round(pos[0]), round(pos[1])] = OBSTACLE\n return True\n else:\n return False", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in OBSTACLES:\n boxes.append(obstacle.dump_bounding_box())\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame)\n\n await asyncio.sleep(0)\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame, negative=True)", "def reset_obstacles(self):\n self.obstacles = np.array([])", "def collide_grid(self):\n topleft = self.absolute_collide_topleft\n bottomright = self.absolute_collide_bottomright\n tlx, tly = self.currentLevel.toGridCoord(topleft)\n brx, bry = self.currentLevel.toGridCoord(bottomright)\n collide_grid = []\n for x in range(tlx, brx+1):\n for y in range(tly, bry+1):\n collide_grid.append( (x,y) )\n if not collide_grid:\n collide_grid = [(tlx,tly)]\n return collide_grid", "def get_neighbors(self) -> List['games.saloon.tile.Tile']:\n neighbors = []\n\n for direction in Tile.directions:\n neighbor = getattr(self, \"tile_\" + direction.lower())\n if neighbor:\n neighbors.append(neighbor)\n\n return neighbors", "def obstacles_callback(self, data):\n obs_pos = [(obs.ObsPosition.x, obs.ObsPosition.y, obs.ObsPosition.z)\n for obs in data.obs]\n obs_yaw = np.array([obs.ObsTheta for obs in data.obs])\n if len(obs_pos)==0:\n self.obs_risk = 0.0\n self.min_obs_dist = self.detect_obstacle_range + 100.0\n else:\n disp_vec = np.array(obs_pos) - self.car_pos # displacement\n dist_obs = np.linalg.norm(disp_vec, axis=1) # obstacle distance\n # ego heading unit vector\n ego_hdg = (np.cos(self.car_euler[2]), np.sin(self.car_euler[2]), 0)\n # cosine of ego heading and obs displacment\n obs_cosine = np.dot(disp_vec, ego_hdg)/dist_obs\n # angle of obs displacement w.r.t ego heading\n obs_angle = np.arccos(obs_cosine)\n # raised cosine, 1.0 within a narrow angle ahead, quickly rolloff\n # to 0.0 as angle increases \n obs_rcos = self.raised_cosine(obs_angle, np.pi/24, np.pi/48)\n # distance risk is Laplacian normalized by detection rangei\n risk_dist = np.exp(-0.1*(dist_obs-self.detect_obstacle_range))\n # relative angle between headings of ego car and obs car\n # shifted by pi\n rel_angle = self.car_euler[2] - obs_yaw + np.pi\n rel_angle = (rel_angle + np.pi) % (2*np.pi) - np.pi\n collide_rcos = self.raised_cosine(rel_angle, np.pi/24, np.pi/48)\n # total directional obs risk is distance risk multiplied by\n # raised-cosied directional weight.\n self.obs_risk = np.sum(\n risk_dist * (obs_rcos+0.1) * (collide_rcos+0.1)\n )\n if np.isnan(self.obs_risk):\n self.obs_risk = 0.0\n # idx = np.argsort(dist_obs)[::]\n # minimum obs distance\n self.min_obs_dist = min(dist_obs)\n near_obs = True if self.min_obs_dist<self.detect_obstacle_range else False\n self.pub_obs_risk.publish(self.obs_risk)\n self.pub_nearest_obs.publish(near_obs)", "def get_boundary_points(self, object_handle):\n points = []\n obstacle_position = self.get_object_position(object_handle)\n ret, orient = vrep.simxGetObjectOrientation(self.client_id, object_handle, -1, \\\n vrep.simx_opmode_blocking)\n ret, x_1 = vrep.simxGetObjectFloatParameter(self.client_id, object_handle, 15, \\\n vrep.simx_opmode_blocking)\n ret, y_1 = vrep.simxGetObjectFloatParameter(self.client_id, object_handle, 16, \\\n vrep.simx_opmode_blocking)\n ret, x_2 = vrep.simxGetObjectFloatParameter(self.client_id, object_handle, 18, \\\n vrep.simx_opmode_blocking)\n ret, y_2 = vrep.simxGetObjectFloatParameter(self.client_id, object_handle, 19, \\\n vrep.simx_opmode_blocking)\n angle = orient[2]\n # Extension of boundaries, so that the robots moves without collisions\n x_1 = x_1 - 0.3\n x_2 = x_2 + 0.3\n y_1 = y_1 - 0.3\n y_2 = y_2 + 0.3\n\n\n p_1 = (x_1 * math.cos(angle) - y_1 * math.sin(angle) + obstacle_position.x, y_1 * \\\n math.cos(angle) + x_1 * math.sin(angle) + obstacle_position.y)\n points.append(Point(p_1))\n p_2 = (x_1 * math.cos(angle) - y_2 * math.sin(angle) + obstacle_position.x, y_2 * \\\n math.cos(angle) + x_1 * math.sin(angle) + obstacle_position.y)\n points.append(Point(p_2))\n p_3 = (x_2 * math.cos(angle) - y_2 * math.sin(angle) + obstacle_position.x, y_2 * \\\n math.cos(angle) + x_2 * math.sin(angle) + obstacle_position.y)\n points.append(Point(p_3))\n p_4 = (x_2 * math.cos(angle) - y_1 * math.sin(angle) + obstacle_position.x, y_1 * \\\n math.cos(angle) + x_2 * math.sin(angle) + obstacle_position.y)\n points.append(Point(p_4))\n return points", "def getMovableRange(self, unit):\n CostArr_mod = modifyMovCost(CostArr, ability)\n Obstacles = self.getUnpassable(player) # units that are not passable....\n pos_list, path_list = UCS_solve(unit.pos, CostArr_mod, unit.MovPnt)\n return pos_list, path_list", "def recreate_obstacles(self):\n self.board_matrix = np.full(Dimension.board_size(), 1)\n self.obstacles = self.create_obstacles()", "def open_spots(self):\n ret = []\n for i in range(1,25):\n if self.nodes[i].piece == None:\n ret.append(i)\n return ret", "def cell_neighbours(self, x, y):\n if self.maze_map[y][x]:\n return set()\n neighbours = set()\n for (direction, ((i, j), dummy)) in MazeGraph.DIRECTIONS.items():\n xi, yj = (x + i) % self.width, (y + j) % self.height\n if not self.maze_map[yj][xi]:\n neighbours.add((direction, (xi, yj)))\n return neighbours", "def get_neighbors(self):\n return list(map(self.game.square, [self.position - self.game.rules[\"row_len\"], self.position + 1, self.position + self.game.rules[\"row_len\"], self.position - 1]))", "def reachable_province(self, ctx):\n return self.reachable_tiles(ctx)", "def tinyMazeSearch(problem):\r\n from game import Directions\r\n s = Directions.SOUTH\r\n w = Directions.WEST\r\n return [s,s,w,s,w,w,s,w]", "def drawMap(mapObj, gameStateObj, goals, screen):\n \n # mapSurf will be the single Surface object that the tiles are drawn\n # on, so that it is easy to position the entire map on the DISPLAYSURF\n # Surface object. First, the width and height must be calculated.\n # mapWidth = len(mapObj) * TILEWIDTH\n # mapSurfHeight = (len(mapObj[0]) - 1) * TILEFLOORHEIGHT + TILEHEIGHT\n # mapSurf = pygame.Surface((mapSurfWidth, mapSurfHeight))\n # mapSurf.fill(BGCOLOR) # start with a blank color on the surface.\n \n for i in xrange(len(tiles)):\n tiles[i].hideturtle()\n \n debugprint(\"drawing map\")\n \n nxtiles = len(mapObj)\n nytiles = len(mapObj[0])\n \n xoffset = TILEWIDTH/2 + TILEWIDTH\n yoffset = WINHEIGHT - TILEHEIGHT/2 - TILEWIDTH\n \n tileCount = 0;\n \n def updateTile(screen, xpos, ypos, shape):\n global tiles\n \n if tileCount >= len(tiles):\n tiles.append(Tile(screen, xpos, ypos, shape))\n else:\n tiles[tileCount].goto(xpos, ypos)\n tiles[tileCount].shape(shape)\n tiles[tileCount].showturtle()\n\n return tileCount + 1\n \n # screen.tracer(1)\n # # Draw the tile sprites onto this surface.\n for x in range(nxtiles):\n for y in range(nytiles):\n xpos = x*TILEWIDTH + xoffset\n ypos = yoffset - y*40\n \n if mapObj[x][y] in TILEMAPPING:\n baseTile = TILEMAPPING[mapObj[x][y]]\n elif mapObj[x][y] in OUTSIDEDECOMAPPING:\n baseTile = TILEMAPPING[' ']\n\n # First draw the base ground/wall tile.\n tileCount = updateTile(screen, xpos, ypos, baseTile)\n # debugprint(xpos)\n # debugprint(ypos)\n if mapObj[x][y] in OUTSIDEDECOMAPPING:\n # Draw any tree/rock decorations that are on this tile.\n tileCount = updateTile(screen,xpos,ypos,OUTSIDEDECOMAPPING[mapObj[x][y]])\n elif (x, y) in gameStateObj['stars']:\n if (x, y) in goals:\n # A goal AND star are on this space, draw goal first.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['covered goal'])\n # Then draw the star sprite.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['star'])\n elif (x, y) in goals:\n # Draw a goal without a star on it.\n tileCount = updateTile(screen,xpos,ypos,IMAGESDICT['uncovered goal'])\n\n # Last draw the player on the board.\n if (x, y) == gameStateObj['player']:\n # Note: The value \"player_image\" refers\n # to a key in \"PLAYERIMAGES\" which has the\n # specific player image we want to show.\n tileCount = updateTile(screen,xpos,ypos,PLAYERIMAGES[game_state[\"player_image\"]])\n debugprint(PLAYERIMAGES[game_state[\"player_image\"]])", "def load_from_info(self, course_info):\n for item in course_info[\"obstacles\"]:\n klass = self.class_map[item[0].lower()]\n midbottom = item[1]\n obstacle = klass(midbottom, self.obstacles)\n if \"gate\" in item[0].lower():\n self.gates.add(obstacle)", "def test_d1_get_neighborhood(self):\n config.NR_COLS = 10\n config.NR_ROWS = 10\n gamefield = [\n [1, 0, 0, 0, 0, 0, 0, 0, 1, 0],\n [1, 0, 0, 0, 0, 0, 0, 0, 1, 1],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1, 1, 0, 0, 0, 0, 0, 0, 0, 0],\n [1, 1, 0, 0, 0, 0, 0, 0, 0, 0],\n ]\n # top left\n nh = logic.get_neighborhood(gamefield, 0, 0)\n self.assertEqual(nh, 4)\n # top right\n nh = logic.get_neighborhood(gamefield, 0, 8)\n self.assertEqual(nh, 2)\n # bottom left\n nh = logic.get_neighborhood(gamefield, 9, 1)\n self.assertEqual(nh, 4)\n # bottom right\n nh = logic.get_neighborhood(gamefield, 9, 9)\n self.assertEqual(nh, 4)\n # center\n nh = logic.get_neighborhood(gamefield, 4, 5)\n self.assertEqual(nh, 3)", "def update_map(self, screenshot=None):\n # Get the visible tiles\n nearby = self.game_map[\n (self.player_position[0] - 10): (self.player_position[0] + 11),\n (self.player_position[1] - 10): (self.player_position[1] + 11)\n ]\n\n # Clear NPCs in the nearby as they may have moved\n nearby[nearby == self.TILES.WEAPON_SHOPKEEPER.value] = self.TILES.UNKNOWN.value\n nearby[nearby == self.TILES.BLACKSMITH.value] = self.TILES.UNKNOWN.value\n\n # Take screenshot and isolate the gamplay region\n if screenshot is None:\n screenshot = utils.take_screenshot()\n play = screenshot[8:344, 8:344]\n\n # Loop through all unknown tiles in the nearby\n for i, j in zip(*np.where(nearby == self.TILES.UNKNOWN.value)):\n # Scale up the dimensions\n tile_x = i * self.TILE_DIM\n tile_y = j * self.TILE_DIM\n\n # The center cell is always the player\n if i == 10 and j == 10:\n tile_x = self.player_position[0] + int(tile_x / 16) - 10\n tile_y = self.player_position[1] + int(tile_y / 16) - 10\n self.game_map[(tile_x, tile_y)] = self.TILES.PLAYER.value\n continue\n\n # Slice the tile from the play region\n tile = play[tile_y:tile_y + self.TILE_DIM,\n tile_x:tile_x + self.TILE_DIM]\n\n tile_x = self.player_position[0] + int(tile_x / 16) - 10\n tile_y = self.player_position[1] + int(tile_y / 16) - 10\n\n # Go through all tile types looking for a high confidence match\n template = None\n for potential_template in self.templates:\n if np.allclose(potential_template[0], tile, 1, 1):\n template = potential_template\n break\n\n # No match, assume it is inaccessible\n if template is None:\n self.game_map[(tile_x, tile_y)] = self.TILES.INACCESSIBLE.value\n continue\n\n # By default, mark tile as inaccessible\n label = None\n\n # Mark as mineable\n if re.search(r'rock', template[1], re.M | re.I):\n label = self.TILES.MOUNTAIN.value\n elif re.search(r'door', template[1], re.M | re.I):\n label = self.TILES.DOOR.value\n elif re.search(r'gravel', template[1], re.M | re.I):\n label = self.TILES.GRAVEL.value\n elif re.search(r'shopkeeper', template[1], re.M | re.I):\n label = self.TILES.WEAPON_SHOPKEEPER.value\n elif re.search(r'blacksmith', template[1], re.M | re.I):\n label = self.TILES.BLACKSMITH.value\n elif re.search(r'guard', template[1], re.M | re.I):\n label = self.TILES.INACCESSIBLE.value\n elif re.search(r'inaccessible', template[1], re.M | re.I):\n label = self.TILES.INACCESSIBLE.value\n elif re.search(r'accessible', template[1], re.M | re.I):\n label = self.TILES.ACCESSIBLE.value\n\n # Calculate coordinates of tile in the map relative to the player\n self.game_map[(tile_x, tile_y)] = label\n\n # Go through all tiles in the gameplay region to find the mountains\n for i, j in zip(*np.where(nearby == self.TILES.MOUNTAIN.value)):\n # Get the tile to the left of the mountain\n tile_left = nearby[(i-1, j)]\n\n # Only allow mountains to be minable if they are beside gravel\n if not tile_left == self.TILES.GRAVEL.value:\n nearby[(i, j)] = self.TILES.INACCESSIBLE.value\n\n # Save the game map to disk\n np.savetxt('map.txt', self.game_map, fmt='%d')", "def tinyMazeSearch(problem):\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n n = Directions.NORTH\n\n return [s, s, n, s, w, s, w, w, s, w]", "def move_boats():\n hit_manatee = False\n for i in range(len(map)-1, -1, -1):\n for j in range(len(map[i])-1, -1, -1):\n if map[i][j] == \"*\":\n # Only runs if the entity is a boat\n if i + 1 >= len(map):\n continue\n if map[i+1][j] == \" \":\n # Moves boat downward if possible\n if i + 2 < len(map) and map[i+2][j] == \"M\":\n hit_manatee = True\n map[i+2][j] = \"W\"\n map[i+1][j] = \"*\"\n map[i][j] = \" \"\n elif map[i+1][j] == \"*\":\n # Boats colliding with each other\n new_boat_pos = (i, j)\n if j + 1 < len(map[i]) and map[i][j+1] == \" \" \\\n and map[i+1][j+1] == \" \":\n new_boat_pos = (i+1, j+1)\n elif j - 1 >= 0 and map[i][j-1] == \" \" \\\n and map[i+1][j-1] == \" \":\n new_boat_pos = (i+1, j-1)\n else:\n continue\n\n # Moves boat down to new position\n map[i][j] = \" \"\n map[new_boat_pos[0]][new_boat_pos[1]] = \"*\"\n if new_boat_pos[0] + 1 < len(map) and \\\n map[new_boat_pos[0] + 1][new_boat_pos[1]] == \"M\":\n hit_manatee = True\n map[new_boat_pos[0] + 1][new_boat_pos[1]] = \"W\"\n return \"injured\" if hit_manatee else None", "def get_neighbours(lat, long):\n # ns = north east, ew = east west (ratio between 1 feet and degree) \n # its different on diferent places on earth (sphere)!!\n ns = 0.0025\n ew = 0.0025\n walk = []\n for i in range(-2, 3):\n for j in range(-2, 3):\n thiscell = CellId.from_lat_lng(LatLng.from_degrees(lat + ns*i, long + ew*j)).parent(S2_CELL_LEVEL)\n if abs(i * j) < 4:\n walk.append(thiscell.id())\n return sorted(walk)", "def find_neighbors(self):\n x, y = self.position\n\n for i in range(3):\n for j in range(3):\n try:\n self.neighbors.append(self.stitches[(x - 1 + i, y - 1 + j)].position)\n except:\n pass\n\n # this cell will be added by default so we must delete at the end\n self.neighbors.remove(self.position)" ]
[ "0.7732808", "0.76378906", "0.7601713", "0.7597489", "0.74278224", "0.7182941", "0.70999545", "0.7085734", "0.70508057", "0.69266826", "0.68817693", "0.68608207", "0.66608745", "0.66440624", "0.6596019", "0.6556084", "0.6540522", "0.6464882", "0.6458014", "0.6457657", "0.6408675", "0.6387551", "0.63855827", "0.6372275", "0.6287982", "0.62231493", "0.62183666", "0.61687744", "0.6123037", "0.6070905", "0.6047976", "0.6028792", "0.6028389", "0.6025304", "0.60223615", "0.60039157", "0.59976447", "0.59896207", "0.59808445", "0.5978771", "0.5968919", "0.59424686", "0.5932615", "0.5919906", "0.5919204", "0.59044635", "0.59017545", "0.5885275", "0.5875807", "0.5866885", "0.5865806", "0.5862336", "0.58595294", "0.5852855", "0.58453923", "0.58341396", "0.582866", "0.58266795", "0.5819508", "0.5808588", "0.5803957", "0.58039296", "0.574765", "0.5745529", "0.57363296", "0.5728108", "0.57197225", "0.57072526", "0.5704337", "0.56862557", "0.5681902", "0.5680618", "0.56697685", "0.56669396", "0.5664043", "0.56628805", "0.56615543", "0.566137", "0.5654342", "0.5651", "0.56464756", "0.5629145", "0.5618832", "0.56174403", "0.5615722", "0.5615621", "0.56132495", "0.5612729", "0.56113815", "0.5609189", "0.56069446", "0.5601985", "0.5596735", "0.558875", "0.55822915", "0.5573178", "0.55636454", "0.5559826", "0.55536133", "0.5553601" ]
0.83878875
0
Return True if the UAV has reached the current waypoint and false if not
def has_uav_reached_current_waypoint(self): return self.drone.has_reached_waypoint()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_reached_waypoint_goal(self):\n return self.control_instance.check_reached_waypoint_goal()", "def update(self):\n\n # If the agent has already reached the\n # last waypoint it doesn't need to update\n if self.finished:\n return True\n\n # Skip if the proxy don't have any [new] data\n if (self.pp.info.datatime == 0) or \\\n (self.pp.info.datatime == self.last_read):\n return False\n\n self.last_read = self.pp.info.datatime\n\n # If this is the first update then head toward the first waypoint\n if self.first_update:\n self.pp.set_cmd_pose(self.active_waypoint['x'],\n self.active_waypoint['y'],\n self.get_heading({'x': self.pp.px, 'y': self.pp.py}, self.active_waypoint),\n 1)\n self.first_update = False\n return False\n\n # Calculate how far the agent is from its current waypoint\n dist = math.hypot(self.pp.px - self.active_waypoint['x'],\n self.pp.py - self.active_waypoint['y'])\n\n # Has it reached it yet?\n if dist < self.waypoint_distance_tolerance:\n\n # If all waypoints have been reached, stop the agent and return True\n if (self.active_waypoint_index + 1) >= len(self.waypoints):\n self.pp.set_cmd_vel(0.0, 0.0, 0.0, 0)\n self.pp.enable(False) # redundant?\n self.finished = True\n return True\n\n # Otherwise select the next waypoint\n prev_waypoint = self.active_waypoint\n self.active_waypoint_index += 1\n self.active_waypoint = self.waypoints[self.active_waypoint_index]\n\n # ...and drive to it\n self.pp.set_cmd_pose(self.active_waypoint['x'],\n self.active_waypoint['y'],\n self.get_heading(prev_waypoint, self.active_waypoint),\n 1)\n\n # Still have waypoints to visit\n return False", "def goal_reached(self, robot_pose):\n goal = self.global_plan.poses[-1].pose\n return self.calc_distance(robot_pose, goal) < self.goal_dist_threshold", "def if_goal_reached(self, pose):\n dx = self.pos.x - pose.x\n dy = self.pos.y - pose.y\n dist = math.sqrt(dx ** 2 + dy ** 2)\n return dist < self.radiu", "def reached(self) -> bool:\n return (time.time() - self._start) >= self.seconds", "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def passed_waypoint(self, waypoint_num):\n bools = self.ros_node.get_data('/diff_drive/waypoints_achieved', simple_data = False)\n # Waits for the data\n if bools is not None:\n if len(bools.bools) >= waypoint_num:\n return bools.bools[waypoint_num -1]\n \n rospy.logerr_throttle(15, \"Checking Waypoint Failed. Did not find a waypoint with the number '%s' in the path\" %(waypoint_num))\n return False\n else:\n return False", "def goingToBreak(self):\n \n if (\n (self.current_loc == 0 and not self.direction_forward) or\n (self.current_loc == len(self.destinations)-1 and self.direction_forward)\n ):\n return True\n return False", "def _update_next_waypoint(self):\n if not self.base_waypoints:\n #rospy.logwarn(\"Waypoints not updated: base_waypoints not available yet.\")\n return False\n\n if not self.current_pose:\n #rospy.logwarn(\"Waypoints not updated: current_pose not available yet.\")\n return False\n\n # Get ego car variables\n ego_x = self.current_pose.position.x\n ego_y = self.current_pose.position.y\n ego_theta = math.atan2(self.current_pose.orientation.y, self.current_pose.orientation.x)\n\n # If I do have a next_waypoint, I will start looking from it, and stop looking\n # as soon as get a local minimum. Otherwise I will do a full search across the whole track\n t = time.time()\n wp = None\n yaw = 0\n dist = 1000000 # Long number\n if self.next_waypoint:\n idx_offset = self.next_waypoint\n full_search = False\n else:\n idx_offset = 0\n full_search = True\n num_base_wp = len(self.base_waypoints)\n\n for i in range(num_base_wp):\n idx = (i + idx_offset)%(num_base_wp)\n wp_x = self.base_waypoints[idx].pose.pose.position.x\n wp_y = self.base_waypoints[idx].pose.pose.position.y\n wp_d = math.sqrt((ego_x - wp_x)**2 + (ego_y - wp_y)**2)\n\n if wp_d < dist:\n dist = wp_d\n wp = idx\n if debugging:\n # Angle betwee car heading and waypoint heading\n yaw = math.atan2(wp_y - ego_y, wp_x - ego_x) - ego_theta\n elif not full_search:\n # Local minimum. If the waypoint makes sense, just use it and break\n if dist < max_local_distance:\n break; # Found a point\n else:\n # Seem to have lost track. Do search again\n rospy.logwarn(\"Waypoint updater lost track (local min at %.1f m after %d waypoints). Going back to full search.\", dist, i+1)\n full_search = True\n\n if debugging:\n rospy.loginfo(\"New next wp [%d] -> (%.1f,%.1f) after searching %d points in %fs\", wp, dist * math.cos(yaw), dist * math.sin(yaw), i, time.time()-t)\n\n if wp is None:\n rospy.logwarn(\"Waypoint updater did not find a valid waypoint\")\n return False\n\n self.next_waypoint = wp\n return True", "def iswalking(self):\r\n return self.model.coord!=self.model.targetcoord", "def goal_reached(self, position):\n return position >= self.goal", "def reached_goal(self):\n for i in range(self.simulator_.num_agents):\n if rvo_math.abs_sq(self.simulator_.agents_[i].position_ - self.goals_[i]) > self.simulator_.agents_[i].radius_ * self.simulator_.agents_[i].radius_:\n return False\n\n return True", "def is_at_goal(self):\n return self._current_loc.get_row() == BoardPath._goal_loc.get_row() and \\\n self._current_loc.get_column() == BoardPath._goal_loc.get_column()", "def reached_angle(self, angle):\n if self.ros_node.get_data(\"/auto/hood/current/angle\") == angle:\n return True\n return False", "def is_jumping(self):\n if(self.going_down or self.going_up or self.mid_air):\n return True\n else:\n return False", "def isFinished(self) -> bool:\n\n # Need to convert distance travelled to degrees. The Standard\n # Romi Chassis found here, https://www.pololu.com/category/203/romi-chassis-kits,\n # has a wheel placement diameter (149 mm) - width of the wheel (8 mm) = 141 mm\n # or 5.551 inches. We then take into consideration the width of the tires.\n inchPerDegree = math.pi * 5.551 / 360.0\n\n # Compare distance travelled from start to distance based on degree turn\n return self._getAverageTurningDistance() >= inchPerDegree * self.degrees", "def check_waypoint_reached(self, pos_tol=0.3, head_tol=0.01):\n self.local_pos_pub.publish(self.waypoint_g)\n\n dx = abs(\n self.waypoint_g.pose.position.x - self.current_pose_g.pose.pose.position.x\n )\n dy = abs(\n self.waypoint_g.pose.position.y - self.current_pose_g.pose.pose.position.y\n )\n dz = abs(\n self.waypoint_g.pose.position.z - self.current_pose_g.pose.pose.position.z\n )\n\n dMag = sqrt(pow(dx, 2) + pow(dy, 2) + pow(dz, 2))\n\n cosErr = cos(radians(self.current_heading_g)) - cos(\n radians(self.local_desired_heading_g)\n )\n\n sinErr = sin(radians(self.current_heading_g)) - sin(\n radians(self.local_desired_heading_g)\n )\n\n dHead = sqrt(pow(cosErr, 2) + pow(sinErr, 2))\n\n if dMag < pos_tol and dHead < head_tol:\n return 1\n else:\n return 0", "def isFinished(self):\n current = self.robot.drivetrain.get_gyro_angle()\n # If abs(target - current) < threshold then return true\n return math.fabs(self._target_degrees - current) <= self._degree_threshold or self.isTimedOut()", "def reached_dest(self) -> bool:\n return self.base_route[-1] == self.traveled_nodes[-1][self.NODE_INDEX]", "def is_on(self) -> bool:\n return self.event.is_tripped", "def judge_goal(self):\n err_pos = math.sqrt((self.y_des - self.y)**2 +(self.x_des - self.x)**2)\n print(\"t= %s\" % rospy.get_time()+\"-----------\")\n print('destination position=['+str(self.x_des)+','+str(self.y_des)+\"]\")\n print('the current position=['+str(self.x)+','+str(self.y)+\"]\")\n print('the current yaw angle=['+str(self.yaw))\n print('distance to destination='+str(err_pos))\n\n if(err_pos < 0.8):\n print('reach goal!!!!!')\n self.goal_flag=1", "def ismoving(self):\n return not self.get_par(\"done_moving\")", "def _has_arrived(self, context) -> bool:\n return self._target[0] == context.x and self._target[1] == context.y", "def is_driving(self, first: Waypoint, second: Waypoint) -> bool:\n dist = self.calc_distance(first, second)\n time_delta = (second.timestamp - first.timestamp).seconds\n if dist > GPS_DISTANCE_ACCURATE_METERS and time_delta < STOP_TIME_SECONDS:\n return True\n elif GPS_DISTANCE_ACCURATE_METERS < dist < CONNECTION_LOST_DISTANCE_THRESHOLD_METERS and \\\n time_delta < CONNECTION_LOST_TIMEOUT_SECONDS:\n return True\n else:\n return False", "def _ismoving(self):\n return self.dp.state()==PyTango.DevState.MOVING", "def is_done(self):\n # Retrieve robot position\n pos = self.robot.getPosition()\n # Check if robot has moved sideways too much\n if abs(pos[0]) > 2.0:\n return True\n # Check if robot has fallen (body too close to the ground)\n elif pos[1] < 0.3:\n return True\n # Check it the robot has reached the end of the track\n elif pos[2] < -20.0:\n return True\n # Check if the robot has walked backwards\n elif pos[2] > 25.0:\n return True\n # No conditions reached, not done yet\n else:\n return False", "def time_to_move(self):\r\n if int(self.pix_pos.x+TOP_BOTTOM_BUFFER//2) % self.app.cell_width == 0:\r\n if self.direction == vec(1, 0) or self.direction == vec(-1, 0) or self.direction == vec(0, 0):\r\n return True\r\n # for the x-direction\r\n\r\n if int(self.pix_pos.y+TOP_BOTTOM_BUFFER//2) % self.app.cell_height == 0:\r\n if self.direction == vec(0, 1) or self.direction == vec(0, -1) or self.direction == vec(0, 0):\r\n return True\r\n # for the y-direction\r\n\r\n # checks to see if the player is still within the bounds\r", "def isCurrentPlayerHome(self):\r\n \r\n #creates corresponding starting and ending points for each player\r\n if self.getTurn() == RED:\r\n start = 0\r\n end = 18\r\n else:\r\n start = 6\r\n end = 24\r\n \r\n #checks whether the current player has checkers on corresponding points\r\n for i in range(start, end):\r\n if self.points[i].getTeam() == self.getTurn():\r\n return False\r\n \r\n return True", "def is_complete(self, vehicle_state, distance_travelled: float) -> bool:\n return self.goal.is_reached(vehicle_state)", "def is_reached(self, vehicle_state) -> bool:\n return False", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def is_moving(self):\n return self.steps < self.max_steps", "def __isTileGoalState(self, point):\n return point == self.goalPoint", "def is_moving(self):\n response = self.__send_and_receive(protocol.GET_IS_MOVE)\n value = self.__gen_response_value(response)\n if value:\n # printf(\"\".join(value[1:]))\n if \"\".join(value)[1:] == \"1\":\n return True\n else:\n return False\n else:\n return False", "def on_track(self):\n for goal in self.goals:\n if not goal.on_track:\n return False\n return True", "def goal_test(self):\n if -1 in self.state:\n return False\n else:\n return True", "def check_movement(self):\n is_clear = True # default return value if no obstacles\n # !!! IR_SENSORS DISABLED\n if self.move_state == MOV_FORWARD:\n if self.l.look_for_obstacle(OBST_FRONT) == True:\n is_clear = False\n return is_clear", "def is_moving(self):\n is_moving = self.get_raw_status() & self.STATUS_MOVING\n return bool(is_moving)", "def is_shooting(self):\n if self.gun_interface:\n return self.gun_interface.is_preparing()\n return False", "def done(self):\n return self.goal == (0, 0)", "def _departure_on_duty(self) -> bool:\n return self._get_departure_shift().is_on_duty()", "async def is_target_reached(self) -> bool: # type: ignore\n ...", "def _arrived_at_checkpoint(self, current_time):\n if current_time == self.time_step_to_enqueue:\n self.at_checkpoint = True\n return True\n \n return False", "def is_goal_unreachable(self, x, y, theta):\n self.current_x = x\n self.current_y = y\n self.wp_goal_unreachable = Point(self.current_x,self.current_y)\n self.dist_btw_follow_goal_unreachable = abs(self.wp_goal_unreachable.distance_to(self.wp_follow))\n #print self.is_left_line\n #print self.dist_btw_follow_goal_unreachable\n if self.dist_btw_follow_goal_unreachable < self.TOLERANCE and self.is_left_line == 1:\n print \"goal unreachable\"\n return True\n else:\n return False", "def one_step_forward(self):\n if(self.row+1>=len(self.maze)):\n return False\n elif(self.battery==0):\n return False\n elif(self.maze[self.row+1][self.column]==False):\n return False\n else:\n self.row+=1\n self.battery-=1\n return True", "def quick_check(self):\n # loop three times and move the servo\n for ang in range(self.MIDPOINT - 100, self.MIDPOINT + 101, 100):\n self.servo(ang)\n time.sleep(.01)\n if self.read_distance() < self.SAFE_DISTANCE:\n return False \n # if the three-part check didn't freak out\n return True", "def reached_angle(self, angle, tol):\n if self.ros_node.get_data(\"/auto/turret/current/angle\") is None:\n rospy.logerr(\"The topic /auto/turret/current/angle has not been published yet\")\n else:\n neg_angle_diff = self.wrap_angle(self.ros_node.get_data(\"/auto/turret/current/angle\") - angle)\n pos_angle_diff = self.wrap_angle(angle - self.ros_node.get_data(\"/auto/turret/current/angle\"))\n\n if pos_angle_diff <= tol or neg_angle_diff <= tol:\n return True\n return False", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def isCompletedAt(self, location):\n return location is not None and location.isSurface()", "def is_on(self) -> bool:\n return self._current_speed != SPEED_OFF", "def check_reached(self):\n m_x, m_y = self.destination.get_pos()\n m_radius = self.destination.radius\n distance_centre = math.sqrt((m_x - self.x)**2 + (m_y - self.y)**2)\n sum_radii = m_radius + self.radius\n if distance_centre < sum_radii:\n self.color = pygame.colordict.THECOLORS['green']\n self.has_reached = True", "def checkGoal(self):\n # -- It is not included for simplifity --#\n if self.reward_cumulative != None:\n x = round((abs(self.reward_cumulative) - abs(round(self.reward_cumulative))) * 100);\n rem_goal = x % 25\n rem_timeout = x % 20\n if rem_goal == 0 and x != 0:\n self.is_goal = True\n else:\n self.is_goal = False\n\n if rem_timeout == 0 and x != 0:\n self.is_timeout = True\n else:\n self.is_timeout = False", "def is_upcoming(self):\n\n return timezone.now() < self.start < timezone.now() + timedelta(days=1)", "def check_position(self, player):\n\n # Mid point of the segment defining the goal\n mid = Point.mid_point(self.s_pos, self.e_pos)\n\n # Transposition of this point by the direction vector of the goal\n # to get the direction vector with its origin in the center of the goal\n mid_prime = self.dir + mid\n\n # Creating both needed vectors\n v1 = Vector.v_from_pp(mid, player.pos)\n v2 = Vector.v_from_pp(mid, mid_prime)\n\n # Getting the angle and checking if it is a valid one\n angle = v1.angle(v2)\n\n return self.is_in_interval(-math.pi / 2, math.pi / 2, angle)", "def is_at_stop(self, location):\n # TODO(ionel): This method doesn't work yet because the opendrive do\n # not contained waypoints annotated as stops.\n loc = to_carla_location(location)\n waypoint = self._map.get_waypoint(loc,\n project_to_road=False,\n lane_type=carla.LaneType.Stop)\n return not waypoint", "def is_game_won(self):\n if self.game_is_tied():\n return False\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n if my_available_steps == 0 or opp_available_steps == 0:\n return True\n else:\n return False", "def goalReached(self, rewards):\n return len(rewards) >= 100 and np.mean(rewards[-100:]) >= 18", "def try_advance(self):\n if not self.step.toclick:\n self.step.finished = True\n return True\n return False", "def is_ahead_of(self, pose, x, y):\n x1 = pose.position.x\n y1 = pose.position.y\n orientation = pose.orientation\n euler = tf.transformations.euler_from_quaternion(\n [orientation.x, orientation.y, orientation.z, orientation.w])\n yaw = euler[2]\n return ((x - x1) * math.cos(yaw) + (y - y1) * math.sin(yaw)) > 0", "def move_to(self, waypoint):\n self.set_final_wp(waypoint)\n self.go()\n currPos = np.asarray(self.rexarm.get_positions())\n while(np.linalg.norm(np.asarray(waypoint) - currPos) > 0.15):\n time.sleep(0.01)", "def check_offset(self):\n\n for d in range(self.n_dmps):\n if abs(self.y0[d] - self.goal[d]) < 1e-4:\n self.goal[d] += 1e-4", "def isFinished(self):\n\n currentValue = numpy.power(10, self.idxCurrentF / self.nbPtsF)\n if currentValue == 0:\n return True\n\n # It can be more than one line for the previous alignment value.\n # We iterate until we find a better value or to the end of the lines.\n for i in self:\n while i.nextLine[self.idx] > currentValue and not i.isFinished:\n i.next();\n \n return not any(i.nextLine[self.idx] <= currentValue for i in self)", "def status(self):\r\n return not self.sendQuery(\"isMoving\",\"isMoving\")", "def quick_check(self):\n #loop three times and moce the servo \n for ang in range(self.MIDPOINT - 115, self.MIDPOINT+116, 115):\n self.servo(ang)\n time.sleep(.05)\n if self.read_distance() < self.SAFE_DISTANCE:\n return False\n #if the three-part check didn't freak out\n return True", "def isNavComplete(self):\n self.info('result_future from isNaveComplete ' + str(self.result_future))\n if not self.result_future:\n # task was cancelled or completed\n return True\n rclpy.spin_until_future_complete(self, self.result_future, timeout_sec=0.10)\n if self.result_future.result():\n self.status = self.result_future.result().status\n if self.status != GoalStatus.STATUS_SUCCEEDED:\n self.info('Goal with failed with status code: {0}'.format(self.status))\n return True\n else:\n # Timed out, still processing, not complete yet\n return False\n\n self.debug('Goal succeeded!')\n return True", "def goal_reached(self):\r\n pos_0=self.goal[0]\r\n pos_1=self.goal[1]\r\n #self.start_score=self.string(self.start[0],self.start[1])\r\n #self.data_with_string[self.start_score]=self.start\r\n #self.goal_score=self.string(pos_0,pos_1)\r\n if self.h(self.current_score[0],self.current_score[1],self.current_score[2]) <=10 :\r\n self.goal_score=self.string(self.current_score[0],self.current_score[1],self.current_score[2])\r\n print(\"goal_reached\")\r\n #print(len(self.expanded))\r\n #print(\"self.expanded\",self.expanded)\r\n return True\r\n return False", "def isUp ( self ) :\n return not self.isDown()", "def is_one_turn_ahead(point_a, point_b, distance):\n _, route_initial = interpolate_trajectory(world, [point_a.location, point_b.location])\n if estimate_route_distance(route_initial) < distance or \\\n estimate_route_distance(route_initial) > 3*distance:\n print (\"Rejected because it is too small\")\n return False\n route = clean_route(route_initial)\n\n print ( \" One curve test \")\n if len(route) != 1:\n print (\" reject because of size\")\n return False\n for point in route:\n # Check if there are any curve\n if point[2] == RoadOption.STRAIGHT:\n print (\" reject due to straight\")\n return False\n\n\n return True", "def has_next(self) -> bool:\n return (self._high - self._low) > self._tol", "def is_onhold(self) -> bool:", "def _reached_temperature_end_point(self, T, Tend):\n if Tend is None:\n # End point not give\n return False\n\n if self._integration_direction == \"increasing\":\n if T > Tend:\n return True\n elif self._integration_direction == \"decreasing\":\n if T < Tend:\n return True\n return False", "def is_goal(self, state: Grid2D.State) -> bool:\n return state.agent_position in self.goals", "def is_up(self):\n data = self.vxprint()\n return self.name in data and data[self.name].STATE == \"ACTIVE\"", "def check_player_reached():\n global round_start_timer, round_over\n\n if player1.alive and player1.rect.top < (platform_width // 2):\n add_time_points()\n reset_players()\n player1.wins += 1\n return True\n\n elif player2.alive and (player2.rect.top + player2.image.get_height()) > \\\n (SCREEN_HEIGHT - platform_width):\n player2.wins += 1\n round_over = True\n add_time_points()\n reset_players()\n return True", "def _is_goal_achieved(self) -> bool:\n assert self._goal_info_cache\n return self._goal_info_cache[1]", "def is_goal(self):\n if 0 in self.final_values: # Check if any zeroes are in the final states\n return False\n return True", "def quick_check(self):\n for ang in range(self.MIDPOINT-150, self.MIDPOINT+151, 150):\n self.servo(ang)\n if self.read_distance() < self.SAFE_DIST:\n return False\n return True", "def one_step_back(self):\n if (self.row -1<0):\n return False\n elif (self.battery == 0):\n return False\n elif (self.maze[self.row - 1][self.column] == False):\n return False\n else:\n self.row -= 1\n self.battery -= 1\n return True", "def get_is_moving(self):\r\n return self._arm.get_is_moving()", "def is_moving(self):\n return self.gripper_io.get_signal_value(\"is_moving\")", "def can_move(self, next_x, next_y):\n\t\tif self.battery == 0:\n\t\t\tif self.planet.tiles[next_y][next_x].is_shaded():\n\t\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"+\":\n\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"-\":\n\t\t\treturn False\n\t\treturn True", "def check_if_won(self):\n if self.player_points > self.enemy_points:\n self.bHasWon = True\n else:\n self.bHasWon = False", "def at(self) -> bool:\n\n return 'step_active' in self.__get_step_2_div().get_attribute(\"class\")", "def isDone(self):\n if self.current_turn >= self.MAX_TURNS: return True\n if self.last_user_action[\"action\"] == \"END\": return True\n return False", "def hasMovementAngZ(self):\n return self.boolrot[2]", "def __check_direction(self, vector, coordinate):\n inverse_vector = -vector[0], -vector[1]\n # Calculate hits to direction\n hits = self.__direction(vector,1,coordinate)\n if hits == 5:\n return True\n # After reaching the end, add hits towards the opposite direction\n hits = self.__direction(inverse_vector,hits,coordinate)\n if hits == 5:\n return True", "def is_same_waypoint(self, wp1, wp2, max_d=0.5, max_v=0.5):\n dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)\n ddif = dl(wp1.pose.pose.position, wp2.pose.pose.position)\n if ddif < max_d:\n return True\n return False", "def isGoal(self, state):\n self._visitedLocations.add(state[0])\n self._visitHistory.append(state[0])\n\n # Return boolean whether all four corners are discovered and satisfied\n currentState = state[1]\n return currentState[0] and currentState[1] and currentState[2] and currentState[3]", "def near_way(self):\r\n\r\n prey_position = np.array(self.prey.position)\r\n actual_position = np.array(self.previous_data[-1])\r\n previous_position = np.array(self.previous_data[-2])\r\n\r\n difference_actual = np.linalg.norm(prey_position - actual_position)\r\n difference_previous = np.linalg.norm(prey_position - previous_position)\r\n\r\n if difference_actual < difference_previous:\r\n return True\r\n else:\r\n return False", "def is_solved(self):\n return self._start == self._target", "def has_happened(self):\n\n return self.end < timezone.now()", "def isInAera(self):\n opp = self.get_opponent\n for players in opp:\n if (players.distance(self.playerPos)<10):\n return True\n return False", "def is_complete(self, vehicle_state, distance_travelled: float) -> bool:\n return (\n self.goal.is_reached(vehicle_state)\n and distance_travelled > self.route_length * self.num_laps\n )", "def checkGoalReached(self):\n if self._after_dead_line():\n if not self._crowdsale_closed.get():\n self._crowdsale_closed.set(True)\n self.CrowdsaleEnded()\n\n if self._amount_raised.get() >= self._funding_goal.get():\n self._funding_goal_reached.set(True)\n self.GoalReached(self._addr_beneficiary.get(), self._amount_raised.get())\n Logger.debug(f'Goal reached!', TAG)", "def onGoal(self):\n return self.index == len(self.path)", "def arrived(self):\n \"\"\" Responsible for transformations \"\"\"\n \n if self.phase == 1:\n if self.closest_i_could_get is not None:\n return array_equal(self.closest_i_could_get, array([0,0]))\n else: \n return array_equal(self.destination, array([0,0]))\n elif self.phase > 1:\n if self.closest_i_could_get is not None:\n return array_equal(self.closest_i_could_get, self.position)\n else: \n return array_equal(self.destination, self.position)", "def is_complete(self):\n return self.winner is not None", "def is_goal(self):\n if self.team1.get_cur_hp() == 0:\n return 1\n elif self.team2.get_cur_hp() == 0:\n return -1\n else:\n return 0", "def goal_test(self, state):\n\n\t\treturn self.goal == state" ]
[ "0.7815707", "0.7180824", "0.70038074", "0.69606185", "0.694313", "0.6907562", "0.6907562", "0.6904402", "0.68095386", "0.6806599", "0.6785689", "0.67554694", "0.6742048", "0.67407054", "0.66793656", "0.66658723", "0.65675145", "0.6564974", "0.6496703", "0.648724", "0.64718044", "0.64579546", "0.64021486", "0.63968456", "0.63868624", "0.6379848", "0.63688874", "0.6362253", "0.6357971", "0.6355502", "0.63550395", "0.63197803", "0.6308656", "0.62830025", "0.6271702", "0.62665874", "0.62614673", "0.6259532", "0.6240852", "0.6240214", "0.62201864", "0.6190889", "0.6179981", "0.6157683", "0.61573523", "0.61545163", "0.61535966", "0.6137169", "0.61218685", "0.61172104", "0.60734826", "0.60705566", "0.60705006", "0.6060253", "0.60515285", "0.60430676", "0.6035128", "0.6021135", "0.6019865", "0.60017", "0.59960175", "0.598313", "0.5982027", "0.59803164", "0.59773", "0.5977217", "0.5962243", "0.59595156", "0.5952443", "0.5947715", "0.5946908", "0.5946455", "0.59307903", "0.5928489", "0.59195316", "0.59167486", "0.5909698", "0.59091985", "0.5896175", "0.5894212", "0.5894009", "0.58906806", "0.588506", "0.58848196", "0.5876064", "0.5876064", "0.58753455", "0.5870494", "0.5866862", "0.58609664", "0.5860517", "0.5859332", "0.5855185", "0.58541256", "0.5837171", "0.58343214", "0.5825371", "0.5824796", "0.58243066", "0.5813454" ]
0.864633
0
Hook to be invoked before the test method has been executed. May perform expensive setup here.
def before_test(self, func, *args, **kwargs): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def before_run_tests(cls):\n pass", "def do_before(self):\r\n pass", "def beforeTest(self, test):\n self.setupLoghandler()", "def before(self) -> None:\n pass", "def startTestHook(self):", "def setUp(self):\n # use self.attribute to keep anything which needs to be accessed later\n print('setUp method\\n')", "def setUp(self):\n super(TestCase, self).setUp()\n self._context = CallContext()", "def setUp(self):\n print('Calling \\'setUp\\'')", "def setUp(self):\r\n # nothing to do, all tests use different things\r\n pass", "def setUp(self) -> None:\n pass", "def setUp(self) -> None:\n pass", "def setUp(self):\n\n return", "def setUp(self):\n pass #because we dont have anything to setup.", "def setUp(self):\n print(\"New test by Nikolay Melnik\")", "def setUp(self):\n\t\tprint(\"\\n-------------------------------------\\nIn Test_RMT_Util:\", self._testMethodName)", "def __call__(self, result=None):\n self._pre_setup()\n super(TestCase, self).__call__(result)\n self._post_tearDown()", "def on_before_execution(self):\n pass", "def setUp(self):\n\n BaseTest.setUp(self)", "def setUp(self):\r\n pass # nothing used by all\r", "def setUp(self) :\n pass", "def setUp(self):\n raise NotImplementedError", "def setUp(self):\n GlusterBaseClass.setUp.im_func(self)\n self.test_method_complete = False", "def setUp_extra(self):\n pass", "def setUp(self):\n\n pass", "def setUp(self):\n\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n pass", "def setUp(self):\n test_env_setup()", "def setUp(self):\r\n pass", "def setUp(self):\n\n self._set_up()", "def setUp(self):\n \n pass", "def setUp(self):\n self", "def setUp(self):\n self", "def setUp(self):\n print(\"\\nIn setUp()...\")", "def setUp(self) -> None:\n return super().setUp()", "def setUp(self):\n super(BasicTestCase, self).setUp()", "def setUp(self):\r\n print('---------------------------------------------\\n')\r\n print('STARTING TEST...')", "def setUp(self):\r\n pass # nothing required by all\r", "def setUp(self):\n setUp()", "def XXsetUp(self):\n print(\"FooTest:setUp_:begin\")\n ## do something...\n print(\"FooTest:setUp_:end\")", "def XXsetUp(self):\n print(\"FooTest:setUp_:begin\")\n ## do something...\n print(\"FooTest:setUp_:end\")", "def setUp(self):\n MainTests.setUp(self)", "def before(self, context):\n raise NotImplementedError", "def __init__(self):\n self.setup_called = False", "def _pre_setup(self):\n apps.clear_cache()\n call_command('migrate', interactive=False, verbosity=0)\n call_command('loaddata', 'initial_data', verbosity=0)\n super(DatatableViewTestCase, self)._pre_setup()", "def setUp(self):\n super(ExtensionTestsMixin, self).setUp()\n\n self.manager = None", "def _set_up():\n repl._setUp = self.setUp", "def setup_method(self) -> None:\n self.client = Mock()", "def _fixture_setup(self):\n pass", "def setUp(self):\n self.Reinitialize()", "def setUp(self):\n self._value = True", "def __call__(self, result=None):\n try:\n self._pre_setup()\n super(TestCase, self).__call__(result)\n finally:\n self._post_teardown()", "def pre_setup(self) -> None:\n if self.__setup_done:\n self.base_logger.error(\"pre_setup was erroneously called twice\")\n raise SetupAlreadyDoneError()", "def setUp(self):\n logging.debug('setting up')", "def setUp(self):\n logging.debug('setting up')", "def setUp(self):\n self.dataset = get_test_dataset()", "def _setup(self) -> None:\n\t\treturn", "def pre_execute(self):", "def setUp(self) -> None:\n self.manager = Manager()", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n \n \n pass", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUpTestCase(self):\n pass" ]
[ "0.80520755", "0.7866928", "0.76243114", "0.76155716", "0.749539", "0.74885553", "0.7474587", "0.74741113", "0.73961705", "0.7393307", "0.7393307", "0.73867905", "0.7364386", "0.7364033", "0.73517734", "0.7307922", "0.7301422", "0.7300426", "0.7297788", "0.7283322", "0.72643584", "0.7251347", "0.7248478", "0.72049904", "0.72049904", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7200059", "0.7189708", "0.7177124", "0.71765316", "0.71596414", "0.71462536", "0.71462536", "0.7144136", "0.714269", "0.71273685", "0.71228665", "0.7109158", "0.71087927", "0.7092336", "0.7092336", "0.7091749", "0.7053385", "0.7053022", "0.70301384", "0.6995213", "0.69833696", "0.6977646", "0.6973242", "0.6957558", "0.6956185", "0.695215", "0.69354755", "0.69146997", "0.69146997", "0.68912005", "0.6888217", "0.68768626", "0.68446684", "0.683771", "0.683771", "0.683771", "0.683771", "0.683771", "0.683771", "0.683771", "0.683771", "0.683771", "0.6819283", "0.6819283", "0.6819283", "0.6819283", "0.6819283", "0.6819283", "0.68074447" ]
0.8013997
1
Hook to be invoked after the test method has been executed. May perform additional cleanup required by the command here.
def after_test(self, func, *args, **kwargs): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def teardown(self) -> None:", "def teardown(self) -> None:", "def teardown(self) -> None:", "def teardown(self):", "def teardown(self):", "def teardown(self):", "def teardown(self):\n pass", "def teardown(self):\n pass", "def teardown(self):\n pass", "def after_test(self, test_results):\n pass", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")", "def teardown(self) -> None:\n pass", "def teardown(self) -> None:\n pass", "def teardown(self,**kwargs):\n pass", "def teardown(self, rc):\n pass", "def teardown(self):\n pass # pylint: disable=unnecessary-pass", "def post_cleanup(self):\n pass", "def _teardown(self):\n # No-op base implementation", "def teardown_method(self):", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n self.logout()", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n self.logout()", "def teardown(self):\n raise NotImplementedError", "def teardown(self):\n raise NotImplementedError", "def after(self):\n pass", "def after(self):\n pass", "def tearDown(self):\n\t\tprint(\"end test\")\n\t\tpass", "def teardown_method(self):\n world.clear_paths()\n print(\"\\nEnd of tests in: %s\\n-------------------\\n\" % __name__)\n self.bigml = {}", "def tearDown(self):\n super(TestCase, self).tearDown()\n self._context.check_done()", "def tearDown(self):\n test_env_teardown()", "def tearDown(self):\n pass\n # teardown called after each test\n # e.g. maybe write test results to some text file", "async def _teardown(self, commit: bool = None):", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n #logout of application\n self.logout()", "def teardown(self):\n self.tcex.log.trace('teardown')", "def after_scenario(context, _):\n context.backup_root_raw.cleanup()", "def after(self) -> None:\n pass", "def _tear_down():\n repl._tearDown = self.tearDown", "def tearDown(self):\n self.tmp.cleanup()", "def tearDown(self):\n print('Calling \\'tearDown\\'')", "def tearDown(self):\n if not self.test_manager.leave_output:\n shutil.rmtree(self.directory)", "def tearDown(self):\n self.popen_patcher.stop()", "def tearDown(self):\n self.popen_patcher.stop()", "def teardown(self):\n del self.testInst, self.dname\n\n return", "def teardown_method(self, test_method):\n self.wo_obj = None\n self.config_data = None", "def clean_up(self) -> None:\n print('Doing some clean-up work...')", "def tearDown(self) -> None:\n pass", "def tearDown(self) -> None:\n pass", "def tearDown(self) -> None:\n pass", "def tearDown(self) -> None:\n self.directory.cleanup()", "def tearDown(self):\n\n self._tear_down()", "def tearDown(self):\n zope.component.testing.tearDown()", "def cleanup(self):\r\n pass", "def __exit__(self, *args):\n if self.teardown:\n super().__exit__(*args)", "def tearDown(self):\r\n testing.tearDown()", "def teardown(self, event):\n pass", "def teardown_method(self, method) -> None:", "def teardown(self):\n del self.testInst, self.dname, self.test_val_length\n\n return", "def teardown(self):\n del self.testInst, self.dname, self.test_val_length\n\n return", "def teardown(self, exception):", "def tearDown(self):\n tests.utils.cleanup_environment()", "def tearDown(self):\n tests.utils.cleanup_environment()", "def tearDown(self):\n \n return", "def tearDown(self):\n self.logger.info(\"tearDown begin\")\n self.logger.info(\"tearDown end\\n\")", "def tearDown(self):\n pass", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def teardown_method(self):\n if self.ae:\n self.ae.shutdown()", "def on_cleanup(self):\n raise NotImplementedError", "def tearDown(self) :\n pass", "def tearDown(self) :\n pass", "def tearDown(self) :\n pass", "def tearDownClass(cls):\r\n print(\"==========================\")\r\n print(\"Cleaning mess after testing!\")", "def cleanup(self):\r\n logging.info(\"entered the cleanup\")", "def after(self, context):\n raise NotImplementedError", "def tearDown(self):\r\n pass", "def tearDown(self):\r\n pass", "def tearDown(self):\r\n pass", "def tearDown(self):\r\n pass", "def tearDown(self):\r\n pass", "def teardown_provider(self):\n pass", "def teardown_class(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass", "def tearDown(self):\n pass" ]
[ "0.7715716", "0.7715716", "0.7715716", "0.76030385", "0.76030385", "0.76030385", "0.7553543", "0.7553543", "0.7553543", "0.75292295", "0.7514933", "0.7465234", "0.7465234", "0.74545705", "0.7450483", "0.74478006", "0.74094987", "0.7409005", "0.7381039", "0.7332953", "0.7332953", "0.73321885", "0.73321885", "0.728147", "0.728147", "0.72736824", "0.7248816", "0.7197486", "0.71743566", "0.7151883", "0.7140868", "0.7133901", "0.71329606", "0.7119978", "0.7092798", "0.70674264", "0.7057909", "0.7045391", "0.7039782", "0.70380104", "0.70380104", "0.7031549", "0.69824106", "0.6955259", "0.6942646", "0.6942646", "0.6942646", "0.6932865", "0.6914539", "0.68889093", "0.68887085", "0.68762004", "0.6871399", "0.6870096", "0.6857238", "0.68556726", "0.68556726", "0.6852895", "0.6840068", "0.6840068", "0.6831606", "0.6827246", "0.68239504", "0.6821231", "0.6821231", "0.6821231", "0.6821231", "0.6821231", "0.6821231", "0.68208915", "0.6809798", "0.6809798", "0.6809798", "0.6802499", "0.6796444", "0.67822", "0.6773165", "0.6773165", "0.6773165", "0.6773165", "0.6773165", "0.67713976", "0.67708075", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106", "0.67665106" ]
0.74189425
16
Executes the command once with the response of the active
def execute(self, response): raise NotImplementedError()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do(self, line): \n self.interface.onecmd(line)", "def run_cmd(self):\r\n self.run = True", "def _sendingCommand(self): \n\n while True:\n self.tello.send_command('command') \n time.sleep(5)", "def do_command(command):\n send_command(command)\n # time.sleep(0.1) # may be required on slow machines\n response = get_response()\n print(\"Rcvd: <<< \" + response)\n return response", "def complete_cmd(self):\r\n if self.select_cmd is not None:\r\n self.do_cmd()", "def do_command(command):\n send_command(command)\n response = get_response()\n print(\"Rcvd: <<< \\n\" + response)\n return response", "async def execute(self, client, message, arg):\n\t\treturn", "def respond(cmd,t,p):\n\tt.write(cmd)\n\treturn wait(t,p)", "def issue(self, cmd):\n self.send([cmd])\n return self.read_until_prompt()[1:] # drop the echo", "def command():\n pass", "async def execute(self):\n return True", "async def execute(self):\n return True", "async def execute(self):\n return True", "async def execute(self):\n return True", "async def execute(self):\n return True", "async def execute(self):\n return True", "def _do_command(self, cmd, unit, **kwargs):\n self._do_cmd_resp(cmd, unit, write_delay=INTER_CHARACTER_DELAY, **kwargs)", "def shell_success(self, cmd):\n self.shell_cmd = cmd\n return response", "def _execute(self, message):\n logging.info(__name__ + ' : Send the following command to the device: %s' % message)\n self.visa_handle.write('@%s%s' % (self._number, message))\n sleep(70e-3) # wait for the device to be able to respond\n result = self._read()\n if result.find('?') >= 0:\n print(\"Error: Command %s not recognized\" % message)\n else:\n return result", "def _execute(self):\n LOG.info(\"Waiting for a message...\")", "def execute(self, cmd, state):\n state[:] = interface.execute_arm_command(cmd, 0)\n self.env.update() # note that the sim update is called twice, once here and once by the hand's sim_connection", "async def run(self):\n \n return await self.hub.send_command(\"$inm%s-\" % (self.id), \"act00-00-\")", "def Execute(self):\n return True", "def execute_command(self):\n return ''", "async def execute(self):", "async def command(self,ctx):\n await ctx.send(\"Yes this is a command.\")", "def ping(self, cmd):\n\n cmd.finish(\"text='Present and (probably) well'\")", "def execute(self):\n\t\treturn \"done\"", "async def _execute(self):", "def process_cmd(self, cmd):\n\n resp = self.COMMANDS[cmd.cmd](cmd)\n\n logger.debug(\"Resp: %s\" % resp)\n # send to resp_queue\n # if type == G.CTRL_TYPE:\n #\n # response = json.dumps((corr_id, routing_key, resp))\n # logger.debug(\"Sending response: %s\" % response)\n # self.out_queue.put(response)\n\n response = cmd.make_response(resp)\n logger.debug(\"Sending response: %s\" % response)\n self.out_queue.put(str(response))", "def _send_command(self, command):\n command = \"%s\\n\" % (command.strip())\n self.server.write(command)\n self.server.flush()\n\n #read the length of the result\n length = int(self.server.readline())\n output = self.server.read(length)\n\n result = pickle.loads(output)\n if result[0] == 'ok':\n return result[1]\n else:\n raise RobotCommandError(str(result))", "def ping(self, cmd):\n cmd.finish(\"text='Present and (probably) well'\")", "def cmd(self, context, message):\r\n return True", "def send_command(self, cmd, shell=None, silent=False):", "def answer_waiting_call(self) -> None:", "def execute(self, rc):\n pass", "def preloop(self):\n super(CoreCommand, self).preloop() # sets up command completion", "def __call__(self):\n context = Context()\n return self.recipe.execute(context, self.cmd, self.cmd_args)", "def performCommand(self, game, command):\r\n game.currentTurn.perform(command)", "def ask(self, command, query_delay=0):\n self.write(command)\n self.wait_for(query_delay)\n return self.read()", "def __call__(self, command):\n return self.execute(command).response.body", "def status(self, cmd):\n\n cmd.inform('text=\"Present!\"')\n cmd.finish()", "def execute(self) -> None:\n self.state()", "def runCommand(self): \\\n # pylint: disable=no-self-use", "async def async_execute_command(self, command, notif):\n if command.startswith('MCU'):\n value = await self.async_call_linkplay_tcpuart(command)\n elif command == 'Reboot':\n value = await self.async_call_linkplay_httpapi(\"getStatus:ip:;reboot;\", None)\n elif command == 'PromptEnable':\n value = await self.async_call_linkplay_httpapi(\"PromptEnable\", None)\n elif command == 'PromptDisable':\n value = await self.async_call_linkplay_httpapi(\"PromptDisable\", None)\n elif command == 'RouterMultiroomEnable':\n value = await self.async_call_linkplay_httpapi(\"setMultiroomLogic:1\", None)\n elif command == 'SetRandomWifiKey':\n from random import choice\n from string import ascii_letters\n newkey = (''.join(choice(ascii_letters) for i in range(16)))\n value = await self.async_call_linkplay_httpapi(\"setNetwork:1:{0}\".format(newkey), None)\n if value == 'OK':\n value = value + \", key: \" + newkey\n else:\n value = \"key: \" + newkey\n elif command.startswith('SetApSSIDName:'):\n ssidnam = command.replace('SetApSSIDName:', '').strip()\n if ssidnam != '':\n value = await self.async_call_linkplay_httpapi(\"setSSID:{0}\".format(ssidnam), None)\n if value == 'OK':\n value = value + \", SoftAP SSID set to: \" + ssidnam\n else:\n value == \"SSID not specified correctly. You need 'SetApSSIDName: NewWifiName'\"\n elif command.startswith('WriteDeviceNameToUnit:'):\n devnam = command.replace('WriteDeviceNameToUnit:', '').strip()\n if devnam != '':\n value = await self.async_call_linkplay_httpapi(\"setDeviceName:{0}\".format(devnam), None)\n if value == 'OK':\n self._name = devnam\n value = value + \", name set to: \" + self._name\n else:\n value == \"Device name not specified correctly. You need 'WriteDeviceNameToUnit: My Device Name'\"\n elif command == 'TimeSync':\n import time\n tme = time.strftime('%Y%m%d%H%M%S')\n value = await self.async_call_linkplay_httpapi(\"timeSync:{0}\".format(tme), None)\n if value == 'OK':\n value = value + \", time: \" + tme\n elif command == 'Rescan':\n self._unav_throttle = False\n self._first_update = True\n # await self.async_schedule_update_ha_state(True)\n value = \"Scheduled to Rescan\"\n elif command == 'Update':\n # await self.async_schedule_update_ha_state(True)\n value = \"Scheduled to Update\"\n else:\n value = \"No such command implemented.\"\n _LOGGER.warning(\"Player %s command: %s, result: %s\", self.entity_id, command, value)\n\n _LOGGER.debug(\"Player %s executed command: %s, result: %s\", self.entity_id, command, value)\n\n if notif:\n self.hass.components.persistent_notification.async_create(\"<b>Executed command:</b><br>{0}<br><b>Result:</b><br>{1}\".format(command, value), title=self.entity_id)", "def send_command(self):\n self.connection.sendline(self.command_string)", "def __is_active(self, command):\n return True", "def accept_command():\n # TODO", "def get_result(self, name):\n self.state[name] = not self.state[name]\n if self.command:\n self.command(self.state)", "def run(self):\n self.cmdloop()", "def execute(self, irc_c, msg, cmd):", "def _client_cmd(self, cmd):\n logging.info('Client cmd: [%s]', cmd)\n return self._client.run(cmd)", "def cmd(self, command):\n self.enode.get_shell('bash').send_command(command, matches=self.scapy_prompt)\n response = self.enode.get_shell('bash').get_response()\n return response", "def execute(self):\r\n pass", "def execute_command(self):\n raise Exception(\"Not implemented\")", "def do_a(self, arg):\n self.do_active(arg)", "def usingHandler(self, cmd):\n self.command_handler.handle_command(cmd)\n while msg_queue.empty() is False:\n self.writeresponse(msg_queue.get())", "def handle_execution_response(self, data, *, wait):\n ...", "def execute(self):\n\t\tpass", "def _run_single(self, param: str):\n single_result = CyBldRunnerSingleResult(self.config.command, param)\n\n start = time.time()\n\n success = False\n if self._execute_single_system_command(self.config.command + \" \" + param) == 0:\n success = True\n\n end = time.time()\n\n if success:\n single_result.set_result(CyBldRunnerResultType.success,\n int(end - start))\n else:\n single_result.set_result(CyBldRunnerResultType.fail,\n int(end - start))\n\n self.results.add_result(single_result)\n return success", "async def resp(self, ctx, index, *, response):\n x = self.bot.get_command('question respond')\n if await x.can_run(ctx):\n await ctx.invoke(x, index, response=response)", "def send_output(self):\n self.__status_handler.io.async_refresh()", "def _command(self, *cmd, handler=None):", "def __call__(self):\n hub.sleep(random.randint(1, self.interval))\n while True:\n self.send_req()\n self.reply_pending = True\n hub.sleep(self.interval)\n if self.reply_pending:\n self.no_response()", "def cmd(self):", "def execute(self) -> None:\n self.command(self.target)", "def run_command(self):\r\n self.update_settings()\r\n self.run = True\r\n self.pause = False\r\n if self.run_call is not None:\r\n self.wm.after(1, self.run_call)", "def on_result(self, res):\n command = res.firstChildElement()\n if command[u'status'] == u'executing':\n session_id = command[u'sessionid']\n request = self.create_request('get')\n broad = request.addElement((NS_COMMAND, 'command'))\n broad['sessionid'] = session_id\n broad['node'] = BROADCAST_NODE\n form = self.create_form()\n broad.addChild(form.toElement())\n request.send(SERV_JID.full())\n else:\n log.msg('Announce sent.')", "def handle_user_command(self, command):\n\n out, err = pyautogit.commands.handle_custom_command(command)\n self.show_command_result(out, err, command_name=command)\n self.refresh_status()", "def request() -> None:\n\t_flag.set()", "def default(self, command):\n self._run_cmd(command)", "def send_one(self,\n response: bytes\n ) -> None:\n\n self.log_to_debug(\n line=f\"Send_One: {response} \"\n )\n self.sendLine(\n line=response\n )", "def run_once(self):\r\n with self._run_lock:\r\n self.run(self._process_queue,True) # True: override flag for saving\r", "async def _run_command(self, command, *args, **kwargs):\n pass", "def command(self, cmd):\n self.lmp.command(cmd)", "def execute_command(self, command):\n LiteHelper.execute_local_command(self.com, command)", "def runCommand(self, command):\n self._expectingCommand = defer.Deferred()\n self.clearBuffer()\n if isinstance(command, unicode):\n command = command.encode(\"utf-8\")\n self.transport.write(command + b'\\n')\n return self._expectingCommand", "def _run_as_argument(self):\n self.command = self.command_parser.run(self.args)\n if self.command:\n if self.command[0] == \"exit\":\n return\n self.answer = self.communication(self.command)\n self.print_answer(self.answer)\n \n self.command.clear()", "def postloop(self):\n super(CoreCommand, self).postloop() # Clean up command completion", "def run(self):\n while True:\n self.command = input(\"> cmd >>> \")\n self.invoker.run(self.command)", "def do(self):\n\n self.logger.debug(\"Entering GoToIdleCommand()\")\n \n device=self.target\n device._deconfigure()\n\n message = \"GoToIdle command completed OK\"\n self.logger.info(message)\n return (ResultCode.OK, message)", "def command_done(self):\n return self.read(\"*OPC?\") == \"1\"", "def _command(self, command, close_delay=0.0):\n self._open()\n try:\n self.__write_command(self._serial_port, command)\n if command.startswith(\"reboot\"):\n return\n\n response = self.__get_response(self._serial_port)\n if response[0].startswith(\"*E\"):\n raise errors.DeviceError(\"Device {} command failed. \"\n \"Unable to write command: {} \"\n \"to serial port: {} Err: {!r}\".format(\n self.name, command, self._serial_port,\n response[0]))\n finally:\n if close_delay > 0.0:\n time.sleep(close_delay)\n self.close()\n\n # Discard the last line which is the prompt\n return response[:-1]", "def run(self):\n import pxp # want to have fresh instance ???!\n ans = {}\n pu.mdbg.log(\"PXPWORKER started ------>cmd:{} cookie:{}\".format(self.cmd, self.cookie))\n if (self.cmd=='tagset'):\n ans = pxp.tagset(self.param)\n elif (self.cmd=='tagmod'): \n ans = pxp.tagmod(self.param)\n elif (self.cmd=='teleset'): \n ans = pxp.teleset(self.param)\n elif (self.cmd=='sumset'): \n ans = pxp.sumset(self.param)\n elif (self.cmd=='sumget'): \n ans = pxp.sumget(self.param)\n elif (self.cmd=='rec_stat'):\n self.rec_stat = {}\n self.rec_stat = self.pxp_rec_stat()\n self.done = True\n ans['cookie'] = self.cookie\n pu.mdbg.log(\"PXPHeler finished ------>cmd:{} param:{}\".format(self.cmd, self.param))\n return\n \n ans['cookie'] = self.cookie\n #resp = pu.disk.sockSendWait(\"AUP|\"+json.dumps(ans), addnewline=True, timeout=1)\n pu.disk.sockSendWait(\"AUP|\"+json.dumps(ans), addnewline=True)\n self.done = True\n pu.mdbg.log(\"PXPHeler finished ------>cmd:{} cookie:{}\".format(self.cmd, self.cookie))", "def send_command_without_response(self, command):\r\n if not self.is_dummy:\r\n self.socket.sendto(command.encode('utf-8'), self.tello_address)", "def response(self, context, message):\r\n return True", "def execute(self):\n pass", "def execute(self):\n pass", "def execute(self):\n pass", "def execute(self):\n pass", "def execute(self):\n pass", "def execute(self):\n pass", "def send_command(self, command):\r\n print (\">> send cmd: {}\".format(command))\r\n self.abort_flag = False\r\n timer = threading.Timer(self.command_timeout, self.set_abort_flag)\r\n\r\n self.socket.sendto(command.encode('utf-8'), self.tello_address)\r\n\r\n timer.start()\r\n while self.response is None:\r\n if self.abort_flag is True:\r\n break\r\n timer.cancel()\r\n \r\n if self.response is None:\r\n response = 'none_response'\r\n else:\r\n response = self.response.decode('utf-8')\r\n\r\n self.response = None\r\n\r\n return response", "def execute():", "def send_command(self, cmd):\n\n\t\tself.eyetribe._connection.request(cmd)", "def ask(self, message):\n\n self.write(message)\n\n if self.multi_command is None:\n return self.read()\n else:\n self.responses_expected += 1", "async def _socket_response(self, msg):\n if msg[\"t\"] != \"INTERACTION_CREATE\":\n return\n data = msg[\"d\"]\n\n if int(data[\"type\"]) not in [1, 2]:\n return\n\n guild = await self._discord.fetch_guild(data[\"guild_id\"])\n user = discord.Member(data=data[\"member\"], guild=guild, state=self._discord._connection)\n channel = await self._discord.fetch_channel(data[\"channel_id\"])\n\n\n options = {}\n x = self.commands.get(data[\"data\"][\"name\"])\n if x:\n if data[\"data\"].get(\"options\") is not None:\n for op in data[\"data\"].get(\"options\"):\n options[op[\"name\"]] = op[\"value\"]\n \n await x.callback(SlashedCommand(self._discord, command=x, data=data, user=user, channel=channel, guild_ids=x.guild_ids), **options)\n return\n \n fixed_options = []\n x_base = self.subcommands.get(data[\"data\"][\"name\"]) or self.subcommand_groups.get(data[\"data\"][\"name\"])\n if x_base:\n x = x_base.get(data[\"data\"][\"options\"][0][\"name\"])\n\n op = data[\"data\"][\"options\"][0]\n while op[\"type\"] != 1:\n op = op[\"options\"][0]\n fixed_options = op.get(\"options\", [])\n \n if x is None:\n x = x_base.get(op[\"name\"])\n \n if self.resolve_options:\n if data[\"data\"].get(\"resolved\"):\n resolved = data[\"data\"][\"resolved\"]\n if resolved.get(\"users\"):\n for u in resolved[\"users\"]:\n resolved_user = resolved[\"users\"][u]\n # find the key name\n for op in fixed_options:\n if options[op[\"name\"]] == u:\n options[op[\"name\"]] = discord.User(state=self._discord._connection, data=resolved_user)\n if resolved.get('members'):\n for m in resolved[\"members\"]:\n resolved_member = resolved[\"members\"][m]\n # find the key name\n for op in fixed_options:\n if options[op[\"name\"]] == m:\n options[op[\"name\"]] = discord.Member(state=self._discord._connection, data=resolved_member, guild=self._discord.get_guild(data[\"guild_id\"]))\n else:\n for op in fixed_options:\n if op[\"type\"] == OptionTypes.USER:\n options[op[\"name\"]] = await (await self._discord.fetch_guild(data[\"guild_id\"])).fetch_member(op[\"value\"])\n elif op[\"type\"] == OptionTypes.CHANNEL:\n options[op[\"name\"]] = await self._discord.fetch_channel(op[\"value\"])\n elif op[\"type\"] == OptionTypes.ROLE:\n options[op[\"name\"]] = get(await (await self._discord.fetch_guild(data[\"guild_id\"])).fetch_roles(), op[\"value\"], lambda x: getattr(x, \"id\", None))\n else:\n options[op[\"name\"]] = op[\"value\"]\n if x:\n await x.callback(SlashedSubCommand(self._discord, x, data, user, channel, x.guild_ids), **options)\n return", "async def _wait_response(self, cmd, prompt_re):\n self.logger.debug(\"Waiting for prompt\")\n resp = await self.wait_prompt(prompt_re)\n return resp", "def _response(self, *lines):\n for line in lines:\n self.client.dataReceived(line + b'\\r\\n')\n self.client.dataReceived(\n b'0001 OK [READ-ONLY] ' + self.command + b' completed\\r\\n')", "def execute(command):\n global interpreter\n command = interpreter.precmd(command)\n stop = interpreter.onecmd(command)\n stop = interpreter.postcmd(stop, command)\n return stop" ]
[ "0.6571192", "0.6366622", "0.63558435", "0.63473594", "0.6232847", "0.6113608", "0.60568047", "0.60522336", "0.5991149", "0.59650344", "0.5937013", "0.5937013", "0.5937013", "0.5937013", "0.5937013", "0.5937013", "0.59077984", "0.5870947", "0.58543557", "0.5847925", "0.5816758", "0.5796295", "0.57787544", "0.5770648", "0.5768519", "0.5764999", "0.5762609", "0.5733717", "0.5724145", "0.5721984", "0.5718734", "0.5716633", "0.5714035", "0.5709309", "0.5702119", "0.5697664", "0.5688415", "0.56819266", "0.56690365", "0.5659568", "0.56507534", "0.56325483", "0.5626532", "0.56253105", "0.5623676", "0.5622274", "0.56221384", "0.56171715", "0.5616116", "0.5611962", "0.56113297", "0.56104743", "0.56020266", "0.5595903", "0.5586821", "0.5573768", "0.55701154", "0.55700153", "0.5554336", "0.5551299", "0.5541292", "0.55387366", "0.55368805", "0.5535949", "0.55340874", "0.55269253", "0.5521982", "0.551275", "0.55115527", "0.5511389", "0.5507905", "0.5506393", "0.5503499", "0.54979515", "0.54837644", "0.5479474", "0.54788595", "0.5477269", "0.5477017", "0.5475556", "0.54753065", "0.546783", "0.54659384", "0.5449897", "0.54486775", "0.544678", "0.5445855", "0.5445855", "0.5445855", "0.5445855", "0.5445855", "0.5445855", "0.54424864", "0.5440101", "0.54395515", "0.54386145", "0.5437914", "0.5432781", "0.5429158", "0.5423345" ]
0.578565
22
Whether the command is scoped only for the current request or the whole test method. Requestspecific commands will be executed once per request, testspecific commands only once per test.
def has_request_scope(self): return self.request_scope
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def class_level_setup(self, request):\n\n if data_reader.get_data(request.function.__name__, \"Runmode\") != \"Y\":\n pytest.skip(\"Excluded from current execution run.\")", "def class_level_setup(self, request):\n test_name = request.function.__name__\n if data_reader.get_data(test_name, \"Runmode\") != \"Y\":\n pytest.skip(\"Excluded from current execution run.\")", "def test_multiple_commands_at_same_time(self):", "def is_cmd(self, name):\n \n return name in self.cmds", "def is_used_request(request):\n return getattr(request, 'param', None) is not NOT_USED", "def check_commands(self):\n pass", "def cmd(self, context, message):\r\n return True", "def test_command():\n\n dispatcher = ntelebot.dispatch.Dispatcher()\n dispatcher.add_command('command', lambda ctx: 'DISPATCHED')\n ctx = MockContext()\n ctx.type = 'message'\n ctx.command = None\n assert dispatcher(ctx) is False\n ctx.command = 'command'\n assert dispatcher(ctx) == 'DISPATCHED'\n ctx.type = 'callback_query'\n assert dispatcher(ctx) == 'DISPATCHED'\n ctx.type = 'inline_query'\n assert dispatcher(ctx) is False", "def responds_to(self, command) -> bool:\n return command == self.command and self.active is True and self.command is not None", "def test_func(self):\n return self.request.user.has_permission(\"core.view_staffer\")", "def check_channel_exec_request(self, channel, command):\n return False", "def __is_active(self, command):\n return True", "def _iscommand(self, key):\r\n\t\tyes = False\r\n\t\tfor i in COMMAND_NAME.keys():\r\n\t\t\tif key == i: \r\n\t\t\t\tyes = True; break\r\n\t\treturn yes", "def is_admin(cls, method_name):\n try:\n getattr(cls, method_name)\n except Exception:\n return False\n return Scenario.meta(cls, \"admin_only\", method_name, default=False)", "def is_configured(command):\n return command in COMMANDS", "def bot_commands_only(cmd):\n @functools.wraps(cmd)\n async def bc_cmd(self, ctx, *args, **kwargs):\n if ctx.guild:\n settings = self.bot.settings[ctx.guild]\n if settings.bot_commands_channels\\\n and ctx.channel.id not in settings.bot_commands_channels\\\n and ctx.author.id not in settings.admin_ids:\n\n for channel_id in settings.bot_commands_channels:\n bc_ch = discord.utils.get(ctx.guild.channels, id=channel_id)\n if bc_ch:\n await ctx.send(f\"Please use {bc_ch.mention} for that command\")\n return\n return await cmd(self, ctx, *args, **kwargs)\n return bc_cmd", "def is_executing(self):\n return self.executing", "def is_command(oin, env, pred_name: YPredName, arg: Any=None):\n return (env.check_predicate(obj, pred_name, arg) for obj in oin)", "def run_command_check(self):\n pass", "def test_get_current_request(self):\n assert get_current_request() is None", "def wrapped(request):\n if request.method in methods:\n return True\n else:\n return False", "def _has_permission(self, user, user_is_mod, command, db_session):\n\n if command[1] == 'for_all':\n return True\n if command[1] == 'for_mods' and user_is_mod:\n return True\n if type(command[1]) == db.Command:\n db_command = command[1]\n if bool(db_command.permissions) is False:\n return True\n elif user in [permission.user_entity for permission in db_command.permissions]:\n return True\n return False", "def test_allowed_if_in_task(self):\n\n @task_or_superuser_only\n def view(request):\n return HttpResponse(\"Hello\")\n\n request = self.factory.get(\"/\")\n request.META[_TASK_NAME_HEADER] = \"test\"\n\n response = view(request)\n self.assertEqual(response.status_code, 200)", "def _dispatching(self):\n return bool(self.generate_config or self.subapp or self.subcommand)", "def __commandExists(self, command, cmdtype):\n try:\n # method exists\n if hasattr(self, self.__getFullCommandName(command, cmdtype)):\n # command handler type exists\n if self.__commandHandlerTypeExists(cmdtype):\n return True\n else:\n return False\n else:\n return False\n # any key does not exist\n except KeyError:\n return False", "def matches_command(self, skill_input: SkillInput) -> bool:\n verb = (skill_input.verb or None) and skill_input.verb.lower()\n return verb in self._cmd_list", "def is_valid_command(command):\n return is_get(command) or is_insert(command) or is_update(command) or is_delete(command) or is_showall(command) or is_search(command)", "def _is_command(obj, cli):\n if not inspect.isfunction(obj) or obj.__name__.startswith(\"_\"):\n return False\n return hasattr(obj, \"__module__\") and obj.__module__ == cli.__name__", "def test_command_method_exists(self):\n motor_shield = MotorShield(self.options, self.connection)\n\n for command in motor_shield.commands:\n self.assertIn(command, dir(motor_shield))", "def _DoCommonRequestProcessing(self, request, mr):\n with mr.profiler.Phase('basic processing'):\n self._CheckForMovedProject(mr, request)\n self.AssertBasePermission(mr)", "def has_command_with_name(self, command_name):\n return command_name in self.commands", "def test_get_current_request(self):\r\n assert_is_none(get_current_request())", "async def before_any_command(ctx):\n ctx.timer = time()\n try:\n await ctx.trigger_typing()\n except discord.errors.Forbidden:\n pass", "def is_command_ancillary(args):\n # pylint: disable=bad-continuation\n if (\n # skip the parent check and only\n # determine if the parameter is present\n is_valid_executes(args, skip=True)\n ):\n return True\n return False", "def guild_only():\n\n async def check(ctx):\n if ctx.guild: # In a server\n return True\n await ctx.send('This command is only available in servers!')\n return False\n\n return commands.check(check)", "def known_command(self, command):\n return self._known_command(command, self.do_command)", "def test_dispatch_launch(self):\n @self.skill.launch\n def sample_func():\n \"\"\"Decorated function.\"\"\"\n self.skill.response.sessionAttributes['run'] = True\n self.skill.request.request.type = 'LaunchRequest'\n self.skill.dispatch()\n self.assertTrue(self.skill.response.sessionAttributes['run'])", "async def wiki_data_ai(self, ctx: _Context):\n if ctx.invoked_subcommand is None:\n self._log_command_use(ctx)\n await _wiki.assert_allowed(ctx)", "def is_enabled(command):\n if command not in Controller.commands:\n return False\n return Controller.commands[command][2]", "def test_func(self):\n taxonomy = self.get_taxonomy()\n return self.request.user == taxonomy.author", "def test_dispatch_intent(self):\n @self.skill.intent('test_intent')\n def sample_func():\n \"\"\"Decorated function.\"\"\"\n self.skill.response.sessionAttributes['run'] = True\n self.skill.request.request.type = 'IntentRequest'\n self.skill.request.request.intent = interface.Intent()\n self.skill.request.request.intent.name = 'test_intent'\n self.skill.dispatch()\n self.assertTrue(self.skill.response.sessionAttributes['run'])", "def test_process_invalid1(self):\n self.skill.logic = {}\n self.skill.valid.app_id = '12345'\n @self.skill.launch\n def sample_func():\n \"\"\"Decorated function.\"\"\"\n pass\n self.skill.logic['LaunchRequest']()\n self.assertFalse(self.skill.process(data.SAMPLE_LAUNCH_REQUEST))", "def should_execute(self, message):\n\t\tif self.command_str is not None:\n\t\t\treturn message.content.startswith(\"{}{}\".format(cmd_prefix, self.command_str))\n\t\telse:\n\t\t\treturn False", "def test_func(self, user):\n return self.get_object().admin == user", "def has_sub_commands(self) -> bool:\n if self.__dict__.get(\"sub_commands\"):\n return True\n\n return False", "def test_func(self):\n taxonomy = self.get_object()\n return self.request.user == taxonomy.author", "def test_func(self):\n taxonomy = self.get_object()\n return self.request.user == taxonomy.author", "def command_registered(self, command: str) -> bool:\n return command in self._commands", "async def can_run(self, ctx: Context) -> bool:\n\n if not self.enabled:\n raise DisabledCommand(f'{self.name} command is disabled')\n\n original = ctx.command\n ctx.command = self\n\n try:\n if not await ctx.bot.can_run(ctx):\n raise CheckFailure(f'The global check functions for command {self.qualified_name} failed.')\n\n cog = self.cog\n if cog is not None:\n local_check = Cog._get_overridden_method(cog.cog_check)\n if local_check is not None:\n ret = await guilded.utils.maybe_coroutine(local_check, ctx)\n if not ret:\n return False\n\n predicates = self.checks\n if not predicates:\n # since we have no checks, then we just return True.\n return True\n\n return await guilded.utils.async_all(predicate(ctx) for predicate in predicates) # type: ignore\n finally:\n ctx.command = original", "def check_global_request(self, kind, msg):\n return False", "def test_get_commands(self):\n self.installer.os = \"amazonLinux\"\n commands = self.installer._get_commands()\n assert len(commands) > 0\n self.installer.os = \"ubuntu\"\n commands = self.installer._get_commands()\n assert len(commands) > 0\n self.installer.os = \"does_not_exist\"\n commands = self.installer._get_commands()\n self.assertEqual(len(commands), 1)", "def test_func(self):\n return self.request.user.is_active # any active user", "def request_scopefunc(self):\n return REQUEST_ID.get().get(\"request\") or threading.get_ident()", "def not_test_without_user(self):\n # TODO", "async def custom(self, ctx):\n if ctx.invoked_subcommand is None:\n raise commands.CommandNotFound(\"Subcommand '{}' does not exist.\".format(ctx.subcommand_passed))", "def safe_known_command(self, command):\n return self._known_command(command, self.safe_do_command)", "async def wiki_data(self, ctx: _Context):\n if ctx.invoked_subcommand is None:\n self._log_command_use(ctx)\n await _wiki.assert_allowed(ctx)", "def _get_supported_commands(self):\n logger.info(\"Default unconfigured API, not adding any commands!\")\n pass", "def runCommand(self): \\\n # pylint: disable=no-self-use", "def do_known_command(self, cmd):\n if cmd in self.commands:\n return \"true\", True\n else:\n return \"false\", True", "def do_not_limit(command):\n if not isinstance(command, commands.Command):\n raise TypeError(\"one_command.not_limited is a decorator for Commands\")\n\n exempted.append(command)\n return command", "def handle(self, command, context):\n print(\"Simulating \" + str(command))\n cmd = command.getCommandID()\n\n if cmd == \"comment\":\n # Suppress comments that start with \"#\"\n comment = command.getProperty(\"comment\")\n return comment.startswith(\"#\")\n \n if cmd == \"log\":\n # Ignore log commands in simulation\n return True\n \n if cmd == \"set\":\n name = command.getProperty(\"device_name\")\n if name == \"rate\":\n # Changing rate always takes a fixed amount of time\n value = command.getProperty(\"value\")\n context.logExecutionStep(\"Set beam rate to %g Hz\" % value, 5.0);\n # Do update the simulated device!\n context.getDevice(\"rate\").write(value)\n return True\n \n if cmd == \"wait\":\n name = command.getProperty(\"device_name\")\n if name == \"charge\":\n charge = command.getProperty(\"desired_value\")\n\n # Time spent waiting for beam charge depends on rate\n rate = self.getRate(context)\n time = 60.0*60.0 * charge * 60/rate\n \n context.logExecutionStep(\"Wait for %.2f Coulomb at %.0f Hz\" % (charge, rate), time);\n return True\n\n # For commands not specifically handled, use default simulation\n return False", "async def hockey_commands(self, ctx: commands.Context) -> None:\n pass", "def test_permissions(self):\n taxonomy = self.get_taxonomy()\n return True if self.request.user == taxonomy.author else taxonomy.public", "async def command(self,ctx):\n await ctx.send(\"Yes this is a command.\")", "def test_request_throttling_is_per_user(self):\n self.ensure_is_throttled(MockView, 200)", "def func(self):\n from evennia.utils.utils import string_suggestions, list_to_string\n\n msg = \"Command '%s' is not available.\" % self.raw\n cmdset = self.cmdset\n cmdset.make_unique(self.caller)\n all_cmds = [cmd for cmd in cmdset if cmd.auto_help and cmd.access(self.caller)]\n names = []\n for cmd in all_cmds:\n # noinspection PyProtectedMember\n names.extend(cmd._keyaliases)\n suggestions = string_suggestions(self.raw, set(names), cutoff=0.7)\n if suggestions:\n msg += \" Maybe you meant %s?\" % list_to_string(\n suggestions, \"or\", addquote=True\n )\n else:\n msg += ' Type \"help\" for help.'\n self.msg(msg)", "def isScopeActive(self, name):", "def isOp(self):\n return True", "def __call__(self, target, creds, enforcer):\n\n return creds['is_admin'] == self.expected", "def __call__(self, target, creds, enforcer):\n\n return creds['is_admin'] == self.expected", "def is_command(self, text):\n return text.split(' ', 1)[0].startswith(\"!\")", "def test_allowed_if_in_task(self):\n\n @task_only\n def view(request):\n return HttpResponse(\"Hello\")\n\n request = self.factory.get(\"/\")\n request.META[_TASK_NAME_HEADER] = \"test\"\n with sleuth.fake(\"djangae.environment.is_in_task\", True):\n response = view(request)\n\n self.assertEqual(response.status_code, 200)", "async def cool(ctx):\n if ctx.invoked_subcommand is None:\n await ctx.send('No, {0.subcommand_passed} is not cool'.format(ctx))", "def can_execute(self, msg, command, now):\n if command not in self.user_limit:\n return True, 0\n expiry = self.user_limit[command].get(msg.author.id, 0)\n return now > expiry, expiry-now", "def _override_command_method(self, name):\n method=getattr(self,name,None)\n if method is not None:\n @func_utils.getargsfrom(method)\n def new_method(*args, **kwargs):\n return self._call_command_method(name,method,args,kwargs)\n setattr(self,name,new_method)", "def command():\n pass", "def run(self, cmd):\n if self.env_name == \"local\":\n asyncio.run(self.async_run(cmd))\n else:\n cmd = f\"{self.get_scalingo_run_cmd()} '{cmd}'\"\n asyncio.run(self.async_run(cmd))", "def disable_cmd_restricted(self, cls):\n whitelist = self.get_availables_cmd(cls)\n if not whitelist:\n return True\n acessmethods = AcessMethods(cls, whitelist)\n setattr(self.cls, \"__getattribute__\", acessmethods.disabled_method)", "def command_(self, name):\n def decorator(func):\n func.__name__ = name\n return self.command(func)\n return decorator", "def _verifyCommand(self):\n for i in range(3):\n rc = self.subdevice.command_test() # Verify command is correct\n if rc is None:\n break", "def test_func(self):\n return self.request.user.is_superuser", "def _under_coverage_cmd(self, cmd):\n if self.run_under_coverage:\n cmd.append('--cov')\n cmd.append('--cov-report=')\n\n return cmd", "def _under_coverage_cmd(self, cmd):\n if self.run_under_coverage:\n cmd.append('--cov')\n cmd.append('--cov-report=')\n\n return cmd", "def has_permission(self):\n return super().has_permission()", "def in_method_code(self):\n return self.mscope is not None", "def test_send_command(fprime_test_api):\n fprime_test_api.send_and_assert_command(\"cmdDisp.CMD_NO_OP\", max_delay=0.1)\n assert fprime_test_api.get_command_test_history().size() == 1\n fprime_test_api.send_and_assert_command(\"cmdDisp.CMD_NO_OP\", max_delay=0.1)\n assert fprime_test_api.get_command_test_history().size() == 2", "def emulate_request(self, user):\n message = {\n 'action': 'emulate_request',\n 'user_id': user.id,\n 'command_ids': self.ids,\n }\n self.env['telegram.bus'].sendone(message)\n return True", "def _under_coverage_cmd(self, cmd):\n if self.run_under_coverage:\n cmd.append('--cov')\n if self.append_coverage:\n cmd.append('--cov-append')\n cmd.append('--cov-report=')\n\n return cmd", "async def add_global_command(self, base):\n api_command = await self._get_global_api_command(base.name)\n if api_command is None:\n await create_global_command(base.to_dict(), self._discord)\n else:\n if api_command != base:\n if api_command.get(\"guild_id\") is None:\n await edit_global_command(api_command[\"id\"], self._discord, base.to_dict())\n else:\n await delete_guild_command(self._discord, api_command[\"id\"], api_command[\"guild_id\"])\n await self.add_global_command(base)", "def _general_testing(self, context, kind, *args, **kwargs):\r\n if kind == \"fake_next_op\":\r\n self._register_fake_next_op(context.channel, *args, **kwargs)\r\n self._reply(context, proto_success({}, None), None)\r\n return True\r\n self._reply(context, proto_failure({\"Unsupported testing function '{}'\".format(kind)}), None)\r\n return False", "def test_importtleCommandExists(self):\n self.assertIn('importtle', get_commands())", "def validate_command(command):\n return command in list(VALID_COMMANDS.keys())", "def test_base_command(self):\n c = SeqPrep()\n # test base command\n self.assertEqual(c.BaseCommand,\n ''.join(['cd \"', os.getcwd(), '/\"; ', 'SeqPrep']))\n # test turning on parameter\n c.Parameters['-O'].on('15')\n self.assertEqual(c.BaseCommand,\\\n ''.join(['cd \"', os.getcwd(), '/\"; ', 'SeqPrep -O 15']))", "def __call__(self, target, creds):\n\n return creds['is_admin'] == self.expected", "def _implements_test_batch_hooks(self):\n return not is_default(self.on_test_batch_begin) or not is_default(\n self.on_test_batch_end\n )", "def test_passes_check():\n from django.core.management import call_command\n\n call_command('check', 'django_recommend')", "def _is_command(self, ext):\n try:\n return issubclass(ext, CommandExtension)\n except TypeError:\n return False", "def handle_request_command(self, msg):\n\n\t\t# only one command?\n\t\tcommand = None\n\t\tif msg.arguments:\n\t\t\tcommand = msg.arguments[0]\n\n\t\tmodule_name = moduleManager.module_providing(self.__command_list, command)\n\n\t\ttry:\n\t\t\t# check if the module exists in the module manager\n\t\t\tmoduleManager[module_name]\n\t\texcept KeyError:\n\t\t\t# the module has been removed from moduleManager (probably through a reload)\n\t\t\tCORE.warn('Module %r (command=%r, id=%r) does not exists anymore' % (module_name, command, msg.id))\n\t\t\tmoduleManager.load()\n\t\t\tself._reload_acls_and_permitted_commands()\n\t\t\tmodule_name = None\n\n\t\tif not module_name:\n\t\t\traise Forbidden()\n\n\t\tif msg.arguments:\n\t\t\tif msg.mimetype == MIMETYPE_JSON:\n\t\t\t\tis_allowed = moduleManager.is_command_allowed(self.acls, msg.arguments[0], options=msg.options, flavor=msg.flavor)\n\t\t\telse:\n\t\t\t\tis_allowed = moduleManager.is_command_allowed(self.acls, msg.arguments[0])\n\t\t\tif not is_allowed:\n\t\t\t\traise Forbidden()\n\t\t\tif module_name not in self.__processes:\n\t\t\t\tCORE.info('Starting new module process and passing new request to module %s: %s' % (module_name, str(msg._id)))\n\t\t\t\ttry:\n\t\t\t\t\tmod_proc = ModuleProcess(module_name, debug=MODULE_DEBUG_LEVEL, locale=self.i18n.locale)\n\t\t\t\texcept EnvironmentError as exc:\n\t\t\t\t\tmessage = self._('Could not open the module. %s Please try again later.') % {\n\t\t\t\t\t\terrno.ENOMEM: self._('There is not enough memory available on the server.'),\n\t\t\t\t\t\terrno.EMFILE: self._('There are too many opened files on the server.'),\n\t\t\t\t\t\terrno.ENFILE: self._('There are too many opened files on the server.'),\n\t\t\t\t\t\terrno.ENOSPC: self._('There is not enough free space on the server.')\n\t\t\t\t\t}.get(exc.errno, self._('An unknown operating system error occurred (%s).' % (exc,)))\n\t\t\t\t\traise ServiceUnavailable(message)\n\t\t\t\tmod_proc.signal_connect('result', self.result)\n\n\t\t\t\tcb = notifier.Callback(self._mod_error, module_name)\n\t\t\t\tmod_proc.signal_connect('error', cb)\n\n\t\t\t\tcb = notifier.Callback(self._socket_died, module_name)\n\t\t\t\tmod_proc.signal_connect('closed', cb)\n\n\t\t\t\tcb = notifier.Callback(self._mod_died, module_name)\n\t\t\t\tmod_proc.signal_connect('finished', cb)\n\n\t\t\t\tself.__processes[module_name] = mod_proc\n\n\t\t\t\tcb = notifier.Callback(self._mod_connect, mod_proc, msg)\n\t\t\t\tnotifier.timer_add(50, cb)\n\t\t\telse:\n\t\t\t\tproc = self.__processes[module_name]\n\t\t\t\tif proc.running:\n\t\t\t\t\tCORE.info('Passing new request to running module %s' % module_name)\n\t\t\t\t\tproc.request(msg)\n\t\t\t\t\tself.reset_inactivity_timer(proc)\n\t\t\t\telse:\n\t\t\t\t\tCORE.info('Queuing incoming request for module %s that is not yet ready to receive' % module_name)\n\t\t\t\t\tproc._queued_requests.append(msg)", "def test_base_command(self):\n c = PandaSeq()\n # test base command\n self.assertEqual(c.BaseCommand,\n ''.join(['cd \"', os.getcwd(), '/\"; ', 'pandaseq']))\n # test turning on parameter\n c.Parameters['-o'].on('15')\n self.assertEqual(c.BaseCommand,\n ''.join(['cd \"', os.getcwd(), '/\"; ', 'pandaseq -o 15']))" ]
[ "0.5562551", "0.5516768", "0.5430572", "0.539855", "0.5351005", "0.5274069", "0.52571267", "0.5246556", "0.52220726", "0.5220322", "0.5207937", "0.52079004", "0.52006286", "0.51645637", "0.51614296", "0.51251787", "0.51241505", "0.51203376", "0.5114063", "0.5096146", "0.50909364", "0.50694185", "0.50355506", "0.50099343", "0.49846688", "0.49681333", "0.49645513", "0.49586082", "0.4956104", "0.49505493", "0.49181402", "0.49153998", "0.49132612", "0.49084872", "0.49063364", "0.49014547", "0.49010968", "0.4894377", "0.4892874", "0.4887316", "0.4868862", "0.4841286", "0.48344573", "0.48339868", "0.48251137", "0.48205292", "0.48205292", "0.48180628", "0.4817588", "0.4816798", "0.4806732", "0.47985336", "0.4796718", "0.4793328", "0.47891676", "0.47886968", "0.4767773", "0.47667348", "0.475794", "0.47575432", "0.4757457", "0.4729139", "0.47279075", "0.47255513", "0.47148764", "0.47147167", "0.47065043", "0.46998724", "0.46989465", "0.46965644", "0.46965644", "0.46963155", "0.46955544", "0.46842968", "0.46828917", "0.46626878", "0.4662497", "0.46575066", "0.46549043", "0.46445283", "0.46431348", "0.4642721", "0.464143", "0.464143", "0.46317136", "0.4629278", "0.46215227", "0.4620273", "0.46180382", "0.4617268", "0.46166238", "0.46148664", "0.4611901", "0.46100003", "0.4607625", "0.4603242", "0.4601986", "0.45957354", "0.45928177", "0.4591681" ]
0.50144213
23
Generates fixture objects from the given response and stores them in the applicationspecific cache.
def execute(self, response): if not has_request_context: return self._fallback_fixture_names() try: app = self.auto_fixture.app # Create response fixture fixture = Fixture.from_response(response, app, self.response_name) self.auto_fixture.add_fixture(fixture) # Create request fixture if request.data: fixture = Fixture.from_request(request, app, self.request_name) self.auto_fixture.add_fixture(fixture) except TypeError: # pragma: no cover warnings.warn("Could not create fixture for unsupported mime type") return response
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def api_response():\n return load_fixture(\"smhi.json\", DOMAIN)", "def fixture_retrieved():\n from aiida.plugins import DataFactory\n from aiida_logger.tests import TEST_DIR\n\n retrieved = DataFactory('folder')()\n retrieved.put_object_from_tree(path=os.path.join(TEST_DIR, 'input_files'))\n\n return retrieved", "def orlov_fixture(request, workspace, minicap):\n logger.info('Orlov Fixture : setup minicap service and other.')\n request.cls.workspace = workspace\n request.cls.minicap = minicap\n request.cls.evidence_dir = request.cls.workspace.mkdir('tmp\\\\evidence')\n request.cls.video_dir = request.cls.workspace.mkdir('tmp\\\\video')\n yield\n logger.info('Olorv Fixture : teardown minicap service and other.')", "def api_response_lack_data():\n return load_fixture(\"smhi_short.json\", DOMAIN)", "def setUp(self):\n cache.clear()\n self.factory = APIRequestFactory()", "def program_response_fixture() -> dict[str, Any]:\n return cast(dict[str, Any], json.loads(load_fixture(\"program_response.json\")))", "def fixtures():", "def store_response_in_cache(responsefile, response):\n global __response_cache\n log.debug(\"Storing data from flats (%s) in cache\" % responsefile)\n __response_cache[responsefile] = {}\n modtime = str(os.path.getmtime(responsefile))\n __response_cache[responsefile][modtime] = response", "def _Dynamic_Fetch(self, request, response):\n print \"Request:\"\n print (\"Request: {}\").format(request)\n response.set_content(self.mock_response_issue)\n response.set_statuscode(200)\n new_header = response.add_header()\n new_header.set_key('Content-type')\n new_header.set_value('application/json')\n\n response.set_finalurl(request.url)\n response.set_contentwastruncated(False)\n\n # allow to query the object after it is used\n # pylint: disable=attribute-defined-outside-init\n self.request = request\n self.response = response", "def fixture_test_store(andy, pandy, candy):\n store_ = InMemoryStore[Person](unique_keys={\"name\"})\n store_.add(andy)\n store_.add(pandy)\n store_.add(candy)\n yield store_", "def mock_api():\n with open(os.path.join(HERE, 'response.json'), 'r') as fp:\n webargs_response = fp.read()\n # A valid package with a proper response\n responses.add(\n responses.GET,\n 'https://pypi.python.org/pypi/webargs/json',\n body=webargs_response,\n content_type='application/json'\n )\n # A valid package with no releases\n with open(os.path.join(HERE, 'response_noreleases.json'), 'r') as fp:\n foo_response = fp.read()\n\n responses.add(\n responses.GET,\n 'https://pypi.python.org/pypi/foo/json',\n body=foo_response,\n content_type='application/json'\n )\n\n # An invalid package name\n responses.add(\n responses.GET,\n 'https://pypi.python.org/pypi/nope/json',\n status=404\n )\n responses.start()\n\n yield responses\n\n responses.stop()", "def fill_from_api_response(self, api_response):\n pass", "def test_dataset_response(dataset_response, client):\n file = glob.glob('./**/test_areas.csv', recursive=True)\n with open(file[0], 'r') as fp:\n resp = client.create_dataset(fp)\n assert resp.keys() == dataset_response.keys()", "def test_api_response_data(self):", "def fixture_andy():\n yield Person(name=\"Andy\", age=12, hobbies=[\"Star Wars\", \"Bicycles\"])", "def fixture_candy():\n yield Person(name=\"Candy\", age=13, hobbies=[\"Gardening\"])", "def fixture_tile_details():\n return {\n \"version\": 1,\n \"revision\": 1,\n \"timestamp\": \"2018-06-19T23:04:39.097Z\",\n \"timestamp_ms\": 1529449479097,\n \"result_code\": 0,\n \"result\": {\n TILE_TILE_UUID: {\n \"thumbnailImage\": \"https://local-tile-pub.s3.amazonaws.com/..\",\n \"tileState\": {\n \"ringStateCode\": 0,\n \"connectionStateCode\": 0,\n \"uuid\": TILE_TILE_UUID,\n \"tile_uuid\": TILE_TILE_UUID,\n \"client_uuid\": TILE_CLIENT_UUID,\n \"timestamp\": 1512615215149,\n \"advertised_rssi\": 1.4e-45,\n \"client_rssi\": 1.4e-45,\n \"battery_level\": 1.4e-45,\n \"latitude\": 21.9083423,\n \"longitude\": -72.4982138,\n \"altitude\": 1821.129812,\n \"h_accuracy\": 5.0,\n \"v_accuracy\": 3.0,\n \"speed\": 1.4e-45,\n \"course\": 1.4e-45,\n \"authentication\": None,\n \"owned\": True,\n \"has_authentication\": None,\n \"lost_timestamp\": -1,\n \"connection_client_uuid\": TILE_CLIENT_UUID,\n \"connection_event_timestamp\": 1512615234268,\n \"last_owner_update\": 1512615215149,\n \"connection_state\": \"READY\",\n \"ring_state\": \"STOPPED\",\n \"is_lost\": False,\n \"voip_state\": \"OFFLINE\",\n },\n \"entityName\": \"TILE\",\n \"tile_uuid\": \"19264d2dffdbca32\",\n \"firmware_version\": \"01.12.14.0\",\n \"owner_user_uuid\": \"2ea56f4d-6576-4b4e-af11-3410cc65e373\",\n \"name\": TILE_TILE_NAME,\n \"category\": None,\n \"image_url\": \"https://local-tile-pub.s3.amazonaws.com/...\",\n \"visible\": True,\n \"is_dead\": False,\n \"hw_version\": \"02.09\",\n \"product\": \"DUTCH1\",\n \"archetype\": \"WALLET\",\n \"configuration\": {\"fw10_advertising_interval\": None},\n \"last_tile_state\": {\n \"ringStateCode\": 0,\n \"connectionStateCode\": 0,\n \"uuid\": \"19264d2dffdbca32\",\n \"tile_uuid\": \"19264d2dffdbca32\",\n \"client_uuid\": \"a01bf97a-c89a-40e2-9534-29976010fb03\",\n \"timestamp\": 1512615215149,\n \"advertised_rssi\": 1.4e-45,\n \"client_rssi\": 1.4e-45,\n \"battery_level\": 1.4e-45,\n \"latitude\": 39.797571,\n \"longitude\": -104.887826,\n \"altitude\": 1588.002773,\n \"h_accuracy\": 5.0,\n \"v_accuracy\": 3.0,\n \"speed\": 1.4e-45,\n \"course\": 1.4e-45,\n \"authentication\": None,\n \"owned\": True,\n \"has_authentication\": None,\n \"lost_timestamp\": -1,\n \"connection_client_uuid\": TILE_CLIENT_UUID,\n \"connection_event_timestamp\": 1512615234268,\n \"last_owner_update\": 1512615215149,\n \"connection_state\": \"DISCONNECTED\",\n \"ring_state\": \"STOPPED\",\n \"is_lost\": False,\n \"voip_state\": \"OFFLINE\",\n },\n \"firmware\": {\n \"expected_firmware_version\": \"\",\n \"expected_firmware_imagename\": \"\",\n \"expected_firmware_urlprefix\": \"\",\n \"expected_firmware_publish_date\": 0,\n \"expected_ppm\": None,\n \"expected_advertising_interval\": None,\n \"security_level\": 1,\n \"expiry_timestamp\": 1529471079097,\n \"expected_tdt_cmd_config\": None,\n },\n \"auth_key\": \"aliuUAS7da980asdHJASDQ==\",\n \"renewal_status\": \"LEVEL1\",\n \"metadata\": {},\n \"auto_retile\": False,\n \"status\": \"ACTIVATED\",\n \"tile_type\": \"TILE\",\n \"registration_timestamp\": 1482711833983,\n \"is_lost\": False,\n \"auth_timestamp\": 1512287015405,\n \"activation_timestamp\": 1482711835011,\n \"last_modified_timestamp\": 1514353410254,\n }\n },\n }", "def test_request(self):\n test_sets = [#{'url':\"http://.*:8774/.*/flavors/detail\", 'data':{'candy':'yum!'}},\n {'url':\"http://.*:8774/.*/flavors/detail\", 'data':{'status_code': 90210, '_content': json.dumps({'error':{'message':'Old Gregg did not like you being in his waters!', 'detail':'Mmmm...creamy.'}})}, 'exception':exceptions.ClientException},\n ]\n for test_set in test_sets:\n response = ''\n cm = None\n self.write_inject_file(test_set)\n exp_exception = test_set.get('exception')\n if exp_exception:\n with self.assertRaises(exp_exception) as cm:\n print 'Expected exception: %s' %exp_exception\n response = self.client.flavors.list()\n else:\n response = self.client.flavors.list()\n if cm:\n print 'Exception info: %s' % vars(cm.exception)\n self.assertEqual(test_set['data']['status_code'], cm.exception.code)\n print 'Test response: %s' %response\n self.remove_inject_file()\n print '#'*80", "def caches_mock(request):\n\n from unittest import mock\n from contextlib import ExitStack\n from dogpile.cache import make_region\n\n caches_to_mock = []\n expiration_time = 600\n\n params = __get_fixture_param(request)\n if params:\n caches_to_mock = params.get(\"caches_to_mock\", caches_to_mock)\n expiration_time = params.get(\"expiration_time\", expiration_time)\n\n with ExitStack() as stack:\n mocked_caches = []\n for module in caches_to_mock:\n region = make_region().configure('dogpile.cache.memory', expiration_time=expiration_time)\n stack.enter_context(mock.patch(module, new=region))\n mocked_caches.append(region)\n\n yield mocked_caches", "def program_post_response_fixture() -> dict[str, Any]:\n return cast(dict[str, Any], json.loads(load_fixture(\"program_post_response.json\")))", "def test_cache(self):\n response = self.make_call().json[0]\n self.assertFalse(response['cached']) # a call has ben made to Google API\n # each step is saved\n self.assertEqual(len(r.keys(pattern=r'step*')), int(r.get('counter')))\n self.assertEqual(int(r.get('counter')), len(response['steps']))\n pairs = set((i, j) for (i, o), (j, d) in combinations_with_replacement(list(enumerate(response['steps'])), 2) if i <= j)\n self.assertEqual(len(r.keys(pattern=r'origin*')), len(pairs)) # each combination is cached\n for i, j in pairs:\n origin, destination = response['steps'][i], response['steps'][j]\n resp = self.make_call(origin=f\"{origin['start_lat']},{origin['start_lng']}\",\n destination=f\"{destination['end_lat']},{destination['end_lng']}\").json[0]\n # No new API calls are made, cached results are returned for each possible combination of origin/dest\n self.assertEqual(origin['start_lat'], resp['start_lat']) # all coordinates should match\n self.assertEqual(origin['start_lng'], resp['start_lng'])\n self.assertEqual(destination['end_lat'], resp['end_lat'])\n self.assertEqual(destination['end_lng'], resp['end_lng'])\n self.assertTrue(resp['cached'])\n # New API call is made for transit directions. We can't recycle driving directions for this one.\n response = self.make_call(mode='transit').json\n self.assertFalse(response[0]['cached'])\n self.assertTrue(len(response) > 1) # when asking for transit directions it should yield multiple alternatives\n # driving directions should be cached already\n response = self.make_call().json[0]\n self.assertTrue(response['cached'])\n # Walking directions should not be cached\n walking = self.make_call(mode='walking').json[0]\n self.assertFalse(walking['cached'])\n # Bicycling should be treated as walking but 3 times as fast\n bicycling = self.make_call(mode='bicycling').json[0]\n self.assertTrue(bicycling['cached'])\n self.assertEqual(walking['duration'], 3 * bicycling['duration'])", "def setUp(self):\n posts = []\n serializers = []\n self.responses = []\n for response in RESPONSES:\n with self.subTest():\n title = response['title']\n url = response['url']\n created = response['created']\n post = Post.objects.create(title=title,\n url=url,\n created=created)\n posts.append(post)\n for post in posts:\n with self.subTest(current_post=post):\n ser = PostSerializer(post)\n serializers.append(ser)\n for ser in serializers:\n with self.subTest(current_serializer=ser):\n response = Response(ser.data)\n self.responses.append(response)", "def create_fake(cls):\n source = pkg_resources.open_text('baseball_id', 'sample.master.csv',\n encoding='iso-8859-1')\n c = lookup.Cache(source)\n return c", "def test_guidanceresponse_1(base_settings):\n filename = base_settings[\"unittest_data_dir\"] / \"guidanceresponse-example.json\"\n inst = guidanceresponse.GuidanceResponse.parse_file(\n filename, content_type=\"application/json\", encoding=\"utf-8\"\n )\n assert \"GuidanceResponse\" == inst.resource_type\n\n impl_guidanceresponse_1(inst)\n\n # testing reverse by generating data from itself and create again.\n data = inst.dict()\n assert \"GuidanceResponse\" == data[\"resourceType\"]\n\n inst2 = guidanceresponse.GuidanceResponse(**data)\n impl_guidanceresponse_1(inst2)", "def process_response(self, response: Dict) -> Iterator[dict]:", "def fixture_chunked_json_data(tmp_path_factory, request):\n # Make root dir\n root = tmp_path_factory.mktemp(\"data\")\n\n # Set params\n num_chunks = request.param.num_chunks\n chunk_size = request.param.chunk_size\n\n # Seed JSON data\n paths = [root / Path(f\"{idx}.json\") for idx in range(num_chunks)]\n for chunk_idx, path in enumerate(paths):\n if not path.parent.exists():\n path.parent.mkdir(parents=True)\n\n content = {str(chunk_idx + idx): chunk_idx + idx for idx in range(chunk_size)}\n with path.open(\"w\") as file:\n json.dump(content, file)\n\n return root", "def create_test_data(self):\n fake = Faker(['en_US', 'ja_JP', 'el_GR', 'de_DE'])\n\n self.actor_request = {\n 'name': fake.name(),\n 'age': random.randint(22, 88),\n 'gender': random.choice(['M', 'F'])\n }\n\n self.movie_request = {\n 'title': fake.color_name() + ' ' + fake.street_suffix(),\n 'releaseDate': str(fake.date_between())\n }\n\n self.actor_update_request = {\n 'name': fake.name(),\n }\n\n self.movie_update_request = {\n 'title': fake.color_name() + ' ' + fake.street_suffix(),\n }\n\n for _ in range(30):\n actor_name = fake.name()\n actor_age = random.randint(22, 88)\n actor_gender = random.choice(['M', 'F'])\n\n movie_title = fake.color_name() + ' ' + fake.street_suffix()\n movie_release_date = str(fake.date_between())\n\n actor = Actor(actor_name, actor_age, actor_gender)\n actor.insert()\n\n movie = Movie(movie_title, movie_release_date)\n movie.insert()\n\n for _ in range(20):\n actors = Actor.query.all()\n movies = Movie.query.all()\n\n actor_to_update = random.choice(actors)\n movie_to_update = random.choice(movies)\n actor_to_update.movies.append(movie_to_update)", "def simulate_response(self, documents):", "def set(self, response):\n self.data[response.url] = Page(\n response.json(),\n self._get_expiration(response.headers)\n )", "async def test_api_template_cached(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n hass.states.async_set(\"sensor.temperature\", 30)\n\n resp = await mock_api_client.post(\n const.URL_API_TEMPLATE,\n json={\"template\": \"{{ states.sensor.temperature.state }}\"},\n )\n\n body = await resp.text()\n\n assert body == \"30\"\n\n hass.states.async_set(\"sensor.temperature\", 40)\n resp = await mock_api_client.post(\n const.URL_API_TEMPLATE,\n json={\"template\": \"{{ states.sensor.temperature.state }}\"},\n )\n\n body = await resp.text()\n\n assert body == \"40\"", "def setUp(self):\n self.tool = flow_common_tool()\n self.xml = xml_tool()\n self.ins = route()\n\n self.response = {}\n self.response[\"HA_SINGLE_INSTANCE\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"terse\">\n <instance-core>\n <instance-name>master</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-active-count>22</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-active-count>7</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n </instance-information>\n \"\"\"\n\n self.response[\"HA_MULTI_INSTANCE\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"terse\">\n <instance-core>\n <instance-name>master</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-active-count>22</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-active-count>7</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n <instance-core>\n <instance-name>__juniper_private1__</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>__juniper_private1__.inet.0</irib-name>\n <irib-active-count>12</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n <instance-core>\n <instance-name>__juniper_private2__</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>__juniper_private2__.inet.0</irib-name>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>1</irib-hidden-count>\n </instance-rib>\n </instance-core>\n <instance-core>\n <instance-name>__juniper_private3__</instance-name>\n <instance-type>forwarding</instance-type>\n </instance-core>\n <instance-core>\n <instance-name>__juniper_private4__</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>__juniper_private4__.inet.0</irib-name>\n <irib-active-count>2</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n <instance-core>\n <instance-name>__master.anon__</instance-name>\n <instance-type>forwarding</instance-type>\n </instance-core>\n <instance-core>\n <instance-name>mgmt_junos</instance-name>\n <instance-type>forwarding</instance-type>\n </instance-core>\n </instance-information>\n \"\"\"\n\n\n self.response[\"HA_SINGLE_INSTANCE_BRIEF\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"terse\">\n <instance-core>\n <instance-name>master</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-active-count>18</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-active-count>1</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n </instance-information>\n \"\"\"\n\n self.response[\"HA_SINGLE_INSTANCE_DETAIL\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"detail\">\n <instance-core>\n <instance-name>master</instance-name>\n <router-id>10.208.133.147</router-id>\n <instance-type>forwarding</instance-type>\n <instance-state>Active</instance-state>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-route-count>18</irib-route-count>\n <irib-active-count>18</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-route-count>1</irib-route-count>\n <irib-active-count>1</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n </instance-information>\n \"\"\"\n\n self.response[\"HA_SINGLE_INSTANCE_EXTENSIVE\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"detail\">\n <instance-core>\n <instance-name>master</instance-name>\n <router-id>10.208.133.147</router-id>\n <instance-type>forwarding</instance-type>\n <instance-state>Active</instance-state>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-route-count>20</irib-route-count>\n <irib-active-count>20</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet.1</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet.2</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet.3</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>iso.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>mpls.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>__mpls-oam__.mpls.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-route-count>5</irib-route-count>\n <irib-active-count>5</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.1</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.2</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.3</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>l2circuit.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>mdt.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>l2protection.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>lsdist.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>lsdist.1</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inetcolor.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6color.0</irib-name>\n <irib-route-count>0</irib-route-count>\n <irib-active-count>0</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n </instance-information>\n \"\"\"\n\n self.response[\"HA_SINGLE_INSTANCE_SUMMARY\"] = \"\"\"\n <instance-information xmlns=\"http://xml.juniper.net/junos/18.1I0/junos-routing\" junos:style=\"terse\">\n <instance-core>\n <instance-name>master</instance-name>\n <instance-type>forwarding</instance-type>\n <instance-rib>\n <irib-name>inet.0</irib-name>\n <irib-active-count>22</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n <instance-rib>\n <irib-name>inet6.0</irib-name>\n <irib-active-count>5</irib-active-count>\n <irib-holddown-count>0</irib-holddown-count>\n <irib-hidden-count>0</irib-hidden-count>\n </instance-rib>\n </instance-core>\n </instance-information>\n \"\"\"\n\n self.response[\"SA_INSTANCE_TEXT\"] = \"\"\"\nInstance Type\n Primary RIB Active/holddown/hidden\nmaster forwarding\n inet.0 18/0/0\n\n__juniper_private1__ forwarding\n __juniper_private1__.inet.0 6/0/0\n\n__juniper_private2__ forwarding\n __juniper_private2__.inet.0 0/0/1\n\n__juniper_private3__ forwarding\n\n__juniper_private4__ forwarding\n __juniper_private4__.inet.0 2/0/0\n\n__master.anon__ forwarding\n \"\"\"", "def mock_dataset_with_cache_dir():\n conf_file, working_dir = _create_temp_work_dir()\n with patch.object(Configuration, 'find_default_config', lambda self: conf_file):\n im = InventoryManager(conf_file)\n ds = im.create_dataset(USERNAME, USERNAME, 'dataset-1', description=\"my dataset 1\",\n storage_type=\"gigantum_object_v1\")\n\n yield ds, working_dir, ds.git.repo.head.commit.hexsha\n shutil.rmtree(working_dir)", "def fixture_pandy():\n yield Person(name=\"Pandy\", age=12, hobbies=[\"Fortnite\"])", "def fakedata():\n if User.query.filter_by(email='chair@conferency.com').first():\n print ('fake data already generated')\n else:\n generate_test_confs() # load testing confs and tracks\n generate_fake_tickets() # create fake tickets\n generate_test_users() # create named fake users\n # generate_fake_users(100) # create random users\n # add_self_follows() # create self-follows for all users\n generate_fake_papers(100) # create random papers\n generate_fake_reviews() # create random reviews\n generate_fake_transactions() # create fake tickets\n generate_fake_schedule()\n generate_default_addons()", "def setUp(self):\r\n super(TestDuplicateItem, self).setUp()\r\n # Create a parent chapter (for testing children of children).\r\n resp = self.create_xblock(parent_usage_key=self.usage_key, category='chapter')\r\n self.chapter_usage_key = self.response_usage_key(resp)\r\n\r\n # create a sequential containing a problem and an html component\r\n resp = self.create_xblock(parent_usage_key=self.chapter_usage_key, category='sequential')\r\n self.seq_usage_key = self.response_usage_key(resp)\r\n\r\n # create problem and an html component\r\n resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='problem', boilerplate='multiplechoice.yaml')\r\n self.problem_usage_key = self.response_usage_key(resp)\r\n\r\n resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='html')\r\n self.html_usage_key = self.response_usage_key(resp)\r\n\r\n # Create a second sequential just (testing children of children)\r\n self.create_xblock(parent_usage_key=self.chapter_usage_key, category='sequential2')", "def setUp(self):\n\n # Json response\n self.json_pass_times = {\n \"message\": \"success\",\n \"request\": {\n \"altitude\": 100,\n \"datetime\": 1481418788,\n \"latitude\": 15.0,\n \"longitude\": 20.0,\n \"passes\": 5\n },\n \"response\": [\n {\n \"duration\": 348,\n \"risetime\": 1481448840\n },\n {\n \"duration\": 634,\n \"risetime\": 1481454465\n },\n {\n \"duration\": 220,\n \"risetime\": 1481460482\n },\n {\n \"duration\": 224,\n \"risetime\": 1481484335\n },\n {\n \"duration\": 640,\n \"risetime\": 1481489937\n }\n ]\n }\n\n self.location = self.json_pass_times['response']\n\n #HTTP Mock\n @all_requests\n def correct_response(url, request):\n headers = {'content-type': 'application/json',\n 'Set-Cookie': 'foo=bar;'}\n return response(200, self.json_pass_times, headers, None, 5,\n request)\n self.http_correct = correct_response\n\n @all_requests\n def wrong_response(url, request):\n headers = {'content-type': 'application/json',\n 'Set-Cookie': 'foo=bar;'}\n return response(403, self.json_pass_times, headers, None, 5,\n request)\n self.http_wrong = wrong_response\n\n self.iss = pyiss.ISS()", "def parse(self, response):\n\n # Create an instance of class 'HouseOfIndyaItem' (located in items.py)\n items = HouseOfIndyaItem()\n\n # The collection of necklace_set available\n necklace_set = response.css('#JsonProductList')\n\n # The total number of necklace_set available in the store\n total_items = int(response.css('.totalRecords::text').extract()[0])\n\n for i in range(total_items):\n # Description of the necklace set\n description = necklace_set.css('p::text')[i].extract()\n\n # Price of the necklace set\n price = necklace_set.css('span:nth-child(1)::text')[i].extract()\n\n # URL of the image of necklace set\n image_url = necklace_set.css(\n '.lazy::attr(data-original)')[i].extract()\n\n # Store in 'items' instance\n items['description'] = description\n items['price'] = price\n items['image_url'] = image_url\n\n yield items", "def CreateResponse(self, name, langName, isValidIntl, resetLangCookie):\n # see if we have the page in the memcache\n logging.info('PROCESSING %s langName [%s] isValidIntl [%s] resetLang [%s]', \n name, langName, isValidIntl, resetLangCookie)\n resp_data = self.GetFromCache(name)\n if resp_data is None:\n logging.info(' Cache miss for %s', name)\n resp_data = self.GetFromNegativeCache(name)\n if resp_data is None:\n resp_data = self.GetFromStore(name)\n\n # IF we have the file, put it in the memcache\n # ELSE put it in the negative cache\n if resp_data is not None:\n self.StoreOrUpdateInCache(name, resp_data)\n elif isValidIntl:\n # couldn't find the intl doc. Try to fall through to English.\n #logging.info(' Retrying with base uri...')\n return False\n else:\n logging.info(' Adding %s to negative cache, serving 404', name)\n self.StoreInNegativeCache(name)\n self.Write404Error()\n return True\n else:\n # found it in negative cache\n self.Write404Error()\n return True\n\n # found content from cache or store\n logging.info('FOUND CLEAN')\n if resetLangCookie:\n logging.info(' Resetting android_developer_pref_lang cookie to [%s]',\n langName)\n expireDate = time.mktime(localtime()) + 60 * 60 * 24 * 365 * 10\n self.response.headers.add_header('Set-Cookie', \n 'android_developer_pref_lang=%s; path=/; expires=%s' % \n (langName, strftime(\"%a, %d %b %Y %H:%M:%S\", localtime(expireDate))))\n mustRevalidate = False\n if ('.html' in name):\n # revalidate html files -- workaround for cache inconsistencies for \n # negotiated responses\n mustRevalidate = True\n #logging.info(' Adding [Vary: Cookie] to response...')\n self.response.headers.add_header('Vary', 'Cookie')\n content_type, encoding = mimetypes.guess_type(name)\n if content_type:\n self.response.headers['Content-Type'] = content_type\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif (name == 'favicon.ico'):\n self.response.headers['Content-Type'] = 'image/x-icon'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.psd'):\n self.response.headers['Content-Type'] = 'application/octet-stream'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n return True", "def test_page_object_caching(self):\r\n settings = get_settings(filenames={})\r\n settings['CACHE_PATH'] = self.temp_cache\r\n settings['CONTENT_CACHING_LAYER'] = 'generator'\r\n settings['READERS'] = {'asc': None}\r\n\r\n generator = PagesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.generate_context()\r\n self.assertTrue(hasattr(generator, '_cache'))\r\n\r\n generator = PagesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n generator.readers.read_file.assert_called_count == 0", "def populate_fixtures():\n languages()\n words()", "def parse(self, response):\r\n recipes = json.loads(response.text)['response']['results']\r\n # test json data\r\n # fp = open(\"./food.json\", \"w\", encoding=\"utf-8\")\r\n # json.dump(recipes, fp=fp, ensure_ascii=False)\r\n for recipe in recipes:\r\n if recipe['record_type'] == 'Recipe':\r\n item = RecipespidersItem()\r\n\r\n self.recipe_count += 1\r\n item['id'] = self.recipe_count\r\n item['name'] = recipe['main_title']\r\n item['description'] = recipe['main_description']\r\n\r\n item['rating_num'] = int(recipe['main_num_ratings'])\r\n item['rating_star'] = int(recipe['main_rating_mapping'])\r\n item['rating_score'] = float(recipe['main_rating'])\r\n\r\n item['total_time'] = int(recipe['recipe_totaltime'])\r\n\r\n if recipe.get('recipe_photo_url') is None:\r\n continue\r\n else:\r\n item['photo_url'] = recipe['recipe_photo_url']\r\n\r\n item['record_url'] = recipe['record_url']\r\n\r\n yield scrapy.Request(url=recipe['record_url'], callback=self.parse_detail, meta={'item': item})\r\n\r\n # process remaining pages\r\n if self.page_num <= 21000:\r\n print(self.page_num)\r\n new_url = format(self.base_url % self.page_num)\r\n self.page_num += 1\r\n\r\n yield scrapy.Request(url=new_url, callback=self.parse)", "def construct_fixtures_tweets():\n\n # Declares today's date\n today = str(datetime.date.today())\n\n # Gets today's fixtures data from football-data.org API\n connection = http.client.HTTPConnection('api.football-data.org')\n headers = {'X-Auth-Token': ''}\n connection.request('GET', '/v2/competitions/PL/matches?dateFrom='+today+'&dateTo='+today, None, headers)\n response = json.loads(connection.getresponse().read().decode())\n\n # Initialises fixtures tweet\n tweet1 = \"Today's #PremierLeague matches:\\n\"\n tweet2 = \"\"\n tweet3 = \"\"\n\n # Checks if any fixtures on today\n if response['matches']:\n # For each fixture obtained, appends line to tweet with information\n for i in range(len(response['matches'])):\n time = response['matches'][i]['utcDate']\n utc = datetime.datetime.strptime(time, '%Y-%m-%dT%H:%M:%SZ')\n gmt = pytz.timezone(\"Europe/London\").fromutc(utc)\n ko_time = gmt.strftime(\"%H:%M\")\n tweet_line = response['matches'][i]['homeTeam']['name'] + ' vs ' + response['matches'][i]['awayTeam'][\n 'name'] + ' (' + ko_time + ')' + '\\n'\n # Checks that tweet will not be too long (~ >280 chars), by splitting into separate tweets\n if len(tweet1) >= 220:\n tweet2 += tweet_line\n elif len(tweet2) >= 220:\n tweet3 += tweet_line\n else:\n tweet1 += tweet_line\n return send_fixtures_tweets(tweet1, tweet2, tweet3)\n else:\n return print('No PL fixtures today')", "def yield_item(self, response):\n item = BrobotBotsItem()\n item.update(self.data)\n yield item", "def yield_item(self, response):\n item = BrobotBotsItem()\n item.update(self.data)\n yield item", "def code_builder(request, tmp_path_factory) -> dataset_builder.DatasetBuilder:\n tmp_path = tmp_path_factory.mktemp('tfds_datasets') # Temporary data_dir\n builder_cls = request.param\n # Generate the dataset (only once for all tests as scope == 'module').\n builder = builder_cls(data_dir=tmp_path)\n builder.download_and_prepare()\n\n # Update the default DATA_DIR during the test.\n with mock.patch.object(constants, 'DATA_DIR', str(tmp_path)):\n yield builder", "def discovery_data(request):\n file = request.param\n p = Path(file)\n if not p.is_absolute():\n p = Path(__file__).parent / \"fixtures\" / file\n\n with open(p) as f:\n return json.load(f)", "def fixtures():\n temp_path = os.path.join(os.path.dirname(__file__), 'temp')\n demo_files_path = os.path.join(os.path.dirname(__file__), 'demo_files')\n\n # Create location\n loc = Location(name='local', uri=temp_path, default=True)\n db.session.add(loc)\n db.session.commit()\n\n # Example files from the data folder\n demo_files = (\n 'markdown.md',\n 'csvfile.csv',\n 'zipfile.zip',\n 'jsonfile.json',\n 'xmlfile.xml',\n 'notebook.ipynb',\n 'jpgfile.jpg',\n 'pngfile.png',\n )\n\n rec_uuid = uuid4()\n provider = RecordIdProvider.create(object_type='rec', object_uuid=rec_uuid)\n data = {\n 'pid_value': provider.pid.pid_value,\n }\n\n record = Record.create(data, id_=rec_uuid)\n bucket = Bucket.create()\n RecordsBuckets.create(record=record.model, bucket=bucket)\n\n # Add files to the record\n for f in demo_files:\n with open(os.path.join(demo_files_path, f), 'rb') as fp:\n record.files[f] = fp\n\n record.files.flush()\n record.commit()\n db.session.commit()", "def mocked_requests_scrapping_get(*args, **kwargs):\n class MockResponse:\n def __init__(self, json_data, status_code, url):\n self.content = json_data\n self.status_code = status_code\n self.url = url\n self.cookies = {\"JSESSIONID\": \"jkghhjgjhgfjgfgjg\"}\n self.encoding = \"utf-8\"\n\n def json(self):\n return self.json_data\n\n dn = os.path.dirname(os.path.realpath(__file__))\n for url, provider in {f\"{settings.BASE_URL}/eAnnuaire/formulaire?appelRetour=true\": \"form\",\n f\"{settings.BASE_URL}/eAnnuaire/resultat\": \"suivant\",\n f\"{settings.BASE_URL}/eAnnuaire/fiche\": \"detail\"}.items():\n if args[0].startswith(url):\n with open(os.path.join(dn, \"fixtures\", f\"{provider}.html\"), \"rb\") as fp:\n return MockResponse(fp.read(), 200, args[0])", "def setUp(self):\n self.directory = tempfile.TemporaryDirectory()\n self.dataset = self.dataset_cls(cache_root=self.directory.name)", "def get_items_from_response(self, response):\n raise NotImplementedError", "def fixture_example_data():\n import_example_data()", "def _generate_data(self, codec='deflate'):\n _logger.info('generating fake data')\n (desc, path) = mkstemp()\n os.close(desc)\n os.remove(path)\n try:\n call([\n 'node', osp.join(DPATH, os.pardir, os.pardir, 'scripts', 'random'),\n self.path, str(self.n_records), path\n ])\n yield path\n finally:\n if osp.exists(path):\n os.remove(path)", "def receiver():\n def generate(entities_to_proceed):\n \"\"\"Process list of entities populating them with altitude data\"\"\"\n yield \"[\"\n for index, entity in enumerate(entities_to_proceed):\n if logging.getLogger().isEnabledFor(logging.DEBUG):\n logging.debug(\"processing entity : %s\", entity)\n else:\n logging.info(\"processing entity : %s\", entity.get(GUID_STR))\n\n if index > 0:\n yield \",\"\n booking_guid = entity.get(GUID_STR)\n iata = entity.get(IATA_STR)\n api_key = resolve_api_key(API_KEYS, iata)\n\n if not isinstance(api_key, str):\n entity[PROP] = []\n yield json.dumps(entity)\n continue\n url = URL_TEMPLATE.render(entity) + booking_guid + \"?api_key=\" + api_key\n if METHOD == \"get\":\n entity[PROP] = requests.get(url, headers=HEADERS).json()\n else:\n entity[PROP] = requests.request(METHOD, url, data=entity.get(\"payload\"),\n headers=HEADERS).json()\n yield json.dumps(entity)\n yield \"]\"\n\n # get entities from request\n entities = request.get_json()\n\n # create the response\n logging.debug(\"Processing %i entities\", len(entities))\n return Response(generate(entities), mimetype='application/json')", "def simulate_fixtures(x_width = 36, y_height = 18, total = 250, x_offset = 0):\n locations = make_locations(x_width, y_height, total, x_offset)\n fixtures = []\n i, j = 0, 0\n count = 0\n for grid_loc in locations:\n strand = int(count >= total/2)\n address = j if strand else i\n pixels = 1\n data = {\"strand\": strand,\n \"address\": address,\n \"pixels\": pixels,\n \"pos1\": map_loc_to_pixel(grid_loc),\n \"pos2\": map_loc_to_pixel(grid_loc),\n \"grid_loc\": grid_loc}\n fixtures.append(Fixture(data))\n if not strand:\n i += 1\n else:\n j += 1\n count += 1\n return fixtures", "def CreateResponse(self, name, langName, isValidIntl, resetLangCookie):\n # see if we have the page in the memcache\n logging.info('PROCESSING %s langName [%s] isValidIntl [%s] resetLang [%s]',\n name, langName, isValidIntl, resetLangCookie)\n resp_data = self.GetFromCache(name)\n if resp_data is None:\n logging.info(' Cache miss for %s', name)\n resp_data = self.GetFromNegativeCache(name)\n if resp_data is None:\n resp_data = self.GetFromStore(name)\n\n # IF we have the file, put it in the memcache\n # ELSE put it in the negative cache\n if resp_data is not None:\n self.StoreOrUpdateInCache(name, resp_data)\n elif isValidIntl:\n # couldn't find the intl doc. Try to fall through to English.\n #logging.info(' Retrying with base uri...')\n return False\n else:\n logging.info(' Adding %s to negative cache, serving 404', name)\n self.StoreInNegativeCache(name)\n self.Write404Error()\n return True\n else:\n # found it in negative cache\n self.Write404Error()\n return True\n\n # found content from cache or store\n logging.info('FOUND CLEAN')\n if resetLangCookie:\n logging.info(' Resetting android_developer_pref_lang cookie to [%s]',\n langName)\n expireDate = time.mktime(localtime()) + 60 * 60 * 24 * 365 * 10\n self.response.headers.add_header('Set-Cookie',\n 'android_developer_pref_lang=%s; path=/; expires=%s' %\n (langName, strftime(\"%a, %d %b %Y %H:%M:%S\", localtime(expireDate))))\n mustRevalidate = False\n if ('.html' in name):\n # revalidate html files -- workaround for cache inconsistencies for\n # negotiated responses\n mustRevalidate = True\n #logging.info(' Adding [Vary: Cookie] to response...')\n self.response.headers.add_header('Vary', 'Cookie')\n content_type, encoding = mimetypes.guess_type(name)\n if content_type:\n self.response.headers['Content-Type'] = content_type\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif (name == 'favicon.ico'):\n self.response.headers['Content-Type'] = 'image/x-icon'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.psd'):\n self.response.headers['Content-Type'] = 'application/octet-stream'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.svg'):\n self.response.headers['Content-Type'] = 'image/svg+xml'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.mp4'):\n self.response.headers['Content-Type'] = 'video/mp4'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.webm'):\n self.response.headers['Content-Type'] = 'video/webm'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n elif name.endswith('.ogv'):\n self.response.headers['Content-Type'] = 'video/ogg'\n self.SetCachingHeaders(mustRevalidate)\n self.response.out.write(resp_data)\n return True", "def test_task_export_tasks_ckan_without_resources(self, mock1):\r\n mocks = [Mock()]\r\n package = dict(id=3, resources=[])\r\n mocks[0].package_exists.return_value = (package, None)\r\n mocks[0].package_update.return_value = package\r\n mocks[0].resource_create.return_value = dict(result=dict(id=3))\r\n mocks[0].datastore_create.return_value = 'datastore'\r\n mocks[0].datastore_upsert.return_value = 'datastore'\r\n\r\n\r\n mock1.side_effect = mocks\r\n\r\n Fixtures.create()\r\n user = db.session.query(User).filter_by(name=Fixtures.name).first()\r\n app = db.session.query(App).first()\r\n user.ckan_api = 'ckan-api-key'\r\n app.owner_id = user.id\r\n db.session.add(user)\r\n db.session.add(app)\r\n db.session.commit()\r\n\r\n self.signin(email=user.email_addr, password=Fixtures.password)\r\n # First test for a non-existant app\r\n uri = '/app/somethingnotexists/tasks/export'\r\n res = self.app.get(uri, follow_redirects=True)\r\n assert res.status == '404 NOT FOUND', res.status\r\n # Now get the tasks in CKAN format\r\n uri = \"/app/somethingnotexists/tasks/export?type=task&format=ckan\"\r\n res = self.app.get(uri, follow_redirects=True)\r\n assert res.status == '404 NOT FOUND', res.status\r\n\r\n # Now with a real app\r\n uri = '/app/%s/tasks/export' % Fixtures.app_short_name\r\n res = self.app.get(uri, follow_redirects=True)\r\n heading = \"<strong>%s</strong>: Export All Tasks and Task Runs\" % Fixtures.app_name\r\n assert heading in res.data, \"Export page should be available\\n %s\" % res.data\r\n # Now get the tasks in CKAN format\r\n uri = \"/app/%s/tasks/export?type=task&format=ckan\" % Fixtures.app_short_name\r\n #res = self.app.get(uri, follow_redirects=True)\r\n with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):\r\n # First time exporting the package\r\n res = self.app.get(uri, follow_redirects=True)\r\n msg = 'Data exported to http://ckan.com'\r\n err_msg = \"Tasks should be exported to CKAN\"\r\n assert msg in res.data, err_msg", "def test_response(self):\n for i, response in enumerate(RESPONSES):\n with self.subTest(i=i):\n self.assertDictContainsSubset(response, dict(self.responses[i].data))", "def mock_dataset_with_manifest(mock_dataset_with_cache_dir):\n m = Manifest(mock_dataset_with_cache_dir[0], USERNAME)\n m.link_revision()\n\n # yield dataset, manifest, working_dir\n yield mock_dataset_with_cache_dir[0], m, mock_dataset_with_cache_dir[1]", "def test_cache_cleanup(self):\n page, page_2 = self.get_pages()\n\n # Assign tags to title\n title_tags = models.TitleTags.objects.create(extended_object=page.get_title_obj(\"en\"))\n title_tags.tags.add(*self.tag_strings)\n page_tags = models.PageTags.objects.create(extended_object=page)\n page_tags.tags.add(*self.tag_strings)\n for lang in self.languages:\n page.publish(lang)\n\n site_id = page.node.site_id\n\n # Reload page from request and extract tags from it\n request = self.get_request(page, \"en\")\n title_tags_list = get_title_tags_from_request(request, page.get_public_object().pk, \"en\", site_id)\n page_tags_list = get_page_tags_from_request(request, page.get_public_object().pk, \"en\", site_id)\n\n try:\n site_id = page.get_public_object().node.site_id\n except AttributeError: # CMS_3_4\n site_id = page.get_public_object().site_id\n title_key = get_cache_key(None, page.get_public_object(), \"en\", site_id, True)\n page_key = get_cache_key(None, page.get_public_object(), \"\", site_id, False)\n\n title_cache = cache.get(title_key)\n page_cache = cache.get(page_key)\n\n self.assertEqual(set(title_tags_list), set(title_cache))\n self.assertEqual(set(page_tags_list), set(page_cache))\n\n page.get_public_object().get_title_obj(\"en\").delete()\n self.assertIsNone(cache.get(title_key))\n\n page.get_public_object().delete()\n self.assertIsNone(cache.get(page_key))", "def _generate_examples(self, filepath, split):\r\n if self.config.name == \"trex\":\r\n paths = filepath\r\n relations_path = paths[0]\r\n paths = paths[1:]\r\n all_rels = {}\r\n with open(relations_path, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n all_rels[data[\"relation\"]] = data\r\n id_ = -1\r\n for filepath in paths:\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n pred = all_rels.get(data[\"predicate_id\"], {})\r\n for evidences in data[\"evidences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"obj_uri\": str(data[\"obj_uri\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"sub_uri\": str(data[\"sub_uri\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"predicate_id\": str(data[\"predicate_id\"]),\r\n \"sub_surface\": str(evidences[\"sub_surface\"]),\r\n \"obj_surface\": str(evidences[\"obj_surface\"]),\r\n \"masked_sentence\": str(evidences[\"masked_sentence\"]),\r\n \"template\": str(pred.get(\"template\", \"\")),\r\n \"template_negated\": str(pred.get(\"template_negated\", \"\")),\r\n \"label\": str(pred.get(\"label\", \"\")),\r\n \"description\": str(pred.get(\"description\", \"\")),\r\n \"type\": str(pred.get(\"type\", \"\")),\r\n }\r\n elif self.config.name == \"conceptnet\":\r\n id_ = -1\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n if data.get(\"negated\") is not None:\r\n for masked_sentence, negated in zip(data[\"masked_sentences\"], data[\"negated\"]):\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"sub\": str(data.get(\"sub\", \"\")),\r\n \"obj\": str(data.get(\"obj\", \"\")),\r\n \"pred\": str(data[\"pred\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"negated\": str(negated),\r\n }\r\n else:\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"sub\": str(data.get(\"sub\", \"\")),\r\n \"obj\": str(data.get(\"obj\", \"\")),\r\n \"pred\": str(data[\"pred\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"negated\": str(\"\"),\r\n }\r\n elif self.config.name == \"squad\":\r\n id_ = -1\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"id\": str(data[\"id\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"negated\": str(data.get(\"negated\", \"\")),\r\n \"masked_sentence\": str(masked_sentence),\r\n }\r\n elif self.config.name == \"google_re\":\r\n id_ = -1\r\n paths = filepath\r\n for filepath in paths:\r\n # from https://github.com/facebookresearch/LAMA/blob/master/scripts/run_experiments.py\r\n if \"place_of_birth\" in filepath:\r\n pred = {\r\n \"relation\": \"place_of_birth\",\r\n \"template\": \"[X] was born in [Y] .\",\r\n \"template_negated\": \"[X] was not born in [Y] .\",\r\n }\r\n elif \"date_of_birth\" in filepath:\r\n pred = {\r\n \"relation\": \"date_of_birth\",\r\n \"template\": \"[X] (born [Y]).\",\r\n \"template_negated\": \"[X] (not born [Y]).\",\r\n }\r\n else:\r\n pred = {\r\n \"relation\": \"place_of_death\",\r\n \"template\": \"[X] died in [Y] .\",\r\n \"template_negated\": \"[X] did not die in [Y] .\",\r\n }\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"pred\": str(data[\"pred\"]),\r\n \"sub\": str(data[\"sub\"]),\r\n \"obj\": str(data[\"obj\"]),\r\n \"evidences\": str(data[\"evidences\"]),\r\n \"judgments\": str(data[\"judgments\"]),\r\n \"sub_w\": str(data[\"sub_w\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"sub_aliases\": str(data[\"sub_aliases\"]),\r\n \"obj_w\": str(data[\"obj_w\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"obj_aliases\": str(data[\"obj_aliases\"]),\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"template\": str(pred[\"template\"]),\r\n \"template_negated\": str(pred[\"template_negated\"]),\r\n }", "def load_and_cache_examples(args, tokenizer, split, task_name, model_type, predictions=None):\n processor = MoralStoriesProcessor()\n if task_name != 'consequence|action+context_genref':\n args.data_dir = os.path.join(args.original_data_dir, task_name, args.split_name)\n else:\n args.data_dir = os.path.join(args.original_data_dir, 'consequence|action+context_gen', args.split_name)\n\n # Get features\n logger.info('Creating features from dataset file at %s', args.data_dir)\n label_list = processor.get_labels()\n if split == 'train':\n examples = processor.get_train_examples(args.data_dir)\n elif split == 'dev':\n examples = processor.get_dev_examples(args.data_dir)\n elif split == 'test':\n examples = processor.get_test_examples(args.data_dir)\n else:\n raise Exception('split value should be in [train, dev, test]')\n\n # Replace gold sequences with model predictions\n if predictions is not None:\n if type(predictions[0]) != tuple:\n all_predictions = [tuple(predictions)]\n else:\n all_predictions = predictions\n extended_examples = list()\n\n for predictions in all_predictions:\n if predictions[0] == 'consequences':\n if len(all_predictions) == 1:\n # Remove negative examples\n positive_examples = list()\n for ex in examples:\n if ex.label == '1':\n positive_examples.append(ex)\n examples = positive_examples\n\n for pr_id, pr in enumerate(predictions[1]):\n ex = examples[pr_id]\n if ex.moral_consequence is not None:\n if len(all_predictions) == 1:\n ex.moral_consequence = pr\n else:\n ex.moral_consequence_draft = pr\n else:\n if len(all_predictions) == 1:\n ex.immoral_consequence = pr\n else:\n ex.immoral_consequence_draft = pr\n extended_examples.append(ex)\n examples = extended_examples\n extended_examples = list()\n\n if predictions[0] == 'consequence_labels':\n for pr_id, pr in enumerate(predictions[1]):\n ex = examples[pr_id]\n if ex.moral_consequence_draft is not None:\n if pr == 1:\n ex.moral_consequence_draft = ex.moral_consequence_draft + ' ' + '<|CSQ_TRUE|>'\n else:\n ex.moral_consequence_draft = ex.moral_consequence_draft + ' ' + '<|CSQ_FALSE|>'\n else:\n if pr == 0:\n ex.immoral_consequence_draft = ex.immoral_consequence_draft + ' ' + '<|CSQ_TRUE|>'\n else:\n ex.immoral_consequence_draft = ex.immoral_consequence_draft + ' ' + '<|CSQ_FALSE|>'\n extended_examples.append(ex)\n examples = extended_examples\n extended_examples = list()\n\n # Generate features; target task is classification\n pad_token_id = tokenizer.convert_tokens_to_ids([tokenizer.pad_token])[0]\n if pad_token_id is None:\n pad_token_id = tokenizer.convert_tokens_to_ids([tokenizer.eos_token])[0]\n features = convert_examples_to_features(examples,\n label_list,\n args.max_seq_length,\n args.max_gen_length,\n tokenizer,\n task_name,\n model_type,\n TASK_DICT[task_name],\n cls_token_at_end=False,\n cls_token=tokenizer.cls_token,\n sep_token=tokenizer.sep_token,\n sep_token_extra=bool(model_type in ['roberta']),\n cls_token_segment_id=0,\n pad_on_left=False,\n pad_token=pad_token_id,\n pad_token_segment_id=0,\n is_eval=split == 'test',\n fit_to_max_corpus_len=True)\n\n # Make feature tensors\n all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)\n all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)\n all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long)\n all_label_ids = torch.tensor([f.label_ids for f in features], dtype=torch.long)\n if 'gen' in task_name:\n all_label_masks = torch.tensor([f.label_mask for f in features], dtype=torch.long)\n all_gen_prompts = torch.tensor([f.gen_prompt_id for f in features], dtype=torch.long)\n dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids,\n all_label_ids, all_label_masks, all_gen_prompts)\n else:\n dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)\n\n return dataset", "def test_generate_diff_download(self, mock_response, mock_request, mock_test_result_file):\n from mod_test.controllers import generate_diff\n\n mock_request.accept_mimetypes.best = 'application/json'\n\n response = generate_diff(1, 1, 1, to_view=0)\n\n self.assertTrue(response, mock_response())", "def test_cache_datastore_manifests(self, cache_audio: bool):\n # Data setup\n random_seed = 42\n sample_rate = 16000\n num_examples = 10\n num_manifests = 2\n data_duration = 1.0\n\n # Generate random signals\n _rng = np.random.default_rng(seed=random_seed)\n\n # Input and target signals have the same duration\n data_duration_samples = int(data_duration * sample_rate)\n\n with tempfile.TemporaryDirectory() as test_dir:\n test_store_dir = os.path.join(test_dir, 'store')\n os.mkdir(test_store_dir)\n\n # Prepare metadata and audio files\n manifest_filepaths = []\n audio_files = []\n for m in range(num_manifests):\n manifest_dir = os.path.join(test_store_dir, f'manifest_{m}')\n os.mkdir(manifest_dir)\n manifest_filepath = os.path.join(manifest_dir, 'manifest.json')\n\n metadata = []\n data = _rng.uniform(low=-0.5, high=0.5, size=(data_duration_samples, num_examples))\n for n in range(num_examples):\n audio_filepath = f'manifest_{m}_audio_{n:02d}.wav'\n audio_file = os.path.join(manifest_dir, audio_filepath)\n # Write audio file\n sf.write(audio_file, data[:, n], sample_rate, 'float')\n # Update metadata\n metadata.append(\n {\n 'audio_filepath': audio_filepath,\n 'duration': data_duration,\n 'text': f'text for example {n:02d}',\n }\n )\n # Update audio files\n audio_files.append(audio_file)\n\n # Save manifest\n write_manifest(manifest_filepath, metadata)\n manifest_filepaths.append(manifest_filepath)\n\n # Cache location\n test_cache_dir = os.path.join(test_dir, 'cache')\n\n # Instead of using AIS, copy object from store dir to cache dir\n def fake_get(self):\n # Object path relative to store path\n object_path = os.path.relpath(self.store_path, start=test_store_dir)\n # Copy to fake local path\n self._local_path = os.path.join(test_cache_dir, object_path)\n os.makedirs(os.path.dirname(self.local_path), exist_ok=True)\n shutil.copy(self.store_path, self.local_path)\n # Return path as in the original get\n return self.local_path\n\n with mock.patch(\n 'nemo.collections.asr.data.audio_to_text.is_datastore_path', lambda x: True\n ), mock.patch.object(DataStoreObject, 'get', fake_get):\n # Use a single worker for this test to avoid failure with mock & multiprocessing (#5607)\n cache_datastore_manifests(manifest_filepaths, cache_audio=cache_audio, num_workers=1)\n\n # Manifests need to be compared\n store_files_to_compare = manifest_filepaths\n if cache_audio:\n # Audio needs to be compared\n store_files_to_compare += audio_files\n\n # Compare files\n for f_store in store_files_to_compare:\n f_cache = os.path.join(test_cache_dir, os.path.relpath(f_store, test_store_dir))\n assert filecmp.cmp(f_store, f_cache, shallow=False), f'Files {f_store} and {f_cache} do not match.'", "def json_frame_specification_fixture(\n movie_path_list_fixture, tmpdir_factory, frame_list_fixture, random_seed_fixture\n):\n\n params = dict()\n\n for ii, movie_path in enumerate(movie_path_list_fixture):\n this_params = dict()\n this_params[\"path\"] = movie_path\n this_params[\"frames\"] = frame_list_fixture[ii]\n this_params[\"mean\"] = (ii + 1) * 2.1\n this_params[\"std\"] = (ii + 1) * 3.4\n params[str(ii)] = this_params\n\n tmpdir = tmpdir_factory.mktemp(\"frame_specification\")\n json_path = tempfile.mkstemp(\n dir=tmpdir, prefix=\"frame_specification_params_\", suffix=\".json\"\n )[1]\n with open(json_path, \"w\") as out_file:\n out_file.write(json.dumps(params))\n\n # now construct the input and output frames that\n # we expect this generator to yield\n expected_output_frames = []\n expected_input_frames = []\n\n path_to_data = dict()\n for movie_path in movie_path_list_fixture:\n with h5py.File(movie_path, \"r\") as in_file:\n data = in_file[\"data\"][()]\n path_to_data[movie_path] = data\n\n # replicate shuffling that happens inside the generator\n rng = np.random.default_rng(random_seed_fixture)\n index_list = list(range(len(movie_path_list_fixture)))\n # rng.shuffle(index_list)\n\n for ii in index_list:\n for i_frame in range(len(frame_list_fixture[ii])):\n this_params = params[str(ii)]\n mu = this_params[\"mean\"]\n std = this_params[\"std\"]\n movie_path = movie_path_list_fixture[ii]\n data = path_to_data[movie_path]\n frame = frame_list_fixture[ii][i_frame]\n output_data = (data[frame, :, :] - mu) / std\n\n input_indexes = np.array([frame - 2, frame - 1, frame + 1, frame + 2])\n input_data = (data[input_indexes, :, :] - mu) / std\n\n expected_output_frames.append(output_data)\n expected_input_frames.append(input_data)\n\n rng = np.random.default_rng(1234)\n rng.shuffle(expected_output_frames)\n rng = np.random.default_rng(1234)\n rng.shuffle(expected_input_frames)\n\n yield {\n \"json_path\": json_path,\n \"expected_input\": expected_input_frames,\n \"expected_output\": expected_output_frames,\n }\n\n json_path = pathlib.Path(json_path)\n if json_path.is_file():\n json_path.unlink()", "def reset_cache():\n setup_cache({})\n yield # test\n setup_cache({})", "def handle_response(self, response):\n\n self._tmp_request_args = {}\n self.cache_response(response)", "def generator_params_fixture(\n tmpdir_factory, json_frame_specification_fixture, random_seed_fixture\n):\n\n params = dict()\n params[\"pre_post_omission\"] = 0\n params[\"total_samples\"] = -1\n params[\"name\"] = \"MovieJSONGenerator\"\n params[\"start_frame\"] = 0\n params[\"end_frame\"] = -1\n params[\"pre_frame\"] = 2\n params[\"post_frame\"] = 2\n params[\"randomize\"] = True\n params[\"data_path\"] = json_frame_specification_fixture[\"json_path\"]\n params[\"steps_per_epoch\"] = -1\n params[\"train_path\"] = json_frame_specification_fixture[\"json_path\"]\n params[\"type\"] = \"generator\"\n params[\"seed\"] = random_seed_fixture\n return params", "def parse(self, response):\n self.driver.get(response.url)\n product_category=response.meta[\"category_text\"]\n products=response.xpath(\"//*[(@class='list-item')]\")\n \n # item containers for storing product\n items = CrawlingECommerceItem()\n \n # iterating over search results\n # for product in products:\n # # Defining the XPaths\n # XPATH_PRODUCT_LINK=\".//*[contains(concat( ' ', @class, ' ' ), concat( ' ', 'goods-tit', ' ' ))]//a\"\n # XPATH_PRODUCT_NAME=\".//div[@class='goods-introudce']//a/@href\"\n # XPATH_PRODUCT_PRICE=\".//div[@class='catalog-detail']//div[@class='detail-right']//p/text()\"\n # XPATH_PRODUCT_IMAGE_LINK=\".//img\"\n\n # raw_product_name=product.xpath(XPATH_PRODUCT_NAME).get()\n # raw_product_price=product.xpath(XPATH_PRODUCT_PRICE).get()\n # raw_product_image_link=product.xpath(XPATH_PRODUCT_IMAGE_LINK).extract()\n # raw_product_link=product.xpath(XPATH_PRODUCT_LINK).get()\n\n # # cleaning the data\n # product_name=''.join(raw_product_name).strip(\n # ) if raw_product_name else None\n # product_price=''.join(raw_product_price).strip(\n # ) if raw_product_price else None\n # product_image_link=''.join(raw_product_image_link).strip(\n # ) if raw_product_image_link else None\n # product_link=''.join(raw_product_link).strip(\n # ) if raw_product_link else None\n\n # # storing item\n # yield CrawlingECommerceItem (\n # product_name=product_name,\n # product_price=product_price,\n # product_url=product_link,\n # product_category=product_category,\n # image_urls=raw_product_image_link\n # )\n\n # # yield items\n \n # XPATH_PRAGINATION_LINK=\"//*[(@class='next right')]/a/@href\"\n\n yield response.follow(str(response.request.url), callback = self.parse, meta = {\"category_text\": product_category})", "def _generate_and_load_initial_batch(self, working_directory: Path):\n\n template_dir = Path(working_directory) / \"template_1\"\n template_dir.mkdir()\n # changes here should often be reflected in\n # data_generator_opts and data_loader_opts\n\n channel_decl = self.channel_configs[0]\n\n plugin_options = {\n \"pid\": \"0\",\n \"big_ids\": \"True\",\n }\n # if it's efficient to do the whole load in one go, let's just do that.\n if self.run_until.gap < MIN_PORTION_SIZE:\n num_records = self.run_until.gap\n else:\n num_records = 1 # smallest possible batch to get to parallelizing fast\n results = self._generate_and_load_batch(\n template_dir,\n channel_decl.org_config,\n {\n \"generator_yaml\": self.options.get(\"recipe\"),\n \"num_records\": num_records,\n \"num_records_tablename\": self.run_until.sobject_name or COUNT_REPS,\n \"loading_rules\": self.loading_rules,\n \"vars\": channel_decl.merge_recipe_options(self.recipe_options),\n \"plugin_options\": plugin_options,\n \"bulk_mode\": self.bulk_mode,\n },\n )\n self.update_running_totals_from_load_step_results(results)\n\n # rename directory to reflect real number of sets created.\n wd = SnowfakeryWorkingDirectory(template_dir)\n if self.run_until.sobject_name:\n self.sets_finished_while_generating_template = wd.get_record_counts()[\n self.run_until.sobject_name\n ]\n else:\n self.sets_finished_while_generating_template = num_records\n\n new_template_dir = data_loader_new_directory_name(template_dir, self.run_until)\n shutil.move(template_dir, new_template_dir)\n template_dir = new_template_dir\n\n # don't send data tables to child processes. All they\n # care about are ID->OID mappings\n wd = SnowfakeryWorkingDirectory(template_dir)\n self._cleanup_object_tables(*wd.setup_engine())\n\n return template_dir, wd.relevant_sobjects()", "def test_multiple_results(self):\n obj_list = [self.factory.create(name='hello') for i in range(2)]\n response = self._get(get_kwargs={'search': 'ello'})\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertEquals(response.context['object_list'].count(), 2)\n for obj in obj_list:\n self.assertTrue(obj in response.context['object_list'])", "def collect_articles():\n # Create a json fixture for all articles which don't exist in the current\n # fixture file\n # TODO: Parse content before writing content to fixture\n for article in get_new_local_articles():\n with open(article_root + \"/\" + article, 'r') as f:\n #extension = article.split(\".\")[1]\n content = f.read()\n fixture = construct_fixture(pk=None, title=filesystem_to_pretty(article.split(\".\")[0]),\n content=content)\n with open(fixture_dir + \"/{}.json\".format(article.split(\".\")[0]), 'w+') as f:\n f.write(fixture)", "def test_cache_placement_data(self):\n zk_content = {\n 'placement': {\n 'test.xx.com': {\n '.data': \"\"\"\n state: up\n since: 100\n \"\"\",\n 'xxx.app1#1234': {\n '.data': '{identity: 1}\\n',\n },\n }\n },\n 'scheduled': {\n 'xxx.app1#1234': {\n 'affinity': 'app1',\n 'memory': '1G',\n 'disk': '1G',\n 'cpu': '100%',\n 'identity_group': 'xxx.app1',\n },\n }\n }\n self.make_mock_zk(zk_content)\n zkclient = kazoo.client.KazooClient()\n self.evmgr._hostname = 'test.xx.com'\n self.evmgr._cache(zkclient, 'xxx.app1#1234')\n\n appcache = os.path.join(self.cache, 'xxx.app1#1234')\n self.assertTrue(os.path.exists(appcache))\n\n with open(appcache) as f:\n data = yaml.load(f.read())\n self.assertEqual(data['identity'], 1)", "def test_process_response(self):\n t = self.create_request_object()\n response_content = u\"\"\"<ODM FileType=\"Snapshot\" FileOID=\"\" CreationDateTime=\"\" ODMVersion=\"1.3\"\nxmlns:mdsol=\"http://www.mdsol.com/ns/odm/metadata\" xmlns=\"http://www.cdisc.org/ns/odm/v1.3\">\n <Study OID=\"Lab Test\">\n <GlobalVariables>\n <StudyName>Lab Test</StudyName>\n <StudyDescription />\n <ProtocolName>Lab Test</ProtocolName>\n </GlobalVariables>\n </Study>\n <Study OID=\"Mediflex\">\n <GlobalVariables>\n <StudyName>Mediflex</StudyName>\n <StudyDescription />\n <ProtocolName>Mediflex</ProtocolName>\n </GlobalVariables>\n </Study>\n</ODM>\"\"\"\n req = mock.Mock(requests.Request, text=response_content)\n response = t.result(req)\n self.assertTrue(isinstance(response, RWSStudies))\n for study in response:\n self.assertTrue(study.oid in ['Lab Test', 'Mediflex'])", "def SetPersistentCache(ambler, suggestions):\n for suggestion in suggestions:\n suggestion_object = models.CachedPlace()\n suggestion_object.lat = suggestion['lat']\n suggestion_object.lng = suggestion['lng']\n suggestion_object.name = suggestion['name']\n suggestion_object.food_type = suggestion['food_type']\n suggestion_object.cost = suggestion['cost']\n suggestion_object.why_description1 = suggestion['why_description1']\n suggestion_object.why_description2 = suggestion['why_description2']\n suggestion_object.cache_timestamp = suggestion['cache_timestamp']\n suggestion_object.address = suggestion['address']\n ambler.persistent_suggestion_cache.append(suggestion_object)\n ambler.put()", "def load_mock_response(file_name):\n with open('test_data/' + file_name, mode='r') as f:\n return json.loads(f.read())", "def fixture_tc_objs(request, reform_xx, puf_subsample, cps_subsample):\n puftest = request.param\n p_xx = Policy()\n p_xx.implement_reform(reform_xx, raise_errors=False)\n if puftest:\n rec_xx = Records(data=puf_subsample)\n else:\n rec_xx = Records.cps_constructor(data=cps_subsample)\n c_xx = Calculator(policy=p_xx, records=rec_xx)\n c_xx.advance_to_year(TEST_YEAR)\n c_xx.calc_all()\n return rec_xx, c_xx, puftest", "def get_cached(factory, cache_file_name, **kwargs):\n if os.path.exists(cache_file_name):\n _logger.info('Loading {}'.format(cache_file_name))\n cached = deserialize(cache_file_name)\n return cached\n\n _logger.info('Creating {}'.format(cache_file_name))\n data = factory()\n serialize(cache_file_name, data, **kwargs)\n return data", "def mock_reddit_search_response(\n ) -> Dict:\n with open(\n \"util/reddit_search_response.json\"\n ) as static_response:\n mock_response = json.load(static_response)\n\n return(mock_response)", "def test_response_reusage(self):\n\n post1 = self._create_db_post(content=\"@test I need a foo.\",\n channel=self.sc.inbound,\n demand_matchables=True,\n user_profile={'screen_name': 'customer'})\n self.assertTrue(self.sc.inbound_channel.is_assigned(post1))\n\n conv1 = self.sc.upsert_conversation(post1)\n post2 = self._create_db_post(content=\"I still need a foo!\",\n channel=self.sc.inbound,\n demand_matchables=True,\n user_profile={'screen_name': 'customer'})\n conv2 = self.sc.upsert_conversation(post2)\n\n resp1 = Response.objects.upsert_from_post(post1)\n resp2 = Response.objects.upsert_from_post(post2)\n self.assertEqual(conv1.id, conv2.id)\n self.assertEqual(resp1.id, resp2.id)\n self.assertTrue(resp2.post_date > resp1.post_date)", "def test_article_object_caching(self):\r\n settings = get_settings(filenames={})\r\n settings['CACHE_PATH'] = self.temp_cache\r\n settings['CONTENT_CACHING_LAYER'] = 'generator'\r\n settings['READERS'] = {'asc': None}\r\n\r\n generator = ArticlesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.generate_context()\r\n self.assertTrue(hasattr(generator, '_cache'))\r\n\r\n generator = ArticlesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n generator.readers.read_file.assert_called_count == 0", "def _load_data(self):\n if self._api_response.status_code == 200:\n self._dataset = self._api_response.json()\n self._fill_day_dicts()", "def get_proxy_recommend_tests():\n sessionObj = dpma.LoginRestServer()\n\n # log in Rest Server \n url = dpma.getURLpath(basePath, 'auth/login')\n response = sessionObj.login(url, headers, vCenterPayload)\n assert_equal(response['statusCode'], 200)\n\n # list recommend\n url = dpma.getURLpath(basePath, 'recommend')\n headers['X-CustomTicket'] = response['sessionTicket']\n response = dpma.sendGetRequest(url, headers)\n assert_equal(response.status_code, 206)\n \n # convert Json object to Python object\n resPython = json.loads(response.content)\n\n recList = []\n\n for rec in resPython:\n recList.append(rec['recommendationId'])\n\n # fileObj = open('/home/kevin/recommendid1', 'w')\n # # fileObj.write(response.content)\n # for rec in resPython:\n # fileObj.write(rec['recommendationId'])\n # fileObj.write(\"\\n\")\n\n # fileObj.close()\n \n \n # get recommend\n i = 1\n for recItem in recList:\n recommendId = 'recommend/' + recItem\n url = dpma.getURLpath(basePath, recommendId)\n response = dpma.sendGetRequest(url, headers)\n assert_equal(response.status_code, 200)\n filepath = '/home/kevin/recommendproxy/getrecommendfile' + str(i) + '.json'\n fileObj = open(filepath, 'w')\n fileObj.write(response.content)\n fileObj.close()\n i = i + 1\n \n url = dpma.getURLpath(basePath, 'auth/logout')\n if not 'X-CustomTicket' in headers:\n headers['X-CustomTicket'] = response['sessionTicket']\n response = dpma.logout(url, headers)\n assert_equal(response.status_code, 200)\n\n \n\n\n # return response.headers['Location']", "def precreate_tempfiles(self, count):\n spy_for = getattr(self, 'spy_for', None)\n\n assert spy_for, (\n '%r must mix in kgb.SpyAgency in order to call this method.'\n % self.__class__)\n\n tmpfiles: List[str] = [\n make_tempfile()\n for i in range(count)\n ]\n\n tmpfiles_iter = iter(tmpfiles)\n\n @spy_for(make_tempfile)\n def _return_next_tempfile(*args, **kwargs) -> str:\n try:\n tmpfile = next(tmpfiles_iter)\n except StopIteration:\n self.fail('Too many calls to make_tempfile(). Expected %s, '\n 'got %s.'\n % (count, count + 1))\n\n content = kwargs.get('content')\n\n if content:\n with open(tmpfile, 'wb') as fp:\n fp.write(content)\n\n return tmpfile\n\n return tmpfiles", "def mocked_requests_scrapping_post(*args, **kwargs):\n class MockResponse:\n def __init__(self, json_data, status_code, url):\n self.content = json_data\n self.status_code = status_code\n self.url = url\n self.cookies = {\"JSESSIONID\": \"jkghhjgjhgfjgfgjg\"}\n self.encoding = \"utf-8\"\n\n def json(self):\n return self.json_data\n\n dn = os.path.dirname(os.path.realpath(__file__))\n with open(os.path.join(dn, \"fixtures\", \"result.html\"), \"rb\") as fp:\n return MockResponse(fp.read(), 200, args[0])", "def test_cache_retrieved(self):\n read = self.client.get(\"/read/froLit/jns915/jns1856/ciham-fro1/1\")\n data = read.data.decode()\n self.assertIn(\n '<span class=\"expan\">et </span>', data,\n \"Text content should be transformed\"\n )\n self.assertIn(\n 'Facsimilaire', data,\n \"Other content should be added\"\n )\n\n cached = self.cache.get(\"urn:cts:froLit:jns915.jns1856.ciham-fro1:1\").decode()\n self.assertIn('<aside class=\"text-left\">', cached, \"Assert cache is made\")\n\n with mock.patch(\"nemo_xslttwo_plugin.shell\") as shell:\n read = self.client.get(\"/read/froLit/jns915/jns1856/ciham-fro1/1\")\n cached_response = read.data.decode()\n self.assertEqual(\n cached_response, data,\n \"Text content should the same in cache\"\n )\n self.assertEqual(\n shell.call_count, 0,\n \"Shell should not be called because we use cache\"\n )", "def tool(request, devid, tmp_path_factory):\n\n tmpdir = tmp_path_factory.mktemp(request.fixturename)\n return request.param(devid, tmpdir=tmpdir)", "def _setup_http_mock(self):\n if self.http_mock is not None:\n return\n\n filename = glob(f\"{DATASET_DIR}/{DATASETS[self.dataset]['file_pattern']}\")\n with open(filename[0]) as csv_file:\n csv_reader = csv.DictReader(csv_file, delimiter=DATASETS[self.dataset]['delimiter'])\n http_data = [next(csv_reader) for x in range(self.batch_size)]\n\n http_mock = self.http.mock()\n http_mock.when(f'GET /{self.dataset}').reply(json.dumps(http_data), times=FOREVER)\n http_mock.when('POST /dest').reply('ack', times=FOREVER)\n self.http_mock = http_mock", "def test_local_cache():", "def mock_single_site_api() -> Generator:\n instance = Mock()\n site = Site(\"01FG0AGP818PXK0DWHXJRRT2DH\", \"11111111111\", [])\n instance.get_sites.return_value = [site]\n\n with patch(\"amberelectric.api.AmberApi.create\", return_value=instance):\n yield instance", "def parse(self, response):\n json_response = json.loads(response.text)\n fetched_results = len(json_response['results'])\n\n if not hasattr(self, 'processed_results'):\n self.processed_results = 0\n self.processed_results += fetched_results\n if fetched_results == 0:\n logging.info('No more files. Total files queried {}'.format(\n self.processed_results))\n return\n\n for result in json_response['results']:\n document = self._create_document(result['columns'])\n if document['language'] != u'ENG':\n info = 'Case {} {} not in ENG, original language {}'.format(\n document['original_id'], document['name'].encode('utf-8'),\n result['columns']['languageisocode'])\n logging.info(info)\n continue\n doc_url = self.base_document_url.format(document['original_id'])\n new_request = scrapy.Request(doc_url, callback=self.parse_document)\n new_request.meta['document'] = document\n yield new_request\n\n if self.processed_results < int(getattr(self, 'limit', DEFAULT_LIMIT)):\n next_url = self.base_main_url.format(\n start_index=self.processed_results, length=RESULT_PER_PAGE)\n yield scrapy.Request(next_url, callback=self.parse)\n else:\n logging.info('Limit reached. Total files obtained {}'.format(\n self.processed_results))", "async def test_expired_session(aresponses, create_session_response):\n aresponses.add(\n \"production.tile-api.com\",\n f\"/api/v1/clients/{TILE_CLIENT_UUID}\",\n \"put\",\n aresponses.Response(\n text=load_fixture(\"create_client_response.json\"),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n aresponses.add(\n \"production.tile-api.com\",\n f\"/api/v1/clients/{TILE_CLIENT_UUID}/sessions\",\n \"post\",\n aresponses.Response(\n text=json.dumps(create_session_response),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n aresponses.add(\n \"production.tile-api.com\",\n f\"/api/v1/clients/{TILE_CLIENT_UUID}\",\n \"put\",\n aresponses.Response(\n text=load_fixture(\"create_client_response.json\"),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n aresponses.add(\n \"production.tile-api.com\",\n f\"/api/v1/clients/{TILE_CLIENT_UUID}/sessions\",\n \"post\",\n aresponses.Response(\n text=json.dumps(create_session_response),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n aresponses.add(\n \"production.tile-api.com\",\n \"/api/v1/tiles/tile_states\",\n \"get\",\n aresponses.Response(\n text=load_fixture(\"tile_states_response.json\"),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n aresponses.add(\n \"production.tile-api.com\",\n f\"/api/v1/tiles/{TILE_TILE_UUID}\",\n \"get\",\n aresponses.Response(\n text=load_fixture(\"tile_details_response.json\"),\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n ),\n )\n\n async with aiohttp.ClientSession() as session:\n api = await async_login(\n TILE_EMAIL, TILE_PASSWORD, session, client_uuid=TILE_CLIENT_UUID\n )\n\n # Simulate an expired session:\n api._session_expiry = int(time() * 1000) - 1000000\n await api.async_get_tiles()", "def get_info(self, response):\n try:\n if re.search('artist/\\d+', response.url) or \\\n re.search('i\\.xiami\\.com/[^/]+$', response.url):\n self.get_artist(response)\n elif re.search('album/\\d+', response.url):\n self.get_albums(response)\n elif re.search('song/\\d+', response.url):\n self.get_songs(response)\n elif 'count/getplaycount' in response.url:\n self.get_count(response)\n else:\n self.get_pages(response)\n except (AttributeError, TypeError):\n return\n request = self.gen_info(response)\n if not request:\n self.save(response.meta['source_id'],\n response.meta['raw_info'],\n response.meta['result'])\n else:\n yield request", "def test_response(self):\n\n from rubber import settings, resource\n settings.RUBBER_MOCK_HTTP_RESPONSE = \"\"\"{\"took\":2,\"timed_out\":false,\"_shards\":{\"total\":5,\"successful\":5,\"failed\":0},\"hits\":{\"total\":2,\"max_score\":1.0,\"hits\":[{\"_index\":\"auth\",\"_type\":\"user\",\"_id\":\"6\",\"_score\":1.0, \"_source\" : {\"username\": \"guillaume\", \"first_name\": \"\", \"last_name\": \"\", \"is_active\": true, \"is_superuser\": false, \"is_staff\": false, \"last_login\": \"2012-08-02T08:30:11\", \"groups\": [], \"user_permissions\": [], \"password\": \"pbkdf2_sha256$10000$M1nRKJfbvdQf$ouX5u9FOUF/MKhhwuwYbiuoVidFITsBrEstGBB4mzZA=\", \"email\": \"somemail@test.com\", \"date_joined\": \"2012-08-02T08:30:11\"}},{\"_index\":\"auth\",\"_type\":\"user\",\"_id\":\"8\",\"_score\":1.0, \"_source\" : {\"username\": \"stephane\", \"first_name\": \"\", \"last_name\": \"\", \"is_active\": true, \"is_superuser\": false, \"is_staff\": false, \"last_login\": \"2012-08-02T09:14:38\", \"groups\": [], \"user_permissions\": [], \"password\": \"pbkdf2_sha256$10000$ORDHZAnNqTwF$UGmkUCyH0/uh1ruP93ZSTyog9Wi5g2qc+m/fxowigFs=\", \"email\": \"othermail@test.com\", \"date_joined\": \"2012-08-02T09:14:38\"}}]}}\"\"\"\n\n requestmock = RequestMock()\n resource.requests = requestmock\n\n response = self.Article.elasticsearch.search({})\n \n self.assertEquals(2, response.json['took'])\n\n from rubber.response import Response\n self.assertTrue(isinstance(response, Response))", "def request_chunk(self, x, z):\n\n if (x, z) in self.chunk_cache:\n returnValue(self.chunk_cache[x, z])\n elif (x, z) in self.dirty_chunk_cache:\n returnValue(self.dirty_chunk_cache[x, z])\n elif (x, z) in self._pending_chunks:\n # Rig up another Deferred and wrap it up in a to-go box.\n retval = yield self._pending_chunks[x, z].deferred()\n returnValue(retval)\n\n chunk = Chunk(x, z)\n yield maybeDeferred(self.serializer.load_chunk, chunk)\n\n if chunk.populated:\n self.chunk_cache[x, z] = chunk\n self.postprocess_chunk(chunk)\n #self.factory.scan_chunk(chunk)\n returnValue(chunk)\n\n if self.async:\n from ampoule import deferToAMPProcess\n from bravo.remote import MakeChunk\n\n d = deferToAMPProcess(MakeChunk,\n x=x,\n z=z,\n seed=self.seed,\n generators=configuration.getlist(self.config_name, \"generators\")\n )\n\n # Get chunk data into our chunk object.\n def fill_chunk(kwargs):\n chunk.blocks = fromstring(kwargs[\"blocks\"],\n dtype=uint8).reshape(chunk.blocks.shape)\n chunk.heightmap = fromstring(kwargs[\"heightmap\"],\n dtype=uint8).reshape(chunk.heightmap.shape)\n chunk.metadata = fromstring(kwargs[\"metadata\"],\n dtype=uint8).reshape(chunk.metadata.shape)\n chunk.skylight = fromstring(kwargs[\"skylight\"],\n dtype=uint8).reshape(chunk.skylight.shape)\n chunk.blocklight = fromstring(kwargs[\"blocklight\"],\n dtype=uint8).reshape(chunk.blocklight.shape)\n\n return chunk\n d.addCallback(fill_chunk)\n else:\n # Populate the chunk the slow way. :c\n for stage in self.pipeline:\n stage.populate(chunk, self.seed)\n\n chunk.regenerate()\n d = succeed(chunk)\n\n # Set up our event and generate our return-value Deferred. It has to\n # be done early becaues PendingEvents only fire exactly once and it\n # might fire immediately in certain cases.\n pe = PendingEvent()\n # This one is for our return value.\n retval = pe.deferred()\n # This one is for scanning the chunk for automatons.\n #pe.deferred().addCallback(self.factory.scan_chunk)\n self._pending_chunks[x, z] = pe\n\n def pp(chunk):\n chunk.populated = True\n chunk.dirty = True\n\n self.postprocess_chunk(chunk)\n\n self.dirty_chunk_cache[x, z] = chunk\n del self._pending_chunks[x, z]\n\n return chunk\n\n # Set up callbacks.\n d.addCallback(pp)\n d.chainDeferred(pe)\n\n # Because multiple people might be attached to this callback, we're\n # going to do something magical here. We will yield a forked version\n # of our Deferred. This means that we will wait right here, for a\n # long, long time, before actually returning with the chunk, *but*,\n # when we actually finish, we'll be ready to return the chunk\n # immediately. Our caller cannot possibly care because they only see a\n # Deferred either way.\n retval = yield retval\n returnValue(retval)", "def retrieve_fixture():\n j = json.load(open(\"./tests/fixtures/crond_event.json\"))\n return j", "def _crawler_result(item, response, spider):\n output_data.clear()\n output_data.append(dict(item))", "def test_task_export_tasks_ckan_first_time(self, mock1):\r\n # Second time exporting the package\r\n mocks = [Mock()]\r\n resource = dict(name='task', id=1)\r\n package = dict(id=3, resources=[resource])\r\n mocks[0].package_exists.return_value = (None, None)\r\n mocks[0].package_create.return_value = package\r\n #mocks[0].datastore_delete.return_value = None\r\n mocks[0].datastore_create.return_value = None\r\n mocks[0].datastore_upsert.return_value = None\r\n mocks[0].resource_create.return_value = dict(result=dict(id=3))\r\n mocks[0].datastore_create.return_value = 'datastore'\r\n mocks[0].datastore_upsert.return_value = 'datastore'\r\n\r\n mock1.side_effect = mocks\r\n\r\n Fixtures.create()\r\n user = db.session.query(User).filter_by(name=Fixtures.name).first()\r\n app = db.session.query(App).first()\r\n user.ckan_api = 'ckan-api-key'\r\n app.owner_id = user.id\r\n db.session.add(user)\r\n db.session.add(app)\r\n db.session.commit()\r\n\r\n self.signin(email=user.email_addr, password=Fixtures.password)\r\n # First test for a non-existant app\r\n uri = '/app/somethingnotexists/tasks/export'\r\n res = self.app.get(uri, follow_redirects=True)\r\n assert res.status == '404 NOT FOUND', res.status\r\n # Now get the tasks in CKAN format\r\n uri = \"/app/somethingnotexists/tasks/export?type=task&format=ckan\"\r\n res = self.app.get(uri, follow_redirects=True)\r\n assert res.status == '404 NOT FOUND', res.status\r\n # Now get the tasks in CKAN format\r\n uri = \"/app/somethingnotexists/tasks/export?type=other&format=ckan\"\r\n res = self.app.get(uri, follow_redirects=True)\r\n assert res.status == '404 NOT FOUND', res.status\r\n\r\n\r\n # Now with a real app\r\n uri = '/app/%s/tasks/export' % Fixtures.app_short_name\r\n res = self.app.get(uri, follow_redirects=True)\r\n heading = \"<strong>%s</strong>: Export All Tasks and Task Runs\" % Fixtures.app_name\r\n assert heading in res.data, \"Export page should be available\\n %s\" % res.data\r\n # Now get the tasks in CKAN format\r\n uri = \"/app/%s/tasks/export?type=task&format=ckan\" % Fixtures.app_short_name\r\n #res = self.app.get(uri, follow_redirects=True)\r\n with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):\r\n # First time exporting the package\r\n res = self.app.get(uri, follow_redirects=True)\r\n msg = 'Data exported to http://ckan.com'\r\n err_msg = \"Tasks should be exported to CKAN\"\r\n assert msg in res.data, err_msg", "def test_cached(self):\n # Setup the mocked response, refrain from matching the query string\n responses.add(responses.GET, self.api_url, json=self.valid_response,\n status=200, match_querystring=False)\n\n acme = ACMEAccount(client=self.client)\n acme.all(self.org_id)\n data = acme.all(self.org_id)\n\n # Verify all the query information\n # There should only be one call the first time \"all\" is called.\n # Due to pagination, this is only guaranteed as long as the number of\n # entries returned is less than the page size\n self.assertEqual(len(responses.calls), 1)\n self.match_url_with_qs(responses.calls[0].request.url)\n self.assertEqual(data, self.valid_response)", "def parse_gazette(self, response):\n json_response = response.json()\n if not json_response:\n self.logger.warning(f\"Document not found in {response.url}\")\n return\n\n json_dir = json_response[\"dir\"]\n\n date = re.search(self.DATE_REGEX, json_dir).group()\n date = parse(date, settings={\"DATE_ORDER\": \"DMY\"}).date()\n is_extra_edition = self.EXTRA_EDITION_TEXT in json_dir\n path = json_dir.replace(\"/\", \"|\")\n\n json_data = json_response[\"data\"]\n file_urls = [self.PDF_URL.format(path, url.split(\"/\")[-1]) for url in json_data]\n\n yield Gazette(\n date=date,\n file_urls=file_urls,\n is_extra_edition=is_extra_edition,\n power=\"executive_legislative\",\n )", "def testcases(self, request, *args, **kwargs):\n response = self.retrieve(request, *args, **kwargs)\n response.data = response.data['testcases']\n return response" ]
[ "0.6123618", "0.5608524", "0.56062853", "0.5596069", "0.55481315", "0.5504589", "0.5498172", "0.5480052", "0.5423372", "0.5308721", "0.52908236", "0.5271301", "0.52630156", "0.525457", "0.5245118", "0.52290255", "0.52251756", "0.5215446", "0.5191281", "0.51741076", "0.5172798", "0.5164142", "0.5162481", "0.5158709", "0.515818", "0.51415455", "0.5109162", "0.5101342", "0.5092544", "0.50873935", "0.5066958", "0.506427", "0.505932", "0.505258", "0.5050145", "0.5036194", "0.5035904", "0.5034362", "0.5034117", "0.50313276", "0.5026031", "0.50157017", "0.5007646", "0.5007646", "0.4982516", "0.49820423", "0.4981551", "0.49804568", "0.4976686", "0.49582452", "0.49483404", "0.49473238", "0.4940414", "0.49386892", "0.49358878", "0.4932295", "0.49195585", "0.49150392", "0.49062675", "0.4902891", "0.4892497", "0.48887363", "0.48884952", "0.488221", "0.4879644", "0.48766318", "0.48476398", "0.48457617", "0.4844124", "0.48420477", "0.48318362", "0.4827318", "0.4822182", "0.48199433", "0.48190153", "0.4810744", "0.48080125", "0.48060393", "0.47996023", "0.47988033", "0.47951472", "0.47934496", "0.47892475", "0.47849998", "0.47831097", "0.47800967", "0.47750974", "0.47749925", "0.47697297", "0.4769358", "0.47647184", "0.4756736", "0.47530797", "0.47482425", "0.47434974", "0.4742244", "0.47418958", "0.47416994", "0.47400102", "0.4738373" ]
0.6490722
0
Falls back to the default fixture names if no names could be determined up to this point.
def _fallback_fixture_names(self): if not self.request_name or not self.response_name: warnings.warn( "No name was specified for the recorded fixture. Falling " "back to default names.") if not self.request_name: self.request_name = __default_names__[0] if not self.response_name: self.response_name = __default_names__[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def populate_fixtures():\n languages()\n words()", "def fixtures():", "def _find_fixtures(self, start_dir):\r\n fixtures = []\r\n def _find(arg, dirname, names):\r\n if (dirname.endswith('fixtures')) and (dirname.find('unit_test')==-1):\r\n for name in names:\r\n if (name.endswith(FIXTUERS_EXT)) and (name.find('initial_data')==-1):\r\n fixtures.append(name.replace(FIXTUERS_EXT, ''))\r\n os.path.walk(start_dir, _find, None)\r\n \r\n return fixtures", "def pytest_runtest_setup(item):\n if hasattr(item, 'fixturenames') and LOOP_KEY not in item.fixturenames:\n item.fixturenames.append(LOOP_KEY)", "def _fixture_setup(self):\n pass", "def load_fixtures(self):\n for fixture_dir in settings.FIXTURE_DIRS:\n fixture_dir = os.path.join(fixture_dir, self.filesystem_name)\n for (root, dirs, files) in os.walk(fixture_dir):\n for file in files:\n full_file_path = os.path.join(root, *dirs, file)\n with open(full_file_path, 'rb') as f:\n self.save(os.path.relpath(full_file_path, fixture_dir), f)", "def setUp(self):\n self.fixture_file = r\"v:\\workspace\\FileHandling\\src\\test-read-write.txt\"\n self.fixture_list = [\"my\", \"written\", \"text\"]\n self.fixture_list_empty_strings = [\"my\", \"\", \"\", \"written\", \"text\"]\n self.fixture_list_trailing_empty_strings = [\"my\", \"written\", \"text\", \"\", \"\"]", "def setUp(self):\n\n fq_dataset_name = self.fq_table_names[0].split('.')\n self.fq_dataset_name = '.'.join(fq_dataset_name[:-1])\n\n fq_sandbox_name = self.fq_sandbox_table_names[0].split('.')\n self.fq_sandbox_name = '.'.join(fq_sandbox_name[:-1])\n\n super().setUp()", "def start_fixture(self):\n pass", "def setUp(self):\n self.fixtureFile = r\"v:\\workspace\\FileHandling\\src\\test-read-write.txt\"\n self.fixtureList = [\"my\", \"written\", \"text\"]\n self.fixtureListEmptyStrings = [\"my\", \"\", \"\", \"written\", \"text\"]\n self.fixtureListTrailingEmptyString = [\"my\", \"written\", \"text\", \"\", \"\"]", "def setUpFixture(self):\n pass", "def fixture_name(self):\n return \"coding_dna_substitution\"", "def generate_tests(self, fixture):\n if fixture.startswith(\"splunk_searchtime_fields\"):\n yield from self.dedup_tests(\n self.fieldtest_generator.generate_tests(fixture),\n fixture\n )\n elif fixture.startswith(\"splunk_searchtime_cim\"):\n yield from self.dedup_tests(\n self.cim_test_generator.generate_tests(fixture),\n fixture\n )", "def fixture_microbial_sample_name():\n return \"microbial_name_test\"", "def fixture_other_case() -> str:\n return \"angrybird\"", "def load_initial_fixtures_func(app_name):\n return partial(_load_initial_fixtures_impl, app_name)", "def fixture_name(self):\n return \"coding_dna_insertion\"", "def tearDownFixture(self):\n pass", "def expected_city_names_fixture():\n return {'b', 'a', 'c'}", "def fixture_name(self):\n return \"coding_dna_deletion\"", "def load_test_subjects_names(self):\n files = os.listdir(os.path.join(self.db_path, self.test_batch))\n for f in files:\n if f.startswith('test-volume'):\n s_name = str.split(str.split(f, '.')[0], '-')[-1]\n self.testing_subjects.append(s_name)\n self.n_test = len(self.testing_subjects)", "def test_template_name():\n for t in templates:\n assert len(t.name) > 0", "def test_Defaults(self):\n self._run(self._test_scenarios, \"Defaults\")", "def set_default_fitscenarios(self, default_dict):\n try:\n self.setup.set_defaults(default_dict)\n return 1\n except:\n return 0", "def setFixtureParamNames(request, orderedParamNameList):\n numParams = len(request.param)\n request.keywords.setdefault(\n \"fixture_param_names\",\n dict())[request.fixturename] = orderedParamNameList[:numParams]", "def fixture_name(self):\n return \"genomic_silent_mutation\"", "def test_defaults_are_kept_if_not_specified_in_args(\n self, junit4_hooks, full_args\n ):\n args = empty_args(master_repo_names=MASTER_REPO_NAMES)\n expected_ignore_tests = [\"some\", \"tests\"]\n expected_hamcrest_path = HAMCREST_PATH\n expected_junit_path = JUNIT_PATH\n expected_rtd = RTD\n expected_disable_security = False\n\n junit4_hooks._ignore_tests = expected_ignore_tests\n junit4_hooks._hamcrest_path = expected_hamcrest_path\n junit4_hooks._junit_path = expected_junit_path\n junit4_hooks._reference_tests_dir = expected_rtd\n junit4_hooks._disable_security = expected_disable_security\n\n junit4_hooks.parse_args(args)\n\n assert junit4_hooks._ignore_tests == expected_ignore_tests\n assert junit4_hooks._hamcrest_path == expected_hamcrest_path\n assert junit4_hooks._junit_path == expected_junit_path\n assert junit4_hooks._reference_tests_dir == expected_rtd\n assert junit4_hooks._disable_security == expected_disable_security", "def __init_fixture_methods(self):\n # init our self.(class_setup|setup|teardown|class_teardown)_fixtures lists\n for fixture_type in fixture_types:\n setattr(self, \"%s_fixtures\" % fixture_type, [])\n\n # for setup methods, we want oldest class first. for teardowns, we want newest class first\n hierarchy = list(reversed(type(self).mro()))\n for cls in hierarchy[1:]:\n # mixins on TestCase instances that derive from, say, object, won't be set up properly\n if hasattr(cls, '_fixture_methods'):\n # the metaclass stored the class's fixtures in a _fixture_methods instance variable\n for fixture_type, fixture_methods in cls._fixture_methods.iteritems():\n bound_fixture_methods = [instancemethod(func, self, self.__class__) for func in fixture_methods]\n if fixture_type.endswith('setup'):\n # for setup methods, we want methods defined further back in the\n # class hierarchy to execute first\n getattr(self, \"%s_fixtures\" % fixture_type).extend(bound_fixture_methods)\n else:\n # for teardown methods though, we want the opposite\n setattr(self, \"%s_fixtures\" % fixture_type, bound_fixture_methods + getattr(self, \"%s_fixtures\" % fixture_type))", "def _get_fixture(item, arg_name, fixture=None):\n if arg_name == \"request\":\n # Support parameterized fixture\n if fixture:\n try:\n item._request.param = item._pyfuncitem.callspec.params[fixture.argname]\n except (AttributeError, KeyError) :\n pass\n\n return item._request\n\n if arg_name == \"self\":\n raise Ignore\n\n _fixtureinfo = item._fixtureinfo\n fixtures = sorted(\n _fixtureinfo.name2fixturedefs[arg_name], key=lambda x: not x.has_location\n )\n return fixtures[0]", "def setup_suite(dataset):\n for speaker in create_all_speakers(dataset):\n if speaker.is_pickle_saved():\n print('{} already exists.'.format(speaker.output_name))\n else:\n speaker.safe_to_pickle()", "def testTitleTemplateFindNames(self):\n\n\t\ttests = {\n\t\t\t'${abc.def.1}-$abc-${123}': {\n\t\t\t\t'abc.def.1': ['abc', 'def', 1],\n\t\t\t\t'123': [123]\n\t\t\t},\n\t\t\t'${abc..def} $$ ${qwe}': {'qwe': ['qwe']}\n\t\t}\n\n\t\tfor test in tests:\n\t\t\tt = TitleTemplate(test)\n\t\t\tself.assertEqual(t.getFieldNames(), tests[test])", "def _set_base_namelists(self):\n\n # Create namelists\n hydro_namelist = self.model.hydro_namelists\n hrldas_namelist = self.model.hrldas_namelists\n\n self.base_hydro_namelist = hydro_namelist.patch(self.domain.hydro_namelist_patches)\n self.base_hrldas_namelist = hrldas_namelist.patch(self.domain.hrldas_namelist_patches)", "def setCaptainNames(self):\n self.captainNames = anwp.func.names.getNames('system_names.txt',self.maxCaptainNames+100, self.rand.randint(1,100))\n self.currentCaptainName = 0", "def finalize(self):\n print(\"%d default backgdrop names found\" % self.total_default)\n for name in self.list_default:\n print name", "def create_default_settings():\n from flaskbb.fixtures.settings import fixture\n create_settings_from_fixture(fixture)", "def testDefaultNamingSucceeds(self):\n cb_name_lib.GetRecoveryName(self.board, self.recovery, 0).AndReturn(\n (self.index_page, self.rec_pat))\n cb_name_lib.DetermineUrl(self.index_page, self.rec_pat).AndReturn(\n self.rec_url)\n self.mox.ReplayAll()\n expected = (self.rec_url, self.index_page)\n actual = cb_name_lib.ResolveRecoveryUrl(self.board, self.recovery)\n self.assertEqual(expected, actual)", "def stop_fixture(self):\n pass", "def wemo_entity_suffix_fixture():\n return \"\"", "def load_season_fixture(self):\n\n print(f'Initializing \\t {self.league} \\t {self.season} fixtures')\n print('Initialization completed')\n return self.fb.leagues[self.league].seasons[self.season].load_played_fixtures()", "def load_label(self, fixture_label):\n show_progress = self.verbosity >= 3\n for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):\n _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))\n open_method, mode = self.compression_formats[cmp_fmt]\n fixture = open_method(fixture_file, mode)\n try:\n self.fixture_count += 1\n objects_in_fixture = 0\n loaded_objects_in_fixture = 0\n if self.verbosity >= 2:\n self.stdout.write(\"Installing %s fixture '%s' from %s.\" %\n (ser_fmt, fixture_name, humanize(fixture_dir)))\n\n objects = serializers.deserialize(ser_fmt, fixture,\n using=self.using, ignorenonexistent=self.ignore)\n\n create_dict = OrderedDict()\n\n for object in objects:\n obj = object.object\n objects_in_fixture += 1\n model = obj.__class__\n if router.allow_migrate_model(self.using, model):\n self.models.add(model)\n if model in create_dict.keys():\n create_dict[model].append(obj)\n else:\n create_dict[model] = [obj]\n for model in create_dict.keys():\n objs = create_dict[model]\n loaded_objects_in_fixture += len(objs)\n try:\n model.objects.using(self.using).bulk_create(objs)\n if show_progress:\n self.stdout.write(\n '\\rProcessed %i object(s).' % loaded_objects_in_fixture,\n ending=''\n )\n except (DatabaseError, IntegrityError) as e:\n e.args = (\"Could not load %(app_label)s.%(object_name)s: %(error_msg)s\" % {\n 'app_label': model._meta.app_label,\n 'object_name': model._meta.object_name,\n 'error_msg': force_text(e)\n },)\n raise\n if objects and show_progress:\n self.stdout.write('') # add a newline after progress indicator\n self.loaded_object_count += loaded_objects_in_fixture\n self.fixture_object_count += objects_in_fixture\n except Exception as e:\n if not isinstance(e, CommandError):\n e.args = (\"Problem installing fixture '%s': %s\" % (fixture_file, e),)\n raise\n finally:\n fixture.close()\n\n # Warn if the fixture we loaded contains 0 objects.\n if objects_in_fixture == 0:\n warnings.warn(\n \"No fixture data found for '%s'. (File format may be \"\n \"invalid.)\" % fixture_name,\n RuntimeWarning\n )", "def fixture_example_data():\n import_example_data()", "def tearDown(self):\n super(TestSelectAPI, self).tearDown()\n self.destroy_fixtures()", "def set_defaults(self, **kw):\n group = kw.pop('group', None)\n for o, v in kw.items():\n self.cfg_fixture.set_default(o, v, group=group)", "def fixture_union(name, # type: str\n fixtures, # type: Iterable[Union[str, Callable]]\n scope=\"function\", # type: str\n idstyle='compact', # type: Optional[Union[str, Callable]]\n ids=None, # type: Union[Callable, Iterable[str]]\n unpack_into=None, # type: Iterable[str]\n autouse=False, # type: bool\n hook=None, # type: Callable[[Callable], Callable]\n **kwargs):\n # grab the caller module, so that we can later create the fixture directly inside it\n caller_module = get_caller_module()\n\n # test the `fixtures` argument to avoid common mistakes\n if not isinstance(fixtures, (tuple, set, list)):\n raise TypeError(\"fixture_union: the `fixtures` argument should be a tuple, set or list\")\n\n # unpack the pytest.param marks\n custom_pids, p_marks, fixtures = extract_parameterset_info((name, ), fixtures)\n\n # get all required fixture names\n f_names = [get_fixture_name(f) for f in fixtures]\n\n # create all alternatives and reapply the marks on them\n fix_alternatives = []\n f_names_args = []\n for _idx, (_name, _id, _mark) in enumerate(zip(f_names, custom_pids, p_marks)):\n # create the alternative object\n alternative = UnionFixtureAlternative(union_name=name, alternative_name=_name, alternative_index=_idx)\n\n # remove duplicates in the fixture arguments: each is required only once by the union fixture to create\n if _name in f_names_args:\n warn(\"Creating a fixture union %r where two alternatives are the same fixture %r.\" % (name, _name))\n else:\n f_names_args.append(_name)\n\n # reapply the marks\n if _id is not None or (_mark or ()) != ():\n alternative = pytest.param(alternative, id=_id, marks=_mark or ())\n fix_alternatives.append(alternative)\n\n union_fix = _fixture_union(caller_module, name,\n fix_alternatives=fix_alternatives, unique_fix_alt_names=f_names_args,\n scope=scope, idstyle=idstyle, ids=ids, autouse=autouse, hook=hook, **kwargs)\n\n # if unpacking is requested, do it here\n if unpack_into is not None:\n # Note: we can't expose the `in_cls` argument as we would not be able to output both the union and the\n # unpacked fixtures. However there is a simple workaround for this scenario of unpacking a union inside a class:\n # call unpack_fixture separately.\n _make_unpack_fixture(caller_module, argnames=unpack_into, fixture=name, hook=hook, in_cls=False)\n\n return union_fix", "def simulate_fixtures(x_width = 36, y_height = 18, total = 250, x_offset = 0):\n locations = make_locations(x_width, y_height, total, x_offset)\n fixtures = []\n i, j = 0, 0\n count = 0\n for grid_loc in locations:\n strand = int(count >= total/2)\n address = j if strand else i\n pixels = 1\n data = {\"strand\": strand,\n \"address\": address,\n \"pixels\": pixels,\n \"pos1\": map_loc_to_pixel(grid_loc),\n \"pos2\": map_loc_to_pixel(grid_loc),\n \"grid_loc\": grid_loc}\n fixtures.append(Fixture(data))\n if not strand:\n i += 1\n else:\n j += 1\n count += 1\n return fixtures", "def setUp(self):\n self.fixtures_path = os.path.join(\n os.path.dirname(os.path.abspath(__file__)), \"fixtures/\"\n )", "def pytest_collection_modifyitems(config, items):\n execute_mssql_tests = ensure_mssql_ready_for_tests(config)\n skip_mssql = pytest.mark.skip(reason=\"requires SQL Server\")\n for item in items:\n if \"mssql\" in item.keywords:\n if execute_mssql_tests:\n # Add 'mssql_setup_and_teardown' as FIRST in fixture list\n fixtures = ['mssql_setup_and_teardown'] + item.fixturenames\n item.fixturenames = fixtures\n else:\n item.add_marker(skip_mssql)\n if \"http_server\" in item.keywords:\n item.fixturenames.append('http_server_setup_and_teardown')", "def reset(self):\n import string\n # import random\n from random import SystemRandom\n old_name = self.name\n\n while old_name == self.name:\n self.name = ''\n n = 0\n while n < 2:\n # Standard pseudo-random generators are not\n # suitable for security/cryptographic purposes.\n # self.name += string.ascii_uppercase[random.randint(0, 25)]\n rnd = SystemRandom()\n self.name += string.ascii_uppercase[rnd.randrange(0, 25)]\n n += 1\n\n for n in range(0, 3):\n self.name += str(rnd.randrange(0, 9))", "def finalize(self):\n print(\"%d default sprite names found:\" % self.total_default)\n for name in self.list_default:\n print name", "def fixture(fname, label=\"reference\"):\n return os.path.join(FIXTURES, label, fname)", "def fixture_sets(*args):\n return [os.path.join(*args, dir)\n for dir in os.listdir(os.path.join(FIXTURE_DATA, *args))\n if os.path.isdir(os.path.join(FIXTURE_DATA, *args, dir))\n ]", "def test_xfail_fixture(broken_fixture):\n pass", "def load_and_process_fixtures_data(self) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:\n df = self._dbmanager.query_fixtures_data(self._seasons)\n if df.empty:\n raise ValueError(\"Empty fixtures dataframe.\")\n\n df = self._drop_last_season_championship_matches(df)\n\n self.teams = get_unique_teams(df)\n self.last_season_teams = get_last_season_unique_teams(df)\n # Get fixtures ids for each team\n teams_fixtures_ids = {t: df[(df[\"home\"] == t) | (df[\"away\"] == t)].loc[:, \"id\"].tolist() for t in self.teams}\n\n self._check_missing_columns(df)\n df = self._check_nan_values(df, teams_fixtures_ids)\n teams_fixtures_ids = self._discard_matches(df, teams_fixtures_ids)\n\n if not self._resume:\n # Use last n ids for predictions and last m ids for testing\n for t in self.last_season_teams:\n self.predict_fixtures_ids[t] = teams_fixtures_ids[t][-NPREDICT:]\n teams_fixtures_ids[t] = teams_fixtures_ids[t][:-NPREDICT]\n\n self.test_fixtures_ids[t] = teams_fixtures_ids[t][-self._ntest:]\n teams_fixtures_ids[t] = teams_fixtures_ids[t][:-self._ntest]\n\n # Rest of ids is counted as train set\n self.train_fixtures_ids = teams_fixtures_ids\n else:\n if self.teams_names_bitlen != self._model_settings[\"teams_names_bitlen\"]:\n raise ValueError(\"Current bitlength required to encode all teams names is higher than previous one.\")\n\n # Check whether teams has not changed\n if self.teams != self._model_settings[\"teams\"]:\n raise ValueError(\"Teams differ from previous run. \\n\"\n f\"New: {self.teams} \\n\"\n f\"Old: {self._model_settings['teams']}\")\n\n if self.last_season_teams != self._model_settings[\"last_season_teams\"]:\n raise ValueError(\"Last season teams differ from previous run. \\n\"\n f\"New: {self.last_season_teams} \\n\"\n f\"Old: {self._model_settings['last_season_teams']}\")\n\n # Check whether fixtures ids match from previous run\n for t in self.last_season_teams:\n predict_fixtures_ids = teams_fixtures_ids[t][-NPREDICT:]\n teams_fixtures_ids[t] = teams_fixtures_ids[t][:-NPREDICT]\n\n test_fixtures_ids = teams_fixtures_ids[t][-self._ntest:]\n teams_fixtures_ids[t] = teams_fixtures_ids[t][:-self._ntest]\n\n if predict_fixtures_ids != self._model_settings[\"predict_fixtures_ids\"][t]:\n raise ValueError(f\"{t} predict fixtures ids differ from previous run. \\n\"\n f\"New: {predict_fixtures_ids} \\n\"\n f\"Old: {self._model_settings['predict_fixtures_ids'][t]}\")\n if test_fixtures_ids != self._model_settings['test_fixtures_ids'][t]:\n raise ValueError(f\"{t} test fixtures ids differ from previous run. \\n\"\n f\"New: {test_fixtures_ids} \\n\"\n f\"Old: {self._model_settings['test_fixtures_ids'][t]}\")\n if teams_fixtures_ids[t] != self._model_settings['train_fixtures_ids'][t]:\n raise ValueError(f\"{t} train fixtures ids differ from previous run. \\n\"\n f\"New: {teams_fixtures_ids[t]} \\n\"\n f\"Old: {self._model_settings['train_fixtures_ids'][t]}\")\n\n # Checks passed, load previously saved data\n self.teams = self._model_settings[\"teams\"]\n self.last_season_teams = self._model_settings[\"last_season_teams\"]\n self.train_fixtures_ids = self._model_settings[\"train_fixtures_ids\"]\n self.test_fixtures_ids = self._model_settings[\"test_fixtures_ids\"]\n self.predict_fixtures_ids = self._model_settings[\"predict_fixtures_ids\"]\n\n self._check_season_gaps_in_teams_matches(df)\n\n # Split original dataset into train, test, and predict datasets\n df_train, df_test, df_predict = self._mask_out_dataset(df)\n self._get_unique_teams_from_datasets(df_train, df_test, df_predict)\n\n self._check_changes_in_teams()\n self._count_samples(df_train, df_test, df_predict)\n\n return df_train, df_test, df_predict", "def default_variation(random, candidates, args):\r\n return candidates", "def default_variation(random, candidates, args):\r\n return candidates", "def get_fixture(name):\n path = os.path.join(os.environ.get(\"WORKSPACE\", \"./\"), \"fixtures\", name)\n if not os.path.isfile(path):\n raise NotFound(\"File {} not found\".format(path))\n return path", "def test_defaults_are_overwritten(self, junit4_hooks, full_args):\n junit4_hooks._ignore_tests = \"this isn't even a list\"\n junit4_hooks._hamcrest_path = \"wrong/path\"\n junit4_hooks._junit_path = \"also/wrong/path\"\n junit4_hooks._reference_tests_dir = \"some/cray/dir\"\n junit4_hooks._timeout = 9999\n\n junit4_hooks.parse_args(full_args)\n\n assert junit4_hooks._ignore_tests == IGNORE_TESTS\n assert junit4_hooks._hamcrest_path == HAMCREST_PATH\n assert junit4_hooks._junit_path == JUNIT_PATH\n assert junit4_hooks._reference_tests_dir == RTD\n assert junit4_hooks._timeout == TIMEOUT", "def _log_fixtures(self, item, item_type, parent_item_id):\n if not item.tags:\n return\n for tag in item.tags:\n if not tag.startswith(\"fixture.\"):\n continue\n msg = f\"Using of '{tag[len('fixture.'):]}' fixture\"\n if self._cfg.log_layout is not LogLayout.SCENARIO:\n self._step_id = self._rp.start_test_item(\n name=msg,\n start_time=timestamp(),\n item_type=item_type,\n parent_item_id=parent_item_id,\n has_stats=False\n if self._cfg.log_layout is LogLayout.NESTED\n else True,\n )\n self._rp.finish_test_item(self._step_id, timestamp(), \"PASSED\")\n continue\n self._rp.log(\n timestamp(),\n msg,\n level=\"INFO\",\n item_id=parent_item_id,\n )", "def setUp(self):\n rebel[\"Name\"] = \"\"", "def setUp(self):\n self.app = load_app(self.application_under_test)\n\n try:\n teardown_db()\n except Exception as e:\n print('-> err ({})'.format(e.__str__()))\n\n setup_app(section_name=self.application_under_test)\n setup_db()\n\n fixtures_loader = FixturesLoader([BaseFixture]) # BaseFixture is already loaded in bootstrap\n fixtures_loader.loads(self.fixtures)", "def test_set_name_through_init(self) -> None:\n\n given = self.test_name\n expected = given\n\n helper = EnvironmentVariableHelper(given)\n actual = helper.name\n\n self.assertEqual(expected, actual)", "def _fixture_union(fixtures_dest,\n name, # type: str\n fix_alternatives, # type: Sequence[UnionFixtureAlternative]\n unique_fix_alt_names, # type: List[str]\n scope=\"function\", # type: str\n idstyle=\"compact\", # type: Optional[Union[str, Callable]]\n ids=None, # type: Union[Callable, Iterable[str]]\n autouse=False, # type: bool\n hook=None, # type: Callable[[Callable], Callable]\n caller=fixture_union, # type: Callable\n **kwargs):\n if len(fix_alternatives) < 1:\n raise ValueError(\"Empty fixture unions are not permitted\")\n\n # then generate the body of our union fixture. It will require all of its dependent fixtures and receive as\n # a parameter the name of the fixture to use\n @with_signature(\"%s(%s, request)\" % (name, ', '.join(unique_fix_alt_names)))\n def _new_fixture(request, **all_fixtures):\n # ignore the \"not used\" marks, like in @ignore_unused\n if not is_used_request(request):\n return NOT_USED\n else:\n _alternative = request.param\n if isinstance(_alternative, UnionFixtureAlternative):\n fixture_to_use = _alternative.alternative_name\n return all_fixtures[fixture_to_use]\n else:\n raise TypeError(\"Union Fixture %s received invalid parameter type: %s. Please report this issue.\"\n \"\" % (name, _alternative.__class__))\n\n if ids is None:\n ids = UnionIdMakers.get(idstyle)\n else:\n # resolve possibly infinite generators of ids here\n ids = resolve_ids(ids, fix_alternatives, full_resolve=False)\n\n # finally create the fixture per se.\n _make_fix = pytest_fixture(scope=scope or \"function\", params=fix_alternatives, autouse=autouse,\n ids=ids, hook=hook, **kwargs)\n new_union_fix = _make_fix(_new_fixture)\n\n # Dynamically add fixture to caller's module as explained in https://github.com/pytest-dev/pytest/issues/2424\n check_name_available(fixtures_dest, name, if_name_exists=WARN, caller=caller)\n setattr(fixtures_dest, name, new_union_fix)\n\n return new_union_fix", "def setUp(self):\n super(PlayTests, self).setUp(\n \"tests/data/shakespeare/\", \"structure.json\", \"brief_example.xml\")", "def _get_setup(self, dataset_name):\n for potential_setup in self.setup:\n for dataset in potential_setup[\"datasets\"]:\n if dataset_name in dataset:\n test_setup = potential_setup\n self.io_args.color = os.path.join(self.io_args.input_root, dataset)\n return test_setup", "def generate_name(self):\n name = self._generate_test_name()\n while self.exists(name):\n name = self._generate_test_name()\n return name", "def get_name_for_id(self):\n return self._id if self._id is not None else self.fixture", "def generate_yaml_tests(directory):\n for yml_file in directory.glob(\"*.yml\"):\n data = yaml.safe_load(yml_file.read_text())\n assert \"cases\" in data, \"A fixture needs cases to be used in testing\"\n\n # Strip the parts of the directory to only get a name without\n # extension and resolver directory\n base_name = str(yml_file)[len(str(directory)) + 1:-4]\n\n base = data.get(\"base\", {})\n cases = data[\"cases\"]\n\n for resolver in 'legacy', '2020-resolver':\n for i, case_template in enumerate(cases):\n case = base.copy()\n case.update(case_template)\n\n case[\":name:\"] = base_name\n if len(cases) > 1:\n case[\":name:\"] += \"-\" + str(i)\n case[\":name:\"] += \"*\" + resolver\n case[\":resolver:\"] = resolver\n\n skip = case.pop(\"skip\", False)\n assert skip in [False, True, 'legacy', '2020-resolver']\n if skip is True or skip == resolver:\n case = pytest.param(case, marks=pytest.mark.xfail)\n\n yield case", "def test_0():\n sync.gen_multi_fake_data()#default is only one randomly selected data set\n sync.main(testing=True)", "def _initNames(self):\n self.outselect = os.path.join(self.workpath, 'FT1_selected'+self.suffix+'.fits')\n self.outmktime = os.path.join(self.workpath, 'FT1_filtered'+self.suffix+'.fits')\n self.outltcube = os.path.join(self.workpath, 'LtCube'+self.suffix+'.fits')\n self.outbincub = os.path.join(self.workpath, 'BinCube'+self.suffix+'.fits')\n self.outbinmap = os.path.join(self.workpath, 'CMAP'+self.suffix+'.fits')\n self.outbinexp = os.path.join(self.workpath, 'BinExpMap'+self.suffix+'.fits')\n self.outexpmap = os.path.join(self.workpath, 'ExpMap'+self.suffix+'.fits')\n self.outsrcmap = os.path.join(self.workpath, 'SrcMaps'+self.suffix+'.fits')\n self.outgtlike = os.path.join(self.workpath, 'Results'+self.suffix+'.dat')\n self.outmodel = os.path.join(self.workpath, 'OutModel'+self.suffix+'.xml')\n self.outapert = os.path.join(self.workpath, 'LC_ApPhoto'+self.suffix+'.fits')\n self.outgtmod = os.path.join(self.workpath, 'GtModel'+self.suffix+'.fits')\n self.outresid = os.path.join(self.workpath, 'Resid'+self.suffix+'.fits')\n self.outresig = os.path.join(self.workpath, 'ResSigma'+self.suffix+'.fits')\n self.outtsmap = os.path.join(self.workpath, 'TSMmap'+self.suffix+'.fits')\n return\n # self.outfind = self.dir + self.src + '_FindSrc'+self.suffix+'.txt'", "async def _nextfixtures(self, ctx: commands.Context, league_id: str):\n headers = ['ID', 'Home', ' ', 'Away', 'Date']\n data = await self._get_league_fixtures_timeframe(ctx.message.server.id, league_id, 'n7')\n\n await self.bot.say('```diff\\n+ Next fixtures```')\n pretty_data = []\n for fixture in data['fixtures']:\n pretty_data.append([\n fixture['id'],\n '[{}] {}'.format(fixture['homeTeamId'], fixture['homeTeamName']),\n ' - ',\n '[{}] {}'.format(fixture['awayTeamId'], fixture['awayTeamName']),\n fixture['date']\n ])\n\n await self.bot.say(box(tabulate(pretty_data, headers=headers)))", "def generate_artificial_names(seed=\"\", num_names=1):\n generated_names = []\n \n stop = False\n while not stop:\n # generate names more than needed as some names may exist in real life\n num_needed_names = (num_names - len(generated_names)) * 3 // 2\n names = generate_names(seed=seed, num_names=num_needed_names)\n \n # check whether names are in dataset or not\n for name in names:\n if not is_real_name(name):\n generated_names.append(name)\n if len(generated_names) == num_names:\n stop = True\n break\n \n return generated_names", "def generate_artificial_names(seed=\"\", num_names=1):\n generated_names = []\n \n stop = False\n while not stop:\n # generate names more than needed as some names may exist in real life\n num_needed_names = (num_names - len(generated_names)) * 3 // 2\n names = generate_names(seed=seed, num_names=num_needed_names)\n \n # check whether names are in dataset or not\n for name in names:\n if not is_real_name(name):\n generated_names.append(name)\n if len(generated_names) == num_names:\n stop = True\n break\n \n return generated_names", "def generate_yaml_tests(directory):\n for yml_file in directory.glob(\"*/*.yml\"):\n data = yaml.safe_load(yml_file.read_text())\n assert \"cases\" in data, \"A fixture needs cases to be used in testing\"\n\n # Strip the parts of the directory to only get a name without\n # extension and resolver directory\n base_name = str(yml_file)[len(str(directory)) + 1:-4]\n\n base = data.get(\"base\", {})\n cases = data[\"cases\"]\n\n for i, case_template in enumerate(cases):\n case = base.copy()\n case.update(case_template)\n\n case[\":name:\"] = base_name\n if len(cases) > 1:\n case[\":name:\"] += \"-\" + str(i)\n\n if case.pop(\"skip\", False):\n case = pytest.param(case, marks=pytest.mark.xfail)\n\n yield case", "def setUp(self):\n Pet.remove_all()", "def load_fixtures(self, dbname, table, data):\n db = self.databases[dbname]['db']\n db.execute('BEGIN')\n for row in data:\n columns = row.keys()\n q = db.Insert(table, cols=columns)\n db.execute(q, row)\n db.execute('COMMIT')", "def tearDown(self):\n try:\n os.remove(self.fixture_file)\n except OSError:\n pass", "def testInitializeNameMapping(self) -> None:\n self._nameClassifierBuilder._initializeNameMapping()\n self.assertEquals(type(self._nameClassifierBuilder._currentNameMapping), list)\n solution = [0.0,0.0,0.0,0.0,0.0,0.0]\n self.assertEquals(solution, self._nameClassifierBuilder._currentNameMapping)", "def test_name_detection(self):\n self.project.name = ''\n self.project.detect_name()\n self.assertEqual(\"Kobol's Last Gleaming\", self.project.name)", "def tearDown(self):\n try:\n os.remove(self.fixtureFile)\n except OSError:\n pass", "def get_step_fixture_name(name, type_, encoding=None):\n return \"pytestbdd_{type}_{name}\".format(\n type=type_, name=force_encode(name, **(dict(encoding=encoding) if encoding else {}))\n )", "def test_step_from_asdf_noname():\n root = 'jwst_generic_pars-makeliststep_0002'\n config_file = t_path(\n Path('steps') / (root + '.asdf')\n )\n step = Step.from_config_file(config_file)\n assert isinstance(step, MakeListStep)\n assert step.name == root\n\n results = step.run()\n assert results == DEFAULT_RESULT", "def setUp(self):\n self.ConFem_ = ConFem.ConFem()\n self.ConSimFem_ = ConSimFem.ConSimFem()\n self.NameLog = \"../_DataShellsSlabs/tmp\"", "def test_get_default(self):\n self.cube_1.var_name = 'fix'\n self.assertIs(self.cube_1, self.fix.get_cube_from_list(self.cubes))", "def fixture_name(self):\n return 'amino_acid_insertion'", "def fixturize(app=\"All\"):\n\n if app == \"All\":\n local('python manage.py dumpdata resources > resources/fixtures/resources.json')\n local('python manage.py dumpdata military > military/fixtures/military.json')\n local('python manage.py dumpdata arenas > arenas/fixtures/arena.json')\n local('python manage.py dumpdata sciences > sciences/fixtures/technologies.json')\n local('python manage.py dumpdata auth.Group > fixtures/groups.json')\n elif app == \"resource\":\n local('python manage.py dumpdata resources > resources/fixtures/resources.json')\n elif app == \"military\":\n local('python manage.py dumpdata military > military/fixtures/military.json')\n elif app == \"arena\":\n local('python manage.py dumpdata arenas > arenas/fixtures/arena.json')\n elif app == \"sciences\":\n local('python manage.py dumpdata sciences > sciences/fixtures/technologies.json')\n elif app == \"groups\":\n local('python manage.py dumpdata auth.Group > fixtures/groups.json')", "def testRecoveryUseDefaultNaming(self):\n expected = (IMAGE_SERVER_PREFIX + '/stable-channel/x86-alex/0.12.433.269',\n ['chromeos', '0.12.433.269', 'x86-alex', 'recovery',\n 'stable-channel', 'mp', '.bin'])\n actual = cb_name_lib.GetRecoveryName(self.board,\n self.version_string,\n 0)\n self.assertEqual(expected, actual)", "def fixture_make_unique_name():\n def _make_unique_name(prefix):\n return f\"{prefix}{time.time_ns()}\"\n return _make_unique_name", "def _get_file_name(name: types.TSeedName) -> str:\n return f\"{name}.yml\"", "def _assert_samples_have_names(self):\n try:\n # before merging, which is requires sample_name attribute to map\n # sample_table rows to subsample_table rows,\n # perform only sample_name attr derivation\n if SAMPLE_NAME_ATTR in self[CONFIG_KEY][SAMPLE_MODS_KEY]\\\n [DERIVED_KEY][DERIVED_ATTRS_KEY]:\n self.attr_derive(attrs=[SAMPLE_NAME_ATTR])\n except KeyError:\n pass\n for sample in self.samples:\n if SAMPLE_NAME_ATTR not in sample:\n msg_base = \"{st} is missing '{sn}' column; \".\\\n format(st=CFG_SAMPLE_TABLE_KEY, sn=SAMPLE_NAME_ATTR)\n msg = msg_base + \\\n \"you must specify {sn}s in {st} or derive them\".\\\n format(st=CFG_SAMPLE_TABLE_KEY, sn=SAMPLE_NAME_ATTR)\n if self.st_index != SAMPLE_NAME_ATTR:\n setattr(sample, SAMPLE_NAME_ATTR,\n getattr(sample, self.st_index))\n _LOGGER.warning(msg_base +\n \"using specified {} index ({}) instead. \"\n \"Setting name: {}\".\n format(CFG_SAMPLE_TABLE_KEY, self.st_index,\n getattr(sample, self.st_index)))\n else:\n raise InvalidSampleTableFileException(msg)", "def _prepare_test_list(self, test_name):\n test_yaml_file_name = f'opnfv-{test_name}.yaml'\n scenario_file_name = os.path.join(self.rally_scenario_dir,\n test_yaml_file_name)\n\n if not os.path.exists(scenario_file_name):\n scenario_file_name = os.path.join(self.scenario_dir,\n test_yaml_file_name)\n\n if not os.path.exists(scenario_file_name):\n raise Exception(\n f\"The scenario '{scenario_file_name}' does not exist.\")\n\n LOGGER.debug('Scenario fetched from : %s', scenario_file_name)\n test_file_name = os.path.join(self.temp_dir, test_yaml_file_name)\n\n if not os.path.exists(self.temp_dir):\n os.makedirs(self.temp_dir)\n\n self.apply_blacklist(scenario_file_name, test_file_name)\n return test_file_name", "def __run_class_setup_fixtures(self):\n self.__run_class_fixtures(\n self.STAGE_CLASS_SETUP,\n self.class_setup_fixtures + [ self.classSetUp ],\n self.EVENT_ON_RUN_CLASS_SETUP_METHOD,\n self.EVENT_ON_COMPLETE_CLASS_SETUP_METHOD,\n )", "def setUp(self):\n self.epath = 'flyeye/tests/fixtures'\n self.dpath = join(self.epath, 'disc.silhouette')", "def test_entity_default_name(self):\n self.request.log(\"Hello World\", entities=(Entity(\"entity\")(12),))\n self.request.end()\n entry = self.get_entry()\n assert 'entities' in entry\n assert len(entry['entities']) == 1\n assert entry['entities'][0] == dict(entity=\"entity\", id=12, name=\"12\")", "def create_fixtures(tables_list, project_path, fixtures_path, fixture_name):\n tables = _prepare_tables_string(tables_list)\n _prepare_fixtures_dir(fixtures_path)\n command = _build_command(tables, fixtures_path, fixture_name)\n os.chdir(project_path)\n os.system(command)\n return 0", "async def test_ignore_default_name(hass: HomeAssistant) -> None:\n entry = MockConfigEntry(\n domain=DOMAIN,\n )\n entry.add_to_hass(hass)\n\n assert await hass.config_entries.async_setup(entry.entry_id)\n await hass.async_block_till_done()\n\n before_entity_count = len(hass.states.async_entity_ids())\n inject_bluetooth_service_info(\n hass,\n replace(\n BLUECHARM_BEACON_SERVICE_INFO_DBUS,\n name=BLUECHARM_BEACON_SERVICE_INFO_DBUS.address,\n ),\n )\n await hass.async_block_till_done()\n assert len(hass.states.async_entity_ids()) == before_entity_count", "def test_return_all_names(self):\n test = self.data.return_all(first_name='Nik', last_name='Silver')\n self.assertEqual(test[0].first_name, 'Nik')\n\n test_2 = self.data.return_all(first_name='Trevor', last_name='Harvey')\n self.assertEqual(test_2[0].last_name, 'Harvey')", "def test_init_default(self):\n self._test_init_default()", "def _reset_seeds(self) -> None:\n self._seeds = [None for _ in range(self.num_envs)]", "def test_resetTerrain_default(self):\n check_attr(self.o, 'resetTerrain')\n self.subtest_noAgent(self.o)\n self.assertIsNone(self.o.resetTerrain(), \"no output expected\")\n self.subtest_noAgent(self.o)", "def test_correct_dataset_found_by_name(self):\n dataset_name = 'my_unlikely_dataset_name'\n dataset = factories.SourceDatasetFactory.create(\n dataset_name=dataset_name,\n source_study_version=self.source_study_version\n )\n url = self.get_url(self.study.pk)\n response = self.client.get(url, {'q': dataset_name})\n returned_pks = get_autocomplete_view_ids(response)\n self.assertEqual(returned_pks, [dataset.i_id])" ]
[ "0.62607694", "0.6084847", "0.59035265", "0.58864886", "0.5782445", "0.56454164", "0.5598075", "0.5590924", "0.55757374", "0.55625635", "0.5513001", "0.5497023", "0.54902035", "0.5441633", "0.5436192", "0.5364549", "0.536005", "0.535958", "0.53501177", "0.5234277", "0.521221", "0.5210105", "0.51943034", "0.5190611", "0.51895875", "0.51807374", "0.51774925", "0.5143355", "0.51406276", "0.5123066", "0.5115242", "0.5106963", "0.5097396", "0.5093395", "0.50658506", "0.5038724", "0.5034177", "0.5021871", "0.50148076", "0.5013417", "0.500856", "0.5001186", "0.5001137", "0.49983704", "0.499154", "0.4980997", "0.49725658", "0.49612477", "0.4946997", "0.49267635", "0.49245542", "0.49121368", "0.49053627", "0.4904955", "0.4904955", "0.4900058", "0.48976505", "0.4896951", "0.48834375", "0.48829237", "0.4880667", "0.48729172", "0.4871829", "0.48703876", "0.48449063", "0.48384425", "0.48364282", "0.48342985", "0.48253468", "0.48240256", "0.48205343", "0.48205343", "0.48100188", "0.48034966", "0.48019835", "0.479865", "0.4796033", "0.47905374", "0.4786642", "0.4782725", "0.47753552", "0.47736964", "0.47732362", "0.47728026", "0.47634467", "0.47605687", "0.47565678", "0.47555038", "0.47552696", "0.47515574", "0.4742911", "0.47391486", "0.47334954", "0.47326276", "0.4727682", "0.47260192", "0.4725561", "0.47074375", "0.4705981", "0.4702907" ]
0.7567539
0
Starting a subprocess should be possible.
async def test_subprocess(event_loop): proc = await asyncio.subprocess.create_subprocess_exec( sys.executable, '--version', stdout=asyncio.subprocess.PIPE, loop=event_loop) await proc.communicate()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Start(self):\n\n\n\n assert not self._process, 'Start() can only be called once'\n self._process = subprocess.Popen(self._args)", "def run_subprocess(cmd):\n subprocess.Popen(cmd, stdin =subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE,\n shell=True,)", "def start(self):\r\n return self.start_subprocess()", "def start(self):\n self._proc = self._get_subprocess()\n self._pid = self._proc.pid\n self._return_code = None", "def spawn(self):\n self._proc = subprocess.Popen(\n self._args, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )", "def run(cmd):\n print('running', cmd)\n proc = sp.Popen([cmd], shell=True)\n proc.wait()\n assert proc.poll() == 0", "def open_subprocess(self, args_, subprocess_key=None):\n\n if subprocess_key in self.subprocess and self.subprocess[subprocess_key].poll is not None:\n # TODO better error class\n\n raise AssertionError(\"process '%s'(pid:%s) already exist and still running\" % (\n subprocess_key, self.subprocess[subprocess_key].pid))\n\n child_process = subprocess.Popen(args_)\n if subprocess_key is None:\n subprocess_key = str(child_process.pid)\n self.subprocess[subprocess_key] = child_process\n str_args = \" \".join(map(str, args_))\n self.log(\"open subprocess pid:%s, cmd='%s'\" % (child_process.pid, str_args))\n\n return child_process.pid", "def start_process(cmd, supress_output=False):\n logging.debug(cmd)\n logging.error(\"[tony]cmd:%r\" % (cmd))\n proc = subprocess.Popen(cmd, stdout=None, stderr=subprocess.PIPE)\n out, err = proc.communicate()\n rtn_code = proc.returncode\n\n if supress_output is False:\n if out:\n logging.info(out)\n if err:\n logging.error(err)\n\n if rtn_code == 0 or rtn_code is None:\n logging.info('Success: Process return code %s', str(rtn_code))\n else:\n logging.error('Error: Process return code %s', str(rtn_code))\n sys.exit(1)", "def start(self):\n if self.subcommand:\n os.execv(self.subcommand, [self.subcommand] + self.argv[1:])\n raise NoStart()\n \n if self.subapp:\n self.subapp.start()\n raise NoStart()\n \n if self.generate_config:\n self.write_default_config()\n raise NoStart()", "def run_subprocess(args, work_dir):\n process = subprocess.Popen(args, cwd=work_dir)\n process.communicate()\n assert process.returncode == 0", "def start():\n global running\n # os.system('python3 /Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py')\n global process\n process = Popen(['python3', '/Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py'])\n running = True", "def start_process():\n global command, process\n\n def on_data(data):\n data = data.decode().strip()\n print('{}'.format(data))\n\n cmd = command.split(' ')\n\n if process:\n process.terminate()\n\n process = MySubprocess(cmd, -1, functools.partial(on_data), None, None)", "def subprocess_Popen(command, **params):\r\n startupinfo = None\r\n if os.name == 'nt':\r\n startupinfo = subprocess.STARTUPINFO()\r\n try:\r\n startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\r\n except AttributeError:\r\n startupinfo.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW\r\n\r\n # Anaconda for Windows does not always provide .exe files\r\n # in the PATH, they also have .bat files that call the corresponding\r\n # executable. For instance, \"g++.bat\" is in the PATH, not \"g++.exe\"\r\n # Unless \"shell=True\", \"g++.bat\" is not executed when trying to\r\n # execute \"g++\" without extensions.\r\n # (Executing \"g++.bat\" explicitly would also work.)\r\n params['shell'] = True\r\n\r\n # Using the dummy file descriptors below is a workaround for a\r\n # crash experienced in an unusual Python 2.4.4 Windows environment\r\n # with the default None values.\r\n stdin = None\r\n if \"stdin\" not in params:\r\n stdin = open(os.devnull)\r\n params['stdin'] = stdin.fileno()\r\n\r\n try:\r\n proc = subprocess.Popen(command, startupinfo=startupinfo, **params)\r\n finally:\r\n if stdin is not None:\r\n del stdin\r\n return proc", "def spawn_subprocess(args, loop=None):\n if not _IS_XOS_ASYNC:\n return spawn_subprocess_not_xos(args, loop=loop)\n else:\n return spawn_subprocess_xos(args, loop=loop)", "def _start_runner(self, spec):\n pid = os.fork()\n if pid:\n # Parent.\n return pid\n # Child.\n #\n # Set the environment variable which tells the runner that it's\n # running under bin/master control. This subtly changes the error\n # behavior of bin/runner.\n env = {'MAILMAN_UNDER_MASTER_CONTROL': '1'}\n # Craft the command line arguments for the exec() call.\n rswitch = '--runner=' + spec\n # Wherever master lives, so too must live the runner script.\n exe = os.path.join(config.BIN_DIR, 'runner')\n # config.PYTHON, which is the absolute path to the Python interpreter,\n # must be given as argv[0] due to Python's library search algorithm.\n args = [sys.executable, sys.executable, exe, rswitch]\n # Always pass the explicit path to the configuration file to the\n # sub-runners. This avoids any debate about which cfg file is used.\n config_file = (config.filename if self._config_file is None\n else self._config_file)\n args.extend(['-C', config_file])\n log = logging.getLogger('mailman.runner')\n log.debug('starting: %s', args)\n # We must pass this environment variable through if it's set,\n # otherwise runner processes will not have the correct VAR_DIR.\n var_dir = os.environ.get('MAILMAN_VAR_DIR')\n if var_dir is not None:\n env['MAILMAN_VAR_DIR'] = var_dir\n # For the testing framework, if these environment variables are set,\n # pass them on to the subprocess.\n for envvar in PRESERVE_ENVS:\n if envvar in os.environ:\n env[envvar] = os.environ[envvar]\n args.append(env)\n os.execle(*args)\n # We should never get here.\n raise RuntimeError('os.execle() failed')", "def run_subprocess(self, *cmd_and_args):\n\n command_line = \" \".join(cmd_and_args)\n self.logger.debug(\"Running: %s\", command_line)\n\n return subprocess.Popen(command_line, shell=True, close_fds=True)", "def start_subprocess(self):\r\n errmsg = ('\\n\\nPlease install GNU Octave and put it in your path\\n')\r\n ON_POSIX = 'posix' in sys.builtin_module_names\r\n if self.use_pty:\r\n master, slave = pty.openpty()\r\n self.wfid, self.rfid = master, master\r\n rpipe, wpipe = slave, slave\r\n else:\r\n self.rfid, wpipe = os.pipe()\r\n rpipe, self.wfid = os.pipe()\r\n kwargs = dict(close_fds=ON_POSIX, bufsize=0, stdin=rpipe,\r\n stderr=wpipe, stdout=wpipe)\r\n if os.name == 'nt':\r\n startupinfo = subprocess.STARTUPINFO()\r\n startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\r\n kwargs['startupinfo'] = startupinfo\r\n try:\r\n proc = subprocess.Popen(['octave', '-q', '--braindead'],\r\n **kwargs)\r\n except OSError: # pragma: no cover\r\n raise Oct2PyError(errmsg)\r\n else:\r\n self.reader = _Reader(self.rfid, self.read_queue)\r\n return proc", "def run_starter(self, expect_to_fail=False):\n logging.info(\"running starter \" + self.name)\n args = [self.cfg.bin_dir / \"arangodb\"] + self.hotbackup_args + self.default_starter_args + self.arguments\n\n lh.log_cmd(args)\n self.instance = psutil.Popen(args)\n logging.info(\"my starter has PID:\" + str(self.instance.pid))\n if not expect_to_fail:\n self.wait_for_logfile()\n self.wait_for_port_bind()", "def start_comp(command_line, log='', env='', foreground='no', no_stdin = 'yes'):\n proc_title_argv = command_line.split()\n\n if proc_title_argv[0] == 'taskset':\n real_program = proc_title_argv[3]\n else:\n real_program = proc_title_argv[0]\n\n # first test shared library link\n try:\n can_find_all_shared_libs(real_program)\n except IOError, e:\n print e;\n raise\n\n my_stdout = None\n my_stderr = None\n my_stdin = None\n if (no_stdin == 'yes'):\n my_stdin = open('/dev/null', 'r')\n\n if log:\n dir = os.path.dirname(log)\n if dir:\n try:\n exist_ok_makedirs(dir, 0777)\n except OSError, (errno, strerror):\n sys.stderr.write('%s: %s\\n' % (dir, strerror))\n raise\n try:\n log_fd = open(log, \"w\")\n except IOError, (errno, strerror):\n print 'cannot open %s: %s' % (log, strerror)\n raise\n else:\n my_stdout = log_fd\n my_stderr = subprocess.STDOUT\n\n #command = [ path ]\n #if options:\n # command += options\n\n my_env = {}\n if env != '':\n env_list = env.split('\\t')\n env_val = '%s:%s' % (env_list[1], env_list[2])\n my_env[env_list[0]] = env_val\n\n try:\n p = subprocess.Popen(proc_title_argv, shell = False,\n # stdin = subprocess.PIPE,\\\n #stdin = None,\n stdin = my_stdin,\n stdout = my_stdout,\n stderr = my_stderr,#)\n env = my_env)\n except OSError, (errno, strerror):\n #sys.exit('cannot execute %s: %s' % (path, strerror))\n print 'cannot execute %s: %s' % (real_program, strerror)\n raise\n except ValueError, strerror:\n #sys.exit('subprocess.Popen value error: %s' %strerror)\n print 'subprocess.Popen value error: %s' % (strerror)\n raise\n\n #proc_name = os.path.basename(path)\n if proc_title_argv[0] == 'taskset':\n try:\n proc_name = os.path.basename(proc_title_argv[3])\n except IndexError,e:\n print \"path: \", path\n sys.exit(e)\n else:\n proc_name = os.path.basename(proc_title_argv[0])\n\n max_retry = 20\n retry = 0\n while True:\n if retry == max_retry:\n sys.exit('cannot exec. %s' % proc_name)\n\n #if kill_proc_exact.lookup_process_exact(proc):\n if get_pids_exact(proc_name):\n break;\n else:\n time.sleep(0.1)\n retry += 1\n\n if foreground == 'yes':\n try:\n p.wait()\n except KeyboardInterrupt, strerror:\n pass", "def _subprocess(cmd):\n\n log.debug('Running: \"%s\"', \" \".join(cmd))\n try:\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n ret = salt.utils.stringutils.to_unicode(proc.communicate()[0]).strip()\n retcode = proc.wait()\n\n if ret:\n return ret\n elif retcode != 1:\n return True\n else:\n return False\n except OSError as err:\n log.error(err)\n return False", "def StartCmd(args, cwd=None, shell=False, env=None):\n _ValidateAndLogCommand(args, cwd, shell)\n return Popen(\n args,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=shell,\n cwd=cwd,\n env=env)", "def start(self):\n if self._is_launched.is_set():\n self._log(\"warning\", \"try to start an already started process\")\n return False\n\n self._popen = Popen(shlex.split(self.command), bufsize=0, executable=None, stdin=PIPE, stdout=PIPE,\n stderr=self.stderr, close_fds=False, shell=False, cwd=None, env=None,\n universal_newlines=True, startupinfo=None, creationflags=0,\n preexec_fn=lambda: os.nice(self._priority))\n\n self._defunctdog_thread.start()\n self._stdin_thread.start()\n self._stdout_thread.start()\n register_thread(self)\n self._is_launched.set()\n self._is_running.set()", "def start_bot(self):\n self.proc = subprocess.Popen(\"./start\", stdin=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t stdout=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t cwd=os.path.abspath(self.path))", "def run(cmd: str) -> None:\n subprocess.run(cmd, shell=True, check=True)", "def _run_command(args):\n subprocess.run(args, check=True)", "def subprocess_run(self, *args):\n return self.testdir.runpytest_subprocess(*args)", "def test_ducts_with_subprocess(self):\n assert_that(SUBPROCESS_TEST_SCRIPT).exists()\n proc = None\n parent = None\n try:\n parent = MessageDuctParent.psuedo_anonymous_parent_duct()\n parent.bind()\n proc = subprocess.Popen(\n [sys.executable, SUBPROCESS_TEST_SCRIPT, parent.listener_address], env={'PYTHONPATH': ROOT_DIR}\n )\n assert_that(parent.listen()).is_true()\n for _ in range(100):\n parent.send(\"pingpong\")\n parent.poll(1)\n assert_that(parent.recv()).is_equal_to(\"pingpong\")\n parent.send(None)\n time.sleep(1)\n finally:\n if parent:\n parent.close()\n if proc:\n proc.terminate()", "def call_subprocess_Popen(command, **params):\r\n if 'stdout' in params or 'stderr' in params:\r\n raise TypeError(\"don't use stderr or stdout with call_subprocess_Popen\")\r\n null = open(os.devnull, 'wb')\r\n # stdin to devnull is a workaround for a crash in a weird Windows\r\n # environement where sys.stdin was None\r\n params.setdefault('stdin', null)\r\n params['stdout'] = null\r\n params['stderr'] = null\r\n p = subprocess_Popen(command, **params)\r\n p.wait()\r\n return p.returncode", "def spawn():\n if platform.system() == \"Windows\":\n # HACK https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1243#issuecomment-706668723\n # FIXME Use pexpect or wexpect somehow to fix this\n pytest.xfail(\n \"pexpect fails on Windows\",\n )\n # Using PopenSpawn, although probably it would be best to use pexpect.spawn\n # instead. However, it's working fine and it seems easier to fix in the\n # future to work on Windows (where, this way, spawning actually works; it's just\n # python-prompt-toolkit that rejects displaying a TUI)\n return PopenSpawn", "def fork(self):\n try:\n pid = os.fork()\n if pid > 0:\n sys.exit(0)\n except OSError as e:\n sys.stderr.write(\"Fork failed: %d (%s)\\n\" % (e.errno, e.strerror))\n sys.exit(1)", "async def _run_subprocess(\n cmd: str,\n allow_params: bool,\n params: Dict[str, ParamValueT],\n) -> Dict[str, Any]:\n cmd_str = cmd\n if allow_params:\n if params[\"shell_params\"] == []:\n cmd_str = cmd.format([''])\n else:\n cmd_str = cmd.format(*params.get('shell_params', ['']))\n\n logging.info(\"Running command: %s\", cmd_str)\n\n cmd_list = shlex.split(cmd_str)\n\n process = await asyncio.create_subprocess_exec(\n *cmd_list,\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n )\n\n stdout, stderr = await process.communicate()\n\n return {\n \"returncode\": process.returncode,\n \"stdout\": stdout.decode(),\n \"stderr\": stderr.decode(),\n }", "def call(*args, **kwargs):\n return Popen(*args, **kwargs).wait()", "def test_application_start():\n\n process = subprocess.Popen(['python', 'runserver.py'],\n stderr=subprocess.STDOUT,\n stdout=subprocess.PIPE)\n\n assert process.pid\n debug_logging = process.stdout.read(100)\n process.kill()\n assert 'Starting application' in debug_logging", "def start_program(program, program_name, VD):\n \n if not is_running(program):\n VD['terminal_output'] += f\"> Starting {program_name}.\\n\"\n subprocess.Popen([program])\n #subprocess.call([\"xdotool\", \"search\", \"--name\", \"spotify\", \"\")\n else:\n VD['terminal_output'] += f\"> {program_name} is already running.\\n\"", "def subprocess_nowait(cmd, shell=False, cwd=None, env=None):\n # type: (str, bool, str, dict) -> subprocess.Process\n return subprocess.Popen(cmd, shell=shell, cwd=cwd, env=env)", "def Run(self) -> None:\n logging.info(\"Running %s in a subprocess...\", self)\n self.stdout = tempfile.TemporaryFile()\n self.stderr = tempfile.TemporaryFile()\n self.begin_time = time.time()\n env = os.environ.copy()\n # Give each test program a separate test_tmpdir so they don't overwrite\n # each other when running in parallel.\n env[\"TEST_TMPDIR\"] = tempfile.mkdtemp()\n # Bazel's test sharding protocol:\n # https://docs.bazel.build/versions/master/test-encyclopedia.html\n if self.total_shards > 1:\n env[\"TEST_TOTAL_SHARDS\"] = str(self.total_shards)\n env[\"TEST_SHARD_INDEX\"] = str(self.shard_id)\n\n self.subprocess = subprocess.Popen(\n [_GetPython(), self.path], stdout=self.stdout, stderr=self.stderr,\n env=env)", "def run_setup(command):\n print 'Executing command:', command\n print '=========='\n cmd = command.split()\n p1 = subprocess.Popen([cmd[0], cmd[1], cmd[2]])\n p1.wait()\n p2 = subprocess.Popen(cmd[4], stdout=subprocess.PIPE)\n p2.wait()\n p3 = subprocess.Popen(\n cmd[6], stdin=p2.stdout, stdout=subprocess.PIPE, bufsize=1,\n preexec_fn=os.setsid\n )\n return p3", "def test_subprocess_fails_with_no_command(self):\n with self.assertRaises(ValueError):\n LazySubprocessTester([])", "def _spawn(self, protocol, args, env=None):\n return reactor.spawnProcess(protocol, self.cmd, args, env=env)", "def start_server_proc(event, server_cmd, checking_env):\n proc = subprocess.Popen(server_cmd, env=checking_env)\n\n # Blocking termination until event is set.\n event.wait()\n\n # If proc is still running, stop it.\n if proc.poll() is None:\n proc.terminate()", "def test_wait_true_shortproc(self):\n child = PtyProcess.spawn(['true'])\n # Wait so we're reasonable sure /bin/true has terminated\n time.sleep(0.2)\n self.assertEqual(child.wait(), 0)", "def Spawn(proc):\n proc.start()\n return proc", "def test_startProcessAlreadyStarted(self):\r\n self.pm.addProcess(\"foo\", [\"foo\"])\r\n self.pm.startProcess(\"foo\")\r\n self.assertIdentical(None, self.pm.startProcess(\"foo\"))", "def start(self):\n cmd = self.doCommand(self.args)\n if cmd is not None:\n cmd.join()\n else:\n self.out = self.error", "def run(self, args=(), with_chroot=False, blocking=True, setsid=False, **kw):\n self.clean_environment()\n\n cmdline = self.cmdline(args)\n TRACER.log('PEX.run invoking %s' % ' '.join(cmdline))\n process = subprocess.Popen(\n cmdline,\n cwd=self._pex if with_chroot else os.getcwd(),\n preexec_fn=os.setsid if setsid else None,\n **kw)\n return process.wait() if blocking else process", "def subprocess_setup():\n import signal\n signal.signal(signal.SIGPIPE, signal.SIG_DFL)", "def run(*args, **kwargs):\n kwargs[\"check\"] = True\n print(\"+\", \" \".join(args[0]))\n return subprocess.run(*args, **kwargs)", "def sanity_check_process(self):\n assert_equals(self.proc.returncode, None)\n time.sleep(1)", "def spawn(self, classpath, main, jvm_options=None, args=None, **subprocess_args):\r\n cmd = self._create_command(*self._scrub_args(classpath, main, jvm_options, args))\r\n return self._spawn(cmd, **subprocess_args)", "def shell(cmd):\n print('Running \"{}\"...'.format(cmd))\n subprocess.check_call(cmd, shell=True)", "def reallyStartProcess(self, name):\n if name in self.protocols:\n return\n p = self.protocols[name] = DelayedStartupLoggingProtocol()\n p.service = self\n p.name = name\n procObj, env, uid, gid = self.processes[name]\n self.timeStarted[name] = time.time()\n\n childFDs = {0: \"w\", 1: \"r\", 2: \"r\"}\n\n childFDs.update(procObj.getFileDescriptors())\n\n procObj.starting()\n\n args = procObj.getCommandLine()\n\n self._reactor.spawnProcess(\n p, args[0], args, uid=uid, gid=gid, env=env,\n childFDs=childFDs\n )", "def spawn(self, arguments=None, environment=None):\n return subprocess.Popen(\n args=[self.executable] + ([] or arguments),\n # do not redirect std streams\n # this fakes the impression of having just one program running\n stdin=None,\n stdout=None,\n stderr=None,\n env=environment,\n )", "def callSubprocess(args, test=False):\n print(Fore.MAGENTA),\n for arg in args: \n print arg,\n print(Fore.WHITE)\n if not test: \n subprocess.call(args)", "def make_subprocess(cmdline, stdout=False, stderr=False, stdin=False,\n universal_newlines=False, close_fds=True, env=None):\n LOG.info(\"Running cmd '%s'\" % \" \".join(cmdline))\n kwargs = {}\n kwargs['stdout'] = stdout and subprocess.PIPE or None\n kwargs['stderr'] = stderr and subprocess.PIPE or None\n kwargs['stdin'] = stdin and subprocess.PIPE or None\n kwargs['universal_newlines'] = universal_newlines\n kwargs['close_fds'] = close_fds\n kwargs['env'] = env\n try:\n proc = subprocess.Popen(cmdline, **kwargs)\n except OSError, e: # noqa\n if e.errno == errno.ENOENT:\n raise CommandNotFound\n else:\n raise\n return proc", "def run(cmd):\n \n proc = subprocess.Popen (cmd, \n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=True\n )\n stdout_value, stderr_value = proc.communicate()\n print stdout_value\n print stderr_value\n\n if proc.poll() > 0:\n sys.stderr.write ( \"\\nError\\n\" )\n print '\\tstderr:', repr(stderr_value.rstrip())\n return False\n else:\n return True", "def patch_subprocess(self, monkeypatch):\n monkeypatch.setattr('subprocess.check_call', self.fake_check_call)", "def start(self, stdin=None, stdout=None, stderr=None):\n logging.debug(\"Starting '%s'\", \" \".join(self.cmd_line))\n self.proc = subprocess.Popen(self.cmd_line,\n stdin=stdin,\n stdout=stdout if stdout\n else subprocess.PIPE,\n stderr=stderr,\n env=self.env)\n self.thread = threading.Thread(target=self.tail)\n self.thread.daemon = True\n self.thread.start()\n self.running = True", "def test_subprocess_fork_pid0(self, mocker):\n mocker.stopall()\n\n test_command = [\"who\", \"-b\"]\n test_name = \"test_who\"\n test_fork = True\n pid = 0\n\n # mock\n mock_logging_debug = mocker.MagicMock(name=\"mock_logging_debug\")\n mock_os_fork = mocker.MagicMock(name=\"mock_os_fork\", return_value=pid)\n mock_sys_exit = mocker.MagicMock(name=\"mock_sys_exit\")\n mock_os_chdir = mocker.MagicMock(name=\"mock_os_chdir\")\n mock_os_setsid = mocker.MagicMock(name=\"mock_os_setsid\")\n mock_os_umask = mocker.MagicMock(name=\"mock_os_umask\")\n\n # patch\n mocker.patch.object(\n scarlett_os.subprocess.logging.Logger, \"debug\", mock_logging_debug\n )\n mocker.patch.object(scarlett_os.subprocess.os, \"fork\", mock_os_fork)\n mocker.patch.object(scarlett_os.subprocess.sys, \"exit\", mock_sys_exit)\n mocker.patch.object(scarlett_os.subprocess.os, \"chdir\", mock_os_chdir)\n mocker.patch.object(scarlett_os.subprocess.os, \"setsid\", mock_os_setsid)\n mocker.patch.object(scarlett_os.subprocess.os, \"umask\", mock_os_umask)\n\n scarlett_os.subprocess.Subprocess(test_command, name=test_name, fork=test_fork)\n\n assert mock_sys_exit.call_count == 0\n\n mocker.stopall()", "def LaunchAndWait(cmd):\n call(cmd)", "def test_install_should_call_subprocess_run(self, mock_subprocess):\n manifest = self.generate_mock_manifest(cfg={\n EXTCFG_SECTION.INSTALL: {\n EXTCFG_OPTION.EXEC_EXT_CMD: ['command'],\n }\n })\n ext_manager = PkgInstExtrasManager(manifest)\n ext_manager.handle_install_extras()\n mock_subprocess.assert_called_with(\n 'command',\n check=True,\n stderr=-1,\n stdout=-1,\n timeout=90,\n universal_newlines=True)", "def call_subshell(subshell):\n curses.def_prog_mode()\n #curses.endwin() # Probably causes a memory leak.\n\n rtn = os.system(\"%s\" % (subshell))\n curses.reset_prog_mode()\n if rtn is not 0:\n return False\n else:\n return True", "def shell_cmd(*args):\n proc = subprocess.run(args)\n returncode = proc.returncode\n if returncode != 0:\n raise RuntimeError(\n f\"Command {args} failed with return code {returncode}\")\n return proc", "def launch_shell(*, cwd: Optional[pathlib.Path] = None) -> None:\n with emit.pause():\n subprocess.run([\"bash\"], check=False, cwd=cwd)", "def spawn():\n if platform.system() == \"Windows\":\n # HACK https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1243#issuecomment-706668723\n # FIXME Use pexpect or wexpect somehow to fix this\n pytest.xfail(\n \"pexpect fails on Windows\",\n )\n # Disable subprocess timeout if debugging (except coverage), for commodity\n # See https://stackoverflow.com/a/67065084/1468388\n tracer = getattr(sys, \"gettrace\", lambda: None)()\n if not isinstance(tracer, (CTracer, type(None))):\n return lambda cmd, timeout=None, *args, **kwargs: PopenSpawn(\n cmd, None, *args, **kwargs\n )\n # Using PopenSpawn, although probably it would be best to use pexpect.spawn\n # instead. However, it's working fine and it seems easier to fix in the\n # future to work on Windows (where, this way, spawning actually works; it's just\n # python-prompt-toolkit that rejects displaying a TUI)\n return PopenSpawn", "def start_process(self, args):\n try:\n with open(os.devnull, 'w') as devnull:\n popenObj = subprocess.Popen(\n args, stdout=devnull, stderr=subprocess.PIPE, cwd=\"/tmp/\")\n popenObj.name = args\n return popenObj\n except Exception as e:\n self.logger.error(\n \"Cannot start process %s due to reason:%s\", args, e)\n raise e", "def run(self, args=(), with_chroot=False, blocking=True, setsid=False,\r\n stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):\r\n import subprocess\r\n self.clean_environment(forking=True)\r\n\r\n cmdline = self.cmdline(args)\r\n TRACER.log('PEX.run invoking %s' % ' '.join(cmdline))\r\n process = subprocess.Popen(cmdline, cwd=self._pex if with_chroot else os.getcwd(),\r\n preexec_fn=os.setsid if setsid else None,\r\n stdin=stdin, stdout=stdout, stderr=stderr)\r\n return process.wait() if blocking else process", "def _run_subprocess(cmd: List[str], args: List[str], env: Optional[Dict[str, str]] = None):\n async def _read_output(stream, logger_instance):\n \"\"\"Read output from command and print it into the right logger.\"\"\"\n while True:\n line = await stream.readline()\n if line == b'':\n break\n logger_instance(line.decode('utf-8').rstrip())\n\n async def _stream_subprocess(cmd, args, env):\n \"\"\"Run subprocess.\"\"\"\n cmd_ = ' '.join(cmd)\n args_ = ' '.join(args)\n process = await asyncio.create_subprocess_shell(f'{cmd_} {args_}',\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n env=env)\n\n await asyncio.wait([\n _read_output(process.stdout, logger.info),\n _read_output(process.stderr, logger.error)\n ])\n await process.wait()\n if process.returncode is None or process.returncode != 0:\n raise ValueError('Task failed!')\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(_stream_subprocess(cmd, args, env))", "def StartCmd(args, cwd=None, quiet=False, stdout=None, stderr=None, env=None):\n return cmd_util.StartCmd(args, cwd=cwd, quiet=quiet, stdout=stdout,\n stderr=stderr, env=env)", "def _run(self, args, cwd=None, env=None, logmode='wb'):\n args = tuple(str(arg) for arg in args)\n if self.process and self.process.poll() is None: \n raise RuntimeError(\"A process is already running\")\n if self.logfile:\n self.logfile.close()\n self.logfile = open(self.logfilename, logmode, buffering=0)\n if env is not None:\n env = dict(os.environ, **env, \n PYTHONPATH=os.pathsep.join(sys.path))\n \n self.process = subprocess.Popen(args, cwd=cwd, stdout=self.logfile,\n stderr=subprocess.STDOUT, env=env)", "def test_run_subprocess_cmd(self, mock_logging, mock_wait_for_proc):\n # Mock logging\n mock_wait_for_proc.return_value = (\"out\", \"err\")\n\n # Set up parameters\n args = [\"run\", \"unittest\"]\n proc = Mock(spec=Popen)\n\n # Test when return code is 0\n proc.returncode = 0\n run_subprocess_cmd(proc, args)\n expected_logs = [\"Executing bash command: run unittest\", \"out\", \"err\", \"Finished cmd successfully\"]\n self.assertListEqual([call(log) for log in expected_logs], mock_logging.call_args_list)\n\n # Test when return code is 1\n proc.returncode = 1\n with self.assertRaises(AirflowException):\n run_subprocess_cmd(proc, args)", "def start_process(options, args):\n import psutil\n import process_starter\n from synergy.system import process_helper\n\n try:\n pid = process_helper.get_process_pid(options.app)\n if pid is not None:\n if psutil.pid_exists(pid):\n message = 'ERROR: Process %r is already running with pid %r\\n' % (options.app, pid)\n sys.stderr.write(message)\n sys.exit(1)\n\n if not options.interactive:\n # this block triggers if the options.interactive is not defined or is False\n process_helper.start_process(options.app, args)\n else:\n process_starter.start_by_process_name(options.app, args)\n except Exception as e:\n sys.stderr.write('Exception on starting %s : %s \\n' % (options.app, str(e)))\n traceback.print_exc(file=sys.stderr)", "def _check_call(argv, **kwargs):\n logging.info('running %r', argv)\n subprocess.check_call(argv, **kwargs)", "def run(self):\n self.process.start()", "def spawn(self):\r\n options = self.config.options\r\n\r\n if self.pid:\r\n msg = 'process %r already running' % self.config.name\r\n options.logger.warn(msg)\r\n return\r\n\r\n self.killing = 0\r\n self.spawnerr = None\r\n self.exitstatus = None\r\n self.system_stop = 0\r\n self.administrative_stop = 0\r\n\r\n self.laststart = time.time()\r\n\r\n self._assertInState(ProcessStates.EXITED, ProcessStates.FATAL,\r\n ProcessStates.BACKOFF, ProcessStates.STOPPED)\r\n\r\n self.change_state(ProcessStates.STARTING)\r\n\r\n try:\r\n filename, argv = self.get_execv_args()\r\n except ProcessException as what:\r\n self.record_spawnerr(what.args[0])\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n return\r\n\r\n try:\r\n self.dispatchers, self.pipes = self.config.make_dispatchers(self)\r\n except OSError as why:\r\n code = why.args[0]\r\n if code == errno.EMFILE:\r\n # too many file descriptors open\r\n msg = 'too many open files to spawn %r' % self.config.name\r\n else:\r\n msg = 'unknown error: %s' % errno.errorcode.get(code, code)\r\n self.record_spawnerr(msg)\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n return\r\n\r\n try:\r\n pid = options.fork()\r\n except OSError as why:\r\n code = why.args[0]\r\n if code == errno.EAGAIN:\r\n # process table full\r\n msg = ('Too many processes in process table to spawn %r' %\r\n self.config.name)\r\n else:\r\n msg = 'unknown error: %s' % errno.errorcode.get(code, code)\r\n\r\n self.record_spawnerr(msg)\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n options.close_parent_pipes(self.pipes)\r\n options.close_child_pipes(self.pipes)\r\n return\r\n\r\n if pid != 0:\r\n return self._spawn_as_parent(pid)\r\n\r\n else:\r\n return self._spawn_as_child(filename, argv)", "def shell(cmd, check=True, stdin=None, stdout=None, stderr=None):\n return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr)", "def supercall(command):\n p = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE)\n retval = p.wait()\n \n if retval != 0:\n get_logger().critical('error calling {}'.format(command))\n for line in p.stderr.readlines():\n get_logger().critical(line.decode('utf8').replace('\\n', ''))\n\n return retval", "def start(self):\n if self.running:\n warnings.warn(\"ExifTool already running; doing nothing.\")\n return\n with open(os.devnull, \"w\") as devnull:\n procargs = [self.executable, \"-stay_open\", \"True\", \"-@\", \"-\",\n \"-common_args\", \"-G\", \"-n\"];\n procargs.extend(self.addedargs)\n logging.debug(procargs) \n self._process = subprocess.Popen(\n procargs,\n stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=devnull)\n self.running = True", "def check_executable(op):\n try:\n proc = subprocess.Popen([op], stdout = subprocess.PIPE, stderr = subprocess.PIPE)\n except OSError:\n return False\n try:\n if proc.poll():\n proc.kill()\n except OSError:\n return True\n return True", "def RunProcess(self, command, env=None, stdin=None, stdout=None, stderr=None):\n\n # merge specified env with OS env\n myenv = os.environ.copy()\n if env is not None:\n myenv.update(env)\n\n try:\n process = subprocess.Popen(command, stdin=stdin, stdout=stdout, stderr=stderr, env=myenv, bufsize=0)\n return process\n except:\n print(\"Unexpected error when launching process:\")\n print(\" \", command)\n print(\" \", env)\n raise", "def test_start_process(self, mocked_check):\n from supvisors.rpcinterface import RPCInterface\n # get patches\n mocked_start = self.supervisor.supvisors.starter.start_process\n mocked_progress = self.supervisor.supvisors.starter.in_progress\n # create RPC instance\n rpc = RPCInterface(self.supervisor)\n # patch the instance\n rpc._get_application_process = Mock()\n # test RPC call with unknown strategy\n with self.assertRaises(RPCError) as exc:\n rpc.start_process('strategy', 'appli:proc')\n self.assertEqual(Faults.BAD_STRATEGY, exc.exception.code)\n self.assertEqual('BAD_STRATEGY: strategy', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with running process\n rpc._get_application_process.return_value = (\n None, Mock(**{'running.return_value': True,\n 'namespec.return_value': 'proc1'}))\n with self.assertRaises(RPCError) as exc:\n rpc.start_process(0, 'appli_1')\n self.assertEqual(Faults.ALREADY_STARTED, exc.exception.code)\n self.assertEqual('ALREADY_STARTED: proc1', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with running processes\n rpc._get_application_process.return_value = (\n Mock(**{'processes.values.return_value': [\n Mock(**{'running.return_value': False}),\n Mock(**{'running.return_value': True,\n 'namespec.return_value': 'proc2'})]}), None)\n with self.assertRaises(RPCError) as exc:\n rpc.start_process(0, 'appli_1')\n self.assertEqual(Faults.ALREADY_STARTED, exc.exception.code)\n self.assertEqual('ALREADY_STARTED: proc2', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with stopped processes\n proc_1 = Mock(**{'running.return_value': False,\n 'stopped.return_value': True,\n 'namespec.return_value': 'proc1'})\n proc_2 = Mock(**{'running.return_value': False,\n 'stopped.return_value': False,\n 'namespec.return_value': 'proc2'})\n rpc._get_application_process.return_value = (\n Mock(**{'processes.values.return_value': [\n proc_1, proc_2]}), None)\n # test RPC call with no wait and not done\n mocked_start.return_value = False\n result = rpc.start_process(1, 'appli:*', 'argument list', False)\n self.assertTrue(result)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(1, proc_1, 'argument list'),\n call(1, proc_2, 'argument list')], mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call no wait and done\n mocked_start.return_value = True\n result = rpc.start_process(1, 'appli:*', 'argument list', False)\n self.assertTrue(result)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(1, proc_1, 'argument list'),\n call(1, proc_2, 'argument list')], mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call with wait and done\n result = rpc.start_process(2, 'appli:*', wait=True)\n self.assertTrue(result)\n self.assertEqual([call(2, proc_1, ''), call(2, proc_2, '')],\n mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call with wait and not done\n mocked_start.return_value = False\n deferred = rpc.start_process(2, 'appli:*', wait=True)\n # result is a function for deferred result\n self.assertTrue(callable(deferred))\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(2, proc_1, ''), call(2, proc_2, '')],\n mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n # test returned function: return True when job in progress\n mocked_progress.return_value = True\n self.assertEqual(NOT_DONE_YET, deferred())\n self.assertEqual([call()], mocked_progress.call_args_list)\n mocked_progress.reset_mock()\n # test returned function: raise exception if job not in progress anymore\n # and process still stopped\n mocked_progress.return_value = False\n with self.assertRaises(RPCError) as exc:\n deferred()\n self.assertEqual(Faults.ABNORMAL_TERMINATION, exc.exception.code)\n self.assertEqual('ABNORMAL_TERMINATION: proc1', exc.exception.text)\n self.assertEqual([call()], mocked_progress.call_args_list)\n mocked_progress.reset_mock()\n # test returned function: return True if job not in progress anymore\n # and process running\n proc_1.stopped.return_value = False\n self.assertTrue(deferred())\n self.assertEqual([call()], mocked_progress.call_args_list)", "def start_command(self, wait_for_config=True):\n self.process = subprocess.Popen(shlex.split(self.command))\n LOGGER.info(\n 'Command (%s) started with pid %s', self.command, self.process.pid)", "def startProcess(self, args, workingDir=None, showArgs=True,\n environment=None):\n self.errorGroup.hide()\n self.normal = False\n \n self.__hasAddOrDelete = False\n if (\n args[0] in [\"fetch\", \"qpush\", \"qpop\", \"qgoto\", \"rebase\",\n \"update\", \"import\", \"revert\", \"graft\", \"shelve\",\n \"unshelve\", \"strip\", \"histedit\"] or\n (args[0] in [\"pull\", \"unbundle\"] and\n (\"--update\" in args[1:] or \"--rebase\" in args[1:]))\n ):\n self.__updateCommand = True\n else:\n self.__updateCommand = False\n \n if showArgs:\n self.resultbox.append(' '.join(args))\n self.resultbox.append('')\n \n out, err = self.vcs.getClient().runcommand(\n args, output=self.__showOutput, error=self.__showError)\n \n if err:\n self.__showError(err)\n if out:\n self.__showOutput(out)\n \n self.normal = True\n \n self.__finish()\n \n return True", "async def test_subprocess_forbid(event_loop):\n proc = await asyncio.subprocess.create_subprocess_exec(\n sys.executable, '--version', stdout=asyncio.subprocess.PIPE,\n loop=event_loop)\n await proc.communicate()", "def subproc():\n subprocess.check_call('FaroScanServer.exe', shell=True,\n cwd='C:\\\\Users\\\\yuhhe\\\\source\\\\repos\\\\FaroScanServer\\\\FaroScanServer\\\\bin\\\\x64\\\\Release',\n close_fds=True)", "def run_subprocess(command, environment=None, shell=False, raise_on_error=True):\n proc = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=shell,\n env=environment)\n stdout, stderr = proc.communicate()\n if proc.returncode != 0:\n if raise_on_error:\n raise RuntimeError('{}\\n{}'.format(stderr, stdout))\n return stdout, stderr, proc.returncode", "def _subexec(command):\n lcwd = fabric.state.env.get('lcwd', None) or None #sets lcwd to None if it bools to false as well\n process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=lcwd)\n out, err = process.communicate()\n print \"command : %s \" % command\n print \"out: %s\" % out\n print \"err: %s\" % err", "def Popen(self, *unargs, **kwargs):\r\n cmdline = None\r\n if 'args' in kwargs:\r\n cmdline = kwargs['args']\r\n else:\r\n cmdline = unargs[0]\r\n return PopenWrapper.WaitWrapper(subprocess_.Popen(*unargs, **kwargs), self, cmdline)", "def run(cmd):\n print ' '.join(cmd)\n try:\n check_call(cmd)\n except CalledProcessError as cpe:\n print \"Error: return code: \" + str(cpe.returncode)\n sys.exit(cpe.returncode)", "def __launch__(self,config,command=None,**kwargs):\n if command is None:\n command = ['sleep 30;','qsub']\n return SampleQsubProcess.__launch__(self,config,command=command,**kwargs)", "def start_driver_ctrl(self):\n try:\n args = [\n DRV_CTRL_PROCESS,\n \"-u\",\n self.user,\n \"start\"\n ]\n result = subprocess.run(args, check=True) #pylint: disable=unused-variable\n return True\n except subprocess.CalledProcessError:\n return False", "def runin(cmd, stdin):\n result = subprocess.Popen(cmd,stdin=subprocess.PIPE)\n result.wait()\n return result.returncode", "def __init__(self, senna_path, executable):\n self.senna_path = senna_path\n self.p = sp.Popen(['blabla', '-path', senna_path],\n executable=os.path.join(senna_path, executable),\n stdin=sp.PIPE,\n stdout=sp.PIPE)", "def _run_shell(self, command_string: str, cwd: str = '/', print_command: bool = False) -> subprocess.Popen:\n if print_command:\n self.logger.info(command_string)\n return subprocess.Popen(command_string, shell=True, cwd=cwd)", "def start_shell(self):\n cmd = 'shell'\n end_strs = ['>']\n self.run_with_output(cmd, end_strs)\n return True", "def test_example(self, _, cmd):\n out = subprocess.run(cmd, shell=True)\n self.assertFalse(out.returncode)", "def LaunchFile(*params):\n\n file = subprocess.Popen(params)\n file.communicate()\n return file.returncode", "def run(args, verbose=False, exception=False, **kwargs):\n if verbose:\n print(\"running: \" + ' '.join(args))\n sys.stdout.flush()\n # Use Popen here instead of call() as it apparently allows powershell on\n # Windows to not lock up waiting for input presumably.\n ret = subprocess.Popen(args, **kwargs)\n code = ret.wait()\n if code != 0:\n err = \"failed to run: \" + ' '.join(args)\n if verbose or exception:\n raise RuntimeError(err)\n sys.exit(err)", "def _staf_start_proc(self,\n command,\n working_dir,\n wait,\n params=[],\n env_vars={},\n location='local'):\n\n staf_request = ('START SHELL COMMAND \"{0}\" WORKDIR \"{1}\" WAIT '\n '{2}s STDERRTOSTDOUT RETURNSTDOUT'.format(unix_style_path(command),\n unix_style_path(working_dir),\n str(wait)))\n if len(params) != 0:\n staf_request += ' PARMS {0}'.format(\" \".join(params))\n\n if len(env_vars) != 0:\n for key in env_vars:\n staf_request += ' ENV {0}={1}'.format(key, env_vars[key])\n\n result = self._staf_handle.submit(location, 'process', staf_request)\n\n if result.rc != result.Ok:\n raise CoreError(result.result)\n\n #Return the exit code from the executed command and STDOUT.\n return (int(result.resultObj['rc']), result.resultObj['fileList'][0]['data'])", "def start(self):\n if self.process:\n return True\n if self.host:\n cmd = \"ssh -Y root@%s\" % self.host\n else:\n cmd = \"su -c\"\n cmd += \" 'echo _GO_ && larchin-0call'\"\n # Run the command as root with pexpect.\n # Return True if it succeeded, else False.\n p = pexpect.spawn(cmd, timeout=None)\n e = p.expect([\"_GO_.*\\n\", pexpect.TIMEOUT, \"Password:\"], 5)\n while e != 0:\n if e == 2:\n ok, pw = ui.textLineDialog(_(\"Please enter root password\"),\n \"larchin: root pw\", pw=True)\n if not ok:\n run_error( _(\"No root password, cancelling run\"))\n return False\n\n p.sendline(pw.strip())\n e = p.expect([\"_GO_.*\\n\", pexpect.TIMEOUT, pexpect.EOF], 5)\n else:\n run_error(_(\"Couldn't start larchin-0call\"))\n return False\n self.process = p\n p.setecho(False)\n\n # Start a thread to read input from 0call.\n command.simple_thread(self.read0call)\n # Perform initialization of the installation system\n ok, textlines = self.xlist(\"init\")\n if not ok:\n run_error(_(\"Couldn't initialize installation system:\\n\\n%s\")\n % \"\\n\".join(textlines))\n return ok", "def _launch(self):\n annotators = ['tokenize', 'ssplit']\n if 'ner' in self.annotators:\n annotators.extend(['pos', 'lemma', 'ner'])\n elif 'lemma' in self.annotators:\n annotators.extend(['pos', 'lemma'])\n elif 'pos' in self.annotators:\n annotators.extend(['pos'])\n annotators = ','.join(annotators)\n options = ','.join(['untokenizable=noneDelete',\n 'invertible=true'])\n # if you work on English, use this this command\n cmd = ['java', '-mx' + self.mem, '-cp', '\"%s\"' % self.classpath,\n 'edu.stanford.nlp.pipeline.StanfordCoreNLP', '-annotators',\n annotators, '-tokenize.options', options,\n '-outputFormat', 'json', '-prettyPrint', 'false']\n \n # if you work on arabic, use this this command\n \n # cmd = ['java', '-mx' + self.mem, '-cp', '\"%s\"' % self.classpath,\n # # 'edu.stanford.nlp.pipeline.StanfordCoreNLP','-annotators',\n # 'edu.stanford.nlp.pipeline.StanfordCoreNLP', '-props', 'StanfordCoreNLP-arabic.properties','-annotators',\n # annotators, '-tokenize.options', options, #'-tokenize.whitespace', 'true',\n # '-outputFormat', 'json', '-prettyPrint', 'false']\n print(' '.join(cmd))\n\n # We use pexpect to keep the subprocess alive and feed it commands.\n # Because we don't want to get hit by the max terminal buffer size,\n # we turn off canonical input processing to have unlimited bytes.\n self.corenlp = pexpect.spawn('/bin/bash', maxread=100000, timeout=60)\n self.corenlp.setecho(False)\n self.corenlp.sendline('stty -icanon')\n self.corenlp.sendline(' '.join(cmd))\n self.corenlp.delaybeforesend = 0\n self.corenlp.delayafterread = 0\n self.corenlp.expect_exact('NLP>', searchwindowsize=100)" ]
[ "0.7211386", "0.6982913", "0.6917975", "0.6912684", "0.6888676", "0.6845422", "0.65411717", "0.65263665", "0.648128", "0.6467744", "0.6461761", "0.64219886", "0.6418295", "0.64087766", "0.63941157", "0.6381634", "0.63771", "0.6374373", "0.63405687", "0.633077", "0.6330121", "0.6313436", "0.6307213", "0.62764114", "0.62715524", "0.62079394", "0.6202999", "0.61737186", "0.6172968", "0.6144104", "0.6097086", "0.60902745", "0.6075148", "0.60734123", "0.6066617", "0.6054903", "0.60527396", "0.60512024", "0.6035233", "0.60327774", "0.603263", "0.5987704", "0.5978134", "0.596659", "0.5962399", "0.59425056", "0.593343", "0.59263927", "0.59202987", "0.58995014", "0.58894295", "0.5878075", "0.5867449", "0.5850583", "0.5847995", "0.5846843", "0.58363456", "0.5836032", "0.5835018", "0.58278537", "0.58278453", "0.58219206", "0.58206254", "0.5817606", "0.5816305", "0.58055234", "0.5793746", "0.5787712", "0.5785018", "0.578337", "0.57832456", "0.57808185", "0.5779516", "0.5775788", "0.5766704", "0.57630837", "0.57420415", "0.5729717", "0.57245624", "0.57236373", "0.5719256", "0.5715863", "0.57060796", "0.5701466", "0.57005376", "0.5688233", "0.568293", "0.56812173", "0.5679481", "0.5668645", "0.5666666", "0.566471", "0.5663862", "0.5658018", "0.56573904", "0.56564164", "0.5644945", "0.5642663", "0.5630315", "0.56192" ]
0.57605875
76
Starting a subprocess should be possible.
async def test_subprocess_forbid(event_loop): proc = await asyncio.subprocess.create_subprocess_exec( sys.executable, '--version', stdout=asyncio.subprocess.PIPE, loop=event_loop) await proc.communicate()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Start(self):\n\n\n\n assert not self._process, 'Start() can only be called once'\n self._process = subprocess.Popen(self._args)", "def run_subprocess(cmd):\n subprocess.Popen(cmd, stdin =subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE,\n shell=True,)", "def start(self):\r\n return self.start_subprocess()", "def start(self):\n self._proc = self._get_subprocess()\n self._pid = self._proc.pid\n self._return_code = None", "def spawn(self):\n self._proc = subprocess.Popen(\n self._args, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )", "def run(cmd):\n print('running', cmd)\n proc = sp.Popen([cmd], shell=True)\n proc.wait()\n assert proc.poll() == 0", "def open_subprocess(self, args_, subprocess_key=None):\n\n if subprocess_key in self.subprocess and self.subprocess[subprocess_key].poll is not None:\n # TODO better error class\n\n raise AssertionError(\"process '%s'(pid:%s) already exist and still running\" % (\n subprocess_key, self.subprocess[subprocess_key].pid))\n\n child_process = subprocess.Popen(args_)\n if subprocess_key is None:\n subprocess_key = str(child_process.pid)\n self.subprocess[subprocess_key] = child_process\n str_args = \" \".join(map(str, args_))\n self.log(\"open subprocess pid:%s, cmd='%s'\" % (child_process.pid, str_args))\n\n return child_process.pid", "def start_process(cmd, supress_output=False):\n logging.debug(cmd)\n logging.error(\"[tony]cmd:%r\" % (cmd))\n proc = subprocess.Popen(cmd, stdout=None, stderr=subprocess.PIPE)\n out, err = proc.communicate()\n rtn_code = proc.returncode\n\n if supress_output is False:\n if out:\n logging.info(out)\n if err:\n logging.error(err)\n\n if rtn_code == 0 or rtn_code is None:\n logging.info('Success: Process return code %s', str(rtn_code))\n else:\n logging.error('Error: Process return code %s', str(rtn_code))\n sys.exit(1)", "def start(self):\n if self.subcommand:\n os.execv(self.subcommand, [self.subcommand] + self.argv[1:])\n raise NoStart()\n \n if self.subapp:\n self.subapp.start()\n raise NoStart()\n \n if self.generate_config:\n self.write_default_config()\n raise NoStart()", "def run_subprocess(args, work_dir):\n process = subprocess.Popen(args, cwd=work_dir)\n process.communicate()\n assert process.returncode == 0", "def start():\n global running\n # os.system('python3 /Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py')\n global process\n process = Popen(['python3', '/Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py'])\n running = True", "def start_process():\n global command, process\n\n def on_data(data):\n data = data.decode().strip()\n print('{}'.format(data))\n\n cmd = command.split(' ')\n\n if process:\n process.terminate()\n\n process = MySubprocess(cmd, -1, functools.partial(on_data), None, None)", "def subprocess_Popen(command, **params):\r\n startupinfo = None\r\n if os.name == 'nt':\r\n startupinfo = subprocess.STARTUPINFO()\r\n try:\r\n startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\r\n except AttributeError:\r\n startupinfo.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW\r\n\r\n # Anaconda for Windows does not always provide .exe files\r\n # in the PATH, they also have .bat files that call the corresponding\r\n # executable. For instance, \"g++.bat\" is in the PATH, not \"g++.exe\"\r\n # Unless \"shell=True\", \"g++.bat\" is not executed when trying to\r\n # execute \"g++\" without extensions.\r\n # (Executing \"g++.bat\" explicitly would also work.)\r\n params['shell'] = True\r\n\r\n # Using the dummy file descriptors below is a workaround for a\r\n # crash experienced in an unusual Python 2.4.4 Windows environment\r\n # with the default None values.\r\n stdin = None\r\n if \"stdin\" not in params:\r\n stdin = open(os.devnull)\r\n params['stdin'] = stdin.fileno()\r\n\r\n try:\r\n proc = subprocess.Popen(command, startupinfo=startupinfo, **params)\r\n finally:\r\n if stdin is not None:\r\n del stdin\r\n return proc", "def spawn_subprocess(args, loop=None):\n if not _IS_XOS_ASYNC:\n return spawn_subprocess_not_xos(args, loop=loop)\n else:\n return spawn_subprocess_xos(args, loop=loop)", "def _start_runner(self, spec):\n pid = os.fork()\n if pid:\n # Parent.\n return pid\n # Child.\n #\n # Set the environment variable which tells the runner that it's\n # running under bin/master control. This subtly changes the error\n # behavior of bin/runner.\n env = {'MAILMAN_UNDER_MASTER_CONTROL': '1'}\n # Craft the command line arguments for the exec() call.\n rswitch = '--runner=' + spec\n # Wherever master lives, so too must live the runner script.\n exe = os.path.join(config.BIN_DIR, 'runner')\n # config.PYTHON, which is the absolute path to the Python interpreter,\n # must be given as argv[0] due to Python's library search algorithm.\n args = [sys.executable, sys.executable, exe, rswitch]\n # Always pass the explicit path to the configuration file to the\n # sub-runners. This avoids any debate about which cfg file is used.\n config_file = (config.filename if self._config_file is None\n else self._config_file)\n args.extend(['-C', config_file])\n log = logging.getLogger('mailman.runner')\n log.debug('starting: %s', args)\n # We must pass this environment variable through if it's set,\n # otherwise runner processes will not have the correct VAR_DIR.\n var_dir = os.environ.get('MAILMAN_VAR_DIR')\n if var_dir is not None:\n env['MAILMAN_VAR_DIR'] = var_dir\n # For the testing framework, if these environment variables are set,\n # pass them on to the subprocess.\n for envvar in PRESERVE_ENVS:\n if envvar in os.environ:\n env[envvar] = os.environ[envvar]\n args.append(env)\n os.execle(*args)\n # We should never get here.\n raise RuntimeError('os.execle() failed')", "def run_subprocess(self, *cmd_and_args):\n\n command_line = \" \".join(cmd_and_args)\n self.logger.debug(\"Running: %s\", command_line)\n\n return subprocess.Popen(command_line, shell=True, close_fds=True)", "def start_subprocess(self):\r\n errmsg = ('\\n\\nPlease install GNU Octave and put it in your path\\n')\r\n ON_POSIX = 'posix' in sys.builtin_module_names\r\n if self.use_pty:\r\n master, slave = pty.openpty()\r\n self.wfid, self.rfid = master, master\r\n rpipe, wpipe = slave, slave\r\n else:\r\n self.rfid, wpipe = os.pipe()\r\n rpipe, self.wfid = os.pipe()\r\n kwargs = dict(close_fds=ON_POSIX, bufsize=0, stdin=rpipe,\r\n stderr=wpipe, stdout=wpipe)\r\n if os.name == 'nt':\r\n startupinfo = subprocess.STARTUPINFO()\r\n startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\r\n kwargs['startupinfo'] = startupinfo\r\n try:\r\n proc = subprocess.Popen(['octave', '-q', '--braindead'],\r\n **kwargs)\r\n except OSError: # pragma: no cover\r\n raise Oct2PyError(errmsg)\r\n else:\r\n self.reader = _Reader(self.rfid, self.read_queue)\r\n return proc", "def run_starter(self, expect_to_fail=False):\n logging.info(\"running starter \" + self.name)\n args = [self.cfg.bin_dir / \"arangodb\"] + self.hotbackup_args + self.default_starter_args + self.arguments\n\n lh.log_cmd(args)\n self.instance = psutil.Popen(args)\n logging.info(\"my starter has PID:\" + str(self.instance.pid))\n if not expect_to_fail:\n self.wait_for_logfile()\n self.wait_for_port_bind()", "def start_comp(command_line, log='', env='', foreground='no', no_stdin = 'yes'):\n proc_title_argv = command_line.split()\n\n if proc_title_argv[0] == 'taskset':\n real_program = proc_title_argv[3]\n else:\n real_program = proc_title_argv[0]\n\n # first test shared library link\n try:\n can_find_all_shared_libs(real_program)\n except IOError, e:\n print e;\n raise\n\n my_stdout = None\n my_stderr = None\n my_stdin = None\n if (no_stdin == 'yes'):\n my_stdin = open('/dev/null', 'r')\n\n if log:\n dir = os.path.dirname(log)\n if dir:\n try:\n exist_ok_makedirs(dir, 0777)\n except OSError, (errno, strerror):\n sys.stderr.write('%s: %s\\n' % (dir, strerror))\n raise\n try:\n log_fd = open(log, \"w\")\n except IOError, (errno, strerror):\n print 'cannot open %s: %s' % (log, strerror)\n raise\n else:\n my_stdout = log_fd\n my_stderr = subprocess.STDOUT\n\n #command = [ path ]\n #if options:\n # command += options\n\n my_env = {}\n if env != '':\n env_list = env.split('\\t')\n env_val = '%s:%s' % (env_list[1], env_list[2])\n my_env[env_list[0]] = env_val\n\n try:\n p = subprocess.Popen(proc_title_argv, shell = False,\n # stdin = subprocess.PIPE,\\\n #stdin = None,\n stdin = my_stdin,\n stdout = my_stdout,\n stderr = my_stderr,#)\n env = my_env)\n except OSError, (errno, strerror):\n #sys.exit('cannot execute %s: %s' % (path, strerror))\n print 'cannot execute %s: %s' % (real_program, strerror)\n raise\n except ValueError, strerror:\n #sys.exit('subprocess.Popen value error: %s' %strerror)\n print 'subprocess.Popen value error: %s' % (strerror)\n raise\n\n #proc_name = os.path.basename(path)\n if proc_title_argv[0] == 'taskset':\n try:\n proc_name = os.path.basename(proc_title_argv[3])\n except IndexError,e:\n print \"path: \", path\n sys.exit(e)\n else:\n proc_name = os.path.basename(proc_title_argv[0])\n\n max_retry = 20\n retry = 0\n while True:\n if retry == max_retry:\n sys.exit('cannot exec. %s' % proc_name)\n\n #if kill_proc_exact.lookup_process_exact(proc):\n if get_pids_exact(proc_name):\n break;\n else:\n time.sleep(0.1)\n retry += 1\n\n if foreground == 'yes':\n try:\n p.wait()\n except KeyboardInterrupt, strerror:\n pass", "def _subprocess(cmd):\n\n log.debug('Running: \"%s\"', \" \".join(cmd))\n try:\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n ret = salt.utils.stringutils.to_unicode(proc.communicate()[0]).strip()\n retcode = proc.wait()\n\n if ret:\n return ret\n elif retcode != 1:\n return True\n else:\n return False\n except OSError as err:\n log.error(err)\n return False", "def StartCmd(args, cwd=None, shell=False, env=None):\n _ValidateAndLogCommand(args, cwd, shell)\n return Popen(\n args,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=shell,\n cwd=cwd,\n env=env)", "def start(self):\n if self._is_launched.is_set():\n self._log(\"warning\", \"try to start an already started process\")\n return False\n\n self._popen = Popen(shlex.split(self.command), bufsize=0, executable=None, stdin=PIPE, stdout=PIPE,\n stderr=self.stderr, close_fds=False, shell=False, cwd=None, env=None,\n universal_newlines=True, startupinfo=None, creationflags=0,\n preexec_fn=lambda: os.nice(self._priority))\n\n self._defunctdog_thread.start()\n self._stdin_thread.start()\n self._stdout_thread.start()\n register_thread(self)\n self._is_launched.set()\n self._is_running.set()", "def start_bot(self):\n self.proc = subprocess.Popen(\"./start\", stdin=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t stdout=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t cwd=os.path.abspath(self.path))", "def run(cmd: str) -> None:\n subprocess.run(cmd, shell=True, check=True)", "def _run_command(args):\n subprocess.run(args, check=True)", "def subprocess_run(self, *args):\n return self.testdir.runpytest_subprocess(*args)", "def test_ducts_with_subprocess(self):\n assert_that(SUBPROCESS_TEST_SCRIPT).exists()\n proc = None\n parent = None\n try:\n parent = MessageDuctParent.psuedo_anonymous_parent_duct()\n parent.bind()\n proc = subprocess.Popen(\n [sys.executable, SUBPROCESS_TEST_SCRIPT, parent.listener_address], env={'PYTHONPATH': ROOT_DIR}\n )\n assert_that(parent.listen()).is_true()\n for _ in range(100):\n parent.send(\"pingpong\")\n parent.poll(1)\n assert_that(parent.recv()).is_equal_to(\"pingpong\")\n parent.send(None)\n time.sleep(1)\n finally:\n if parent:\n parent.close()\n if proc:\n proc.terminate()", "def call_subprocess_Popen(command, **params):\r\n if 'stdout' in params or 'stderr' in params:\r\n raise TypeError(\"don't use stderr or stdout with call_subprocess_Popen\")\r\n null = open(os.devnull, 'wb')\r\n # stdin to devnull is a workaround for a crash in a weird Windows\r\n # environement where sys.stdin was None\r\n params.setdefault('stdin', null)\r\n params['stdout'] = null\r\n params['stderr'] = null\r\n p = subprocess_Popen(command, **params)\r\n p.wait()\r\n return p.returncode", "def spawn():\n if platform.system() == \"Windows\":\n # HACK https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1243#issuecomment-706668723\n # FIXME Use pexpect or wexpect somehow to fix this\n pytest.xfail(\n \"pexpect fails on Windows\",\n )\n # Using PopenSpawn, although probably it would be best to use pexpect.spawn\n # instead. However, it's working fine and it seems easier to fix in the\n # future to work on Windows (where, this way, spawning actually works; it's just\n # python-prompt-toolkit that rejects displaying a TUI)\n return PopenSpawn", "def fork(self):\n try:\n pid = os.fork()\n if pid > 0:\n sys.exit(0)\n except OSError as e:\n sys.stderr.write(\"Fork failed: %d (%s)\\n\" % (e.errno, e.strerror))\n sys.exit(1)", "async def _run_subprocess(\n cmd: str,\n allow_params: bool,\n params: Dict[str, ParamValueT],\n) -> Dict[str, Any]:\n cmd_str = cmd\n if allow_params:\n if params[\"shell_params\"] == []:\n cmd_str = cmd.format([''])\n else:\n cmd_str = cmd.format(*params.get('shell_params', ['']))\n\n logging.info(\"Running command: %s\", cmd_str)\n\n cmd_list = shlex.split(cmd_str)\n\n process = await asyncio.create_subprocess_exec(\n *cmd_list,\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n )\n\n stdout, stderr = await process.communicate()\n\n return {\n \"returncode\": process.returncode,\n \"stdout\": stdout.decode(),\n \"stderr\": stderr.decode(),\n }", "def call(*args, **kwargs):\n return Popen(*args, **kwargs).wait()", "def test_application_start():\n\n process = subprocess.Popen(['python', 'runserver.py'],\n stderr=subprocess.STDOUT,\n stdout=subprocess.PIPE)\n\n assert process.pid\n debug_logging = process.stdout.read(100)\n process.kill()\n assert 'Starting application' in debug_logging", "def start_program(program, program_name, VD):\n \n if not is_running(program):\n VD['terminal_output'] += f\"> Starting {program_name}.\\n\"\n subprocess.Popen([program])\n #subprocess.call([\"xdotool\", \"search\", \"--name\", \"spotify\", \"\")\n else:\n VD['terminal_output'] += f\"> {program_name} is already running.\\n\"", "def subprocess_nowait(cmd, shell=False, cwd=None, env=None):\n # type: (str, bool, str, dict) -> subprocess.Process\n return subprocess.Popen(cmd, shell=shell, cwd=cwd, env=env)", "def Run(self) -> None:\n logging.info(\"Running %s in a subprocess...\", self)\n self.stdout = tempfile.TemporaryFile()\n self.stderr = tempfile.TemporaryFile()\n self.begin_time = time.time()\n env = os.environ.copy()\n # Give each test program a separate test_tmpdir so they don't overwrite\n # each other when running in parallel.\n env[\"TEST_TMPDIR\"] = tempfile.mkdtemp()\n # Bazel's test sharding protocol:\n # https://docs.bazel.build/versions/master/test-encyclopedia.html\n if self.total_shards > 1:\n env[\"TEST_TOTAL_SHARDS\"] = str(self.total_shards)\n env[\"TEST_SHARD_INDEX\"] = str(self.shard_id)\n\n self.subprocess = subprocess.Popen(\n [_GetPython(), self.path], stdout=self.stdout, stderr=self.stderr,\n env=env)", "def run_setup(command):\n print 'Executing command:', command\n print '=========='\n cmd = command.split()\n p1 = subprocess.Popen([cmd[0], cmd[1], cmd[2]])\n p1.wait()\n p2 = subprocess.Popen(cmd[4], stdout=subprocess.PIPE)\n p2.wait()\n p3 = subprocess.Popen(\n cmd[6], stdin=p2.stdout, stdout=subprocess.PIPE, bufsize=1,\n preexec_fn=os.setsid\n )\n return p3", "def test_subprocess_fails_with_no_command(self):\n with self.assertRaises(ValueError):\n LazySubprocessTester([])", "def _spawn(self, protocol, args, env=None):\n return reactor.spawnProcess(protocol, self.cmd, args, env=env)", "def start_server_proc(event, server_cmd, checking_env):\n proc = subprocess.Popen(server_cmd, env=checking_env)\n\n # Blocking termination until event is set.\n event.wait()\n\n # If proc is still running, stop it.\n if proc.poll() is None:\n proc.terminate()", "def test_wait_true_shortproc(self):\n child = PtyProcess.spawn(['true'])\n # Wait so we're reasonable sure /bin/true has terminated\n time.sleep(0.2)\n self.assertEqual(child.wait(), 0)", "def Spawn(proc):\n proc.start()\n return proc", "def test_startProcessAlreadyStarted(self):\r\n self.pm.addProcess(\"foo\", [\"foo\"])\r\n self.pm.startProcess(\"foo\")\r\n self.assertIdentical(None, self.pm.startProcess(\"foo\"))", "def start(self):\n cmd = self.doCommand(self.args)\n if cmd is not None:\n cmd.join()\n else:\n self.out = self.error", "def run(self, args=(), with_chroot=False, blocking=True, setsid=False, **kw):\n self.clean_environment()\n\n cmdline = self.cmdline(args)\n TRACER.log('PEX.run invoking %s' % ' '.join(cmdline))\n process = subprocess.Popen(\n cmdline,\n cwd=self._pex if with_chroot else os.getcwd(),\n preexec_fn=os.setsid if setsid else None,\n **kw)\n return process.wait() if blocking else process", "def subprocess_setup():\n import signal\n signal.signal(signal.SIGPIPE, signal.SIG_DFL)", "def run(*args, **kwargs):\n kwargs[\"check\"] = True\n print(\"+\", \" \".join(args[0]))\n return subprocess.run(*args, **kwargs)", "def sanity_check_process(self):\n assert_equals(self.proc.returncode, None)\n time.sleep(1)", "def spawn(self, classpath, main, jvm_options=None, args=None, **subprocess_args):\r\n cmd = self._create_command(*self._scrub_args(classpath, main, jvm_options, args))\r\n return self._spawn(cmd, **subprocess_args)", "def shell(cmd):\n print('Running \"{}\"...'.format(cmd))\n subprocess.check_call(cmd, shell=True)", "def reallyStartProcess(self, name):\n if name in self.protocols:\n return\n p = self.protocols[name] = DelayedStartupLoggingProtocol()\n p.service = self\n p.name = name\n procObj, env, uid, gid = self.processes[name]\n self.timeStarted[name] = time.time()\n\n childFDs = {0: \"w\", 1: \"r\", 2: \"r\"}\n\n childFDs.update(procObj.getFileDescriptors())\n\n procObj.starting()\n\n args = procObj.getCommandLine()\n\n self._reactor.spawnProcess(\n p, args[0], args, uid=uid, gid=gid, env=env,\n childFDs=childFDs\n )", "def spawn(self, arguments=None, environment=None):\n return subprocess.Popen(\n args=[self.executable] + ([] or arguments),\n # do not redirect std streams\n # this fakes the impression of having just one program running\n stdin=None,\n stdout=None,\n stderr=None,\n env=environment,\n )", "def callSubprocess(args, test=False):\n print(Fore.MAGENTA),\n for arg in args: \n print arg,\n print(Fore.WHITE)\n if not test: \n subprocess.call(args)", "def make_subprocess(cmdline, stdout=False, stderr=False, stdin=False,\n universal_newlines=False, close_fds=True, env=None):\n LOG.info(\"Running cmd '%s'\" % \" \".join(cmdline))\n kwargs = {}\n kwargs['stdout'] = stdout and subprocess.PIPE or None\n kwargs['stderr'] = stderr and subprocess.PIPE or None\n kwargs['stdin'] = stdin and subprocess.PIPE or None\n kwargs['universal_newlines'] = universal_newlines\n kwargs['close_fds'] = close_fds\n kwargs['env'] = env\n try:\n proc = subprocess.Popen(cmdline, **kwargs)\n except OSError, e: # noqa\n if e.errno == errno.ENOENT:\n raise CommandNotFound\n else:\n raise\n return proc", "def run(cmd):\n \n proc = subprocess.Popen (cmd, \n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=True\n )\n stdout_value, stderr_value = proc.communicate()\n print stdout_value\n print stderr_value\n\n if proc.poll() > 0:\n sys.stderr.write ( \"\\nError\\n\" )\n print '\\tstderr:', repr(stderr_value.rstrip())\n return False\n else:\n return True", "def patch_subprocess(self, monkeypatch):\n monkeypatch.setattr('subprocess.check_call', self.fake_check_call)", "def start(self, stdin=None, stdout=None, stderr=None):\n logging.debug(\"Starting '%s'\", \" \".join(self.cmd_line))\n self.proc = subprocess.Popen(self.cmd_line,\n stdin=stdin,\n stdout=stdout if stdout\n else subprocess.PIPE,\n stderr=stderr,\n env=self.env)\n self.thread = threading.Thread(target=self.tail)\n self.thread.daemon = True\n self.thread.start()\n self.running = True", "def test_subprocess_fork_pid0(self, mocker):\n mocker.stopall()\n\n test_command = [\"who\", \"-b\"]\n test_name = \"test_who\"\n test_fork = True\n pid = 0\n\n # mock\n mock_logging_debug = mocker.MagicMock(name=\"mock_logging_debug\")\n mock_os_fork = mocker.MagicMock(name=\"mock_os_fork\", return_value=pid)\n mock_sys_exit = mocker.MagicMock(name=\"mock_sys_exit\")\n mock_os_chdir = mocker.MagicMock(name=\"mock_os_chdir\")\n mock_os_setsid = mocker.MagicMock(name=\"mock_os_setsid\")\n mock_os_umask = mocker.MagicMock(name=\"mock_os_umask\")\n\n # patch\n mocker.patch.object(\n scarlett_os.subprocess.logging.Logger, \"debug\", mock_logging_debug\n )\n mocker.patch.object(scarlett_os.subprocess.os, \"fork\", mock_os_fork)\n mocker.patch.object(scarlett_os.subprocess.sys, \"exit\", mock_sys_exit)\n mocker.patch.object(scarlett_os.subprocess.os, \"chdir\", mock_os_chdir)\n mocker.patch.object(scarlett_os.subprocess.os, \"setsid\", mock_os_setsid)\n mocker.patch.object(scarlett_os.subprocess.os, \"umask\", mock_os_umask)\n\n scarlett_os.subprocess.Subprocess(test_command, name=test_name, fork=test_fork)\n\n assert mock_sys_exit.call_count == 0\n\n mocker.stopall()", "def LaunchAndWait(cmd):\n call(cmd)", "def test_install_should_call_subprocess_run(self, mock_subprocess):\n manifest = self.generate_mock_manifest(cfg={\n EXTCFG_SECTION.INSTALL: {\n EXTCFG_OPTION.EXEC_EXT_CMD: ['command'],\n }\n })\n ext_manager = PkgInstExtrasManager(manifest)\n ext_manager.handle_install_extras()\n mock_subprocess.assert_called_with(\n 'command',\n check=True,\n stderr=-1,\n stdout=-1,\n timeout=90,\n universal_newlines=True)", "def call_subshell(subshell):\n curses.def_prog_mode()\n #curses.endwin() # Probably causes a memory leak.\n\n rtn = os.system(\"%s\" % (subshell))\n curses.reset_prog_mode()\n if rtn is not 0:\n return False\n else:\n return True", "def shell_cmd(*args):\n proc = subprocess.run(args)\n returncode = proc.returncode\n if returncode != 0:\n raise RuntimeError(\n f\"Command {args} failed with return code {returncode}\")\n return proc", "def launch_shell(*, cwd: Optional[pathlib.Path] = None) -> None:\n with emit.pause():\n subprocess.run([\"bash\"], check=False, cwd=cwd)", "def spawn():\n if platform.system() == \"Windows\":\n # HACK https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1243#issuecomment-706668723\n # FIXME Use pexpect or wexpect somehow to fix this\n pytest.xfail(\n \"pexpect fails on Windows\",\n )\n # Disable subprocess timeout if debugging (except coverage), for commodity\n # See https://stackoverflow.com/a/67065084/1468388\n tracer = getattr(sys, \"gettrace\", lambda: None)()\n if not isinstance(tracer, (CTracer, type(None))):\n return lambda cmd, timeout=None, *args, **kwargs: PopenSpawn(\n cmd, None, *args, **kwargs\n )\n # Using PopenSpawn, although probably it would be best to use pexpect.spawn\n # instead. However, it's working fine and it seems easier to fix in the\n # future to work on Windows (where, this way, spawning actually works; it's just\n # python-prompt-toolkit that rejects displaying a TUI)\n return PopenSpawn", "def start_process(self, args):\n try:\n with open(os.devnull, 'w') as devnull:\n popenObj = subprocess.Popen(\n args, stdout=devnull, stderr=subprocess.PIPE, cwd=\"/tmp/\")\n popenObj.name = args\n return popenObj\n except Exception as e:\n self.logger.error(\n \"Cannot start process %s due to reason:%s\", args, e)\n raise e", "def run(self, args=(), with_chroot=False, blocking=True, setsid=False,\r\n stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):\r\n import subprocess\r\n self.clean_environment(forking=True)\r\n\r\n cmdline = self.cmdline(args)\r\n TRACER.log('PEX.run invoking %s' % ' '.join(cmdline))\r\n process = subprocess.Popen(cmdline, cwd=self._pex if with_chroot else os.getcwd(),\r\n preexec_fn=os.setsid if setsid else None,\r\n stdin=stdin, stdout=stdout, stderr=stderr)\r\n return process.wait() if blocking else process", "def _run_subprocess(cmd: List[str], args: List[str], env: Optional[Dict[str, str]] = None):\n async def _read_output(stream, logger_instance):\n \"\"\"Read output from command and print it into the right logger.\"\"\"\n while True:\n line = await stream.readline()\n if line == b'':\n break\n logger_instance(line.decode('utf-8').rstrip())\n\n async def _stream_subprocess(cmd, args, env):\n \"\"\"Run subprocess.\"\"\"\n cmd_ = ' '.join(cmd)\n args_ = ' '.join(args)\n process = await asyncio.create_subprocess_shell(f'{cmd_} {args_}',\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n env=env)\n\n await asyncio.wait([\n _read_output(process.stdout, logger.info),\n _read_output(process.stderr, logger.error)\n ])\n await process.wait()\n if process.returncode is None or process.returncode != 0:\n raise ValueError('Task failed!')\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(_stream_subprocess(cmd, args, env))", "def StartCmd(args, cwd=None, quiet=False, stdout=None, stderr=None, env=None):\n return cmd_util.StartCmd(args, cwd=cwd, quiet=quiet, stdout=stdout,\n stderr=stderr, env=env)", "def _run(self, args, cwd=None, env=None, logmode='wb'):\n args = tuple(str(arg) for arg in args)\n if self.process and self.process.poll() is None: \n raise RuntimeError(\"A process is already running\")\n if self.logfile:\n self.logfile.close()\n self.logfile = open(self.logfilename, logmode, buffering=0)\n if env is not None:\n env = dict(os.environ, **env, \n PYTHONPATH=os.pathsep.join(sys.path))\n \n self.process = subprocess.Popen(args, cwd=cwd, stdout=self.logfile,\n stderr=subprocess.STDOUT, env=env)", "def test_run_subprocess_cmd(self, mock_logging, mock_wait_for_proc):\n # Mock logging\n mock_wait_for_proc.return_value = (\"out\", \"err\")\n\n # Set up parameters\n args = [\"run\", \"unittest\"]\n proc = Mock(spec=Popen)\n\n # Test when return code is 0\n proc.returncode = 0\n run_subprocess_cmd(proc, args)\n expected_logs = [\"Executing bash command: run unittest\", \"out\", \"err\", \"Finished cmd successfully\"]\n self.assertListEqual([call(log) for log in expected_logs], mock_logging.call_args_list)\n\n # Test when return code is 1\n proc.returncode = 1\n with self.assertRaises(AirflowException):\n run_subprocess_cmd(proc, args)", "def start_process(options, args):\n import psutil\n import process_starter\n from synergy.system import process_helper\n\n try:\n pid = process_helper.get_process_pid(options.app)\n if pid is not None:\n if psutil.pid_exists(pid):\n message = 'ERROR: Process %r is already running with pid %r\\n' % (options.app, pid)\n sys.stderr.write(message)\n sys.exit(1)\n\n if not options.interactive:\n # this block triggers if the options.interactive is not defined or is False\n process_helper.start_process(options.app, args)\n else:\n process_starter.start_by_process_name(options.app, args)\n except Exception as e:\n sys.stderr.write('Exception on starting %s : %s \\n' % (options.app, str(e)))\n traceback.print_exc(file=sys.stderr)", "def _check_call(argv, **kwargs):\n logging.info('running %r', argv)\n subprocess.check_call(argv, **kwargs)", "def run(self):\n self.process.start()", "def spawn(self):\r\n options = self.config.options\r\n\r\n if self.pid:\r\n msg = 'process %r already running' % self.config.name\r\n options.logger.warn(msg)\r\n return\r\n\r\n self.killing = 0\r\n self.spawnerr = None\r\n self.exitstatus = None\r\n self.system_stop = 0\r\n self.administrative_stop = 0\r\n\r\n self.laststart = time.time()\r\n\r\n self._assertInState(ProcessStates.EXITED, ProcessStates.FATAL,\r\n ProcessStates.BACKOFF, ProcessStates.STOPPED)\r\n\r\n self.change_state(ProcessStates.STARTING)\r\n\r\n try:\r\n filename, argv = self.get_execv_args()\r\n except ProcessException as what:\r\n self.record_spawnerr(what.args[0])\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n return\r\n\r\n try:\r\n self.dispatchers, self.pipes = self.config.make_dispatchers(self)\r\n except OSError as why:\r\n code = why.args[0]\r\n if code == errno.EMFILE:\r\n # too many file descriptors open\r\n msg = 'too many open files to spawn %r' % self.config.name\r\n else:\r\n msg = 'unknown error: %s' % errno.errorcode.get(code, code)\r\n self.record_spawnerr(msg)\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n return\r\n\r\n try:\r\n pid = options.fork()\r\n except OSError as why:\r\n code = why.args[0]\r\n if code == errno.EAGAIN:\r\n # process table full\r\n msg = ('Too many processes in process table to spawn %r' %\r\n self.config.name)\r\n else:\r\n msg = 'unknown error: %s' % errno.errorcode.get(code, code)\r\n\r\n self.record_spawnerr(msg)\r\n self._assertInState(ProcessStates.STARTING)\r\n self.change_state(ProcessStates.BACKOFF)\r\n options.close_parent_pipes(self.pipes)\r\n options.close_child_pipes(self.pipes)\r\n return\r\n\r\n if pid != 0:\r\n return self._spawn_as_parent(pid)\r\n\r\n else:\r\n return self._spawn_as_child(filename, argv)", "def shell(cmd, check=True, stdin=None, stdout=None, stderr=None):\n return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr)", "def supercall(command):\n p = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE)\n retval = p.wait()\n \n if retval != 0:\n get_logger().critical('error calling {}'.format(command))\n for line in p.stderr.readlines():\n get_logger().critical(line.decode('utf8').replace('\\n', ''))\n\n return retval", "async def test_subprocess(event_loop):\n proc = await asyncio.subprocess.create_subprocess_exec(\n sys.executable, '--version', stdout=asyncio.subprocess.PIPE,\n loop=event_loop)\n await proc.communicate()", "def start(self):\n if self.running:\n warnings.warn(\"ExifTool already running; doing nothing.\")\n return\n with open(os.devnull, \"w\") as devnull:\n procargs = [self.executable, \"-stay_open\", \"True\", \"-@\", \"-\",\n \"-common_args\", \"-G\", \"-n\"];\n procargs.extend(self.addedargs)\n logging.debug(procargs) \n self._process = subprocess.Popen(\n procargs,\n stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=devnull)\n self.running = True", "def check_executable(op):\n try:\n proc = subprocess.Popen([op], stdout = subprocess.PIPE, stderr = subprocess.PIPE)\n except OSError:\n return False\n try:\n if proc.poll():\n proc.kill()\n except OSError:\n return True\n return True", "def RunProcess(self, command, env=None, stdin=None, stdout=None, stderr=None):\n\n # merge specified env with OS env\n myenv = os.environ.copy()\n if env is not None:\n myenv.update(env)\n\n try:\n process = subprocess.Popen(command, stdin=stdin, stdout=stdout, stderr=stderr, env=myenv, bufsize=0)\n return process\n except:\n print(\"Unexpected error when launching process:\")\n print(\" \", command)\n print(\" \", env)\n raise", "def test_start_process(self, mocked_check):\n from supvisors.rpcinterface import RPCInterface\n # get patches\n mocked_start = self.supervisor.supvisors.starter.start_process\n mocked_progress = self.supervisor.supvisors.starter.in_progress\n # create RPC instance\n rpc = RPCInterface(self.supervisor)\n # patch the instance\n rpc._get_application_process = Mock()\n # test RPC call with unknown strategy\n with self.assertRaises(RPCError) as exc:\n rpc.start_process('strategy', 'appli:proc')\n self.assertEqual(Faults.BAD_STRATEGY, exc.exception.code)\n self.assertEqual('BAD_STRATEGY: strategy', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with running process\n rpc._get_application_process.return_value = (\n None, Mock(**{'running.return_value': True,\n 'namespec.return_value': 'proc1'}))\n with self.assertRaises(RPCError) as exc:\n rpc.start_process(0, 'appli_1')\n self.assertEqual(Faults.ALREADY_STARTED, exc.exception.code)\n self.assertEqual('ALREADY_STARTED: proc1', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with running processes\n rpc._get_application_process.return_value = (\n Mock(**{'processes.values.return_value': [\n Mock(**{'running.return_value': False}),\n Mock(**{'running.return_value': True,\n 'namespec.return_value': 'proc2'})]}), None)\n with self.assertRaises(RPCError) as exc:\n rpc.start_process(0, 'appli_1')\n self.assertEqual(Faults.ALREADY_STARTED, exc.exception.code)\n self.assertEqual('ALREADY_STARTED: proc2', exc.exception.text)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual(0, mocked_start.call_count)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n # test RPC call with stopped processes\n proc_1 = Mock(**{'running.return_value': False,\n 'stopped.return_value': True,\n 'namespec.return_value': 'proc1'})\n proc_2 = Mock(**{'running.return_value': False,\n 'stopped.return_value': False,\n 'namespec.return_value': 'proc2'})\n rpc._get_application_process.return_value = (\n Mock(**{'processes.values.return_value': [\n proc_1, proc_2]}), None)\n # test RPC call with no wait and not done\n mocked_start.return_value = False\n result = rpc.start_process(1, 'appli:*', 'argument list', False)\n self.assertTrue(result)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(1, proc_1, 'argument list'),\n call(1, proc_2, 'argument list')], mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call no wait and done\n mocked_start.return_value = True\n result = rpc.start_process(1, 'appli:*', 'argument list', False)\n self.assertTrue(result)\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(1, proc_1, 'argument list'),\n call(1, proc_2, 'argument list')], mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call with wait and done\n result = rpc.start_process(2, 'appli:*', wait=True)\n self.assertTrue(result)\n self.assertEqual([call(2, proc_1, ''), call(2, proc_2, '')],\n mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n mocked_check.reset_mock()\n mocked_start.reset_mock()\n # test RPC call with wait and not done\n mocked_start.return_value = False\n deferred = rpc.start_process(2, 'appli:*', wait=True)\n # result is a function for deferred result\n self.assertTrue(callable(deferred))\n self.assertEqual([call()], mocked_check.call_args_list)\n self.assertEqual([call(2, proc_1, ''), call(2, proc_2, '')],\n mocked_start.call_args_list)\n self.assertEqual(0, mocked_progress.call_count)\n # test returned function: return True when job in progress\n mocked_progress.return_value = True\n self.assertEqual(NOT_DONE_YET, deferred())\n self.assertEqual([call()], mocked_progress.call_args_list)\n mocked_progress.reset_mock()\n # test returned function: raise exception if job not in progress anymore\n # and process still stopped\n mocked_progress.return_value = False\n with self.assertRaises(RPCError) as exc:\n deferred()\n self.assertEqual(Faults.ABNORMAL_TERMINATION, exc.exception.code)\n self.assertEqual('ABNORMAL_TERMINATION: proc1', exc.exception.text)\n self.assertEqual([call()], mocked_progress.call_args_list)\n mocked_progress.reset_mock()\n # test returned function: return True if job not in progress anymore\n # and process running\n proc_1.stopped.return_value = False\n self.assertTrue(deferred())\n self.assertEqual([call()], mocked_progress.call_args_list)", "def start_command(self, wait_for_config=True):\n self.process = subprocess.Popen(shlex.split(self.command))\n LOGGER.info(\n 'Command (%s) started with pid %s', self.command, self.process.pid)", "def startProcess(self, args, workingDir=None, showArgs=True,\n environment=None):\n self.errorGroup.hide()\n self.normal = False\n \n self.__hasAddOrDelete = False\n if (\n args[0] in [\"fetch\", \"qpush\", \"qpop\", \"qgoto\", \"rebase\",\n \"update\", \"import\", \"revert\", \"graft\", \"shelve\",\n \"unshelve\", \"strip\", \"histedit\"] or\n (args[0] in [\"pull\", \"unbundle\"] and\n (\"--update\" in args[1:] or \"--rebase\" in args[1:]))\n ):\n self.__updateCommand = True\n else:\n self.__updateCommand = False\n \n if showArgs:\n self.resultbox.append(' '.join(args))\n self.resultbox.append('')\n \n out, err = self.vcs.getClient().runcommand(\n args, output=self.__showOutput, error=self.__showError)\n \n if err:\n self.__showError(err)\n if out:\n self.__showOutput(out)\n \n self.normal = True\n \n self.__finish()\n \n return True", "def subproc():\n subprocess.check_call('FaroScanServer.exe', shell=True,\n cwd='C:\\\\Users\\\\yuhhe\\\\source\\\\repos\\\\FaroScanServer\\\\FaroScanServer\\\\bin\\\\x64\\\\Release',\n close_fds=True)", "def run_subprocess(command, environment=None, shell=False, raise_on_error=True):\n proc = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=shell,\n env=environment)\n stdout, stderr = proc.communicate()\n if proc.returncode != 0:\n if raise_on_error:\n raise RuntimeError('{}\\n{}'.format(stderr, stdout))\n return stdout, stderr, proc.returncode", "def _subexec(command):\n lcwd = fabric.state.env.get('lcwd', None) or None #sets lcwd to None if it bools to false as well\n process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=lcwd)\n out, err = process.communicate()\n print \"command : %s \" % command\n print \"out: %s\" % out\n print \"err: %s\" % err", "def Popen(self, *unargs, **kwargs):\r\n cmdline = None\r\n if 'args' in kwargs:\r\n cmdline = kwargs['args']\r\n else:\r\n cmdline = unargs[0]\r\n return PopenWrapper.WaitWrapper(subprocess_.Popen(*unargs, **kwargs), self, cmdline)", "def run(cmd):\n print ' '.join(cmd)\n try:\n check_call(cmd)\n except CalledProcessError as cpe:\n print \"Error: return code: \" + str(cpe.returncode)\n sys.exit(cpe.returncode)", "def __launch__(self,config,command=None,**kwargs):\n if command is None:\n command = ['sleep 30;','qsub']\n return SampleQsubProcess.__launch__(self,config,command=command,**kwargs)", "def start_driver_ctrl(self):\n try:\n args = [\n DRV_CTRL_PROCESS,\n \"-u\",\n self.user,\n \"start\"\n ]\n result = subprocess.run(args, check=True) #pylint: disable=unused-variable\n return True\n except subprocess.CalledProcessError:\n return False", "def runin(cmd, stdin):\n result = subprocess.Popen(cmd,stdin=subprocess.PIPE)\n result.wait()\n return result.returncode", "def __init__(self, senna_path, executable):\n self.senna_path = senna_path\n self.p = sp.Popen(['blabla', '-path', senna_path],\n executable=os.path.join(senna_path, executable),\n stdin=sp.PIPE,\n stdout=sp.PIPE)", "def _run_shell(self, command_string: str, cwd: str = '/', print_command: bool = False) -> subprocess.Popen:\n if print_command:\n self.logger.info(command_string)\n return subprocess.Popen(command_string, shell=True, cwd=cwd)", "def start_shell(self):\n cmd = 'shell'\n end_strs = ['>']\n self.run_with_output(cmd, end_strs)\n return True", "def test_example(self, _, cmd):\n out = subprocess.run(cmd, shell=True)\n self.assertFalse(out.returncode)", "def LaunchFile(*params):\n\n file = subprocess.Popen(params)\n file.communicate()\n return file.returncode", "def run(args, verbose=False, exception=False, **kwargs):\n if verbose:\n print(\"running: \" + ' '.join(args))\n sys.stdout.flush()\n # Use Popen here instead of call() as it apparently allows powershell on\n # Windows to not lock up waiting for input presumably.\n ret = subprocess.Popen(args, **kwargs)\n code = ret.wait()\n if code != 0:\n err = \"failed to run: \" + ' '.join(args)\n if verbose or exception:\n raise RuntimeError(err)\n sys.exit(err)", "def _staf_start_proc(self,\n command,\n working_dir,\n wait,\n params=[],\n env_vars={},\n location='local'):\n\n staf_request = ('START SHELL COMMAND \"{0}\" WORKDIR \"{1}\" WAIT '\n '{2}s STDERRTOSTDOUT RETURNSTDOUT'.format(unix_style_path(command),\n unix_style_path(working_dir),\n str(wait)))\n if len(params) != 0:\n staf_request += ' PARMS {0}'.format(\" \".join(params))\n\n if len(env_vars) != 0:\n for key in env_vars:\n staf_request += ' ENV {0}={1}'.format(key, env_vars[key])\n\n result = self._staf_handle.submit(location, 'process', staf_request)\n\n if result.rc != result.Ok:\n raise CoreError(result.result)\n\n #Return the exit code from the executed command and STDOUT.\n return (int(result.resultObj['rc']), result.resultObj['fileList'][0]['data'])", "def start(self):\n if self.process:\n return True\n if self.host:\n cmd = \"ssh -Y root@%s\" % self.host\n else:\n cmd = \"su -c\"\n cmd += \" 'echo _GO_ && larchin-0call'\"\n # Run the command as root with pexpect.\n # Return True if it succeeded, else False.\n p = pexpect.spawn(cmd, timeout=None)\n e = p.expect([\"_GO_.*\\n\", pexpect.TIMEOUT, \"Password:\"], 5)\n while e != 0:\n if e == 2:\n ok, pw = ui.textLineDialog(_(\"Please enter root password\"),\n \"larchin: root pw\", pw=True)\n if not ok:\n run_error( _(\"No root password, cancelling run\"))\n return False\n\n p.sendline(pw.strip())\n e = p.expect([\"_GO_.*\\n\", pexpect.TIMEOUT, pexpect.EOF], 5)\n else:\n run_error(_(\"Couldn't start larchin-0call\"))\n return False\n self.process = p\n p.setecho(False)\n\n # Start a thread to read input from 0call.\n command.simple_thread(self.read0call)\n # Perform initialization of the installation system\n ok, textlines = self.xlist(\"init\")\n if not ok:\n run_error(_(\"Couldn't initialize installation system:\\n\\n%s\")\n % \"\\n\".join(textlines))\n return ok", "def _launch(self):\n annotators = ['tokenize', 'ssplit']\n if 'ner' in self.annotators:\n annotators.extend(['pos', 'lemma', 'ner'])\n elif 'lemma' in self.annotators:\n annotators.extend(['pos', 'lemma'])\n elif 'pos' in self.annotators:\n annotators.extend(['pos'])\n annotators = ','.join(annotators)\n options = ','.join(['untokenizable=noneDelete',\n 'invertible=true'])\n # if you work on English, use this this command\n cmd = ['java', '-mx' + self.mem, '-cp', '\"%s\"' % self.classpath,\n 'edu.stanford.nlp.pipeline.StanfordCoreNLP', '-annotators',\n annotators, '-tokenize.options', options,\n '-outputFormat', 'json', '-prettyPrint', 'false']\n \n # if you work on arabic, use this this command\n \n # cmd = ['java', '-mx' + self.mem, '-cp', '\"%s\"' % self.classpath,\n # # 'edu.stanford.nlp.pipeline.StanfordCoreNLP','-annotators',\n # 'edu.stanford.nlp.pipeline.StanfordCoreNLP', '-props', 'StanfordCoreNLP-arabic.properties','-annotators',\n # annotators, '-tokenize.options', options, #'-tokenize.whitespace', 'true',\n # '-outputFormat', 'json', '-prettyPrint', 'false']\n print(' '.join(cmd))\n\n # We use pexpect to keep the subprocess alive and feed it commands.\n # Because we don't want to get hit by the max terminal buffer size,\n # we turn off canonical input processing to have unlimited bytes.\n self.corenlp = pexpect.spawn('/bin/bash', maxread=100000, timeout=60)\n self.corenlp.setecho(False)\n self.corenlp.sendline('stty -icanon')\n self.corenlp.sendline(' '.join(cmd))\n self.corenlp.delaybeforesend = 0\n self.corenlp.delayafterread = 0\n self.corenlp.expect_exact('NLP>', searchwindowsize=100)" ]
[ "0.7211386", "0.6982913", "0.6917975", "0.6912684", "0.6888676", "0.6845422", "0.65411717", "0.65263665", "0.648128", "0.6467744", "0.6461761", "0.64219886", "0.6418295", "0.64087766", "0.63941157", "0.6381634", "0.63771", "0.6374373", "0.63405687", "0.633077", "0.6330121", "0.6313436", "0.6307213", "0.62764114", "0.62715524", "0.62079394", "0.6202999", "0.61737186", "0.6172968", "0.6144104", "0.6097086", "0.60902745", "0.6075148", "0.60734123", "0.6066617", "0.6054903", "0.60527396", "0.60512024", "0.6035233", "0.60327774", "0.603263", "0.5987704", "0.5978134", "0.596659", "0.5962399", "0.59425056", "0.593343", "0.59263927", "0.59202987", "0.58995014", "0.58894295", "0.5878075", "0.5867449", "0.5850583", "0.5847995", "0.5846843", "0.58363456", "0.5836032", "0.5835018", "0.58278537", "0.58278453", "0.58219206", "0.58206254", "0.5817606", "0.5816305", "0.58055234", "0.5793746", "0.5787712", "0.5785018", "0.578337", "0.57832456", "0.57808185", "0.5779516", "0.5775788", "0.5766704", "0.57630837", "0.57605875", "0.57420415", "0.5729717", "0.57245624", "0.57236373", "0.5719256", "0.5715863", "0.5701466", "0.57005376", "0.5688233", "0.568293", "0.56812173", "0.5679481", "0.5668645", "0.5666666", "0.566471", "0.5663862", "0.5658018", "0.56573904", "0.56564164", "0.5644945", "0.5642663", "0.5630315", "0.56192" ]
0.57060796
83
Apply the orthonormalization explicit topic analysis method
def main(): if len(sys.argv) != 5: exit("Usage: python oneta.py train-corpus test-corpus kernel-size output") w1Words = dict() w2Words = dict() W1 = 0 W2 = 0 D1 = int(sys.argv[3]) sys.stderr.write("First scan of training data\n") J = 0 # Read through the corpus to decided which words are in the dense set and which in the sparse set corpus = open(sys.argv[1],"r") for line in corpus: tokens = word_tokenize(line) for token in tokens: tk_decoded = token.decode("utf-8") if J < D1 and tk_decoded not in w1Words: w1Words[tk_decoded] = W1 W1 += 1 elif J >= D1 and tk_decoded not in w2Words: w2Words[tk_decoded] = W2 W2 += 1 J += 1 corpus.close() D2 = J - D1 # Partition the corpus into a L-shaped matrix sys.stderr.write("Building matrices") At = lil_matrix((D1,W1)) B = lil_matrix((W1,D2)) Ct = lil_matrix((D2,W2)) corpus = open(sys.argv[1],"r") j = 0 for line in corpus: sys.stderr.write(".") tokens = word_tokenize(line) docsq = 0. for token in tokens: tk_decoded = token.decode("utf-8") if j < D1: # tk_decoded in w1words tkId = w1Words[tk_decoded] docsq += (At[j,tkId]+1)**2 - (At[j,tkId])**2 At[j,tkId] += 1. elif tk_decoded in w1Words: tkId = w1Words[tk_decoded] docsq += (B[tkId,j-D1]+1)**2 - (B[tkId,j-D1])**2 B[tkId,j-D1] += 1. else: tkId = w2Words[tk_decoded] docsq += (Ct[j-D1,tkId]+1)**2 - (Ct[j-D1,tkId])**2 Ct[j-D1,tkId] += 1. if j < D1: At[j,:] /= math.sqrt(docsq) else: for w in range(0,W1): B[w,j-D1] /= math.sqrt(docsq) Ct[j-D1,:] /= math.sqrt(docsq) j += 1 sys.stderr.write("\nBuild Cn\n") Cn = zeros((D2,1)) Ct = Ct.tocsr() for i in range(0,D2): v = ((Ct[i,:] * Ct[i,:].transpose())[0,0]) if v == 0: Cn[i,0] = 1. else: Cn[i,0] = v # Building real matrices sys.stderr.write("Calculating ATA\n") ATA = (At * At.transpose()).todense() # D1 x D1 At = At.tocsr() B = B.tocsc() sys.stderr.write("Solve inverse\n") ATAi = linalg.inv(ATA) # The real calculation is that if we have input vector [ d_1 d_2 ] ^ T # We yield [ (A^T * A)^-1 * A^T ( d1^T - B * (C^T * d2 / Cn) ) (C^T * d2 / Cn) sys.stderr.write("Calculating projected vectors\n") out = open(sys.argv[4],"w") testDocs = open(sys.argv[2],"r") for testDoc in testDocs: sys.stderr.write(".") corpus = open(sys.argv[1],"r") d1 = zeros((W1,1)) d2 = zeros((W2,1)) tokens = word_tokenize(testDoc) for token in tokens: tk_decoded = token.decode("utf-8") if tk_decoded in w1Words: d1[w1Words[tk_decoded],0] += 1 elif tk_decoded in w2Words: d2[w2Words[tk_decoded],0] += 1 norm = sqrt(sum(d1**2) + sum(d2**2)) d1 /= norm d2 /= norm v2 = (Ct * d2) / Cn v1 = ATAi * (At * (d1 - B * v2)) for j in range(0,D1+D2): out.write(str(j) + " ") out.write("||| ") for j in range(0,D1): out.write(str(v1[j,0]) + " ") for j in range(0,D2): out.write(str(v2[j,0]) + " ") out.write("\n") out.flush() out.close() sys.stderr.write("\n")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transform(self, corpus: Corpus):\n assert 'stem_tokens' in next(corpus.iter_utterances()).meta\n counter = 1\n for utt in corpus.iter_utterances():\n if utt.meta['valid']:\n utt.meta['analysis'] = lexicon.analyze(utt.text,categories=self.categories)\n for k in utt.meta['analysis'].keys():\n if utt.meta['analysis'][k] != 0.0:\n utt.meta['analysis'][k] = 1\n else:\n utt.meta['analysis'] = None\n\n counter = counter + 1\n if counter % 10000 == 0:\n print(\"processed \", counter, \"utterances \")\n return corpus", "def fit_transform(self, X):\n self.fit(X)\n return self.doc_topic_distr, self.xai", "def _apply_transform(self):\n pass", "def _unnormalized_transform(self):\n return self.n_ds + self.doc_sentiment_prior_", "def inference(self):\n for m, doc in enumerate(self.docs):\n # Be careful followings are views\n # So self.hoge will be change, when changing variant\n zs_j = self.zs_m_j[m]\n zk_j = self.zk_m_j[m]\n n_m_zs = self.n_m_zs[m]\n n_m_zk = self.n_m_zk[m]\n for j, t in enumerate(doc):\n # discount for n-th word t with topic z\n zs = zs_j[j]\n zk = zk_j[j]\n n_m_zs[zs] -= 1\n n_m_zk[zs, zk] -= 1\n self.n_zk_t[zk, t] -= 1\n self.n_zk[zk] -= 1\n\n # sampling topic new_z for t\n \"\"\"\n n_s = n_m_zs + self.alphas # mth doc, S vec\n p_s = n_s / np.sum(n_s)\n n_k = n_m_zk + self.alphask # mth doc, SxK matrix\n p_k = n_k / n_s.reshape(len(n_s), 1)\n n_v = self.n_zk_t[:, t] + self.beta\n p_v = n_v / (self.n_zk + self.beta)\n\n p_zsk = p_s.reshape(len(p_s), 1) * p_k * p_v # SxK matrix\n \"\"\"\n\n p_zsk = (n_m_zk + self.alphask) * self.n_zk_t[:, t] \\\n / (np.sum(n_m_zs + self.alphas) * self.n_zk)\n\n p_zs = np.sum(p_zsk, axis=1) / np.sum(p_zsk)\n p_zk = np.sum(p_zsk, axis=0) / np.sum(p_zsk)\n\n new_zs = np.random.multinomial(1, p_zs).argmax()\n new_zk = np.random.multinomial(1, p_zk).argmax()\n\n # print(\"arg\", np.argmax(p_s), np.argmax(p_k, axis=1),\n # np.argmax(p_k, axis=0), np.argmax(p_zk))\n # print('probs', p_s, p_zs)\n # print('probk', p_k, p_zk)\n # print('old', zs, zk)\n # print('new', new_zs, new_zk)\n\n # set z the new topic and increment counters\n zs_j[j] = new_zs\n zk_j[j] = new_zk\n n_m_zs[new_zs] += 1\n n_m_zk[new_zs, new_zk] += 1\n self.n_zk_t[new_zk, t] += 1\n self.n_zk[new_zk] += 1", "def model(self, doc_list=None):\r\n\r\n # eta => prior for the per-topic word distribution\r\n eta = torch.ones(self.V)\r\n\r\n with pyro.plate(\"topics\", self.K):\r\n\r\n # Beta => per topic word distribution\r\n Beta = pyro.sample(f\"beta\", dist.Dirichlet(eta))\r\n\r\n # alpha => prior for the per-doc topic vector\r\n alpha = torch.ones(self.K) / self.K\r\n\r\n X_List, Theta = [], []\r\n for d in pyro.plate(\"documents\", self.D, subsample_size=self.S):\r\n\r\n # theta => per-doc topic vector\r\n theta = pyro.sample(f\"theta_{d}\", dist.Dirichlet(alpha))\r\n\r\n doc = None if doc_list is None else doc_list[d]\r\n\r\n with pyro.plate(f\"words_{d}\", self.N[d]):\r\n\r\n # assign a topic\r\n z_assignment = pyro.sample(\r\n f\"z_assignment_{d}\",\r\n dist.Categorical(theta)\r\n )\r\n\r\n # from that topic vec, select a word\r\n X = pyro.sample(\r\n f\"w_{d}\",\r\n dist.Categorical(Beta[z_assignment]),\r\n obs=doc\r\n )\r\n\r\n X_List.append(X)\r\n Theta.append(theta)\r\n\r\n Theta = torch.stack(Theta)\r\n\r\n return X_List, Beta, Theta", "def _initialize(self):\n self.VT = len(self.corpus.topicDictionary)\n self.VO = len(self.corpus.opinionDictionary)\n self.DT = len(self.corpus)\n self.DO = np.array([len(p.opinionCorpus)\n for p in self.corpus.perspectives], dtype=np.int)\n self.maxDocLengthT = max([p.topicCorpus.maxDocLength\n for p in self.corpus.perspectives])\n self.maxDocLengthO = np.array([p.opinionCorpus.maxDocLength\n for p in self.corpus.perspectives],\n dtype=np.int)\n\n # topics\n self.z = np.zeros((self.DT, self.maxDocLengthT), dtype=np.int)\n self.ndk = np.zeros((self.DT, self.nTopics), dtype=np.int)\n self.nkw = np.zeros((self.nTopics, self.VT), dtype=np.int)\n self.nk = np.zeros(self.nTopics, dtype=np.int)\n self.ntd = np.zeros(self.DT, dtype=np.float)\n\n # opinions\n self.x = np.array([np.zeros((self.DO[i], self.maxDocLengthO[i]),\n dtype=np.int)\n for i, p in enumerate(self.corpus.perspectives)])\n self.nrs = np.zeros((self.nPerspectives, self.nTopics, self.VO),\n dtype=np.int)\n self.ns = np.zeros((self.nPerspectives, self.nTopics), dtype=np.int)\n\n # loop over the words in the corpus\n for d, persp, d_p, doc in self.corpus:\n for w_id, i in self.corpus.words_in_document(doc, 'topic'):\n topic = np.random.randint(0, self.nTopics)\n self.z[d, i] = topic\n self.ndk[d, topic] += 1\n self.nkw[topic, w_id] += 1\n self.nk[topic] += 1\n self.ntd[d] += 1\n\n for w_id, i in self.corpus.words_in_document(doc, 'opinion'):\n opinion = np.random.randint(0, self.nTopics)\n self.x[persp][d_p, i] = opinion\n self.nrs[persp, opinion, w_id] += 1\n self.ns[persp, opinion] += 1\n logger.debug('Finished initialization.')", "def preprocess(self, topic_name, corpus):\n\n return None", "def summarize_corpus():\n\t\n\t# get metadata\n\t#get_metadata.from_TEIP5(wdir, corpus_inpath, \"metadata\", md_mode)\n\t\n\t# visualize some metadata\n\t#visualize_metadata.describe_corpus(wdir, md_csv, \"author-continent\")\n\tvisualize_metadata.describe_corpus(wdir, md_csv, \"author-country\")\n\t#visualize_metadata.describe_corpus(wdir, md_csv, \"language\")\n\t#visualize_metadata.describe_corpus(wdir, md_csv, \"subgenre_hist\")\n\t#visualize_metadata.describe_corpus(wdir, md_csv, \"subgenre_x\")\n\tvisualize_metadata.plot_pie(wdir, md_csv, \"subgenre\")\n\n\tvisualize_metadata.describe_corpus(wdir, md_csv, \"subgenre\")\n\t#visualize_metadata.describe_corpus(wdir, md_csv, \"gender\")\n\t\n\t# make some counts\n\tmd_table = pd.DataFrame.from_csv(os.path.join(wdir, md_csv), header=0)\n\tnum_texts = len(md_table)\n\t#num_language = len(md_table.groupby([\"language\"]))\n\t#num_continent = len(md_table.groupby([\"author-continent\"]))\n\t#num_countries = len(md_table.groupby([\"author-country\"]))\n\t#num_authors = len(md_table.groupby([\"author-name\"]))\n\tnum_authors = len(md_table.groupby([\"author-name\"]))\n\tnum_subgenre = len(md_table.groupby([\"subgenre\"]))\n\t#num_subgenre_x = len(md_table.groupby([\"subgenre_x\"]))\n\t#fr_subgenre_hist = md_table.groupby([\"subgenre_hist\"]).count()\n\t#num_historical = fr_subgenre_hist[\"idno\"][\"historical\"]\n\t#num_not_historical = fr_subgenre_hist[\"idno\"][\"not_historical\"]\n\t\n\t\n\td = {\"texts\":[num_texts], \n\t#\"languages\":[num_language],\n\t#\"continents\":[num_continent],\n\t#\"countries\":[num_countries],\n\t\"authors\":[num_authors],\n\t#\"subgenre_x\":[num_subgenre_x],\n\t\"subgenre\":[num_subgenre]}\n\t#\"num_historical\":[num_historical],\n\t#\"num_not_historical\":[num_not_historical]}\n\t\n\t\n\t\n\tcount_fr = pd.DataFrame(d)\n\tcount_fr.to_csv(os.path.join(wdir, \"corpus-description.csv\"), sep=\",\", header=True)\n\tprint(\"Done: summarize corpus\")", "def fit(self, raw_documents, y=None):\n\n X = super().fit_transform(raw_documents) #now pandas return matrix \n self._tfidf.fit(X)\n ##arqui\n return self", "def _apply_transform(self, w2w_transform):\n raise NotImplementedError", "def text_to_corpus(text, accented_chars=True,\n convert_num=True, extra_whitespace=True, \n lemmatization=True, lowercase=True, punctuations=True,\n remove_html=True, remove_num=True, special_chars=True, \n stop_words=True): \n \n\n \"\"\"preprocess text with default option set to true for all steps\"\"\"\n if remove_html == True: #remove html tags\n text = strip_html_tags(text)\n if extra_whitespace == True: #remove extra whitespaces\n text = remove_whitespace(text)\n if accented_chars == True: #remove accented characters\n text = remove_accented_chars(text)\n if lowercase == True: #convert all characters to lowercase\n text = text.lower()\n \n # add a period to the end of the text:\n if len(text) > 0 and text[-1] != '.':\n text += '.'\n \n doc = nlp(text) #tokenise text \n clean_text = []\n \n for token in doc:\n \n flag = True\n edit = token.text\n # print(\"Word: \", edit, \" Type: \", token.pos_)\n \n # remove stop words\n if stop_words == True and token.is_stop and token.pos_ != 'NUM': \n flag = False\n \n # remove punctuations\n if punctuations == True and (token.pos_ == 'PUNCT' and not token.tag_ == '.') and flag == True: \n flag = False\n \n # remove 'X' characters:\n if token.pos_ == 'X':\n flag = False\n \n # remove special characters\n if special_chars == True and token.pos_ == 'SYM' and flag == True: \n flag = False\n \n # remove numbers\n if remove_num == True and (token.pos_ == 'NUM' or token.text.isnumeric()) \\\n and flag == True:\n flag = False\n \n # convert number words to numeric numbers\n if convert_num == True and token.pos_ == 'NUM' and flag == True:\n edit = w2n.word_to_num(token.text)\n \n # convert tokens to base form\n elif lemmatization == True and token.lemma_ != \"-PRON-\" and flag == True:\n edit = token.lemma_\n \n # convert all closing punctuation ('.', '!', '?', '...' to periods)\n if token.tag_ == '.' and flag == True:\n clean_text.append('.')\n \n # add text lemmas to the clean text:\n elif edit != \"\" and flag == True:\n clean_text.append(edit)\n \n return ' '.join(clean_text)", "def model_topics(df):\n\n data = df.text.values.tolist()\n data_words = list(sent_to_words(data))\n\n # Build the bigram and trigram models\n bigram = gensim.models.Phrases(data_words, min_count=5, threshold=100)\n trigram = gensim.models.Phrases(bigram[data_words], threshold=100) \n\n # Faster way to get a sentence clubbed as a trigram/bigram\n bigram_mod = gensim.models.phrases.Phraser(bigram)\n trigram_mod = gensim.models.phrases.Phraser(trigram)\n\n # Remove Stop Words\n data_words_nostops = remove_stopwords(data_words)\n\n # Form Bigrams\n data_words_bigrams = make_bigrams(data_words_nostops,bigram_mod)\n\n # Initialize spacy 'en' model, keeping only tagger component (for efficiency)\n nlp = spacy.load('en', disable=['parser', 'ner'])\n\n # Do lemmatization keeping only noun, adj, vb, adv\n data_lemmatized = lemmatization(data_words_bigrams, allowed_postags=['NOUN', 'ADJ', 'VERB', 'ADV'])\n\n # Create Dictionary\n id2word = corpora.Dictionary(data_lemmatized)\n\n # Create Corpus\n texts = data_lemmatized\n\n # Term Document Frequency\n corpus = [id2word.doc2bow(text) for text in texts]\n\n # Perform Topic Modeling for number of topics ranging from 5 to 50 in steps of 5\n model_list, coherence_values = compute_coherence_values(dictionary=id2word, corpus=corpus, texts=data_lemmatized, start=5, limit=50, step=5)\n\n return model_list,coherence_values,corpus,id2word", "def post_process_result_of_lda_topic_model(lda_model, gensim_corpus,\n document_collection,\n document_collection_filtered,\n n_closest=25):\n # Prepare containers to store results\n # Container to keep the document topic matrix\n n_closest = - n_closest\n document_topic_matrix = []\n # Container to keep topics and the closest texts to each topic\n topic_closest_doc_with_topics_words = []\n # Container to keep topics\n all_topics = lda_model.show_topics(50)\n\n # Create an LDA corpus from the original gensim corpus\n lda_corpus = lda_model[gensim_corpus]\n\n # Iterate through the lda corpus and create the document topic matrix\n for i, documents in enumerate(lda_corpus):\n # Data returned is not proper numpy matrix\n document_topic_matrix.append(\n np.array([elements[1]for elements in documents]))\n\n # Create the proper numpy matrix\n document_topic_matrix = np.vstack(document_topic_matrix)\n\n # Find the closest texts to a given topic\n # Iterate through the transpose of the document topic matrix\n for i, element in enumerate(document_topic_matrix.T):\n # Identify the id of 15 closest texts of each topic\n closest = element.argsort(axis=0)[n_closest:][::-1]\n # Create a container to keep each text with the id above\n texts = []\n for element in closest:\n texts.append({'matched_text':\n document_collection_filtered[element],\n 'matched_text_words':\n document_collection[element]['match_word'],\n 'testimony_id': document_collection[element]\n ['testimony_id']})\n\n # Append them to container\n topic_closest_doc_with_topics_words.append({'texts': texts,\n 'topic_words':\n all_topics[i]})\n\n return {'topic_documents': topic_closest_doc_with_topics_words,\n 'document_topic_matrix': document_topic_matrix}", "def fit(self, corpus, **kwargs):\n if not len(corpus.dictionary):\n return None\n self.reset_model(corpus)\n self.running = True\n self.update(corpus.ngrams_corpus, **kwargs)\n self.topic_names = ['Topic{} ({})'.format(i, ', '.join(words))\n for i, words in enumerate(self._topics_words(3), 1)]\n self.running = False", "def topic_extraction(df, col_name):\n tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2,\n max_features=200,\n stop_words='english')\n tfidf = tfidf_vectorizer.fit_transform(df[col_name])\n\n tf_vectorizer = CountVectorizer(max_df=0.95, min_df=2,\n max_features=200,\n stop_words='english')\n tf = tf_vectorizer.fit_transform(df[col_name])\n nmf = NMF(n_components=20, random_state=1,\n alpha=.1, l1_ratio=.5)\n tfidf_feature_names = tfidf_vectorizer.get_feature_names()\n nmf_w = nmf.fit_transform(tfidf)\n nmf_h = nmf.components_\n df['labels'] = nmf_w.argmax(axis=1) # this was the right code to get labels/clusters\n\n\n print(\"\\nTopics in NMF model:\")\n print_top_words(nmf, tfidf_feature_names)\n\n\n lda = LatentDirichletAllocation(n_topics=20, max_iter=5,\n learning_method='online',\n learning_offset=50.,\n random_state=0,\n n_jobs=-1)\n lda.fit(tf)\n doc_topic_distrib = lda.transform(tf)\n lda_labels = doc_topic_distrib.argmax(axis=1)\n print lda_labels[:100]\n df['lda_labels'] = lda_labels\n print(\"\\nTopics in LDA model:\")\n tf_feature_names = tf_vectorizer.get_feature_names()\n print_top_words(lda, tf_feature_names)\n return df", "def build_analyzer(self):\n\t\tanalyser = super(TfidfVectorizer, self).build_analyzer()\n\t\treturn lambda doc: (lemmatizer.lemmatize(w) for w in analyser(doc))", "def performLexiconBasedSentimentAnalysis(data):\n opinions = data[0]\n taggedTweets = data[3]\n sentiments_mapping = lexiconBasedSentimentPrediction(\n taggedTweets) # identify the sentiment orientation of each tweet\n for key in sentiments_mapping:\n opinions[key].setSO(sentiments_mapping[key]) # set the sentiment orientation for each tweet\n return opinions", "def forward(self, doc):\n out = torch.tensor([]).float().to(self.device)\n\n for i in range(len(doc)):\n sentences_raw = sentencesplit(cleantxt(doc[i]))\n sentences_ready = torch.tensor([]).float().to(self.device)\n for sentence in sentences_raw:\n sentence = sentence.split()\n if sentence == []:\n continue\n lookup_tensor = torch.tensor([]).long().to(self.device)\n for word in sentence:\n if word in self.embedd_dict:\n lookup_tensor = torch.cat((lookup_tensor,\n torch.LongTensor([self.embedd_dict[word]])), 0)\n else:\n lookup_tensor = torch.cat((lookup_tensor, torch.LongTensor([0])), 0)\n # Word embedding\n xw = self.word_embedding(lookup_tensor).view(1, -1, self.embedding_dim).to(self.device)\n # Word GRU\n self.hidden_gru_words = self.init_hidden_words()\n hw, self.hidden_gru_words = self.gru_word(xw, self.hidden_gru_words)\n # Word MLP\n uw = nn.Tanh()(self.MLP_word(hw)).to(self.device)\n # Word attention\n attention_score = torch.matmul(uw, self.attention_word).squeeze().to(self.device)\n attention_score = F.softmax(attention_score, dim=0).view(uw.size(0), uw.size(1), 1).to(self.device)\n scored_x = (hw * attention_score).to(self.device)\n s = torch.sum(scored_x, dim=1).to(self.device)\n #collecting sentences\n sentences_ready = torch.cat((sentences_ready, s), 0)\n # Sentence GRU\n if len(sentences_ready) == 0:\n out = torch.cat((out,\n torch.randn(1, self.number_cat).to(self.device)), 0).to(self.device)\n continue\n sentences_ready_gru = sentences_ready.view(1, -1, self.embedding_dim).to(self.device)\n self.hidden_gru_sentences = self.init_hidden_sentences()\n hs, self.hidden_gru_sentences = self.gru_sentence(torch.tensor(sentences_ready_gru), self.hidden_gru_sentences)\n # SENTENCE MLP\n us = nn.Tanh()(self.MLP_sentence(hs)).to(self.device)\n # Sentence attention\n attention_score = torch.matmul(us, self.attention_sentence).squeeze().to(self.device)\n attention_score = F.softmax(attention_score, dim=0).view(us.size(0), us.size(1), 1).to(self.device)\n scored_x = (hs * attention_score).to(self.device)\n v = torch.sum(scored_x, dim=1).to(self.device)\n # classification\n p = self.MLP_classification(v).to(self.device)\n out = torch.cat((out, p.float()), 0).float().to(self.device)\n return out", "def transform(self, corpus):\n topics = self.model[corpus.ngrams_corpus]\n matrix = matutils.corpus2dense(topics, num_docs=len(corpus),\n num_terms=self.num_topics).T\n\n corpus.extend_attributes(matrix[:, :len(self.topic_names)], self.topic_names)\n return corpus", "def transform(self):\n result = []\n for item in self.doc_topic_matrix:\n result.append(item / np.sum(item))\n result = np.array(result)\n return result", "def get_representative_words(self, phi=None):\n phi = phi if phi is not None else self.phi\n for i in range(self.n_components):\n print(\"Topic\", i)\n c = np.argsort(self.phi[i, :])\n for j in c[-1:-11:-1]:\n print(self.list_ind2word[j], phi[i, j])", "def test_normalization(self):\n\n \"\"\"\n Create the test data.\n \"\"\"\n tokenizer = Tokenizer(stem=False)\n posts = [\n \"Erdogan with threats to attack regime forces 'everywhere' in Syria\",\n \"After Erdogan's statement, Damascus says Erdogan 'disconnected from reality' after threats\",\n ]\n\n corpus = [ Document(post, tokenizer.tokenize(post)) for post in posts ]\n\n extractor = TokenExtractor(tokenizer=tokenizer)\n scorer = TFIDFScorer({ 'erdogan': 1, 'threats': 2 }, 10)\n candidates = extractor.extract(corpus)\n scores = scorer.score(candidates, normalize_scores=True)\n self.assertEqual(1, scores.get('erdogan'))", "def _proc(dat):\n def lemma(text):\n lemmatizer = WordNetLemmatizer()\n w_tokenizer = WhitespaceTokenizer()\n return [lemmatizer.lemmatize(w) for w in w_tokenizer.tokenize(text)]\n\n dat['text_lemmatized'] = dat['clean_comments'].apply(lemma)\n dat['text_lemmatized'] = dat['text_lemmatized'].apply(' '.join)", "def process_corpus(args):\n\n fs = open(args.input,'r')\n out = list()\n for line in fs:\n blob = TextBlob(line.strip())\n result_info = dict()\n result_info\n result_info['correct'] = str(blob.correct())\n if args.parse :\n result_info['parse'] = get_parsed_text(blob)\n if args.tokenize:\n result_info['tokenize'] = get_tokenizer_result(blob)\n if args.sentiment:\n result_info['sentiment'] = analyze_sentiment(blob)\n if args.sentence_sentiment:\n result_info['sentence_sentiment'] = analyze_sentence_sentiment(blob)\n if args.noun_phrase:\n result_info['noun_phrase'] = get_noun_phrases(blob)\n if args.pos:\n result_info['pos'] = get_pos_tags(blob)\n\n out.append(result_info)\n print out\n json.dump(out,open('out.json','w'))\n fs.close()\n print '******************************* Execution completed *********************************'", "def _lda(self):\n self.ldamodel = gensim.models.ldamodel.LdaModel(self.gensim_corpus, \n num_topics=self.n_topics, \n id2word=self.id_map, \n passes=self.n_passes,\n random_state=42)\n \n self.topic_matrix = self.ldamodel.print_topics(num_topics=self.n_topics, \n num_words=self.n_words)", "def _preprocess(self):\n self.data['sentences'] = self.data['text'].apply(self._tokenize_sent)\n self.data['nouns'] = self.data['sentences'].apply(self._get_nouns)\n # self._get_frequent_features()\n # self._compactness_pruning()\n # self._redundancy_pruning()\n # self._get_features()\n self._extract_opinions()", "def prepare(self,docs,topics):\n \n self.docs, self.dictionary, self.corpus = self.clean_docs(docs)\n \n # Create keyword map\n self.set_keyword_map()\n \n # Create keyword map with their relatives\n self.set_keyword_map_rel()\n \n self.topic_map = {topic: set(self.get_related_keywords(topic,self.keyword_map_rel,_score=False)) \n for topic in topics}", "def _transform(self, document):\n pass", "def main(self, words_docs, cleaned_sentences, lang, model_dir, number_of_clusters, embedding_model, model_id):\n\t\ttry:\n\t\t\tif embedding_model == \"tfidf\": text_vector = self.create_tfidf_vectors(cleaned_sentences)\n\t\t\telif embedding_model == \"word2vec\": text_vector = self.create_w2v_vectors(words_docs)\n\t\t\tmodel, pred_dict = self.train_model(cleaned_sentences, text_vector, number_of_clusters, lang, model_id, model_dir)\n\t\t\tdf_dominant_topic = self.evaulate_clusters(pred_dict, model_dir)\n\n\t\texcept Exception as e:\n\t\t\tprint(\"\\n Error in main : \",e)\n\t\t\tprint(\"\\n Error details : \", traceback.format_exc())\n\n\t\treturn df_dominant_topic", "def apply(self):", "def main_topic_doc(ldamodel, corpus=corpus): \n \n doc_topics = pd.DataFrame()\n\n for i, row in enumerate(ldamodel[corpus]):\n row = sorted(row, key=lambda x: (x[1]), reverse=True)\n\n for j, (topic_num, prop_topic) in enumerate(row):\n if j == 0:\n wp = ldamodel.show_topic(topic_num)\n topic_keywords = \"' \".join([word for word, prop in wp])\n doc_topics = doc_topics.append(pd.Series([int(topic_num), round(prop_topic,4), topic_keywords]), ignore_index=True)\n else:\n break\n doc_topics.columns = ['Dominant_Topic', 'Percent_Contrib', 'Topic_keywords']\n return doc_topics", "def test_topic_reduction_edge_cases():\n model = BERTopic()\n nr_topics = 5\n model.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents)\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents)\n new_freq = model.get_topic_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)", "def intent_of_text_LnDOR(ChapterTextS, TargetQuestionsD, TestS, StopWords):\n \n # Chapter Text - stokenize\n StokensCT = stokenize(ChapterTextS, StopWords) \n\n # Test question - stokenize\n StokensTest = stokenize(TestS, StopWords)\n\n # Knowledge Base Dict - stokenize\n KBD_structure = stokenizeKBD(TargetQuestionsD, StopWords)\n\n # List (because list is mutable, set is not) of all stokens in document\n StokensDoc = StokensCT[:] # from chapter text\n StokensDoc.extend(StokensTest[:]) # += Test string\n\n # extend list of stokens in Doc\n for i in TargetQuestionsD:\n StokensDoc.extend(TargetQuestionsD[i][\"mq stokens\"][:]) # += KB target [matched Q]s\n StokensDoc.extend(TargetQuestionsD[i][\"ans stokens\"][:]) # += KB answers\n \n StokensTestV = set(StokensTest)\n StokensDocV = set(StokensDoc)\n StokensAntiTgtV = StokensDocV\n \n # Complement of all targets\n for i in TargetQuestionsD:\n StokensAntiTgtV = StokensAntiTgtV.difference(set(TargetQuestionsD[i][\"mq stokens\"]))\n \n # calculate confusion matrix and DOR etc.\n LnDORD = {}\n # Anti Target\n TP, FP, FN, TN = confusion_matrix(StokensDocV, StokensAntiTgtV, StokensTestV) \n LnDOR = lndor(TP, FP, FN, TN) \n someAngle = angleDOR(TP, FP, FN, TN) \n \n LnDORD[\"AntiTgt\"] = {'lndor': LnDOR, 'theta': someAngle}\n\n # total occurences\n total_occ = 0\n for i in TargetQuestionsD:\n total_occ += TargetQuestionsD[i]['count']\n\n for i in TargetQuestionsD:\n StokensTgtV = set(TargetQuestionsD[i][\"mq stokens\"][:])\n\n TP, FP, FN, TN = confusion_matrix(StokensDocV, StokensTgtV, StokensTestV) \n priorOR = TargetQuestionsD[i]['count'] / total_occ\n\n LnDOR = lndor(TP, FP, FN, TN) \n someAngle = angleDOR(TP, FP, FN, TN, priorOR) \n \n LnDORD[i] = {'lndor': LnDOR, 'theta': someAngle}\n # LnDORD = {i: {'lndor': , 'theta': }}, KB indices + \"AntiTgt\"\n\n return LnDORD", "def preprocess(docs):\r\n # stop = set(stopwords.words('english'))\r\n tags = {'NN', 'NNS', 'NNP', 'NNP', 'NNPS', 'JJ', 'JJR', 'JJS'}\r\n for i in range(len(docs)):\r\n docs[i] = [(word.lower(), convert(tag)) for (word, tag) in nltk.pos_tag(nltk.word_tokenize(docs[i])) if tag in tags]\r\n return lemmatize_docs(docs)", "def train(self, corpus):\n for sentence in corpus.corpus:\n for datum in sentence.data: \n self.unigramCounts[datum.word] += 1\n self.totalCount += 1", "def __getitem__(self, doc):\n lda_model = ldamodel.LdaModel(\n num_topics=self.num_topics, alpha=self.alphas, id2word=self.id2word, dtype=np.float64)\n lda_model.topics = np.zeros((self.vocab_len, self.num_topics))\n ldapost = LdaPost(num_topics=self.num_topics, max_doc_len=len(doc), lda=lda_model, doc=doc)\n\n time_lhoods = []\n for time in range(self.num_time_slices):\n lda_model = self.make_lda_seq_slice(lda_model, time) # create lda_seq slice\n lhood = LdaPost.fit_lda_post(ldapost, 0, time, self)\n time_lhoods.append(lhood)\n\n doc_topic = ldapost.gamma / ldapost.gamma.sum()\n # should even the likelihoods be returned?\n return doc_topic", "def apply(self, data):\n print(\"this is morphism '{}'\".format(self.name))\n data = np.array(data)\n transformed_data = self.transf(data)\n return pd.DataFrame.from_dict({\"transf\": transformed_data.flatten()})", "def learn_topic_model_activities(self):\n print \"\\nLearning a topic model with LDA:\"\n\n doc_topic, topic_word = tm.run_topic_model(self.accu_path, self.config['lda'])\n\n tm.dump_lda_output(self.lda_path, doc_topic, topic_word)\n print \"Topic Modelling - done.\\n\"\n return True", "def qualify_words():\n config = get_config()\n\n all_feature_matrices = []\n all_opinion_matrices = []\n\n # first 5 parts are labeled, thus are useful\n all_feature_label_vectors = []\n all_opinion_label_vectors = []\n\n for fname in config.file_names:\n feature_X, feature_dims = load_feature_matrices(fname)\n opinion_X, opinion_dims = load_opinion_matrices(fname)\n feature_y = load_feature_labels(fname)\n opinion_y = load_opinion_labels(fname)\n\n # append to all collector\n all_feature_matrices.append(feature_X)\n all_feature_label_vectors.append(feature_y)\n all_opinion_matrices.append(opinion_X)\n all_opinion_label_vectors.append(opinion_y)\n # use first 5 for training\n # stack first 5\n feature_training_X = []\n feature_training_y = []\n opinion_training_X = []\n opinion_training_y = []\n for i in range(5):\n feature_training_X.append(all_feature_matrices[i])\n feature_training_y.append(all_feature_label_vectors[i])\n opinion_training_X.append(all_opinion_matrices[i])\n opinion_training_y.append(all_opinion_label_vectors[i])\n\n feature_training_X = np.hstack(feature_training_X)\n feature_training_y = np.hstack(feature_training_y)\n opinion_training_X = np.hstack(opinion_training_X)\n opinion_training_y = np.hstack(opinion_training_y)\n\n # using combination of rule and ranking score as features\n feature_model = MultinomialNB()\n opinion_model = MultinomialNB()\n\n # training\n feature_model.fit(np.transpose(feature_training_X), feature_training_y.ravel())\n opinion_model.fit(np.transpose(opinion_training_X), opinion_training_y.ravel())\n\n # predicting on candidate aspects and opinions, extracted from amazon reviews\n for i in range(5, len(config.file_names)):\n fname = config.file_names[i]\n feature_pred = feature_model.predict_proba(\n np.transpose(all_feature_matrices[i]))[:,1]\n opinion_pred = opinion_model.predict_proba(\n np.transpose(all_opinion_matrices[i]))[:,1]\n # pickle the prediction results\n with open('../results/' + fname + '_feature_pred_score.pickle', 'wb') as f:\n pickle.dump(feature_pred, f)\n with open('../results/' + fname + '_opinion_pred_score.pickle', 'wb') as f:\n pickle.dump(opinion_pred, f)", "def preprocess(docs, nlp, min_length, min_counts, max_counts):\n\n def clean_and_tokenize(doc):\n text = ' '.join(doc.split()) # remove excessive spaces\n text = nlp(text, tag=True, parse=False, entity=False)\n #return [t.lemma_ for t in text if t.is_alpha and len(t) > 2 and not t.is_stop]\n return [t.lower_ for t in text if t.is_alpha and len(t) > 2 and not t.is_stop] # remove .lemma and add lower case operation\n\n tokenized_docs = [(i, clean_and_tokenize(doc)) for i, doc in tqdm(docs)]\n\n # remove short documents\n n_short_docs = sum(1 for i, doc in tokenized_docs if len(doc) < min_length)\n tokenized_docs = [(i, doc) for i, doc in tokenized_docs if len(doc) >= min_length]\n print('number of removed short documents:', n_short_docs)\n\n # remove some tokens\n counts = _count_unique_tokens(tokenized_docs)\n tokenized_docs = _remove_tokens(tokenized_docs, counts, min_counts, max_counts)\n n_short_docs = sum(1 for i, doc in tokenized_docs if len(doc) < min_length)\n tokenized_docs = [(i, doc) for i, doc in tokenized_docs if len(doc) >= min_length]\n print('number of additionally removed short documents:', n_short_docs)\n\n counts = _count_unique_tokens(tokenized_docs)\n encoder, decoder, word_counts = _create_token_encoder(counts)\n\n print('\\nminimum word count number:', word_counts[-1])\n print('this number can be less than MIN_COUNTS because of document removal')\n\n encoded_docs = _encode(tokenized_docs, encoder) # all the doc is encoded as indexes instead of words using ix2word\n return encoded_docs, decoder, word_counts", "def theta_topic(self):\n f1 = self.ndk+self.alpha\n f2 = np.sum(self.ndk, axis=1, keepdims=True)+self.nTopics*self.alpha\n return f1/f2", "def _transform(self, dataset):\n raise NotImplementedError()", "def aprioriOutput(rules, dataSet, minimumSupport, minimumConfidence):", "def compute_topic_model(year_from=1900, year_to=2020, venues_filter=None, n_topics=100, use_lemmer=True,\n min_df=2, max_df=0.8):\n start = time.time()\n out_fileprefix = get_output_fileprefix(year_from, year_to, venues_filter, n_topics)\n\n corpus, tf_features_names = get_corpus_gensim_for_learning(year_from, year_to, venues_filter, use_lemmer, min_df, max_df)\n execute_lda_gensim(corpus, tf_features_names, n_topics, out_fileprefix)\n\n end = time.time()\n return year_from, year_to, n_topics, (end - start)", "def normalize_dataset(self):", "def test_extract_topics_custom_cv():\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n\n cv = CountVectorizer(ngram_range=(1, 2))\n model = BERTopic(vectorizer=cv)\n model._update_topic_size(documents)\n model._extract_topics(documents)\n freq = model.get_topic_freq()\n\n assert model.c_tf_idf.shape[0] == 5\n assert model.c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)", "def trainInternal():\n\n con_counts = Counter()\n deflike = Counter()\n\n for record in records:\n data = [re.split(\"\\t\", d) for d in re.split(\"\\n\", record)]\n tokens, tags = zip(*data)\n\n for i, token in enumerate(tokens):\n denom = len(token)\n for indices, f in fqs(token, 0.5): #perform analysis on one word at a time\n context, numer = internalContext(indices, token)\n if tags[i] != \"O\": #only want the named entities\n deflike[context] += f * numer/denom #need to normalize by word length\n con_counts[context] += f * numer/denom\n\n deflike = Counter({context: deflike[context]/con_counts[context] for context in deflike}) #perform division on each entry\n\n return deflike", "def test_model(docs, labels,model, log_writer:LogWriter,test_name):\n stats = []\n topic_indexes, topics_of_index = connect_topic_id_to_topics(model,prep_docs_for_assesment(docs,labels),log_writer)\n distribution = []\n for index, article in enumerate(docs):\n analysis_res = model.analyse_text(article)\n if len(analysis_res) == 0:\n print(\"nothing found\")\n continue\n res = max(analysis_res, key=lambda item: item[1])\n if res[0] not in topics_of_index:\n topics_of_index[res[0]] = [labels[index]]\n topic_indexes[labels[index]] = res[0]\n print(\"continuing\")\n continue\n distribution.append(res[0])\n stats.append(1 if labels[index] in topics_of_index[res[0]] else 0)\n # self.log_writer.add_log(\"Article with topic {} was assigned {} with {} certainty.\".format(article[0], \"correctly\" if res[0] == self.topic_positions[article[0]] else \"wrong\", res[1]))\n accuracy = sum(stats) / len(stats)\n log_writer.add_log(\"{} got accuracy {}\".format(test_name,accuracy))\n log_writer.add_log(\"Real distribution was {}\".format(dict(Counter(labels))))\n log_writer.add_log(\"Predicted distribution was {}\".format(dict(Counter(distribution))))\n return accuracy", "def transform():", "def test_topic_reduction(reduced_topics):\n model = BERTopic()\n nr_topics = reduced_topics + 2\n model.nr_topics = reduced_topics\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents.copy())\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents.copy())\n new_freq = model.get_topic_freq()\n\n assert old_freq.Count.sum() == new_freq.Count.sum()\n assert len(old_freq.Topic.unique()) == len(old_freq)\n assert len(new_freq.Topic.unique()) == len(new_freq)\n assert isinstance(model.mapped_topics, dict)\n assert not set(model.get_topic_freq().Topic).difference(set(new_documents.Topic))\n assert model.mapped_topics", "def apply_isomapEmbedding(self, X_train, X_test):\r\n embedding = Isomap(n_components =2, n_jobs=-1)\r\n X_train = embedding.fit_transform(X_train)\r\n X_test = embedding.transform(X_test)\r\n return X_train, X_test", "def process_sample(\n sample: Dict[str, Any],\n relation_vocab: Dict[str, int],\n spacy_model: Any,\n tokenizer: Any,\n) -> Tuple[Optional[Dict[str, Any]], Dict[str, int]]:\n\n processed_sample = {}\n\n if sample['num_pos_raters'] < 2:\n relation = NO_RELATION\n else:\n relation = sample['relation']\n if relation not in relation_vocab:\n relation_vocab[relation] = len(relation_vocab)\n label = relation_vocab[relation]\n processed_sample['target'] = [label]\n\n text = sample['annotated_text']\n\n # Remove subj and obj annotations from text and store position\n def find_span(input_text: str, pattern: Any,\n prefix_len: int) -> Tuple[int, int]:\n \"\"\"Find span corresponding to actual subj or obj strings.\"\"\"\n match = pattern.search(input_text)\n span_start = match.start() + prefix_len + 1\n # We want inclusive spans, hence -2 instead of -1\n span_end = match.end() - 2\n return (span_start, span_end)\n\n def replace_and_adjust(\n input_text: str, match: Any, prefix_len: int,\n inverted_mapping: np.ndarray) -> Tuple[str, np.ndarray]:\n \"\"\"Remove subj/obj annotations and adjust token mapping accordingly.\"\"\"\n\n original_span_start = match.start() + prefix_len + 1\n original_span_end = match.end() - 1\n actual_string = input_text[original_span_start:original_span_end]\n new_text = input_text[:match.start()] + actual_string + input_text[match\n .end():]\n\n # Inverted mapping maps from remaining tokens to positions in original text\n new_inverted_mapping = np.zeros(len(new_text), dtype=np.int32)\n new_inverted_mapping[:match.start()] = inverted_mapping[:match.start()]\n\n new_span_start = match.start()\n new_span_end = match.start() + len(actual_string)\n new_inverted_mapping[new_span_start:new_span_end] = inverted_mapping[\n original_span_start:original_span_end]\n new_inverted_mapping[new_span_end:] = inverted_mapping[original_span_end +\n 1:]\n\n return new_text, new_inverted_mapping\n\n inverted_mapping = np.arange(len(text))\n subj_pattern = re.compile('SUBJ{[^}]+}')\n subj_span = find_span(text, subj_pattern, len('SUBJ'))\n obj_pattern = re.compile('OBJ{[^}]+}')\n obj_span = find_span(text, obj_pattern, len('OBJ'))\n\n # Remove subj/obj annotations from text\n while True:\n subj_match = subj_pattern.search(text)\n if subj_match is None:\n break\n text, inverted_mapping = replace_and_adjust(text, subj_match, len('SUBJ'),\n inverted_mapping)\n\n while True:\n obj_match = obj_pattern.search(text)\n if obj_match is None:\n break\n text, inverted_mapping = replace_and_adjust(text, obj_match, len('OBJ'),\n inverted_mapping)\n\n # Adjust spans for removed tokens\n mapping = np.zeros(len(sample['annotated_text']), dtype=np.int32) - 1\n mapping[inverted_mapping] = np.arange(len(inverted_mapping))\n subj_span = (mapping[subj_span[0]], mapping[subj_span[1]])\n assert subj_span[0] != -1 and subj_span[1] != -1\n obj_span = (mapping[obj_span[0]], mapping[obj_span[1]])\n assert obj_span[0] != -1 and obj_span[1] != -1\n\n parsed_text = spacy_model(text)\n\n # We use spacy to parse text, identify noun chunks\n mention_char_spans = []\n mention_char_spans.append(subj_span)\n mention_char_spans.append(obj_span)\n\n def overlaps(first_span: Tuple[int, int], second_span: Tuple[int,\n int]) -> bool:\n\n def point_inside_span(point: int, span: Tuple[int, int]) -> bool:\n return span[0] >= point and point <= span[1]\n\n spans_overlap = (\n point_inside_span(first_span[0], second_span) or\n point_inside_span(first_span[1], second_span) or\n point_inside_span(second_span[0], first_span) or\n point_inside_span(second_span[1], first_span))\n\n return spans_overlap\n\n for chunk in parsed_text.noun_chunks:\n span_start_char = parsed_text[chunk.start].idx\n span_last_token = parsed_text[chunk.end - 1]\n span_end_char = span_last_token.idx + len(span_last_token.text) - 1\n char_span = (span_start_char, span_end_char)\n # Append only if does not overlap with subj or obj spans. In case spacy\n # mention annotation disagrees with tacred annotation, we want to favor\n # tacred.\n\n if not overlaps(char_span, subj_span) and not overlaps(char_span, obj_span):\n mention_char_spans.append(char_span)\n\n # Sort spans by start char\n start_chars = np.array([span[0] for span in mention_char_spans])\n sorted_indices = np.argsort(start_chars)\n sorted_positions = np.zeros_like(start_chars)\n sorted_positions[sorted_indices] = np.arange(len(sorted_positions))\n sorted_spans = [mention_char_spans[idx] for idx in sorted_indices]\n\n # Tokenize and get aligned mention positions\n _, text_ids, text_mask, mention_spans, span_indices = tokenization_utils.tokenize_with_mention_spans(\n tokenizer=tokenizer,\n sentence=text,\n spans=sorted_spans,\n max_length=FLAGS.max_length,\n add_bert_tokens=True,\n allow_truncated_spans=True,\n )\n\n processed_sample['text_ids'] = text_ids\n processed_sample['text_mask'] = text_mask\n\n # Subj and obj are the first elements of mention spans.\n subj_index = sorted_positions[0]\n obj_index = sorted_positions[1]\n\n # Some spans may be dropped by the BERT tokenizer. Here we map indices in the\n # original list of spans to the one returned by the tokenizer.\n reverse_span_indices = {\n original_idx: tokenized_idx\n for tokenized_idx, original_idx in enumerate(span_indices)\n }\n\n # Skip if subj or obj dropped.\n if (subj_index not in reverse_span_indices or\n obj_index not in reverse_span_indices):\n return None, relation_vocab\n\n subj_index = reverse_span_indices[subj_index]\n obj_index = reverse_span_indices[obj_index]\n\n # Make sure we don't discard subj or obj\n assert max(subj_index, obj_index) < FLAGS.max_mentions\n\n processed_sample['subject_mention_indices'] = [subj_index]\n processed_sample['object_mention_indices'] = [obj_index]\n\n mention_spans = np.array(mention_spans)\n mention_start_positions = mention_spans[:, 0]\n mention_end_positions = mention_spans[:, 1]\n\n mention_start_positions = mention_start_positions[:FLAGS.max_mentions]\n mention_end_positions = mention_end_positions[:FLAGS.max_mentions]\n\n mention_pad_shape = (0, FLAGS.max_mentions - len(mention_start_positions))\n\n mention_mask = np.ones(len(mention_start_positions), dtype=np.int64)\n mention_mask = np.pad(mention_mask, mention_pad_shape, mode='constant')\n mention_start_positions = np.pad(\n mention_start_positions, mention_pad_shape, mode='constant')\n mention_end_positions = np.pad(\n mention_end_positions, mention_pad_shape, mode='constant')\n\n processed_sample['mention_start_positions'] = mention_start_positions\n processed_sample['mention_end_positions'] = mention_end_positions\n processed_sample['mention_mask'] = mention_mask\n\n return processed_sample, relation_vocab", "def enhance_metadata(metadata, features='all'):\n\n # available options\n ortographic_features = ['w_length','n_vowels','n_consonants']\n lexical_features = ['uni_freq', 'bi_freq', 'func_word','count']\n position_features = ['position','position_end','is_first_word','is_last_word']\n\n # make list of features\n if features == 'all': features = ortographic_features +lexical_features + position_features \n\n # use ws clean to lower case\n words = [word.lower() for word in metadata['word'].values]\n\n # itereate features and fill metadata\n for feature in features:\n # ORTOGRAPHIC ##############################\n if feature == 'w_length': \n metadata[feature] = w_length(words)\n if feature == 'n_consonants':\n metadata[feature] = n_consonants(words)\n if feature == 'n_vowels':\n metadata[feature] = n_vowels(words)\n\n # LEXICAL ###################################\n if feature == 'uni_freq':\n metadata[feature] = unigram(words)\n if feature == 'bi_freq':\n metadata[feature] = bigram(words)\n if feature == 'func_word':\n metadata[feature] = function_word(words)\n if feature == 'count':\n metadata[feature] = count(words)\n\n # POSITION ###################################\n if feature == 'position':\n metadata[feature] = position(words)\n if feature == 'position_end':\n metadata[feature] = position_end(words)\n if feature == 'is_first_word':\n metadata[feature] = first_word(words)\n if feature == 'is_last_word':\n metadata[feature] = last_word(words)\n\n return metadata", "def tag_pred(model, vectorized_input, feature_names, top_t, nb_tag_pred=10,\n threshold=0.15):\n\n # Topic df -----------------------------------------------------------------\n topic_df = display_topics2(model, feature_names)\n # associate each topic with a list of tags\n topics_kwords_df = topic_df.T #(topic_df.isin(top_t)*topic_df).T\n topic2tags_d = {}\n # tags_per_topic = []\n for topic in topics_kwords_df:\n tag_list = []\n for e in topics_kwords_df.loc[:, topic]:\n if e is not \"\":\n tag_list.append(e)\n topic2tags_d[topic] = tag_list\n\n # Create Document Vs Topic df ----------------------------------------------\n import numpy as npy\n model_output = model.transform(vectorized_input)\n topicnames = [\"Topic\" + str(i) for i in range(model.components_.shape[0])]\n docnames = [\"Post\" + str(i) for i in range(vectorized_input.shape[0])]\n df_document_topic = pd.DataFrame(npy.round(model_output, 2),\n columns=topicnames,\n index=docnames)\n\n # Tag predictions ----------------------------------------------------------\n tag_pred_l = []\n for post in df_document_topic.index:\n tags_post = []\n topics_proba = df_document_topic.loc[post, :]\n mask = topics_proba >= threshold\n topic_pred = list(df_document_topic.loc[post, mask].index)\n tot_proba = topics_proba[topic_pred].sum()\n\n # if no major topic in this post, propose just top 10 tags\n if len(topic_pred) == 0:\n tags_post = tags_post + top_t[0:nb_tag_pred].copy()\n else:\n for topic in topic_pred:\n # pic number of top elements ~ to proba of the topic\n nb_elements = int(round(topics_proba[topic]*10/tot_proba,0))\n tags_post = tags_post + topic2tags_d[topic][0:nb_elements].copy()\n tag_pred_l.append(tags_post)\n\n return tag_pred_l", "def calculate_result(self, reviewer_data, article_data, people_data,\n coi_data,\n min_rev_art, max_rev_art, min_art_rev, max_art_rev):\n\n cur_progress = 0\n max_progress = 100\n\n article_data = pd.DataFrame(article_data)\n people_data = pd.DataFrame(people_data)\n coauthors_df = pd.DataFrame([[r.PaperID, co_author]\n for _, r in article_data.iterrows()\n for co_author in r.PersonIDList.split(';')],\n columns = ['PaperID', 'PersonID'])\n\n if reviewer_data is None:\n # extract reviewer data from articles\n coauthor_articles = coauthors_df.merge(article_data)[['PersonID', 'Abstract']]\n coauthor_abtracts = coauthor_articles.groupby('PersonID').\\\n agg({'Abstract': lambda x: ''.join(x)})\n reviewer_data = pd.DataFrame(zip(coauthor_abtracts.index,\n coauthor_abtracts.Abstract),\n columns=['PersonID', 'Abstract'])\n else:\n reviewer_data = pd.DataFrame(reviewer_data)\n reviewer_data.PersonID = reviewer_data.PersonID.apply(str)\n\n if coi_data is not None:\n coi_data = pd.DataFrame(coi_data)\n\n update_frequency = 1\n cur_progress += int(max_progress/6.)\n self.update_progress(\n cur_progress,\n max_progress,\n update_frequency=update_frequency,\n )\n\n\n # this performs the topic modeling (LSA)\n a = prm.compute_affinity(reviewer_data.Abstract, article_data.Abstract)\n cur_progress += int(max_progress/6.)\n self.update_progress(\n cur_progress,\n max_progress,\n update_frequency=update_frequency,\n )\n\n # if coi_data available, then add as if they were co-authors\n if coi_data is not None:\n coi_data.PersonID = coi_data.PersonID.apply(str)\n coauthors_df = pd.concat((coauthors_df, coi_data))\n\n\n # articles\n article_data2 = article_data.copy()\n article_data2.index = article_data2.PaperID\n article_data2['id'] = range(article_data2.shape[0])\n coi_row = np.array(article_data2.loc[coauthors_df.PaperID].id.tolist())\n\n # persons\n reviewer_data2 = reviewer_data.copy()\n reviewer_data2.index = reviewer_data2.PersonID\n reviewer_data2['id'] = range(reviewer_data2.shape[0])\n coi_column = np.array(reviewer_data2.loc[coauthors_df.PersonID].id.tolist())\n\n for i, j in zip(coi_row, coi_column):\n a[i, j] = -1000.#np.inf\n\n v, A, d = prm.create_lp_matrices(a, min_rev_art, max_rev_art,\n min_art_rev, max_art_rev)\n v = v.flatten()\n d = d.flatten()\n\n cur_progress += int(max_progress/6.)\n self.update_progress(\n cur_progress,\n max_progress,\n update_frequency=update_frequency,\n )\n\n solver = pywraplp.Solver('SolveReviewerAssignment',\n pywraplp.Solver.GLOP_LINEAR_PROGRAMMING)\n infinity = solver.Infinity()\n n, m = A.shape\n x = [[]]*m\n c = [0]*n\n\n for j in range(m):\n x[j] = solver.NumVar(-infinity, infinity, 'x_%u' % j)\n\n # state objective function\n objective = solver.Objective()\n for j in range(m):\n objective.SetCoefficient(x[j], v[j])\n objective.SetMaximization()\n\n # state the constraints\n for i in range(n):\n c[i] = solver.Constraint(-infinity, d[i])\n\n # update status bar\n if np.mod(i, int(n/10)) == 0:\n cur_progress += 3\n self.update_progress(\n cur_progress,\n max_progress,\n update_frequency=update_frequency,\n )\n\n for j in A.col[A.row == i]:\n c[i].SetCoefficient(x[j], A.data[np.logical_and(A.row == i, A.col == j)][0])\n\n result_status = solver.Solve()\n if result_status != 0:\n print \"The final solution might not converged\"\n\n x_sol = np.array([x_tmp.SolutionValue() for x_tmp in x])\n\n #x = prm.linprog_solve(v, ne, d)\n x_sol = (x_sol > 0.5)\n\n cur_progress += int(max_progress/6.)\n self.update_progress(\n 4*int(max_progress/6.),\n max_progress,\n update_frequency=update_frequency,\n )\n\n b = prm.create_assignment(x_sol, a)\n self.update_progress(\n 5*int(max_progress/6.),\n max_progress,\n update_frequency=update_frequency,\n )\n\n assignment_df = article_data[['PaperID', 'Title']]\n assignment_df['Reviewers'] = ''\n assignment_df['ReviewerIDs'] = ''\n for i in range(b.shape[0]):\n paper_reviewers = np.where(b[i, :])[0]\n assignment_df.Reviewers.iloc[i] = ', '.join(list(people_data.FullName.iloc[paper_reviewers].copy()))\n # assignment_df.ReviewerIDs.iloc[i] = ', '.join(list(people_data.PersonID.iloc[paper_reviewers].copy()))\n self.update_progress(\n 6*int(max_progress/6.),\n max_progress,\n update_frequency=update_frequency,\n )\n\n # transform to ascii\n assignment_df.Title.apply(lambda x: unicode(x))\n assignment_df.Reviewers.apply(lambda x: unicode(x))\n\n # , 'result': assignment_df.to_csv(None, na_rep='', index=False)\n # return {'task': {'status': 'SUCCESS'}}\n return assignment_df.to_csv(None, na_rep='', index=False, encoding='utf-8')", "def query_preprocess(input_pack: DataPack):\n sentence = input_pack.get_single(Sentence)\n\n relations = defaultdict(dict)\n text_mention_mapping = {}\n\n # get all srl relations\n for link in input_pack.get(PredicateLink, sentence):\n verb = link.get_parent()\n verb_text = verb.text\n argument = link.get_child()\n argument_text = argument.text\n\n text_mention_mapping[verb_text] = verb\n text_mention_mapping[argument_text] = argument\n relations[verb_text][link.arg_type] = argument_text\n\n arg0, arg1, predicate = None, None, None\n for verb_text, entity in relations.items():\n arg0, arg1, predicate = collect_mentions(text_mention_mapping, entity, verb_text)\n if not arg0 and not arg1:\n continue\n else:\n break\n\n if not arg0 and not arg1:\n raise Exception('AllenNLP SRL cannot extract the two arguments or the '\n 'predicate in your query, please check our examples '\n 'or rephrase your question')\n\n verb_lemma, is_answer_arg0 = None, None\n\n # check pos tag and lemma for tokens\n for j, token in enumerate(input_pack.get(entry_type=Token,\n range_annotation=sentence,\n components=['forte_wrapper.nltk.nltk_processors.NLTKWordTokenizer']\n )):\n # find WH words\n if token.pos in {\"WP\", \"WP$\", \"WRB\", \"WDT\"}:\n if arg0.begin <= token.begin and arg0.end >= token.end:\n is_answer_arg0 = True\n elif arg1.begin <= token.begin and arg1.end >= token.end:\n is_answer_arg0 = False\n\n # find verb lemma\n if token.text == predicate.text:\n verb_lemma = token.lemma\n\n return sentence, arg0.text if arg0 else '', arg1.text if arg1 else '', \\\n predicate.text, verb_lemma, is_answer_arg0", "def normalize(self):\n for key in self.corpus.keys():\n sum_count = 0\n words = []\n counts = []\n for k, v in self.corpus[key].items():\n sum_count += v\n words.append(k)\n counts.append(v)\n prob = [float(count)/sum_count for count in counts]\n\n self.corpus[key] = [words, prob]", "def preprocess(self, documents):\n\n # Store the total number of documents\n num_docs = np.float(len(documents))\n\n # A dict storing the frequency of each word across all documents\n total_word_freq = {}\n\n # A dict storing the number of documents that word appears in\n doc_word_freq = {}\n\n # Iterate over all documents\n for doc in documents:\n # Split the string into a list of words\n words = extract_words(doc)\n\n # Update the 'total_word_freq' dict using all words in 'words'\n for w in words:\n ''' YOUR CODE HERE '''\n if w not in total_word_freq.keys():\n total_word_freq[w] = 1\n else:\n total_word_freq[w] += 1\n\n ''' END CODE FOR THIS LOOP '''\n\n # Update the 'doc_word_freq' dict. Remember to only add '1' corresponding to\n # each word in a document. In case a word appears twice in a document, then\n # it should be ignored. We use the set() data structure to achieve this.\n for w in set(words):\n ''' YOUR CODE HERE '''\n if w not in doc_word_freq:\n doc_word_freq[w] = 1\n else:\n doc_word_freq[w] += 1\n\n ''' END CODE FOR THIS LOOP '''\n\n # A set of words with total frequency less than 'self.min_freq'\n remove_words = set()\n\n ''' YOUR CODE HERE '''\n\n # Check frequency of each word and add to 'remove_words'\n for w in total_word_freq.keys():\n if total_word_freq[w] < self.min_freq:\n remove_words.add(w)\n\n # Delete the words in 'remove_words' from 'total_word_freq' and\n # 'doc_word_freq'.\n for w in remove_words:\n del total_word_freq[w]\n del doc_word_freq[w]\n\n # Create a numpy array to store frequencies from which\n # we can create the 'self.idf' preprocessed numpy array.\n word_freq_tensor = np.zeros(len(doc_word_freq))\n\n # For each word in 'doc_word_freq' dict, update\n # 'self.word_to_idx' and 'self.idx_to_word' and\n # 'word_freq_tensor'.\n i = 0\n for w in doc_word_freq.keys():\n self.word_to_idx[w] = i \n self.idx_to_word[i] = w\n word_freq_tensor[i] = doc_word_freq[w]\n i+=1\n \n #print(word_freq_tensor.shape)\n #print(word_freq_tensor)\n # Calculate 'self.idf' (see hint.pdf for formula)\n self.idf = -1*np.log(word_freq_tensor/(len(documents)))\n ''' END YOUR CODE HERE '''", "def train(self, documents):\n ###DONE\n\n #entire vocab in document set D\n vocab_sod = set()\n vocab_pop = set()\n \n #Calcuates prior probabilities\n priorSOD = 0 #how many docs are spam\n priorPOP = 0 #how many docs are ham\n \n #Cacluates Tct\n term_freq_sod = {} #{term:occur, term:occur}\n term_freq_pop = {}\n \n #Tct'\n Tct_sod = 0 #Tct' = sum of (every term occurence in class c + 1)\n Tct_pop = 0\n \n for doc in documents: \n if 'sod' in doc.label:\n priorSOD += 1\n for token in doc.tokens:\n Tct_sod += 1\n if token in term_freq_sod.keys():\n term_freq_sod[token] = term_freq_sod[token] + 1\n else:\n term_freq_sod[token] = 1\n vocab_sod.add(token) \n else:\n priorPOP += 1\n for token in doc.tokens:\n Tct_pop += 1\n if token in term_freq_pop.keys():\n term_freq_pop[token] = term_freq_pop[token] + 1\n else:\n term_freq_pop[token] = 1\n vocab_pop.add(token)\n \n \n #endfor\n # | is for set join\n self.vocab = vocab_sod | vocab_pop #gets rid of duplicate words (those in both 'ham' and 'spam') \n \n #Tct Primes\n #tct' = term freq of all terms in class c + 1*(total terms)\n Tct_sod = Tct_sod + len(self.vocab) \n Tct_pop = Tct_pop + len(self.vocab) \n \n \n print(\"PriorSod: \" + str(priorSOD))\n print(\"PriorPop: \" + str(priorPOP))\n print(\"LEN Docum: \" + str(len(documents)))\n \n self.priorSOD = priorSOD / len(documents)\n self.priorPOP = priorPOP / len(documents)\n \n for term in self.vocab:\n if term in term_freq_pop.keys():\n self.cond_prob_pop[term] = (term_freq_pop[term] + 1) / Tct_pop\n else:\n self.cond_prob_pop[term] = 1 / Tct_pop\n \n if term in term_freq_sod.keys():\n self.cond_prob_sod[term] = (term_freq_sod[term] + 1) / Tct_sod\n else:\n self.cond_prob_sod[term] = 1 / Tct_sod\n \n \n pass", "def set_up_data():\r\n \r\n X, Y = pretreatment.import_dataset()\r\n \r\n print('Applying cleansing...')\r\n X = pretreatment.pretreatment(X)\r\n Y = pretreatment.pretreatment(Y)\r\n \r\n indice = [i for i in range(len(X)) if (len(X[i]) > SENTENCE_LENGTH-2 and len(X[i]) < SENTENCE_LENGTH+1 and len(Y[i]) > SENTENCE_LENGTH-2 and len(Y[i]) < SENTENCE_LENGTH+1)]#(len(X[i]) > SENTENCE_LENGTH and len(X[i]) < 2 * SENTENCE_LENGTH and len(Y[i]) > SENTENCE_LENGTH and len(Y[i]) < 2 * SENTENCE_LENGTH)]\r\n X = [X[i] for i in indice]\r\n Y = [Y[i] for i in indice]\r\n \r\n X = pretreatment.standardize_sentence_length(X)\r\n Y = pretreatment.standardize_sentence_length(Y)\r\n \r\n print('Computing the corpus sizes...')\r\n compute_T(X, 'english')\r\n compute_T(Y, 'french')\r\n compute_S(X, 'english')\r\n compute_S(Y, 'french')\r\n compute_N(X, 'french')\r\n compute_N(Y, 'english')\r\n \r\n print('English corpus: %d tokens' % T_ENGLISH)\r\n print('French corpus: %d tokens' % T_FRENCH)\r\n print('English sentence length: %d' % S_ENGLISH)\r\n print('French sentence length: %d' % S_FRENCH)\r\n print('Number of sentences (both english and french): %d / %d' % (N_ENGLISH, N_FRENCH))\r\n \r\n print('Converting in one hot vectors')\r\n global CORPUS_ENGLISH, CORPUS_FRENCH\r\n params_ENGLISH = (N_ENGLISH, S_ENGLISH, T_ENGLISH)\r\n params_FRENCH = (N_FRENCH, S_FRENCH, T_FRENCH)\r\n X, CORPUS_ENGLISH= treatment.convert_to_one_hot(X, params_ENGLISH)\r\n Y, CORPUS_FRENCH= treatment.convert_to_one_hot(Y, params_FRENCH)\r\n \r\n return (X, Y)", "def _initialize(self):\n for doc_index, doc in enumerate(self.document):\n temp_word_topic_matrix = []\n for word in doc:\n if word in self.word2id.keys():\n start_topic_index = np.random.randint(0, self.K)\n temp_word_topic_matrix.append(start_topic_index)\n self.doc_topic_matrix[doc_index, start_topic_index] += 1\n self.topic_word_matrix[start_topic_index, self.word2id[word]] += 1\n self.topic_matrix[start_topic_index] += 1\n self.current_word_topic_matrix.append(temp_word_topic_matrix)", "def f(DATA_LINK, DATA_COLUMN_NAME, STOPWORD_CHOICE, STOPWORD_LINK, NGRAM_CHOICE,NGRAM_NUM, TestData,topic_number_user,fetchArray):\r\n data = pd.read_csv(DATA_LINK)\r\n df=data[DATA_COLUMN_NAME]\r\n ######################################################################\r\n if (STOPWORD_CHOICE):\r\n stopwords=prepare_stopwords(STOPWORD_LINK)\r\n else:\r\n stopwords=prepare_stopwords(link='stopwords.csv')\r\n ######################################################################\r\n\r\n df=clean(df)\r\n\r\n processed_docs = []\r\n\r\n for doc in df:\r\n processed_docs.append(preprocess(doc,stopwords))\r\n ############################################################################\r\n if NGRAM_CHOICE:\r\n ngram=[]\r\n ngram_mod=[]\r\n for i in range(NGRAM_NUM):\r\n if(i==0):\r\n ngram.append(gensim.models.Phrases(processed_docs[0:10000], min_count=5, threshold=100)) # higher threshold fewer phrases\r\n else:\r\n ngram.append(gensim.models.Phrases(ngram[i-1][processed_docs[0:10000]], min_count=5, threshold=100)) # higher threshold fewer phrases\r\n ngram_mod.append(gensim.models.phrases.Phraser(ngram[i]))\r\n \r\n ###########################################################################\r\n\r\n ################################################################################\r\n if NGRAM_CHOICE:\r\n # Form Ngrams\r\n data_words_ngrams = make_ngrams(processed_docs,NGRAM_NUM,ngram_mod)\r\n\r\n # Do lemmatization keeping only noun, adj, vb, adv\r\n data_lemmatized=[]\r\n for i in range(len(data_words_ngrams)):\r\n data_lemmatized.append(lemmatization(data_words_ngrams[i]))\r\n else:\r\n data_lemmatized=processed_docs\r\n ################################################################################\r\n \r\n\r\n dictionary = gensim.corpora.Dictionary(data_lemmatized)\r\n\r\n dictionary.filter_extremes(no_below=15, no_above=0.1, keep_n= 100000)\r\n\r\n bow_corpus = [dictionary.doc2bow(doc) for doc in data_lemmatized]\r\n\r\n lda_model = gensim.models.LdaMulticore(bow_corpus, \r\n num_topics = topic_number_user, \r\n id2word = dictionary, \r\n passes = 10, workers = 2)\r\n\r\n for idx, topic in lda_model.print_topics(-1):\r\n print(\"Topic: {} \\nWords: {}\".format(idx, topic ))\r\n print(\"\\n\")\r\n lda_model.save('turk_lda.gensim')\r\n\r\n unseen_document = TestData\r\n\r\n rx = re.compile('\\W+')\r\n unseen_document = rx.sub(' ', unseen_document).strip()\r\n\r\n\r\n # Data preprocessing step for the unseen document\r\n bow_vector = dictionary.doc2bow(preprocess(unseen_document,stopwords))\r\n\r\n topics = []\r\n for index, score in sorted(lda_model[bow_vector], key=lambda tup: -1*tup[1]):\r\n print(\"Score: {}\\t Topic: {}\".format(score, lda_model.print_topic(index, 5)))\r\n # rslt = result(str(score), str(lda.print_topic(index,5)))\r\n rslt = result(str(score), str(re.findall('\"([^\"]*)\"', str(lda_model.print_topic(index,5)))))\r\n topics.append(rslt)\r\n\r\n fetchArray.put(topics)", "def predict(self, documents):\n raise NotImplementedError()", "def __init__(self,corpus,topic_number=10,iteration_number=1000,burn_in=500,update_cycle=100,alpha=None,beta=None):\n # documents, key: id of document, value: list of word in an specific document.\n self.documents = corpus.documents\n # number of iteration when using Gibbs Sampling.\n self.iteration_number = iteration_number\n self.topic_number = topic_number\n self.burn_in = burn_in\n self.update_cycle = update_cycle\n # number of terms.\n self.term_number = len(corpus.word_id)\n # number of documents.\n self.document_number = len(self.documents)\n # if alpha and beta is None, then assign values to them.\n if alpha == None:\n self.alpha = [2.0] * self.topic_number\n else:\n self.alpha = alpha\n if beta == None:\n self.beta = [0.5] * self.term_number\n else:\n self.beta = beta\n # The sum of elements in beta.\n self.sum_beta = sum(self.beta)\n # The sum of elements in alpha.\n self.sum_alpha = sum(self.alpha)\n # counter, [m][k] refers to the number of times that topic k has been observed with a word in document m.\n self.document_topic_count_matrix = {}\n # counter, [k][t] refers to the number of times that term t has been observed with topic k.\n self.topic_term_count_matrix = {}\n # distribution matrix, [m][k] refers the probability that assigning topic k to document m.\n self.document_distribution_over_topic = {}\n # distribution matrix, [k][t] refers the probability that assigning topic k to term t.\n self.topic_distribution_over_term = {}\n # counter, [m] refers the number of times that all topics have been observed with a word in document m.\n # also, [m] equals to the number of words in document m.\n self.sum_document_by_topic_count = {}\n # counter, [k] refers the number of times that all terms have been observed with topic k.\n self.sum_topic_by_term_count = {}\n # topic assigned to an word in a document. [m][n] refers to the topic that assigned to the n th word in document\n # m.\n self.word_topic_assignment = {}\n # the number of times that the distribution has been updated.\n self.update_number = 0.0", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def measureAll(authors_texts,sectorialized_agents):\n authors_texts=P.text.aux.textFromAuthors(authors_texts,self.topm_dict[\"sectorialized_agents\"])\n authors_measures={}\n # análise de cada mensagem e de cada autor\n for author in authors_texts:\n authors_measures[author]={}\n texts=authors_texts[author]\n authors_measures[author][\"raw_strings\"]=P.text.raw.analyseAll(texts)\n authors_measures[author][\"pos\"]= P.text.pos.analyseAll(authors_analysis[author][\"raw_analysis\"])\n authors_measures[author][ \"wordnet\" ]=P.text.wordnet.analyseAll(authors_analysis[author][\"pos_analysis\"])\n authors_measures[author][\"tfIdf\"]=P.text.tfIdf.analyseAll(texts) # tfIdf de cada texto e do autor, numeric: mean e std das distancias\n # análise de cada setor e da estrutura toda\n# sectors_texts=P.text.aux.textFromSectors(authors_text,sectorialized_agents)\n sectors_measures={}\n for sector in sectorialized_agents:\n sectors_measures[sector][\"raw_strings\"]=P.text.raw.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n sectors_measures[sector][\"pos\"]= P.text.pos.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n sectors_measures[sector][\"wordnet\"]= P.text.wordnet.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n # tfIdf de cada texto e de cada autor, numeric: mean e std das distancias por texto e por autor, e media e etd dos autores\n sectors_measures[sector][\"tfIdf\"]= P.text.tfIdf.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n\n# texts=[sectors_texts[i] for i in (\"peripherals\",\"intermediaries\",\"hubs\")]\n# sectors_analysis[\"raw_strings\"]=P.text.raw.analyseAll(texts)\n# sectors_analysis[\"pos\"]= P.text.pos.analyseAll(sectors_analysis[\"raw_analysis\"])\n# sectors_analysis[ \"wordnet\" ]=P.text.wordnet.analyseAll(sectors_analysis[\"pos_analysis\"])\n# sectors_analysis[\"tfIdf\"]=P.text.tfIdf.tfIdf(texts)\n\n overall_measures[\"raw_strings\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n overall_measures[\"pos\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n overall_measures[\"wordnet\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n # tfIdf measurespor texto, autor e setor, numeric: media e desvio das distancias por cada grupo, media e desvio dos setores e dos autores\n overall_measures[\"tfIdf\"]=P.text.tfIdf.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n\n del authors_texts,sectorialized_agents,author, sector\n return locals()", "def word_to_perplexity(model, timeseries, indices, words, args):\n accum = 0\n words_len = len(words)-args.window_size\n batches = math.floor(words_len / args.batch_size)\n print(batches)\n for start in range(0, batches):\n idx = start*args.batch_size\n inp = np.array([timeseries[i:i+args.window_size] for i in range(idx, idx+args.batch_size)])\n label = np.asarray([indices[x] for x in words[idx+args.window_size:idx+args.window_size+args.batch_size]]) \n \n pred = model.predict(inp, batch_size=128)\n lp = np.log(pred)\n for i, ent in enumerate(lp):\n accum += ent[label[i]]\n if start % 5 == 0:\n print(\"{} / {}. Perplexity so far: {}\".format(start, batches, np.exp(-accum / (start*args.batch_size+1))))\n accum = -accum\n print(accum)\n avg = accum / words_len \n print(avg)\n perplex = np.power(avg, 2)\n print(perplex)", "def transform(docs: Any) -> Any:\n return docs", "def vectorize(ex, model, single_answer=False):\r\n word_dict = model.word_dict\r\n tgt_dict = model.tgt_dict\r\n\r\n if model.args.use_elmo:\r\n # Index words\r\n document = torch.from_numpy(np.asarray(elmo_sent_mapper(ex['document']), dtype=np.int))\r\n if(model.args.reader_type!='summarizer'):\r\n question = torch.from_numpy(np.asarray(elmo_sent_mapper(ex['question']), dtype=np.int))\r\n\r\n # When using elmo, char representations are not required\r\n document_chars, question_chars = -1, -1\r\n else:\r\n # Index words\r\n document = torch.LongTensor([word_dict[w] for w in ex['document']])\r\n if(model.args.reader_type!='summarizer'):\r\n question = torch.LongTensor([word_dict[w] for w in ex['question']])\r\n\r\n # Index chars\r\n if model.args.reader_type == 'mlstm' or \\\r\n model.args.reader_type == 'areader':\r\n document_chars, question_chars = -2, -2\r\n else:\r\n document_chars = torch.LongTensor([word_dict.word_to_char_ids(w).tolist() for w in ex['document']])\r\n if(model.args.reader_type!='summarizer'):\r\n question_chars = torch.LongTensor([word_dict.word_to_char_ids(w).tolist() for w in ex['question']])\r\n\r\n # ...or with target(s) (might still be empty if answers is empty)\r\n # handle if questions are unanswerable [SQuAD v2.0]\r\n if(model.args.reader_type!='summarizer'):\r\n \r\n if single_answer:\r\n assert (len(ex['answers']) > 0)\r\n answer = torch.LongTensor([tgt_dict[w] for w in ex['answers'][0]])\r\n else:\r\n # FIXME: multiple answers are possible, fix batchify also.\r\n # answer = [torch.LongTensor([word_dict[w] for w in ans]) for ans in ex['answers']]\r\n answer = torch.LongTensor([tgt_dict[w] for w in ex['answers'][0]])\r\n else:\r\n summary = torch.LongTensor([tgt_dict[w] for w in ex['summary']])\r\n\r\n\r\n if(model.args.reader_type!='summarizer'):\r\n return document, document_chars, question, question_chars, answer, \\\r\n ex['id'], ex['document'], ex['question'], ex['answers'], \\\r\n ex['src_vocab']\r\n else:\r\n return document, document_chars, summary, ex['id'], ex['document'], ex['summary'], ex['src_vocab']", "def __init__(self, corpus):\n self.train(corpus)", "def test_extract_topics():\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model = BERTopic()\n model._update_topic_size(documents)\n model._extract_topics(documents)\n freq = model.get_topic_freq()\n\n assert model.c_tf_idf.shape[0] == 5\n assert model.c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)", "def doc_analyzer(self, doc):\n\n if self.lowercase is None or self.lowercase == 'none':\n lowercase = set()\n elif self.lowercase in {'both', 'all'}:\n lowercase = {'char', 'word'}\n else: lowercase = {self.lowercase}\n\n # character n-grams\n if 'char' in lowercase:\n docfeat = self.get_ngrams(list(doc.lower()),\n self.c_ngmin, self.c_ngmax)\n else:\n docfeat = self.get_ngrams(list(doc),\n self.c_ngmin, self.c_ngmax)\n # word n-grams\n if 'word' in lowercase:\n docfeat.extend(self.get_ngrams(self.tokenizer(doc.lower()),\n self.w_ngmin, self.w_ngmax,\n suffix=\"⅏\", separator=\" \"))\n else:\n docfeat.extend(self.get_ngrams(self.tokenizer(doc),\n self.w_ngmin, self.w_ngmax,\n suffix=\"⅏\", separator=\" \"))\n return docfeat", "def analyse(self):\n logging.info(\"transferring text to CorpusCook...\")\n\n paragraphs = self.text.split('\\n\\n')\n print(\"mean length of splitted lines\", (mean([len(p) for p in paragraphs])))\n\n # If TIKA resolved '\\n'\n if (mean([len(p) for p in paragraphs])) > 80:\n paragraphs = [re.sub(r\"- *\\n\", '', p) for p in paragraphs]\n paragraphs = [p.replace('\\n', \" \") for p in paragraphs]\n paragraphs = [p.replace(';', \" \") for p in paragraphs]\n joiner = \" \"\n else:\n # If TIKA did not\n joiner = \" \"\n\n processed_text = joiner.join([p\n for p in paragraphs\n if\n p and\n ks_2samp(self.normal_data, list(p)).pvalue > self.threshold\n ]\n )\n\n return processed_text.strip()[:self.length_limit]", "def __rank_topics(self, found_topics, explanation):\n max_value = 0\n scores = []\n for _,topic in found_topics.items():\n topic[\"score\"] = topic[\"times\"] * len(topic['grams'].keys())\n scores.append(topic[\"score\"])\n if topic[\"score\"] > max_value:\n max_value = topic[\"score\"]\n\n for _,topic in found_topics.items():\n if \"syntactic\" in topic:\n topic[\"score\"] = max_value\n\n\n\n\n # Selection of unique topics\n unique_topics = {}\n for t_p,topic in found_topics.items():\n prim_label = self.cso.get_primary_label_wu(t_p)\n if prim_label in unique_topics:\n if unique_topics[prim_label] < topic[\"score\"]:\n unique_topics[prim_label] = topic[\"score\"]\n else:\n unique_topics[prim_label] = topic[\"score\"]\n\n # ranking topics by their score. High-scored topics go on top\n sort_t = sorted(unique_topics.items(), key=lambda v: v[1], reverse=True)\n #sort_t = sorted(found_topics.items(), key=lambda k: k[1]['score'], reverse=True)\n\n\n # perform\n vals = []\n for t_p in sort_t:\n vals.append(t_p[1]) #in 0, there is the topic, in 1 there is the info\n\n\n #### suppressing some warnings that can be raised by the kneed library\n warnings.filterwarnings(\"ignore\")\n try:\n x_vals = range(1,len(vals)+1)\n t_kn = KneeLocator(x_vals, vals, direction='decreasing')\n if t_kn.knee is None:\n #print(\"I performed a different identification of knee\")\n t_kn = KneeLocator(x_vals, vals, curve='convex', direction='decreasing')\n except ValueError:\n pass\n\n ##################### Pruning\n\n try:\n knee = int(t_kn.knee)\n except TypeError:\n knee = 0\n except UnboundLocalError:\n knee = 0\n\n if knee > 5:\n try:\n knee += 0\n except TypeError:\n print(\"ERROR: \",t_kn.knee,\" \",knee, \" \", len(sort_t))\n\n else:\n try:\n if sort_t[0][1] == sort_t[4][1]:\n top = sort_t[0][1]\n test_topics = [item[1] for item in sort_t if item[1]==top]\n knee = len(test_topics)\n\n else:\n knee = 5\n except IndexError:\n knee = len(sort_t)\n\n final_topics = []\n final_topics = [self.cso.get_topic_wu(sort_t[i][0]) for i in range(0,knee)]\n self.reset_explanation()\n self.explanation = {self.cso.topics_wu[sort_t[i][0]]: explanation[sort_t[i][0]] for i in range(0,knee)}\n\n return final_topics", "def normQnA(dfin):\n\n update_log(er='norming Q',upload=False)\n dfin['normQ']= dfin.query_text_raw.apply(normalize_text, args=(True,True))\n dfin = dfin[dfin.normQ != 'dud_drop_me']\n\n #update_log(er='norming A',upload=True)\n #dfin['normA']= dfin.kcc_answer_raw.apply(normalize_text , args=(False,False))\n #dfin = dfin[dfin.normA != 'dud_drop_me']\n return(dfin)", "def train_lda_topic_model_with_mallet(texts, path_mallet,\n terms_to_remove=[], num_topics=50,\n no_below=10, no_above=0.9,\n scoring=False, start=2, step=3):\n preprocessed_corpus = []\n print ('training of gensim corpus began')\n for i, text in enumerate(texts):\n if i == 0:\n # todo filter here\n text = text.split()\n\n # Additional filtering steps #\n \"\"\"\n filtered_text = [word for word in text if (word[0] in\n string.ascii_uppercase + string.ascii_lowercase)]\n\n filtered_text = [word for word in filtered_text if\n (word not in set(stopwords.words('english')))]\n preprocessed_corpus.append(filtered_text)\n \"\"\"\n\n dct = initialize_gensim_dictionary([text])\n else:\n text = text.split()\n # Additional filtering steps\n\n \"\"\"\n filtered_text = [word for word in text if (word[0] in\n string.ascii_uppercase + string.ascii_lowercase)]\n\n filtered_text = [word for word in filtered_text if\n (word not in set(stopwords.words('english')))]\n preprocessed_corpus.append(filtered_text)\n \"\"\"\n add_documents_to_gensim_dictionary(dct, [text])\n # todo:this is to be integrated to the building process\n\n if len(terms_to_remove) > 0:\n for term in terms_to_remove:\n dct.filter_tokens(bad_ids=[dct.token2id[term]])\n\n dct.filter_extremes(no_below=no_below, no_above=no_above)\n\n gensim_corpus = [dct.doc2bow(bag_of_word.split()) for bag_of_word in texts]\n print ('gensim corpus done')\n if scoring:\n\n coherence_values = []\n\n for n in range(start, num_topics, step):\n\n lda = LdaMallet(constants.PATH_TO_MALLET,\n gensim_corpus, id2word=dct,\n num_topics=n)\n coherencemodel = CoherenceModel(model=lda,\n texts=preprocessed_corpus,\n dictionary=dct, coherence='c_v')\n coherence_values.append(coherencemodel.get_coherence())\n\n return coherence_values\n\n else:\n lda = LdaMallet(constants.PATH_TO_MALLET, gensim_corpus,\n id2word=dct, num_topics=num_topics)\n # Visualize LDA results, poor results obtained.\n # from gensim.models.wrappers import ldamallet\n # lda_model = ldamallet.malletmodel2ldamodel(lda)\n # vis = pyLDAvis.gensim.prepare(lda_model, gensim_corpus, dct)\n # pyLDAvis.save_html(vis , 'test.html')\n return {'model': lda, 'corpus': gensim_corpus}", "def preprocess_corpus(corpus): \n \n # print 'preprocessing words'\n # remove space\n # text = re.findall(r'\\w+', corpus) # for [a-zA-Z0-9_]\n text = re.findall(r'[a-zA-Z]+', corpus) # for [a-zA-Z] keep words only no numbers and '_' \n words = [w.lower() for w in text]\n # print words \n \n # stemmer based on existing ones in the current list\n lemma = nltk.WordNetLemmatizer()\t\t\t#extract the original word pattern\n lemmed_words = [lemma.lemmatize(w) for w in words]\n \n # tag lemmed_words\n tagged_words = nltk.pos_tag(lemmed_words)\n # print tagged_words \n \n processed_words = []\n tag_list = ['CC', 'DT', 'EX', 'IN', 'MD', \n 'PDT', 'POS', 'PRP', 'PRP$', 'TO', \n 'WDT', 'WP', 'WRB']\n for word, tag in tagged_words:\n if tag in tag_list:\n pass \n else: \n processed_words.append(word)\n \n return processed_words", "def lda_predict_df(df, col_name, lda_model, dictionary, lda_topic_name_dict=None, only_best_prediction=True):\n# for index, score in sorted(LDAmodel_lang[bow_vector], key=lambda tup: -1*tup[1]):\n# print(\"Score: {}\\t Topic: {}\".format(score, lda_model.print_topic(index, 5)))\n cols = list(df.columns)\n df['bow'] = list(map(lambda doc: dictionary.doc2bow(doc), df[col_name]))\n if only_best_prediction:\n if lda_topic_name_dict is None:\n df['prediction'] = df['bow'].apply(PredictTopicFromBOW,lda_model=lda_model)\n df[['pred_probability','pred_index']] = pd.DataFrame(df.prediction.values.tolist(), index= df.index)\n else:\n df['prediction'] = df['bow'].apply(PredictTopicFromBOW,lda_model=lda_model, lda_topic_name_dict=lda_topic_name_dict)\n df[['pred_probability','pred_index','pred_label']] = pd.DataFrame(df.prediction.values.tolist(), index= df.index)\n df.drop(['prediction'], axis=1)\n else:\n num_topics = len(lda_model.get_topics())\n for i in range(num_topics):\n df[i] = df['bow'].apply(PredictTopicFromBOW,lda_model=lda_model, prediction_index=i)\n# df[['pred_probability','pred_index']] = pd.DataFrame(df.prediction.values.tolist(), index= df.index)\n\n # Unpivot values, and split predictions\n values = [i for i in range(num_topics)]\n df = pd.melt(df, id_vars=cols, value_vars=values)\n df = df[df['value'].isnull()==False].sort_values(by=[col_name])\n df.rename(columns={'variable':'index','value':'prediction'}, inplace=True)\n df[['pred_probability','pred_index']] = pd.DataFrame(df.prediction.values.tolist(), index=df.index)\n \n return df", "def data_cleaner(doc):\n \n sw = stopwords.words('english')\n regex_token = RegexpTokenizer(r\"([a-zA-Z]+(?:’[a-z]+)?)\")\n doc = regex_token.tokenize(doc)\n doc = [word.lower() for word in doc]\n doc = [word for word in doc if word not in sw]\n #print(doc)\n doc = pos_tag(doc)\n doc = [(word[0], get_wordnet_pos(word[1])) for word in doc]\n #print(doc)\n lemmatizer = WordNetLemmatizer() \n doc = [lemmatizer.lemmatize(word[0], word[1]) for word in doc]\n #print(' '.join(doc))\n return ' '.join(doc)", "def __init__(words, pred_index):", "def lemmitization_on_corpus(text_corpus):\n\n text_corpus[text_column_name] = text_corpus[\n text_column_name].apply(lemitization)\n return text_corpus", "def main():\n\n global text_column_name\n # set to false\n # TODO: add RC corpus to non RC corpus\n text_corpus, already_processed_article_list = import_data_and_subset(\n path_metadata, path_rc_metadata, sepr, path_preprocessed_article_list, article_id, text_column_name,\n rc_content_column_name, reset_already_preprocessed_article_list=False)\n\n print 'raw corpus loaded'\n\n # text_corpus = remove_tags_from_corpus(\n # text_corpus, text_column_name)\n\n not_preprocessed_article_list = find_new_articles(\n text_corpus, index_column_name, already_processed_article_list)\n print 'new_article list is ' + str(len(not_preprocessed_article_list))\n\n text_corpus = subset_corpus(\n text_corpus, index_column_name, not_preprocessed_article_list)\n print 'subsetting of unprocessed articles from corpus done'\n print 'articles left after subsetting are ' + str(text_corpus[index_column_name].nunique())\n\n text_corpus = parallelize(text_corpus, remove_tags_from_corpus)\n print 'html tags removed from corpus'\n # text_corpus = convert_corpus_to_list(text_corpus)\n\n # text_corpus = contraction_expansion_on_corpus(\n # text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, contraction_expansion_on_corpus)\n print 'contractions expansion completed'\n\n # text_corpus = tokenization_on_corpus(text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, tokenization_on_corpus)\n print 'tokenization completed'\n # text_corpus = punctuation_removal_on_corpus(text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, punctuation_removal_on_corpus)\n print 'punctuation removal completed'\n # text_corpus = lowercase_on_corpus(text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, lowercase_on_corpus)\n print 'lower casing done'\n # text_corpus = lemmitization_on_corpus(text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, lemmitization_on_corpus)\n print 'lemmatization done'\n # text_corpus = stopword_removal_on_corpus(text_corpus, text_column_name)\n text_corpus = parallelize(text_corpus, stopword_removal_on_corpus)\n print 'stopwords removed'\n text_corpus = remove_article_with_little_or_no_content(text_corpus)\n print 'articles with less or no corpus removed'\n store_to_disk(text_corpus, path_preprocessed_files, append_mode=True)\n print 'per-processing done and saved at mentioned path'", "def df_lda_preprocessing(df, col_name, remove_stopwords=True, add_features=False):\n df['text'] = df[col_name] # Create a copy of the input col_name: text\n \n # df_clean_sting(df, 'text') # Clean the text from col_name # TEST FJERN RENGØRING\n\n # Test other way of handling strings\n df_simple_clean_string(df, 'text')\n\n if add_features:\n df_make_features_from_string(df, 'text') # Add features\n\n # This is a hack soly for the scope of this project to concat ThreadSubject\n # When the message is initiated by the Member\n if col_name == 'SignalMessageBodyClean':\n df_aka = df.copy(deep=True)\n # df_aka['text_1'] = df_aka['ThreadSubject']\n # df_clean_sting(df_aka, 'ThreadTopic')\n df_simple_clean_string(df_aka, 'ThreadTopic')\n\n df['text'] = (df['text'] +' '+df_aka['ThreadTopic']).where(df['IsFirstMessageInthread']==1,df['text'])\n\n df_get_tokens(df, 'text') # Returns col: tokenized_text\n\n # df_stem_words(df, 'tokenized_text') # Returns col: stemmed_text\n\n df_bigrams(df, 'tokenized_text') # Returns bigrams\n df_trigrams(df, 'tokenized_text') # Returns trigrams\n\n df['ngrams'] = df['tokenized_text'] + df['bigrams'] + df['trigrams']\n\n if remove_stopwords:\n df_remove_stopwords(df, 'ngrams') # returns stopwords_removed", "def _extract_opinions(self):\n self.data['adjectives'] = self.data['sentences'].apply(lambda x: self._extract_pos(x, ADJ))\n self.data['adverbs'] = self.data['sentences'].apply(lambda x: self._extract_pos(x, ADV))\n self.data['verbs'] = self.data['sentences'].apply(lambda x: self._extract_pos(x, VERB))", "def apply(self) -> None:", "def apply(self) -> None:", "def semanticSearch(model, topics, index, idx_to_docid, k=1000):\r\n run = {}\r\n topic_nums = [topic for topic in topics]\r\n queries = [topics[topic]['title'] for topic in topics]\r\n encoded_queries = model.encode(queries)\r\n labels, distances = index.knn_query(encoded_queries, k=k)\r\n for i,topic in enumerate(topic_nums):\r\n run[topic] = []\r\n # considers highest passage match only for a document\r\n added_docids = []\r\n sim = [1-x for x in distances[i]]\r\n scored_run = zip(labels[i], sim)\r\n for i, (passageidx, dist) in enumerate(scored_run):\r\n docid = idx_to_docid[passageidx]\r\n \r\n if docid not in added_docids:\r\n run[topic].append((docid, dist))\r\n added_docids.append(docid)\r\n run[topic] = run[topic][:1000]\r\n return run", "def infertopics(self):\n\n # Iterate over nodes missing topic attribute (only occurs for new nodes)\n for uid in self.scan(attribute=\"updated\"):\n # Remove updated attribute\n self.removeattribute(uid, \"updated\")\n\n # Get list of neighboring nodes\n ids = self.edges(uid)\n\n # Infer topic\n topic = Counter(self.attribute(x, \"topic\") for x in ids).most_common(1)[0][0] if ids else None\n if topic:\n # Add id to topic list and set topic attribute\n self.topics[topic].append(uid)\n self.addattribute(uid, \"topic\", topic)\n\n # Set topic rank\n self.addattribute(uid, \"topicrank\", len(self.topics[topic]) - 1)\n\n # Infer category\n category = Counter(self.attribute(x, \"category\") for x in ids).most_common(1)[0][0]\n self.addattribute(uid, \"category\", category)", "def get_feature_set_SB(tweet):\n #pos-tag frequencies\n# print \"Tagged words in tweet: \", tweet.tagged_words\n pos_tag_freq = {}\n additional_freq = {}\n for phrase in tweet.tagged_words:\n for word in phrase:\n try:\n tag = word['pos']\n pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# if tag=='PRtinf':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# elif tag=='ADJS':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# elif tag=='ADJ':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# elif tag=='NP':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# elif tag=='DET':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n# elif tag=='P':\n# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1\n if tag in ADJECTIVES:\n additional_freq['adjectives'] = additional_freq.get(tag, 0) + 1\n elif tag in ADVERBS: \n additional_freq['adverbs'] = additional_freq.get(tag, 0) + 1\n elif tag in PRONOUNS:\n additional_freq['pronoun'] = 1\n except KeyError:\n continue\n# print \"Tag frequencies: \", pos_tag_freq\n for key in pos_tag_freq.keys():\n pos_tag_freq[key] = pos_tag_freq[key]*1.0\n #number of adjectives in sentence, number of adverbs in sentence(except ikke), pronoun in sentence(binary) \n #Number of exclamation marks, number of emoticons,\n emoticons = tweet.nrof_happyemoticons+tweet.nrof_sademoticons\n if emoticons>0:\n additional_freq['emoticons'] = emoticons*1.0\n if tweet.nrof_exclamations>0:\n additional_freq['exclamations'] = tweet.nrof_exclamations*1.0\n \n# print \"Additional frequencies: \", additional_freq\n# raw_input(\"Continue?\")\n \n #Concatenate the dicts\n features= dict(pos_tag_freq.items() + additional_freq.items())\n# print \"All features: \", features\n# raw_input(\"Continue?\")\n return features", "def optimize(self):\n scores = []\n n_topics = np.arange(self.topic_range[0], self.topic_range[1]+1)\n print('Running optimization with topic range from {0} to {1}'.format(\n self.topic_range[0],self.topic_range[1]))\n self._preproc()\n\n # Perform LDA for topic_range\n for n in n_topics:\n self.n_topics = n\n self._lda()\n if self.verbose:\n print('LDA completed for {0} topics.'.format(n))\n self._evaluate()\n scores.append(self.score)\n \n # Visualize results\n print('Optimization completed, plotting results...')\n fig1, ax1 = plt.subplots()\n ax1.plot(n_topics, np.asarray(scores))\n ax1.set_title('Coherence for topic range from {0} to {1}'.format(\n self.topic_range[0], self.topic_range[1]), fontsize= 16)\n ax1.set_xlabel('n_topics')\n ax1.set_ylabel('score')\n ax1.set_xticks(n_topics)\n plt.show()", "def normal_subjects_out(sub):\n baseDIR = 'the root directory'\n ## read data (contains training data (all subjects except one) and test data (the rest subject) and corresponding labels) \n matContent = sio.loadmat(baseDIR +'/Data/' +'normal_subject_out'+str(sub+1) +'.mat')\n normal_x_train = matContent['trainingFeatures']\n y_train = np.squeeze(matContent['trainingLabels'])\n x_test = matContent['testFeatures']\n y_test = matContent['testLabels'] \n ## permute data\n rand_idx = np.random.permutation(normal_x_train.shape[0])\n normal_x_train = normal_x_train[rand_idx,:,:]\n \n ## normalize data based on StandardScaler function in Sklearn\n normal_x_train, x_test, scaler = standardization(normal_x_train, x_test)\n\n ## pick 80% of normal training data to train DAE architecture and the rest 20% is used for computing NPM and fitting GEVD\n train_perc = np.int(normal_x_train.shape[0] - np.round(0.2 * normal_x_train.shape[0]))\n normal_x_train1 = normal_x_train[0:train_perc,:]\n normal_x_train2 = normal_x_train[train_perc:,:]\n \n ## training labels are always 0s \n training_labels = y_train[0:train_perc,] \n training_labels1 = y_train[train_perc:,]\n \n ## make the inverse of standardization on both training parts \n normal_x_gevd = scaler.inverse_transform(np.reshape(normal_x_train2,[normal_x_train2.shape[0],\n normal_x_train2.shape[1]*normal_x_train2.shape[2]*normal_x_train2.shape[3]]))\n normal_x_train_inverseScale = scaler.inverse_transform(np.reshape(normal_x_train1,[normal_x_train1.shape[0],\n normal_x_train1.shape[1]*normal_x_train1.shape[2]*normal_x_train1.shape[3]]))\n \n #save data both for before and after standardization\n sio.savemat(baseDIR + 'Dropout/'+ 'normal_train_for_NPM_sub_out' + str(sub+1)+ '.mat', {'normal_train_x':normal_x_train_inverseScale,'normal_train_npm':normal_x_gevd})\n sio.savemat(baseDIR + 'Dropout/' + 'data_after_normalization' + str(sub+1) + '.mat',{'normal_x_train1':normal_x_train1,'normal_x_train2':normal_x_train2,'scaler':scaler,'x_test':x_test})\n return normal_x_train1, normal_x_train2, x_test, training_labels, training_labels1, y_test, scaler", "def lemmatize_fun(self):\n tokens = str(self.doc).split()\n cleaned_tokens = None\n if self.lemmatize_method == 'wordnet':\n cleaned_tokens = [self.lemmatizer.lemmatize(token) for token in tokens]\n else:\n cleaned_tokens = [self.lemmatizer.stem(token) for token in tokens]\n \n self.doc = ' '.join(cleaned_tokens)", "def transform(self):", "def show_topic_model_textually(seed_gensim_topic_model, seed_gensim_corpus,\n texts_to_analyze, num_topics):\n print(\"alpha =\", seed_gensim_topic_model.alpha)\n print(seed_gensim_topic_model)\n print(seed_gensim_topic_model.print_topics(num_topics))\n print()" ]
[ "0.55906874", "0.55814046", "0.5555375", "0.55358505", "0.5529263", "0.5498415", "0.54653543", "0.5456407", "0.53456396", "0.5337148", "0.5316981", "0.5269533", "0.52625924", "0.52604556", "0.5249314", "0.5231671", "0.5213493", "0.5205622", "0.519339", "0.51790893", "0.51714104", "0.51679546", "0.5158415", "0.51343614", "0.5122691", "0.51168317", "0.51085293", "0.51030415", "0.5089652", "0.50866264", "0.5081767", "0.507698", "0.5064707", "0.506027", "0.5046986", "0.5046457", "0.5046365", "0.5042969", "0.5034741", "0.5031876", "0.5028686", "0.50281334", "0.50199825", "0.50156516", "0.4987568", "0.49856314", "0.4982271", "0.49805942", "0.49755886", "0.4964731", "0.49628848", "0.49601611", "0.49586007", "0.4951104", "0.4948306", "0.49453774", "0.4944094", "0.49411118", "0.4939873", "0.49290544", "0.4920369", "0.49175444", "0.4914694", "0.49114716", "0.49096456", "0.4908829", "0.4908829", "0.4908829", "0.4908829", "0.4908829", "0.4908829", "0.4908829", "0.49080798", "0.4901994", "0.48992607", "0.48914835", "0.48886725", "0.4880966", "0.48778033", "0.4873373", "0.4868686", "0.4866979", "0.48663005", "0.48647198", "0.48627797", "0.4861863", "0.48547572", "0.48543715", "0.48524314", "0.4850626", "0.48487985", "0.48434848", "0.48434848", "0.4841029", "0.48405972", "0.48308587", "0.4821274", "0.4818176", "0.4818136", "0.48176765", "0.48164088" ]
0.0
-1
Generate ics from days.
def generate_ics(days: Sequence[dict], filename: Text) -> None: cal = Calendar() cal.add("X-WR-CALNAME", "中国法定节假日") cal.add("X-WR-CALDESC", "中国法定节假日数据,自动每日抓取国务院公告。") cal.add("VERSION", "2.0") cal.add("METHOD", "PUBLISH") cal.add("CLASS", "PUBLIC") cal.add_component(_create_timezone()) days = sorted(days, key=lambda x: x["date"]) for fr, to in _iter_date_ranges(days): start = _cast_date(fr["date"]) end = _cast_date(to["date"]) + datetime.timedelta(days=1) name = fr["name"] + "假期" if not fr["isOffDay"]: name = "上班(补" + name + ")" cal.add_component(_create_event(name, start, end)) with open(filename, "wb") as f: f.write(cal.to_ical())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_ics(events, config):\n\n # Create the Calendar\n calendar = icalendar.Calendar()\n calendar.add('prodid', config.calendar_prodid)\n calendar.add('version', '2.0')\n calendar.add('method', 'publish')\n\n for event_data in events:\n # Create the event\n event = icalendar.Event()\n\n # Populate the event\n event.add('summary', event_data['title'])\n event.add('description', get_description(event_data))\n event.add('uid', event_data['id'])\n event.add('location', event_data['place'])\n event.add('dtstart', get_datetime(event_data, 'when_start'))\n if event_data['when_end']:\n event.add('dtend', get_datetime(event_data, 'when_end'))\n event.add('dtstamp', datetime.datetime.now())\n\n # Add the event to the calendar\n calendar.add_component(event)\n\n return calendar.to_ical()", "def generate_dates(self):\r\n\r\n numdays = 20\r\n\r\n base = datetime.datetime.today()\r\n\r\n date_list = [base + datetime.timedelta(days=x) for x in range(numdays)]\r\n\r\n date_str = [x.strftime(\"%d-%m-%Y\") for x in date_list]\r\n\r\n return date_str", "def generate_days(ndays, year=2022, month=1, day=1):\n # NOTE: This method is more efficient than the \"string parsing\"\n # method used by generate_months() and generate_years(),\n # but this only matters if generating a lot of entries\n # and it only works if the datetime64-represented\n # distance between units to generate is constant\n day_indexes = np.arange(ndays, dtype=np.int64) # 0, 1, ..., [ndays-1]\n startdate = np.datetime64(f'{year:02d}-{month:02d}-{day:02d}T00:00:00.000000', 'us')\n usec_per_day = int(1e6) * 86400 # 86.4k sec per day = 60*60*24s\n usec_offsets = day_indexes * usec_per_day\n return usec_offsets + startdate", "def as_ical(self):\n if self.date_is_approximate:\n return None\n\n ymd = (self.date.year, self.date.month, self.date.day)\n event_date = date(*ymd)\n event = icalendar.Event()\n event.add(\"dtstart\", event_date)\n event.add(\"dtend\", event_date + timedelta(days=1))\n event.add(\"uid\", self.ical_uid)\n event.add(\"summary\", \"Django Girls %s\" % self.city)\n event.add(\"location\", f\"{self.country}, {self.city}\")\n return event", "def gen_dates(train_per_start, hours_inc=48, n_inc=10, hours1_inc=6, n1_inc=4):\n dates = []\n train_per = train_per_start[:]\n for i_inc in range(n_inc):\n # '2014-06-24 01:00:00','2014-06-30 00:00:00'\n train_per1 = train_per[:]\n for i1_inc in range(n1_inc):\n dates.append(train_per1[:])\n train_per1[0] = add_hour(train_per1[0], hours1_inc)\n train_per1[1] = add_hour(train_per1[1], hours1_inc)\n train_per[0] = add_hour(train_per[0], hours_inc)\n train_per[1] = add_hour(train_per[1], hours_inc)\n return dates", "def generate_days(self, nr_of_days):\n log = []\n names = self.load_names()\n\n for i in range(0, nr_of_days):\n log.extend(self.generate_day_cycle(names))\n\n return log", "def generate_days_list():\n\n seven_days = []\n\n for i in xrange(1, 8):\n seven_days.append([i, 0])\n\n return seven_days", "def generate_day_cycle(self, names):\n day_log = []\n time_delta = timedelta(days=1)\n\n for i in range(0, len(self.HOUR_SHEET)):\n if self.is_time_for_bruteforce(i):\n day_log.extend(self.generate_brute_force_log(i, names))\n\n day_log.extend(self.generate_hour_cycle(i, names))\n\n day_log.sort()\n\n self.date += time_delta\n\n return day_log", "def get_ic(self):\n return self.dt, self.dr, self.dtheta, self.dphi", "def draw_day(day):\n\n day_drawing = \"\"\n for i in day:\n for j in i:\n day_drawing += j\n return day_drawing", "def generate_date_set(self, year, month, days):\n dates = set()\n for day in days:\n dates.add(date(year, month, day))\n return dates", "def output(self):\n return self.cal.to_ical()", "def output(self):\n return self.cal.to_ical()", "def ical_string(self) -> str:\n tz = ''\n if self.timezone != '':\n tz = ';TZID=' + self.timezone\n result = ['BEGIN:VCALENDAR',\n 'BEGIN:VEVENT',\n 'CREATED:' + self._created,\n 'DESCRIPTION:' + self.description,\n 'DTEND' + tz + ':' + self._end,\n 'DTSTAMP' + tz + ':' + self._dtstamp,\n 'DTSTART' + tz + ':' + self._start,\n 'LAST-MODIFIED:' + self.lastmodified,\n 'LOCATION:' + self.location,\n 'SEQUENCE:' + str(self._sequence),\n 'SUMMARY:' + self.summary,\n 'UID:' + self._uid,\n 'END:VEVENT',\n 'END:VCALENDAR']\n return '\\n'.join(result)", "def visualize_days():\n\t\n\t#grab our parsed data that we parsed earlier\n\tdata_file = parse(MY_FILE, \",\")\n\t\n\t#make a new variable, counter, from iterating through each line of\n\t#data in the parsed data, and count how many incidents happen on each\n\t#day of the week\n\tcounter = Counter(item[\"DayOfWeek\"] for item in data_file)\n\t\n\t#separate the x-axis data (days of the week) from the counter variable\n\t#from the y-axis (number of incidents each day)\n\tdata_list = [\n\t\t\t\tcounter[\"Monday\"],\n\t\t\t\tcounter[\"Tuesday\"],\n\t\t\t\tcounter[\"Wednesday\"],\n\t\t\t\tcounter[\"Thursday\"],\n\t\t\t\tcounter[\"Friday\"],\n\t\t\t\tcounter[\"Saturday\"],\n\t\t\t\tcounter[\"Sunday\"]\n\t\t\t\t]\n\tday_tuple = tuple([\"Mon\", \"Tues\", \"Wed\", \"Thurs\", \"Fri\", \"Sat\", \"Sun\"])\n\t\n\t#with y-axis data, assign it to a matplotlib plot instance\n\tplt.plot(data_list)\n\t\n\t#create amount of ticks need for x and y axes and assign labels\n\tplt.xticks(range(len(day_tuple)), day_tuple)\n\t\n\t#save the plot\n\tplt.savefig(\"Days.png\")\n\t\n\t#close plot file\n\tplt.clf()", "def hydrate_date(days):\n return Date.from_ordinal(unix_epoch_date_ordinal + days)", "def ical(self) -> Calendar:\n cal = Calendar()\n event = IEvent()\n event.add(\"summary\", \"Video Chat\")\n event.add(\"dtstart\", self.start)\n cal.add_component(event)\n return cal.to_ical()", "def visualize_days():\n\n # grab our parsed data that we parsed earlier\n data_file = parse(MY_FILE, \",\")\n\n counter = Counter(item['DayOfWeek'] for item in data_file)\n\n data_list = [\n counter['Monday'],\n counter['Tuesday'],\n counter['Wednesday'],\n counter['Thursday'],\n counter['Friday'],\n counter['Saturday'],\n counter['Sunday']\n ]\n\n day_tuple = tuple(['Mon','Tues','Wed','Thurs','Fri','Sat','Sun'])\n\n plt.plot(data_list)\n\n # num of ticks needed for our x-axis & assign labels\n plt.xticks(range(len(day_tuple)),day_tuple)\n \n plt.savefig(\"Days.png\")\n plt.clf()", "def day2datetime(scenario,days):\r\n\t\tdate_int = np.empty((len(days)));date_int[:]=np.nan\r\n\t\tstart_year =2000\r\n\t\tstart =(start_year*365)\r\n\t\tith=0\t\r\n\t\tfor iday in days:\r\n\t\t\tmonth_days =np.array([31,28,31,30,31,30,31,31,30,31,30,31])\r\n\t\t\tcalendar_days = np.array([0,31,59,90,120,151,181,212,243,273,304,334,365])\r\n\t\t\ttotal_days = int(iday) + start; \r\n\t\t\tyear = total_days//365; \r\n\t\t\tremainder = total_days%365\r\n\t\t\tif remainder ==0: year=year-1;month=12;day=31\r\n\t\t\telse: \r\n\t\t\t\tmonth = 1+[layer for layer in range(len(calendar_days)) if calendar_days[layer]< remainder and calendar_days[layer+1]>=remainder][0]\r\n\t\t\t\tday = int(remainder - calendar_days[month-1])\r\n\t\t\t\tif day == 0: day = month_days[month-1]\r\n\t\t\tdate_int[ith] = year*10000+month*100+day\r\n\t\t\tith=ith+1\r\n\t\treturn date_int.astype(int)", "def getComparableDateValues(self, days):\n dates = []\n for i in days:\n date = i[:10]\n dates.append(date)\n return dates", "def get_days(view, restriction):\n today = datetime.today()\n first_day = today - timedelta(days=today.weekday()) # Current week's Monday\n first_day += timedelta(days=NDAYS*view) # Go back/forward view weeks\n\n days, months = [], set()\n for i in range(0, NDAYS):\n i_day = first_day + timedelta(days=i)\n months.add(i_day.strftime(\"%B\"))\n\n elements = {\n 'deliveries':build_data([\n ud for ud in UDnotDone() if restriction(ud, i_day)\n ]) if i_day >= today else [],\n 'day_name': i_day.strftime(\"%A\") + \" \" + str(i_day.day),\n 'color': day_color(today, i_day),\n 'class': \"hideCalendarMobile\" if i_day < today else \"\",\n 'id': str(i_day.day)+\"-\"+str(i_day.month)}\n days.append(elements)\n return days, \"/\".join(list(months))", "def daily_table(self):\n htable = [0 for i in range(7)]\n for i in range(self.dataset.shape[0]):\n stime = time.localtime(np.int32(self.dataset[i][2]))\n evtime = stime[6]\n htable[evtime] += 1\n return htable", "def _tr_cal_date(self, date):\n items = []\n for code in self._datefmt:\n if code == 'Y':\n items += [date.year_str]\n elif code == 'M':\n if '/' in self._datefmt or '.' in self._datefmt:\n month = date.month_num\n if month is not None:\n month = \"{:02d}\".format(month)\n else:\n month = self._monthName(date.month)\n if month is not None:\n items += [month]\n elif code == 'D':\n day = date.day\n if day is not None and ',' in self._datefmt:\n items += [str(\"{:02d},\".format(day))]\n elif day is not None:\n items += [\"{:02d}\".format(day)]\n if '/' in self._datefmt:\n sep = '/'\n elif '.' in self._datefmt:\n sep = '.'\n elif '-' in self._datefmt:\n sep = '-'\n else:\n sep = ' '\n return sep.join(items)", "def visualize_days(parsed_data, output_dir):\n\n # Returning no. of incidents by each day of the week\n counter = fetch_incident_by_days(parsed_data)\n\n # data_list = fetch_incident_by_days.keys()\n\n # Separating the counter to have an ordered list\n y_values = [\n counter[\"Monday\"],\n counter[\"Tuesday\"],\n counter[\"Wednesday\"],\n counter[\"Thursday\"],\n counter[\"Friday\"],\n counter[\"Saturday\"],\n counter[\"Sunday\"]\n ]\n\n # Creating labels for x-axis\n x_labels = tuple([\"Mon\", \"Tues\", \"Wed\", \"Thurs\", \"Fri\", \"Sat\", \"Sun\"])\n\n # Assigning the data to plot\n plt.plot(y_values)\n\n # Assigning xticks on x-axis\n plt.xticks(range(len(x_labels)), x_labels)\n\n # Save the graph and show the figure\n file_name = os.path.join(output_dir, DAYS_PLOT_FILENAME)\n plt.savefig(file_name)\n plt.show()", "def ANdatefixer(years):\n\n\n\t# ========== create the new dates ==========\n\t# year = ds.Year\n\n\t# +++++ set up the list of dates +++++\n\tdates = OrderedDict()\n\ttm = [dt.datetime(int(year) , 6, 30) for year in years]\n\tdates[\"time\"] = pd.to_datetime(tm)\n\n\tdates[\"calendar\"] = 'standard'\n\tdates[\"units\"] = 'days since 1900-01-01 00:00'\n\t\n\tdates[\"CFTime\"] = date2num(\n\t\ttm, calendar=dates[\"calendar\"], units=dates[\"units\"])\n\n\treturn dates", "def create_calendar(actions, location_and_time_axes):\n calendar = ical.Calendar()\n calendar['PRODID'] = '{} {}'.format(ical.__name__, ical.__version__)\n calendar['VERSION'] = 2.0\n calendar['X-WR-CALNAME'] = 'PyCon.DE 2018'\n\n for location, date in actions.keys():\n meta_info = location_and_time_axes[(date.year, date.month, date.day)]\n time_axis = meta_info['time_axis']\n for action in actions[(location, date)]:\n if action['title'] == 'End':\n continue\n\n event = create_event(action, date, location, time_axis)\n\n calendar.add_component(event)\n\n return calendar", "def dayPeriod(lon,lat,n1,n2,day):\n x, y, z = _getXYZ(lon,lat)\n N = range(n1,n2+1)\n D = []\n for n_ in N:\n n = n_ * day\n i = range(0,n)\n j = range(n,n+n)\n d_ = gcDist(x[i],y[i],z[i],\n x[j],y[j],z[j])\n D = D + [d_,]\n print n, d_\n\n return (N,D)", "def generate_sic_monthly():\n # Set variable\n nc_file = '/home/disk/sipn/rclancy/ecmwf/pf/predictability/SIC/SIC.nc'\n var_name = 'SIC'\n var_date_name = 'SIC_dates'\n\n # Get variables\n var, var_dates =read_nc_var(nc_file, var_name, var_date_name)\n # Get associated dates\n var_year, var_month, var_day = get_y_mo_d(var_dates)\n\n SIC_present=np.zeros([12, 20, 240])\n for m in range(1,13):\n mo_ind = np.array(var_month==m)\n SIC_present[m-1,:,:] = np.nanmean(var[:,:,mo_ind],2)\n\n SIC_present[SIC_present>0]=1\n return SIC_present;", "def day2datetime(scenario,days):\r\n\t\tdate_int = np.empty((len(days)));date_int[:]=np.nan\r\n\t\tif scenario =='T1970C': start_year =1970\r\n\t\telse: start_year =2010\r\n\t\tstart =(start_year*365)\r\n\t\tith=0\t\r\n\t\tfor iday in days:\r\n\t\t\tmonth_days =np.array([31,28,31,30,31,30,31,31,30,31,30,31])\r\n\t\t\tcalendar_days = np.array([0,31,59,90,120,151,181,212,243,273,304,334,365])\r\n\t\t\ttotal_days = int(iday) + start; \r\n\t\t\tyear = total_days//365; \r\n\t\t\tremainder = total_days%365\r\n\t\t\tif remainder ==0: year=year-1;month=12;day=31\r\n\t\t\telse: \r\n\t\t\t\tmonth = 1+[layer for layer in range(len(calendar_days)) if calendar_days[layer]< remainder and calendar_days[layer+1]>=remainder][0]\r\n\t\t\t\tday = int(remainder - calendar_days[month-1])\r\n\t\t\t\tif day == 0: day = month_days[month-1]\r\n\t\t\tdate_int[ith] = year*10000+month*100+day\r\n\t\t\tith=ith+1\r\n\t\treturn date_int.astype(int)", "def day_range():\n DAYS = range(1, 32)\n days = map(lambda x: (x, x), DAYS)\n return days", "def create_string(iteration, dic):\n return str(iteration) + '|' + dic['Year'] + '/' + \\\n get_month_number(dic['Month']) + '/' + \\\n dic['Day'] + '|' + dic['Hour'] + ':' + \\\n dic['Min'] + ':' + dic['Seg'] + '|' + \\\n dic['Energy']", "def get_metric_for_all_dates(inargs, exp_id):\n date_list = []\n # Loop over dates and collect data\n for date in h.make_timelist(inargs.date_start, inargs.date_stop,\n inargs.hours_inc):\n date_list.append(get_metric_for_one_day(inargs, exp_id, date))\n return np.array(date_list)", "def gen_birthdays(n):\n list = []\n for date in range(n):\n list.append(random.randint(1, 365))\n return list", "def build_date():\n def r(x):\n return tuple(ord(i) for i in x)\n return r", "def _labels_for_historical_data(days: int, lang: str) -> List[str]:\n labels = []\n for days_diff in range(days, -1, -1):\n date_begin = date.today() - timedelta(days=days_diff)\n timestamp = pretty_print_timestamp(date_begin, lang)\n labels.append(timestamp)\n return labels", "def create_ical_file(list_of_events, strasse, hausnummer):\n cal = Calendar()\n\n # Some properties are required to be compliant:\n cal.add('prodid', '-//My calendar product//mxm.dk//')\n cal.add('version', '2.0')\n\n global total_number_of_events\n total_number_of_events = len(list_of_events)\n\n all_ical_events = create_cal_events(list_of_events, strasse, hausnummer)\n for evnt in all_ical_events:\n # Add the event to the calendar:\n cal.add_component(evnt)\n\n cal_as_ical = cal.to_ical()\n create_folder_if_not_exists()\n # Write iCal file to disk\n return save_ical_file(cal_as_ical, get_filename(strasse, hausnummer))", "def to_ical(self, name, value):\n type_class = self.for_property(name)\n return type_class(value).to_ical()", "def customized_retained_list(self, base_day, day_list=[]):\n auBitmap = self.make_bitmap(base_day, 'dau')\n return [(base_day, auBitmap.count())] + \\\n zip(day_list, self._retained_value(base_day, day_list, 'dau'))", "def random_date_generator(start_date, range_in_days):\n days_to_add = np.arange(0, range_in_days)\n random_dates = []\n for i in range(range_in_days):\n random_date = np.datetime64(start_date) + np.random.choice(days_to_add)\n random_dates.append(random_date)\n\n return random_dates", "def export_event(self):\n\n cal = Eve()\n cal.add('summary', str(self.categories))\n cal.add('description', self.label)\n cal.add('dtstart', vDatetime(self.start))\n cal.add('dtend', vDatetime(self.end))\n return cal.to_ical()", "def day_to_day(self):\n while True:\n yield 0", "def writeIcal(calendarItems):\n\n cal = Calendar()\n cal.add('prodid', '-//Gremien Kalender//opendata.stadt-muenster.de//')\n cal.add('version', '2.0')\n\n with open(OUTPUT_FILE_CSV, 'w', newline='') as csvfile:\n csvWriter = csv.writer(csvfile, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n csvWriter.writerow(['MeetingID', 'Start', 'Ende', 'Gremium', 'Veranstaltung', 'Ort', 'Weitere Information'])\n\n for key, session in sorted(calendarItems.items()):\n\n # Prepare event title (and convert datestrings to datetime objects with timezone)\n meetingId = session[5]\n sessionName = session[2]\n committee = session[3]\n location = session[4]\n start = datetime.strptime(session[0], \"%Y-%m-%dT%H:%M:%S%z\")\n end = datetime.strptime(session[1], \"%Y-%m-%dT%H:%M:%S%z\")\n meetingUrl = OPARL_MEETING_URL.format(meetingId)\n logging.info(\"Adding ical: %s %s %s\", start, committee, sessionName)\n\n # Create ical event (and convert datetimes to UTC)\n event = Event()\n event.add('summary', '{} - {}'.format(committee, sessionName))\n event.add('dtstart', start.astimezone(pytz.utc))\n event.add('dtend', end.astimezone(pytz.utc))\n event.add('dtstamp', datetime.now())\n event.add('description', meetingUrl)\n event.add('uid', '20220215T101010/{}@ms'.format(meetingId))\n\n organizer = vCalAddress('MAILTO:opendata@citeq.de')\n organizer.params['cn'] = vText('Stadt Münster')\n organizer.params['role'] = vText('Ratsinformationssytem')\n event['organizer'] = organizer\n event['location'] = vText(location)\n\n # Add event to calendar\n cal.add_component(event)\n\n # Add event to CSV\n csvWriter.writerow([meetingId, str(start), str(end), committee, sessionName, location, meetingUrl])\n\n\n # Write ical file\n f = open(OUTPUT_FILE_ICS, 'wb')\n f.write(cal.to_ical())\n f.close()", "def _load_days_lists(self):\n school_year = \\\n SchoolDB.models.get_school_year_for_date(\n self.start_date)\n for i in range(0, self.total_days_count):\n day = self.start_date + timedelta(i)\n if (not school_year or (not school_year.in_block(day))):\n morning_type = afternoon_type = 0\n day_description = \"Not in school year.\"\n elif (i > self.days_count):\n morning_type = afternoon_type = \\\n SchoolDB.models.StudentAttendanceRecord.valid\n day_description = \"In the future.\"\n else:\n morning_type = afternoon_type = \\\n SchoolDB.models.StudentAttendanceRecord.valid\n morning_school_day, afternoon_school_day, day_description = \\\n SchoolDB.models.is_school_day(day,\n self.section)\n if morning_school_day:\n morning_type |= \\\n SchoolDB.models.StudentAttendanceRecord.school_day\n if afternoon_school_day:\n afternoon_type |= \\\n SchoolDB.models.StudentAttendanceRecord.school_day\n self.dayperiod_type.append(morning_type)\n self.dayperiod_type.append(afternoon_type)\n self.day_description.append(day_description)\n self.date_list.append(day.toordinal())", "def generate_day_header():\n cf = config.Config()\n outstr = \"total_rotation,total_acceleration,total_distance,number_missing,\"\n outstr += \"oc1_time,oc2_time,oc3_time,oc4_time,oc5_time,oc6_time,oc7_time,\"\n outstr += \"oc8_time,oc9_time,oc10_time,oc11_time,oc12_time,oc13_time,\"\n outstr += \"oc14_time,oc15_time,oc16_time,oc17_time,oc18_time,oc19_time,\"\n outstr += \"oc20_time,oc21_time,oc22_time,oc23_time,oc24_time,oc25_time,\"\n outstr += \"oc26_time,oc27_time,oc28_time,oc29_time,oc30_time,oc31_time,\"\n outstr += \"oc32_time,oc33_time,\"\n anames = cf.activity_list\n for i in range(len(anames)):\n outstr += anames[i] + \"_time,\"\n outstr += \"oc1_first,oc2_first,\"\n outstr += \"oc3_first,oc4_first,oc5_first,oc6_first,oc7_first,oc8_first,\"\n outstr += \"oc9_first,oc10_first,oc11_first,oc12_first,oc13_first,\"\n outstr += \"oc14_first,oc15_first,oc16_first,oc17_first,oc18_first,\"\n outstr += \"oc19_first,oc20_first,oc21_first,oc22_first,oc23_first,\"\n outstr += \"oc24_first,oc25_first,oc26_first,oc27_first,oc28_first,\"\n outstr += \"oc29_first,oc30_first,oc31_first,oc32_first,oc33_first,\"\n for i in range(len(anames)):\n outstr += anames[i] + \"_first,\"\n outstr += \"attraction_time,house_time,restaurant_time,\"\n outstr += \"road_time,service_time,store_time,work_time,other_time,\"\n outstr += \"attraction_first,house_first,restaurant_first,road_first,\"\n outstr += \"service_first,store_first,work_first,other_first\"\n return outstr", "def iterateList(self, numDays):\n import dateutil as du\n self.daysList = []\n for pull_date in range(numDays):\n self.daysList.append(str((self.right_now + du.relativedelta.relativedelta(days=pull_date)).date()))\n return self.daysList", "def plot_all_indicies_sic(resolutions, temporal_resolution, temporal_decomposition, detrend, imagefolder = 'images/timeseries/INDICIES/', indicies = ['SAM', 'IPO', 'DMI', 'ENSO'], seaice_source = 'nsidc'):\n for n, temp_res, temp_decomp, dt, indexname in itertools.product(resolutions, temporal_resolution, temporal_decomposition, detrend, indicies):\n plot_index_sic_timeseries(anomlous = 'anomalous' == temp_decomp, temporal_resolution = temp_res, detrend = dt == 'detrended', indexname = indexname, n = n, seaice_source = seaice_source)", "def get_dates():\n return {\n \"years\": range(datetime.date.today().year, datetime.date.today().year + 5),\n \"months\": range(1, 13),\n \"days\": range(1, 32)\n }", "def stripDataDays(d, t, validDays = \\\n [\"Mon\", \"Tue\", \"Wed\", \"Thu\", \"Fri\", \"Sat\", \"Sun\"]):\n convTimes = []\n for i in range(len(t)):\n if t[i].strftime(\"%a\") in validDays:\n convTimes.append(i)\n\n convData = d[convTimes]\n convTimes = t[convTimes]\n\n return convData, convTimes", "def GetIscEventcatalog(start_date_time, days, pos, catalog_type):\n # Read the isc data. Note that we take any data points within 1000 km, which\n # is a huge distance. We let the polygon distance calculation below pull it\n # in closer.\n data = isc.ReadISCData('gs://clouddfe-cfs/isc', catalog_type, start_date_time,\n days, pos, 1000)\n\n # Munge the data. Brendan's calculations have different field names from\n # what's returned from the ISC reader. Likely we want to remove this step.\n # TODO(jfaller, meadeb): Make field names consistent.\n ret = {}\n ret['yr'] = [x['date_time'].year for x in data]\n ret['mon'] = [x['date_time'].month for x in data]\n ret['day'] = [x['date_time'].day for x in data]\n ret['hr'] = [x['date_time'].hour for x in data]\n ret['min'] = [x['date_time'].minute for x in data]\n ret['sec'] = [x['date_time'].second for x in data]\n ret['latitude'] = [x['lat'] for x in data]\n ret['longitude'] = [x['lon'] for x in data]\n ret['depth'] = [x['depth'] for x in data]\n ret['magnitude'] = [x['magnitude'] for x in data]\n ret['datetime'] = [x['date_time'] for x in data]\n\n return ret", "def generate_seven_days(start_date):\n\n seven_days = []\n\n for i in xrange(7):\n days_to_add = datetime.timedelta(days=i)\n seven_days.append([start_date + days_to_add, 0])\n\n return seven_days", "def create_daily_ticks(days_in_plot, minors_per_day=4, end_date=None):\n\n if not end_date:\n end_date = datetime.now()\n\n date_limits = dict()\n date_limits['right'] = end_date.replace(hour=0, minute=0, second=0, microsecond=0) + dt.timedelta(\n days=1) # end of day\n date_limits['left'] = date_limits['right'] - dt.timedelta(days=days_in_plot)\n\n major_ticks = [date_limits['right'] - dt.timedelta(days=x) for x in range(0, days_in_plot + 1)]\n\n minor_ticks = [date_limits['right'] - dt.timedelta(hours=x * (24 / minors_per_day))\n for x in range(0, days_in_plot * minors_per_day + 1)]\n\n return date_limits, major_ticks, minor_ticks", "def generate_24hr_Xlabels():\n\n X_LABELS = []\n\n for i in xrange(24):\n\n if i == 0:\n X_LABELS.append(\"12\" + \"AM\")\n elif i < 12:\n X_LABELS.append(str(i) + \"AM\")\n elif i == 12:\n X_LABELS.append(\"12\" + \"PM\")\n else:\n X_LABELS.append(str(i - 12) + \"PM\")\n\n return X_LABELS", "def create_date_list(start_date = start_date, end_date = end_date):", "def accumulate(self, days: int, dt: float, plot=True):\r\n self.floatCheck([days, dt])\r\n self.negValCheck([days, dt])\r\n t = np.linspace(0, days, int(days / dt) + 1)\r\n S, E, I, R = self._simulate(days, dt)\r\n # create a numpy array that will hold all of the values\r\n cases = np.zeros(len(I))\r\n # add up the total infected and removed at given time to account for everyone with the virus\r\n for i in range(len(I)):\r\n cases[i] = I[i] + R[i]\r\n # create a dictionary that holds the data for easy conversion to dataframe\r\n data1 = {\r\n \"Days\": t,\r\n \"Susceptible\": S,\r\n \"Exposed\": E,\r\n \"Infected\": I,\r\n \"Removed\": R,\r\n \"Total Cases\": cases,\r\n }\r\n # create the column labels\r\n labels = [\r\n \"Days\",\r\n \"Susceptible\",\r\n \"Exposed\",\r\n \"Infected\",\r\n \"Removed\",\r\n \"Total Cases\",\r\n ]\r\n # convert to dataframe\r\n df = pd.DataFrame(data=data1, columns=labels)\r\n if plot:\r\n # do some plotting\r\n df.plot(x=\"Days\", y=[\"Total Cases\"])\r\n plt.xlabel(\"Days\")\r\n plt.ylabel(\"Total Cases\")\r\n plt.show()\r\n # return dataframe\r\n return df", "def createDates(self, data: QDate=None):\n if data is None:\n data = self.oggi\n # print('CREATEDATES DATA', data)\n dateList = MeseGiorniDictGen.bigList(data)\n return dateList", "def getAllDays(self):\n start = str(self.current[0:10])\n end = str(self.dueDate[0:10])\n daysRange = pd.date_range(start = start, end = end).tolist()\n daysRange = daysRange[1:len(daysRange)-1]\n days = []\n for i in daysRange:\n day = str(i)\n day = day[:10] + 'T' + day[11:] + '-05:00'\n days.append(day)\n return days", "def daily(self):\n return c.Daily(self)", "def datefixer(ds):\n\n\n\t# ========== create the new dates ==========\n\tyear = ds.Year\n\n\t# +++++ set up the list of dates +++++\n\tdates = OrderedDict()\n\ttm = [dt.datetime(int(year) , int(np.floor(tm)), int(tm%1*30+1)) for tm in ds.time]\n\tdates[\"time\"] = pd.to_datetime(tm)\n\n\tdates[\"calendar\"] = 'standard'\n\tdates[\"units\"] = 'days since 1900-01-01 00:00'\n\t\n\tdates[\"CFTime\"] = date2num(\n\t\ttm, calendar=dates[\"calendar\"], units=dates[\"units\"])\n\n\treturn dates", "def make_time_series(rics: list, fields: list, start_date: str, end_date: str, name: str) -> None:\n df = get_time_series(rics, fields, start_date, end_date)\n df.dropna(inplace=True)\n to_csv(df, name, time_series=True)", "def read_ics(self, filename, lat_long_data):\n with open(os.path.join(self.zoneinfo_path, filename), \"r\") as zone:\n zoneinfo = zone.readlines()\n\n with open(os.path.join(self.zoneinfo_pure_path, filename), \"r\") as zone:\n zoneinfo_pure = zone.readlines()\n\n ics_data = []\n for i in range(0, len(zoneinfo)):\n line = zoneinfo[i]\n key = line[:line.find(\":\")]\n\n if key == \"BEGIN\":\n if line != \"BEGIN:VCALENDAR\\r\\n\":\n ics_data.append(line)\n elif key == \"END\":\n if line != \"END:VCALENDAR\\r\\n\":\n ics_data.append(line)\n elif key in (\"TZID\", \"TZOFFSETFROM\", \"TZOFFSETTO\", \"TZNAME\", \"DTSTART\"):\n ics_data.append(line)\n elif key == \"RRULE\":\n if line == zoneinfo_pure[i]:\n ics_data.append(line)\n else:\n sys.stderr.write(\"Using pure version of %s\\n\" % filename[:-4])\n ics_data.append(zoneinfo_pure[i])\n\n zone_data = {\n \"ics\": \"\".join(ics_data).rstrip()\n }\n zone_name = filename[:-4]\n if zone_name in lat_long_data:\n zone_data[\"latitude\"] = lat_long_data[zone_name][0]\n zone_data[\"longitude\"] = lat_long_data[zone_name][1]\n\n return zone_data", "def covid_daily():\n #Fetches data from API and creates global varibles\n covid_handle(covid_fetch())\n #Creates a daily breifing using varibles\n covid_daily_news = (f\"The number of new cases in {areaName} \\\ntoday is: {newCasesToday}. Bringing the \\\ncumulative number of cases to: {cumCasesToday}. \\nThe number of deaths yesterday were {newDeathsYesterday}, \\\nleaving the cummulative number of deaths in {areaName} at {cumDeathsYesterday}.\")\n return covid_daily_news", "def forecast(days):\n transition = np.array([[.7, .6], [.3, .4]])\n state = 0\n record = []\n for day in xrange(days):\n state = np.random.binomial(1, transition[1, state])\n record.append(state)\n return record", "def comp_day_range(number_of_days=5):\n word_days = _(\"days\")\n word_day = _(\"day\")\n DAYS = range(2, number_of_days + 1)\n days = map(lambda x: (x, \"- %d \" % x + word_days), DAYS)\n COMP_DAY_LIST = [(1, '- 1 ' + word_day)]\n return COMP_DAY_LIST + days", "def generate_dates(self, event):\n dates = []\n dtstart = self.tz_localize(event['dtstart'].dt)\n if 'dtend' in event:\n dtend = self.tz_localize(event['dtend'].dt)\n # DTEND is exclusive, so the real ending date is one day before\n if is_date(dtend):\n dtend -= datetime.timedelta(days=1)\n else:\n dtend = None\n # Normal case: no repetition\n if not 'rrule' in event:\n dates.append(self.format_dateinterval(dtstart, dtend))\n # Handle recurrent events\n else:\n ruleset = rrule.rruleset()\n rule = rrule.rrulestr(event['rrule'].to_ical().decode('utf-8'),\n dtstart=dtstart)\n ruleset.rrule(rule)\n # Parse all types of recurrence constraints\n for prop in ['rdate', 'exdate']:\n if not prop in event:\n continue\n # This can return either a single value or a list, so it's\n # a mess...\n prop_dates = event[prop]\n if not isinstance(prop_dates, list):\n prop_dates = [prop_dates]\n for prop_date in prop_dates:\n # This is a vDDDLists\n for vddd in prop_date.dts:\n dt = vddd.dt\n # EXDATE and RDATE are allowed to be dates,\n # convert them to datetime. TODO: should the time\n # be midnight, or the time from DTSTART?\n if is_date(dt):\n dt = datetime.datetime.combine(dt, datetime.time())\n dt = self.tz_localize(dt)\n ruleset.__getattribute__(prop)(dt)\n # We now have a ruleset that expands to a list of starting\n # date or datetime, one for each repetition.\n for dtstart_repeat in itertools.islice(ruleset, MAX_RECURRING_EVENTS):\n # Handle case where dtstart is a date, since rrule always\n # returns datetime objects.\n if is_date(dtstart):\n dtstart_repeat = dtstart_repeat.date()\n # Compute matching dtend if applicable\n if dtend == None:\n dtend_repeat = None\n else:\n dtend_repeat = dtend + (dtstart_repeat - dtstart)\n dates.append(self.format_dateinterval(dtstart_repeat, dtend_repeat))\n return dates", "def t_range_days_inv(t_days_lst):\r\n t_train_lst = pd.date_range(\r\n start=np.datetime64(t_days_lst[0]),\r\n end=np.datetime64(t_days_lst[-1]) + np.timedelta64(1, \"D\"),\r\n freq=\"D\",\r\n )\r\n return t_days_lst2range(t_train_lst)", "def dwd_GOES_IR_VIS(self,\n sun_zenith_angle_correction=True,\n alpha_sz_day_limit=None,\n alpha_sz_night_limit=None,\n backup_orig_data=False):\n return self._dwd_create_day_night_image(\n '00_7', '10_7',\n sun_zenith_angle_correction=sun_zenith_angle_correction,\n alpha_sz_day_limit=alpha_sz_day_limit,\n alpha_sz_night_limit=alpha_sz_night_limit,\n backup_orig_data=backup_orig_data)", "def dates(self):\n pass", "def acquisitions_from_ifg_dates(ifg_dates):\n acq_dates = []\n for ifg_date in ifg_dates: # loop through the dates for each ifg\n dates = ifg_date.split('_') # split into two YYYYMMDDs\n for date in dates: # loop through each of these\n if date not in acq_dates: # if it't not already in the list...\n acq_dates.append(date) # add to it\n return acq_dates", "def get_outlook_calendar_entries(days = 1):\r\n outlook = win32.Dispatch('outlook.application')\r\n\r\n ns = outlook.GetNamespace(\"MAPI\")\r\n appointments = ns.GetDefaultFolder(9).Items\r\n appointments.Sort(\"[Start]\")\r\n appointments.IncludeRecurrences = \"True\"\r\n\r\n date_from = datetime.datetime.today()\r\n begin = date_from.date().strftime(\"%x\")\r\n\r\n date_to = datetime.timedelta(days=(days+1)) + date_from\r\n end = date_to.date().strftime(\"%x\")\r\n\r\n date_filter = \"[Start] >= '\" + begin + \"' AND [END] <= '\" + end + \"'\"\r\n\r\n print(date_filter)\r\n\r\n appointments = appointments.Restrict(date_filter)\r\n events_list = []\r\n\r\n for a in appointments:\r\n #print(\"from appointment \" + str(a.Start))\r\n event_date = a.Start.replace(tzinfo=timezone(datetime.timedelta(seconds=time.localtime().tm_gmtoff)))\r\n events_list.append([event_date, a.Subject, a.Duration, a.Location])\r\n\r\n return events_list", "def days(self):\n return self._contents", "def calendar_for_event_description(ed):\n return icemac.ab.calendar.interfaces.ICalendar(ed.context)", "def generate_father_day_planning(days_to_countries=None):\n if days_to_countries is None:\n days_to_countries = get_father_days()\n\n dates = list(days_to_countries.keys())\n\n\n for i,date in enumerate(dates):\n dates[i] = parse(date,default=datetime.datetime(2020,1,1))\n #days_to_countries[date].sort()\n\n\n\n dates.sort()\n\n\n for date in dates:\n date = date.strftime('%B %d')\n date = re.sub(r'0(\\d)',r'\\1',date)\n\n countries = days_to_countries[date]\n\n print(date)\n\n for country in countries:\n print(f'- {country}')\n\n print()\n\n\n # you code", "def make_info(days, secs, nsecs, gtids, followers=None):\n array = []\n if gtids is None:\n array.append(('Not Processed', '-'))\n return array\n for t in range(len(days)):\n total_secs = TZERO + days[t]*24*3600 + secs[t] + float(nsecs[t])/1e9\n stime = time.strftime(\"%Y/%m/%d %H:%M:%S \", time.localtime(total_secs))\n if followers:\n array.append((gtids[t], stime, followers[t]))\n continue\n array.append((gtids[t], stime))\n\n return array", "def generate_behavior_header():\n outstr = \"mean_day_1,mean_day_2,mean_day_3,mean_day_4,mean_day_5,\"\n outstr += \"mean_day_6,mean_day_7,mean_day_8,mean_day_9,mean_day_10,\"\n outstr += \"mean_day_11,mean_day_12,mean_day_13,mean_day_14,mean_day_15,\"\n outstr += \"mean_day_16,mean_day_17,mean_day_18,mean_day_19,mean_day_20,\"\n outstr += \"mean_day_21,mean_day_22,mean_day_23,mean_day_24,mean_day_25,\"\n outstr += \"mean_day_26,mean_day_27,mean_day_28,mean_day_29,mean_day_30,\"\n outstr += \"mean_day_31,mean_day_32,mean_day_33,mean_day_34,mean_day_35,\"\n outstr += \"mean_day_36,mean_day_37,mean_day_38,mean_day_39,mean_day_40,\"\n outstr += \"mean_day_41,mean_day_42,mean_day_43,mean_day_44,mean_day_45,\"\n outstr += \"mean_day_46,mean_day_47,mean_day_48,mean_day_49,mean_day_50,\"\n outstr += \"mean_day_51,mean_day_52,mean_day_53,mean_day_54,mean_day_55,\"\n outstr += \"mean_day_56,mean_day_57,mean_day_58,mean_day_59,mean_day_60,\"\n outstr += \"mean_day_61,mean_day_62,mean_day_63,mean_day_64,mean_day_65,\"\n outstr += \"mean_day_66,mean_day_67,mean_day_68,mean_day_69,mean_day_70,\"\n outstr += \"mean_day_71,mean_day_72,mean_day_73,mean_day_74,mean_day_75,\"\n outstr += \"mean_day_76,mean_day_77,mean_day_78,mean_day_79,mean_day_80,\"\n outstr += \"mean_day_81,mean_day_82,mean_day_83,mean_day_84,mean_day_85,\"\n outstr += \"mean_day_86,mean_day_87,mean_day_88,mean_day_89,mean_day_90,\"\n outstr += \"mean_day_91,mean_day_92,mean_day_93,mean_day_94,mean_day_95,\"\n outstr += \"mean_day_96,mean_day_97,mean_day_98,mean_day_99,\"\n outstr += \"mean_day_100,mean_day_101,mean_day_102,\"\n outstr += \"mean_day_103,mean_day_104,mean_day_105,mean_day_106,mean_day_107,\"\n outstr += \"mean_day_108,mean_day_109,mean_day_110,\"\n outstr += \"med_day_1,med_day_2,med_day_3,med_day_4,med_day_5,\"\n outstr += \"med_day_6,med_day_7,med_day_8,med_day_9,med_day_10,\"\n outstr += \"med_day_11,med_day_12,med_day_13,med_day_14,med_day_15,\"\n outstr += \"med_day_16,med_day_17,med_day_18,med_day_19,med_day_20,\"\n outstr += \"med_day_21,med_day_22,med_day_23,med_day_24,med_day_25,\"\n outstr += \"med_day_26,med_day_27,med_day_28,med_day_29,med_day_30,\"\n outstr += \"med_day_31,med_day_32,med_day_33,med_day_34,med_day_35,\"\n outstr += \"med_day_36,med_day_37,med_day_38,med_day_39,med_day_40,\"\n outstr += \"med_day_41,med_day_42,med_day_43,med_day_44,med_day_45,\"\n outstr += \"med_day_46,med_day_47,med_day_48,med_day_49,med_day_50,\"\n outstr += \"med_day_51,med_day_52,med_day_53,med_day_54,med_day_55,\"\n outstr += \"med_day_56,med_day_57,med_day_58,med_day_59,med_day_60,\"\n outstr += \"med_day_61,med_day_62,med_day_63,med_day_64,med_day_65,\"\n outstr += \"med_day_66,med_day_67,med_day_68,med_day_69,med_day_70,\"\n outstr += \"med_day_71,med_day_72,med_day_73,med_day_74,med_day_75,\"\n outstr += \"med_day_76,med_day_77,med_day_78,med_day_79,med_day_80,\"\n outstr += \"med_day_81,med_day_82,med_day_83,med_day_84,med_day_85,\"\n outstr += \"med_day_86,med_day_87,med_day_88,med_day_89,med_day_90,\"\n outstr += \"med_day_91,med_day_92,med_day_93,med_day_94,med_day_95,\"\n outstr += \"med_day_96,med_day_97,med_day_98,med_day_99,\"\n outstr += \"med_day_100,med_day_101,med_day_102,\"\n outstr += \"med_day_103,med_day_104,med_day_105,med_day_106,med_day_107,\"\n outstr += \"med_day_108,med_day_109,med_day_110,\"\n outstr += \"std_day_1,std_day_2,std_day_3,std_day_4,std_day_5,\"\n outstr += \"std_day_6,std_day_7,std_day_8,std_day_9,std_day_10,\"\n outstr += \"std_day_11,std_day_12,std_day_13,std_day_14,std_day_15,\"\n outstr += \"std_day_16,std_day_17,std_day_18,std_day_19,std_day_20,\"\n outstr += \"std_day_21,std_day_22,std_day_23,std_day_24,std_day_25,\"\n outstr += \"std_day_26,std_day_27,std_day_28,std_day_29,std_day_30,\"\n outstr += \"std_day_31,std_day_32,std_day_33,std_day_34,std_day_35,\"\n outstr += \"std_day_36,std_day_37,std_day_38,std_day_39,std_day_40,\"\n outstr += \"std_day_41,std_day_42,std_day_43,std_day_44,std_day_45,\"\n outstr += \"std_day_46,std_day_47,std_day_48,std_day_49,std_day_50,\"\n outstr += \"std_day_51,std_day_52,std_day_53,std_day_54,std_day_55,\"\n outstr += \"std_day_56,std_day_57,std_day_58,std_day_59,std_day_60,\"\n outstr += \"std_day_61,std_day_62,std_day_63,std_day_64,std_day_65,\"\n outstr += \"std_day_66,std_day_67,std_day_68,std_day_69,std_day_70,\"\n outstr += \"std_day_71,std_day_72,std_day_73,std_day_74,std_day_75,\"\n outstr += \"std_day_76,std_day_77,std_day_78,std_day_79,std_day_80,\"\n outstr += \"std_day_81,std_day_82,std_day_83,std_day_84,std_day_85,\"\n outstr += \"std_day_86,std_day_87,std_day_88,std_day_89,std_day_90,\"\n outstr += \"std_day_91,std_day_92,std_day_93,std_day_94,std_day_95,\"\n outstr += \"std_day_96,std_day_97,std_day_98,std_day_99,\"\n outstr += \"std_day_100,std_day_101,std_day_102,\"\n outstr += \"std_day_103,std_day_104,std_day_105,std_day_106,std_day_107,\"\n outstr += \"std_day_108,std_day_109,std_day_110,\"\n outstr += \"max_day_1,max_day_2,max_day_3,max_day_4,max_day_5,\"\n outstr += \"max_day_6,max_day_7,max_day_8,max_day_9,max_day_10,\"\n outstr += \"max_day_11,max_day_12,max_day_13,max_day_14,max_day_15,\"\n outstr += \"max_day_16,max_day_17,max_day_18,max_day_19,max_day_20,\"\n outstr += \"max_day_21,max_day_22,max_day_23,max_day_24,max_day_25,\"\n outstr += \"max_day_26,max_day_27,max_day_28,max_day_29,max_day_30,\"\n outstr += \"max_day_31,max_day_32,max_day_33,max_day_34,max_day_35,\"\n outstr += \"max_day_36,max_day_37,max_day_38,max_day_39,max_day_40,\"\n outstr += \"max_day_41,max_day_42,max_day_43,max_day_44,max_day_45,\"\n outstr += \"max_day_46,max_day_47,max_day_48,max_day_49,max_day_50,\"\n outstr += \"max_day_51,max_day_52,max_day_53,max_day_54,max_day_55,\"\n outstr += \"max_day_56,max_day_57,max_day_58,max_day_59,max_day_60,\"\n outstr += \"max_day_61,max_day_62,max_day_63,max_day_64,max_day_65,\"\n outstr += \"max_day_66,max_day_67,max_day_68,max_day_69,max_day_70,\"\n outstr += \"max_day_71,max_day_72,max_day_73,max_day_74,max_day_75,\"\n outstr += \"max_day_76,max_day_77,max_day_78,max_day_79,max_day_80,\"\n outstr += \"max_day_81,max_day_82,max_day_83,max_day_84,max_day_85,\"\n outstr += \"max_day_86,max_day_87,max_day_88,max_day_89,max_day_90,\"\n outstr += \"max_day_91,max_day_92,max_day_93,max_day_94,max_day_95,\"\n outstr += \"max_day_96,max_day_97,max_day_98,max_day_99,\"\n outstr += \"max_day_100,max_day_101,max_day_102,\"\n outstr += \"max_day_103,max_day_104,max_day_105,max_day_106,max_day_107,\"\n outstr += \"max_day_108,max_day_109,max_day_110,\"\n outstr += \"min_day_1,min_day_2,min_day_3,min_day_4,min_day_5,\"\n outstr += \"min_day_6,min_day_7,min_day_8,min_day_9,min_day_10,\"\n outstr += \"min_day_11,min_day_12,min_day_13,min_day_14,min_day_15,\"\n outstr += \"min_day_16,min_day_17,min_day_18,min_day_19,min_day_20,\"\n outstr += \"min_day_21,min_day_22,min_day_23,min_day_24,min_day_25,\"\n outstr += \"min_day_26,min_day_27,min_day_28,min_day_29,min_day_30,\"\n outstr += \"min_day_31,min_day_32,min_day_33,min_day_34,min_day_35,\"\n outstr += \"min_day_36,min_day_37,min_day_38,min_day_39,min_day_40,\"\n outstr += \"min_day_41,min_day_42,min_day_43,min_day_44,min_day_45,\"\n outstr += \"min_day_46,min_day_47,min_day_48,min_day_49,min_day_50,\"\n outstr += \"min_day_51,min_day_52,min_day_53,min_day_54,min_day_55,\"\n outstr += \"min_day_56,min_day_57,min_day_58,min_day_59,min_day_60,\"\n outstr += \"min_day_61,min_day_62,min_day_63,min_day_64,min_day_65,\"\n outstr += \"min_day_66,min_day_67,min_day_68,min_day_69,min_day_70,\"\n outstr += \"min_day_71,min_day_72,min_day_73,min_day_74,min_day_75,\"\n outstr += \"min_day_76,min_day_77,min_day_78,min_day_79,min_day_80,\"\n outstr += \"min_day_81,min_day_82,min_day_83,min_day_84,min_day_85,\"\n outstr += \"min_day_86,min_day_87,min_day_88,min_day_89,min_day_90,\"\n outstr += \"min_day_91,min_day_92,min_day_93,min_day_94,min_day_95,\"\n outstr += \"min_day_96,min_day_97,min_day_98,min_day_99,\"\n outstr += \"min_day_100,min_day_101,min_day_102,\"\n outstr += \"min_day_103,min_day_104,min_day_105,min_day_106,min_day_107,\"\n outstr += \"min_day_108,min_day_109,min_day_110,\"\n outstr += \"day_zero_crossings,day_mean_crossings,day_interquartile_range,\"\n outstr += \"day_skewness,day_kurtosis,\"\n outstr += \"sig_day_1,sig_day_2,sig_day_3,sig_day_4,sig_day_5,\"\n outstr += \"sig_day_6,sig_day_7,sig_day_8,sig_day_9,sig_day_10,\"\n outstr += \"sig_day_11,sig_day_12,sig_day_13,sig_day_14,sig_day_15,\"\n outstr += \"sig_day_16,sig_day_17,sig_day_18,sig_day_19,sig_day_20,\"\n outstr += \"sig_day_21,sig_day_22,sig_day_23,sig_day_24,sig_day_25,\"\n outstr += \"sig_day_26,sig_day_27,sig_day_28,sig_day_29,sig_day_30,\"\n outstr += \"sig_day_31,sig_day_32,sig_day_33,sig_day_34,sig_day_35,\"\n outstr += \"sig_day_36,sig_day_37,sig_day_38,sig_day_39,sig_day_40,\"\n outstr += \"sig_day_41,sig_day_42,sig_day_43,sig_day_44,sig_day_45,\"\n outstr += \"sig_day_46,sig_day_47,sig_day_48,sig_day_49,sig_day_50,\"\n outstr += \"sig_day_51,sig_day_52,sig_day_53,sig_day_54,sig_day_55,\"\n outstr += \"sig_day_56,sig_day_57,sig_day_58,sig_day_59,sig_day_60,\"\n outstr += \"sig_day_61,sig_day_62,sig_day_63,sig_day_64,sig_day_65,\"\n outstr += \"sig_day_66,sig_day_67,sig_day_68,sig_day_69,sig_day_70,\"\n outstr += \"sig_day_71,sig_day_72,sig_day_73,sig_day_74,sig_day_75,\"\n outstr += \"sig_day_76,sig_day_77,sig_day_78,sig_day_79,sig_day_80,\"\n outstr += \"sig_day_81,sig_day_82,sig_day_83,sig_day_84,sig_day_85,\"\n outstr += \"sig_day_86,sig_day_87,sig_day_88,sig_day_89,sig_day_90,\"\n outstr += \"sig_day_91,sig_day_92,sig_day_93,sig_day_94,sig_day_95,\"\n outstr += \"sig_day_96,sig_day_97,sig_day_98,sig_day_99,\"\n outstr += \"sig_day_100,sig_day_101,sig_day_102,\"\n outstr += \"sig_day_103,sig_day_104,sig_day_105,sig_day_106,sig_day_107,\"\n outstr += \"sig_day_108,sig_day_109,sig_day_110,\"\n outstr += \"mean_hour_1,mean_hour_2,mean_hour_3,mean_hour_4,mean_hour_5,\"\n outstr += \"mean_hour_6,mean_hour_7,mean_hour_8,mean_hour_9,mean_hour_10,\"\n outstr += \"mean_hour_11,mean_hour_12,mean_hour_13,mean_hour_14,mean_hour_15,\"\n outstr += \"mean_hour_16,mean_hour_17,mean_hour_18,mean_hour_19,mean_hour_20,\"\n outstr += \"mean_hour_21,mean_hour_22,mean_hour_23,mean_hour_24,mean_hour_25,\"\n outstr += \"mean_hour_26,mean_hour_27,mean_hour_28,mean_hour_29,mean_hour_30,\"\n outstr += \"mean_hour_31,mean_hour_32,mean_hour_33,mean_hour_34,mean_hour_35,\"\n outstr += \"mean_hour_36,mean_hour_37,mean_hour_38,mean_hour_39,mean_hour_40,\"\n outstr += \"mean_hour_41,mean_hour_42,mean_hour_43,mean_hour_44,mean_hour_45,\"\n outstr += \"mean_hour_46,mean_hour_47,mean_hour_48,mean_hour_49,mean_hour_50,\"\n outstr += \"mean_hour_51,mean_hour_52,mean_hour_53,\"\n outstr += \"mean_hour_54,mean_hour_55,mean_hour_56,mean_hour_57,\"\n outstr += \"med_hour_1,med_hour_2,med_hour_3,med_hour_4,med_hour_5,\"\n outstr += \"med_hour_6,med_hour_7,med_hour_8,med_hour_9,med_hour_10,\"\n outstr += \"med_hour_11,med_hour_12,med_hour_13,med_hour_14,med_hour_15,\"\n outstr += \"med_hour_16,med_hour_17,med_hour_18,med_hour_19,med_hour_20,\"\n outstr += \"med_hour_21,med_hour_22,med_hour_23,med_hour_24,med_hour_25,\"\n outstr += \"med_hour_26,med_hour_27,med_hour_28,med_hour_29,med_hour_30,\"\n outstr += \"med_hour_31,med_hour_32,med_hour_33,med_hour_34,med_hour_35,\"\n outstr += \"med_hour_36,med_hour_37,med_hour_38,med_hour_39,med_hour_40,\"\n outstr += \"med_hour_41,med_hour_42,med_hour_43,med_hour_44,med_hour_45,\"\n outstr += \"med_hour_46,med_hour_47,med_hour_48,med_hour_49,med_hour_50,\"\n outstr += \"med_hour_51,med_hour_52,med_hour_53,\"\n outstr += \"med_hour_54,med_hour_55,med_hour_56,med_hour_57,\"\n outstr += \"std_hour_1,std_hour_2,std_hour_3,std_hour_4,std_hour_5,\"\n outstr += \"std_hour_6,std_hour_7,std_hour_8,std_hour_9,std_hour_10,\"\n outstr += \"std_hour_11,std_hour_12,std_hour_13,std_hour_14,std_hour_15,\"\n outstr += \"std_hour_16,std_hour_17,std_hour_18,std_hour_19,std_hour_20,\"\n outstr += \"std_hour_21,std_hour_22,std_hour_23,std_hour_24,std_hour_25,\"\n outstr += \"std_hour_26,std_hour_27,std_hour_28,std_hour_29,std_hour_30,\"\n outstr += \"std_hour_31,std_hour_32,std_hour_33,std_hour_34,std_hour_35,\"\n outstr += \"std_hour_36,std_hour_37,std_hour_38,std_hour_39,std_hour_40,\"\n outstr += \"std_hour_41,std_hour_42,std_hour_43,std_hour_44,std_hour_45,\"\n outstr += \"std_hour_46,std_hour_47,std_hour_48,std_hour_49,std_hour_50,\"\n outstr += \"std_hour_51,std_hour_52,std_hour_53,\"\n outstr += \"std_hour_54,std_hour_55,std_hour_56,std_hour_57,\"\n outstr += \"max_hour_1,max_hour_2,max_hour_3,max_hour_4,max_hour_5,\"\n outstr += \"max_hour_6,max_hour_7,max_hour_8,max_hour_9,max_hour_10,\"\n outstr += \"max_hour_11,max_hour_12,max_hour_13,max_hour_14,max_hour_15,\"\n outstr += \"max_hour_16,max_hour_17,max_hour_18,max_hour_19,max_hour_20,\"\n outstr += \"max_hour_21,max_hour_22,max_hour_23,max_hour_24,max_hour_25,\"\n outstr += \"max_hour_26,max_hour_27,max_hour_28,max_hour_29,max_hour_30,\"\n outstr += \"max_hour_31,max_hour_32,max_hour_33,max_hour_34,max_hour_35,\"\n outstr += \"max_hour_36,max_hour_37,max_hour_38,max_hour_39,max_hour_40,\"\n outstr += \"max_hour_41,max_hour_42,max_hour_43,max_hour_44,max_hour_45,\"\n outstr += \"max_hour_46,max_hour_47,max_hour_48,max_hour_49,max_hour_50,\"\n outstr += \"max_hour_51,max_hour_52,max_hour_53,\"\n outstr += \"max_hour_54,max_hour_55,max_hour_56,max_hour_57,\"\n outstr += \"min_hour_1,min_hour_2,min_hour_3,min_hour_4,min_hour_5,\"\n outstr += \"min_hour_6,min_hour_7,min_hour_8,min_hour_9,min_hour_10,\"\n outstr += \"min_hour_11,min_hour_12,min_hour_13,min_hour_14,min_hour_15,\"\n outstr += \"min_hour_16,min_hour_17,min_hour_18,min_hour_19,min_hour_20,\"\n outstr += \"min_hour_21,min_hour_22,min_hour_23,min_hour_24,min_hour_25,\"\n outstr += \"min_hour_26,min_hour_27,min_hour_28,min_hour_29,min_hour_30,\"\n outstr += \"min_hour_31,min_hour_32,min_hour_33,min_hour_34,min_hour_35,\"\n outstr += \"min_hour_36,min_hour_37,min_hour_38,min_hour_39,min_hour_40,\"\n outstr += \"min_hour_41,min_hour_42,min_hour_43,min_hour_44,min_hour_45,\"\n outstr += \"min_hour_46,min_hour_47,min_hour_48,min_hour_49,min_hour_50,\"\n outstr += \"min_hour_51,min_hour_52,min_hour_53,\"\n outstr += \"min_hour_54,min_hour_55,min_hour_56,min_hour_57,\"\n outstr += \"hour_zero_crossings,hour_mean_crossings,hour_interquartile_range,\"\n outstr += \"hour_skewness,hour_kurtosis,\"\n outstr += \"sig_hour_1,sig_hour_2,sig_hour_3,sig_hour_4,sig_hour_5,\"\n outstr += \"sig_hour_6,sig_hour_7,sig_hour_8,sig_hour_9,sig_hour_10,\"\n outstr += \"sig_hour_11,sig_hour_12,sig_hour_13,sig_hour_14,sig_hour_15,\"\n outstr += \"sig_hour_16,sig_hour_17,sig_hour_18,sig_hour_19,sig_hour_20,\"\n outstr += \"sig_hour_21,sig_hour_22,sig_hour_23,sig_hour_24,sig_hour_25,\"\n outstr += \"sig_hour_26,sig_hour_27,sig_hour_28,sig_hour_29,sig_hour_30,\"\n outstr += \"sig_hour_31,sig_hour_32,sig_hour_33,sig_hour_34,sig_hour_35,\"\n outstr += \"sig_hour_36,sig_hour_37,sig_hour_38,sig_hour_39,sig_hour_40,\"\n outstr += \"sig_hour_41,sig_hour_42,sig_hour_43,sig_hour_44,sig_hour_45,\"\n outstr += \"sig_hour_46,sig_hour_47,sig_hour_48,sig_hour_49,sig_hour_50,\"\n outstr += \"sig_hour_51,sig_hour_52,sig_hour_53,\"\n outstr += \"sig_hour_54,sig_hour_55,sig_hour_56,sig_hour_57,\"\n outstr += \"ri_ww_acc,ri_wd_acc,ri_sun_acc,ri_mon_acc,ri_tue_acc,\"\n outstr += \"ri_wed_acc,ri_thu_acc,ri_fri_acc,ri_sat_acc,\"\n outstr += \"ri_ww_rot,ri_wd_rot,ri_sun_rot,ri_mon_rot,ri_tue_rot,\"\n outstr += \"ri_wed_rot,ri_thu_rot,ri_fri_rot,ri_sat_rot,\"\n outstr += \"ri_ww_dist,ri_wd_dist,ri_sun_dist,ri_mon_dist,ri_tue_dist,\"\n outstr += \"ri_wed_dist,ri_thu_dist,ri_fri_dist,ri_sat_dist,\"\n outstr += \"cr_hour_1,cr_hour_2,cr_hour_3\"\n return outstr", "def map_datetime_to_int(total_days,start_date):\n datetime_to_int_map = {}\n \n for _days in range(total_days):\n datetime_to_int_map[start_date + datetime.timedelta(days =_days)] = _days\n\n return datetime_to_int_map", "def __init__(self, shift_days: int):\n self.shift_days = shift_days\n\n self.categorical_features = [\n 'dept_id', 'cat_id', 'store_id', 'state_id',\n 'event_type_1', 'month', 'wday',\n 'is_weekend', 'snap'\n ]", "def _default_dates():\n today = datetime.now().date()\n five_days_from_now = today + timedelta(days=5)\n # create readable format, as should be input\n # return [today.strftime('%Y-%m-%d'), five_days_from_now.strftime('%Y-%m-%d')]\n return [today, five_days_from_now]", "def dates_to_fits(date_begin: astropy.time.Time, date_end: astropy.time.Time) -> dict[str, Any]:\n cards: dict[str, Any] = {}\n if date_begin is None and date_end is None:\n # no date headers can be written\n return cards\n\n cards[\"TIMESYS\"] = \"TAI\"\n\n date_avg = None\n if date_begin is not None and date_end is not None:\n date_avg = date_begin + (date_end - date_begin) / 2.0\n\n for fragment, date in ((\"OBS\", date_begin), (\"BEG\", date_begin), (\"END\", date_end), (\"AVG\", date_avg)):\n if date is not None:\n tai = date.tai\n cards[f\"DATE-{fragment}\"] = tai.isot\n cards[f\"MJD-{fragment}\"] = tai.mjd\n\n return cards", "def getDate(self):\n arr=[]\n dayarr=['Saturday','Sunday','Monday','Tuesday','Wednesday','Thursday','Friday']\n if self.debug =='yes':\n arr=self.debugdate\n else:\n \n hour = time.strftime('%H')\n month=time.strftime('%m')\n dayname=time.strftime('%A')\n monthname=time.strftime('%B')\n \n if time.strftime('%H') < '16':\n dayname=dayarr[int(time.strftime('%w'))]\n day = '%s' % str(int(time.strftime('%d'))-1)\n #print day\n if day == '0':\n month,day,monthname=self.EOM(month)\n if int(day) < 10:\n day= '0%s' % day\n else:\n day=day\n else:\n day = time.strftime('%d')\n \n arr.append(month)\n arr.append(day)\n arr.append(time.strftime('%Y'))\n arr.append(dayname)\n arr.append(monthname)\n #print arr\n return arr", "def _iterate_days(from_date, to_date):\n if from_date > to_date:\n raise ValueError('from_date %s is > to_date %s', from_date, to_date)\n return rrule.rrule(rrule.DAILY, dtstart=from_date, until=to_date)", "def covid_data()->str:# make it so json file can change days number\n event_log(\"retrieve covid data...\",\"\")\n c = 0\n covid_info= (\n 'https://api.coronavirus.data.gov.uk/v1/data?'\n 'filters=areaType=nation;areaName=england&'\n 'structure={\"date\":\"date\",\"newCases\":\"newCasesByPublishDate\"}'\n )\n response = get(covid_info, timeout=10)\n result = response.json()\n cases_list=[]\n for x in result['data']:\n cases_list.append((str(x['date']) + \" Cases in the country on that day: \" + str(x['newCases'])))\n if c == 6:#displays covid cases for past 7 days\n break\n c+=1\n return cases_list", "def gen_ymd(t,d) -> str:\n ymd = ( str(t.year) + d + str(t.month) + d + str(t.day) )\n return ymd", "def cal():\n this_cal = Kalendar()\n to_display = \"\"\n\n for elements in this_cal.get_all_elements():\n to_display += elements[\"key\"] + \":<BR>\"\n for element in elements[\"value\"]:\n to_display += \"&nbsp;&nbsp;&nbsp;&nbsp;\" + str(element) + \"<BR>\"\n\n return to_display", "def get_weather_details(self, days: int = None):\n forecast = super().get_weather_forecast(self.BASE_URL)\n headers = [\n \"date\",\n \"min_temp\",\n \"max_temp\",\n \"phrase\",\n \"probability\",\n \"wind_speed\"]\n if days is None:\n days = 5\n for number in range(days):\n data = []\n date = forecast[\"DailyForecasts\"][number]['Date']\n date = date[:10]\n data.append(date)\n min_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Minimum\"][\"Value\"])) - 32) / 1.8)\n data.append(min_temp)\n max_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Maximum\"][\"Value\"])) - 32) / 1.8)\n data.append(max_temp)\n phrase = forecast[\"DailyForecasts\"][number][\"Day\"][\"LongPhrase\"]\n data.append(phrase)\n probability = (forecast[\"DailyForecasts\"][number][\"Day\"]\n [\"RainProbability\"])\n data.append(probability)\n wind_speed = round(int(\n (forecast[\"DailyForecasts\"][number][\"Day\"][\"Wind\"][\"Speed\"]\n [\"Value\"]) / 1.6), 1)\n data.append(wind_speed)\n yield dict(zip(headers, data))", "def plot_all_sic_sic(resolutions, temporal_resolution, temporal_decomposition, detrend, imagefolder = 'images/timeseries/INDICIES/', indicies = ['SAM', 'IPO', 'DMI', 'ENSO']):\n for n, temp_res, temp_decomp, dt in itertools.product(resolutions, temporal_resolution, temporal_decomposition, detrend):\n plot_sic_sic_timeseries(anomlous = 'anomalous' == temp_decomp, temporal_resolution = temp_res, spatial_resolution = n, detrend = dt == 'detrended')", "def get_dates(self):\n now = datetime.now()\n if now.month > 6 and now.month < 9:\n now = datetime(now.year, 6, 1)\n\n term = ReadingsTerm()\n out = list(term.get_year_interval(now)) + [now.month]\n return out", "def create_events_from_list(days):\r\n\tfrom reserver.models import Event, EventCategory\r\n\tadded_events_count = 0\r\n\toff_day_event_category = EventCategory.objects.get(name=\"Red day\")\r\n\tfor day in days:\r\n\t\tyear = day[\"date\"].split(\"-\")[0]\r\n\t\tif not Event.objects.filter(start_time__year=year, name=day[\"name\"]).exists():\r\n\t\t\tevent = Event(\r\n\t\t\t\tstart_time = timezone.make_aware(datetime.datetime.strptime(day[\"date\"], '%Y-%m-%d').replace(hour=8)),\r\n\t\t\t\tend_time = timezone.make_aware(datetime.datetime.strptime(day[\"date\"], '%Y-%m-%d').replace(hour=16)),\r\n\t\t\t\tname = day[\"name\"],\r\n\t\t\t\tcategory = off_day_event_category,\r\n\t\t\t\tdescription = \"This day is a Norwegian national holiday.\"\r\n\t\t\t)\r\n\t\t\tevent.save()\r\n\t\t\tadded_events_count += 1\r\n\t\telif not Event.objects.filter(start_time__year=year, name=day[\"name\"], category=off_day_event_category).exists():\r\n\t\t\tevent = Event.objects.get(start_time__year=year, name=day[\"name\"])\r\n\t\t\tevent.category = off_day_event_category\r\n\t\t\tevent.save()\r\n\t\t\tprint(\"Corrected an event category\")\r\n\r\n\tprint(\"Added \" + str(added_events_count) + \" new event(s)\")", "def get_date_paths(from_date, to_date, prefix_tmpl=STD_DATE_PREFIX):\n return [get_date_prefix(d, prefix_tmpl)\n for d in _iterate_days(from_date, to_date)]", "def get_dark_cal_ids(dark_cals_dir=MICA_FILES['dark_cals_dir'].abs):\n dark_cal_ids = sorted([fn for fn in os.listdir(dark_cals_dir)\n if re.match(r'[12]\\d{6}$', fn)])\n dates = [CxoTime(d[:4] + ':' + d[4:]).date for d in dark_cal_ids]\n return OrderedDict(zip(dates, dark_cal_ids))", "def _create_week_dates_text(self):\n week_start = []\n week_end = []\n week_text = []\n week_start.append(self.start_date)\n week_end.append(self.start_date + timedelta(days=6))\n week_start.append(week_end[0] + timedelta(days=1))\n week_end.append(self.display_end_date)\n for i in (0,1):\n week_start_month = week_start[i].strftime(\"%b\")\n week_start_day = week_start[i].strftime(\"%d\").lstrip(\"0\")\n week_end_month = week_end[i].strftime(\"%b\")\n week_end_day = week_end[i].strftime(\"%d\").lstrip(\"0\")\n week_text.append(\"%s %s - %s %s\" %(week_start_month, \n week_start_day, week_end_month, week_end_day))\n return week_text", "def _add_day(cab_data):\n return cab_data.assign(day=lambda x: x.time.dt.strftime('%m%d'))", "def readrawicsd(filename):\n with open(os.path.join(os.getcwd(), 'resources', 'rawICSD', filename + '.csv'), 'rt') as f:\n reader = list(csv.reader(f))\n icsdlist = []\n for row in reader:\n icsd_id = ''\n for character in row[0]:\n try:\n int(character)\n except ValueError:\n icsdlist.append(icsd_id)\n break\n else:\n icsd_id += character\n\n resultstring = '#' + '; #'.join(icsdlist)\n\n return resultstring", "def get_date_list(self, queryset, date_type):\n date_field = self.get_date_field()\n dates_group = [list(qs.dates(date_field, date_type)) for qs in queryset]\n dates = [d.day for d in reduce(lambda a, b: a + b, dates_group, [])]\n\n calendar.setfirstweekday(6) # starts at sunday\n month = self.get_month()\n year = self.get_year()\n cal = calendar.monthcalendar(int(year), int(month))\n\n for i, week in enumerate(cal):\n for j, day in enumerate(week):\n state = models.DATE_STATES[dates.count(day)]\n cal[i][j] = {'day': day, 'state': state}\n return cal", "def it2day(it, dt=1200, start=694224000, sep=''):\n return time.strftime('%Y' + sep + '%m' + sep + '%d', time.gmtime(start+dt*it-86400))", "def calculate_climatology(var, var_dates):\n d_counts=[]\n var_clim = np.zeros_like(var)\n for m in range(1,13): #for each month\n mo_ind = (var_dates[1,:]==m)\n day_options = np.unique(var_dates[2,mo_ind])\n \n #print(day_options) #for diagnostics \n for d in range(0,np.size(day_options)): #for each possible day\n d_ind = (mo_ind) & (var_dates[2,:]==day_options[d])\n\n var_days = var[:,:,d_ind]\n var_daysav = np.nanmean(var_days,2)\n \n var_clim[:,:,d_ind] = np.transpose(np.tile(var_daysav,(np.sum(d_ind),1,1)),(1,2,0))\n \n d_counts.append(np.sum(d_ind)) #this is just for diagnostics\n \n return var_clim;", "def gen_date_list(begin_date, end_date):\n begin_tm = time.strptime(begin_date, \"%Y%m%d\")\n end_tm = time.strptime(end_date, \"%Y%m%d\")\n begin_tv = calendar.timegm(begin_tm)\n end_tv = calendar.timegm(end_tm)\n date_list = []\n for tv in xrange(begin_tv, end_tv+86400, 86400):\n date_list.append(time.strftime(\"%Y%m%d\", time.gmtime(tv)))\n return date_list", "def date_info_day(date_str, infile):\n #date_str = str(sys.argv[1])\n #infile = './' + date_str + '.nc'\n\n # prepare date\n year,mon,day = date_str.split('-')\n year_num = int(float(year))\n mon_num = int(float(mon))\n day_num = int(float(day))\n\n\n datesec_calc = []\n val_pr_day = 4\n secstep = 86400/val_pr_day\n sec = [0, 1*secstep, 2*secstep, 3*secstep]\n for j in sec:\n datesec_calc.append(j)\n\n # Open a netCDF file for appending:\n ncfile = Dataset(infile,'a')\n #time_in = ncfile.variables['time'][:]\n #ncfile = Dataset('date_datesec' + date + '.nc','w')\n\n # Create the variable (4 byte integer in this case)\n # first argument is name of variable, second is datatype, third is\n # a tuple with the names of dimensions.\n date_str = ncfile.createVariable('date',dtype('int32').char,('time'))\n datesec = ncfile.createVariable('datesec',dtype('int32').char,('time'))\n\n # Write data to variable:\n date_str[:] = year_num*10000+mon_num*100+day_num\n datesec[:] = datesec_calc\n\n # Add attributes to the variables:\n date_str.long_name = 'current date (YYYYMMDD)'\n datesec.long_name = 'current seconds of current date'\n\n # close the file.\n ncfile.close()\n return", "def test_output_day(self):\n input_ = [\n self.indicator_record(date=datetime.date(2011, 1, 1), value=0.83),\n self.indicator_record(date=datetime.date(2011, 2, 1), value=0.80),\n ]\n output = self.expander._ipca_from_15_expander(input_)\n\n self.assertEqual(output[-1].date.day, 1)", "def dwd_IR_VIS(self,\n sun_zenith_angle_correction=True,\n alpha_sz_day_limit=None,\n alpha_sz_night_limit=None,\n backup_orig_data=False):\n return self._dwd_create_day_night_image(\n 'VIS006', 'IR_108',\n sun_zenith_angle_correction=sun_zenith_angle_correction,\n alpha_sz_day_limit=alpha_sz_day_limit,\n alpha_sz_night_limit=alpha_sz_night_limit,\n backup_orig_data=backup_orig_data)", "def _get_case_dates(self):\n path = \"//path/to/text/text()\"\n return [\n convert_date_string(date_string)\n for date_string in self.html.xpath(path)\n ]" ]
[ "0.6347691", "0.6087289", "0.6072295", "0.59718066", "0.57362044", "0.5708821", "0.56834716", "0.5679318", "0.5669046", "0.56072843", "0.5576914", "0.557137", "0.557137", "0.553295", "0.54978454", "0.5415384", "0.5413129", "0.53640497", "0.5288981", "0.52658474", "0.52517194", "0.52397335", "0.52350885", "0.5234849", "0.5156906", "0.5156129", "0.5150557", "0.5128345", "0.51163435", "0.5114991", "0.51132566", "0.507384", "0.5061931", "0.5061109", "0.5050038", "0.504691", "0.50243956", "0.5020416", "0.5018443", "0.50176114", "0.50168204", "0.50124925", "0.49879882", "0.49876735", "0.4982737", "0.4965481", "0.4961545", "0.49584097", "0.49571124", "0.49556664", "0.49488443", "0.4947532", "0.49420223", "0.4937137", "0.49324402", "0.49244693", "0.49201718", "0.4918494", "0.49090597", "0.49079663", "0.49008286", "0.4888303", "0.48759574", "0.4875176", "0.48719564", "0.48471335", "0.48398674", "0.4838093", "0.4837855", "0.48254043", "0.48009425", "0.47985104", "0.47974372", "0.47947195", "0.47831738", "0.47822326", "0.4778717", "0.47783867", "0.47661227", "0.4758893", "0.47584456", "0.47551444", "0.4749165", "0.47455546", "0.4743396", "0.4743138", "0.47394466", "0.4722736", "0.47116163", "0.47066754", "0.47035918", "0.47002935", "0.4699633", "0.4697021", "0.46918428", "0.46909904", "0.46863863", "0.46830356", "0.46812585", "0.4679857" ]
0.78284
0
Get user profile Fetches from the user collection by using the user's email as key.
def get_user_profile(email): # GET # NOTE: This method previously called LCS with director credentials in order to retrieve the user's name # We will update TeamRU to store names along with our user objects, saving the need to call LCS again user_profile = coll("users").find_one({"_id": email}) if not user_profile: return {"message": "User not found"}, 404 user_profile["user_id"] = user_profile.pop("_id") return user_profile, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _getProfileFromUser(self):\n # Make sure user is authenticated\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n # Get Profile from datastore\n user_id = user.email()\n p_key = ndb.Key(Profile, user_id)\n profile = p_key.get()\n # Create new Profile if not there\n if not profile:\n profile = Profile(\n key = p_key,\n displayName = user.nickname(),\n mainEmail= user.email(),\n teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),\n )\n profile.put()\n return profile", "def get_user(cls, email=None, user_id=None):\n\n params = {'email': email, 'user_id': user_id}\n user_dict = cls._do_call(\n 'GET', cls.api_endpoint + 'users', params=params)\n return user_dict", "def get_user_by_email(self, strategy, email):\r\n return strategy.storage.user.user_model().objects.get(email=email)", "def helper_get_by_email(user_email):\n user = heart_rate_databases_starter.models.User.objects.raw({\"_id\": user_email}).first() # Get the first user where _id=email\n return user", "def get_info(email):\n # Get the first user where _id=email\n user = models.User.objects.raw({\"_id\": email}).first()\n return user", "def get_user_profile(self):\n return self.request('get', 'id/users')", "def load_user(user_email):\n return User.query.get(user_email)", "def get_user(email, queryset=None):\n if queryset is None:\n queryset = User.objects\n return queryset.get(username=_email_to_username(email))", "def user(email):\r\n return User.objects.get(email=email)", "def get_user(self, user_id):\n _email = self._email_for_user_id(user_id)\n response = self._get('/users?{0}'.format(urllib.urlencode({'search': _email})))\n for _user in response:\n if _user['email'] == _email:\n return _user\n return None", "def get_user_by_email(self, emailid):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'emailid': emailid}\n url = SECURE_API_URL + \"raas/v1/user\"\n return self._lr_object._get_json(url, payload)", "def get_user(self, email):\n return run_transaction(\n self.sessionfactory,\n lambda session: get_user_txn(session, email))", "def get_user_by_email(cls, user_email):\n\n try:\n user_login_info = User.query.filter_by(email=user_email).one()\n\n return user_login_info\n\n except Exception, error:\n print error", "def read_user_profile():\n logger.debug(\"entering function read_profile\")\n find_query = {\"user_id\": current_user.id}\n project_query = {\"_id\": 0, \"user_id\": 0, \"password\": 0}\n result = run_find_one_query(config.USERS_COL, find_query, project_query, error=True,\n error_msg=NO_USER_ERR_MSG)\n logger.info(\"fetched user profile for %s\", current_user.id)\n response = get_success_response(data=result)\n logger.debug(\"exiting function read_profile\")\n return response", "def get_by_email(self, email):\n user = (\n self.session\n .query(tables.User)\n .filter_by(email=email)\n .first()\n )\n return user", "def retrieve_user(self, email):\n if self.database is None:\n raise Exception(\"No database.\")\n if email is None or len(email) == 0:\n raise Exception(\"Bad parameter.\")\n return self.database.retrieve_user(email)", "def current_user(email):\n for user in Data.users:\n if email == user['email']:\n return user", "def show(self, email):\n\n return User.query.filter_by(email=email).first()", "def getUserbyEmail(self, email):\n\n cursor = self.conn.cursor()\n query = \"SELECT uid, cid, ufirstname, ulastname, udescription, urole, uclassification, email, pin \" \\\n \"FROM Users natural inner join Credential \" \\\n \"WHERE email= %s;\"\n cursor.execute(query, (email,))\n result = cursor.fetchone()\n return result", "def users_profile_query(self):\n email_query = request.args.get('email')\n if not email_query:\n self.logger.debug((messages.MISSING_FIELDS_ERROR % \"email\"))\n return messages.ERROR_JSON % (messages.MISSING_FIELDS_ERROR % \"email\"), 400\n try:\n user_data = self.auth_server.profile_query(email_query)\n except UnexistentUserError:\n self.logger.debug(messages.USER_NOT_FOUND_MESSAGE % email_query)\n return messages.ERROR_JSON % (messages.USER_NOT_FOUND_MESSAGE % email_query), 404\n return json.dumps(user_data), 200", "def get_user_by_email(self, email: str):\n try:\n return model_to_dict(\n User.select().where(User.email == email).get())\n except DoesNotExist:\n raise ValueError(HTTPStatus.NOT_FOUND,\n 'User with email {} does not exist'.format(email))\n except Exception:\n raise BaseException(HTTPStatus.INTERNAL_SERVER_ERROR,\n 'Internal server error')", "async def get_by_email(self, email: str) -> Optional[UD]:\n user = await looped_fetch(\n self.async_deta_base.fetch, query={\"email\": email.lower()}\n )\n\n return self.user_db_model(**user) if user else None", "def get_user_by_email(email):\n\n user = User.query.filter(User.email == email).first()\n \n return user", "def get_user(current_user):\n for user in user_db:\n if user['email'] == current_user:\n return user", "def get_user(self, email):\n\n try:\n return self.client.admin_get_user(\n Username=email,\n UserPoolId=self.user_pool_id\n )\n except self.client.exceptions.UserNotFoundException:\n raise Exception('An account with the given email does not exist.')", "def get_my_profile(self):\n\n url = self.api_base_url + \"user/profile\"\n\n try:\n raw_response = self.request_handler.make_request(ApiRequestHandler.GET, url)\n except RequestFailed:\n raise\n\n jsonified_response = json.loads(raw_response.text)\n user_profile = jsonified_response\n\n return user_profile", "def get_user_profile(self):\n return self.user.profile", "def get_user(self):\n try:\n return User.objects.get(id=self.user_id)\n except User.DoesNotExist:\n return AnonymousProfile()", "def get_user_by_email(email):\n user = User.query.filter(User.email == email).first()\n result = userSchema.dump(user)\n return jsonify(result)", "def get(\n user_id=None, discord_id=None, google_id=None, email=None,\n ):\n temp_cursor = user_db.cursor()\n\n pos_selectors = {\n \"user_id\": user_id,\n \"discord_id\": discord_id,\n \"google_id\": google_id,\n \"email\": email,\n }\n\n user = None\n for selector in pos_selectors.keys():\n sel_value = pos_selectors[selector]\n if sel_value is None:\n continue\n user = temp_cursor.execute(\n \"SELECT * FROM users WHERE \" + selector + \" = ?\", (sel_value,)\n ).fetchone()\n\n if user is not None:\n return User_Info.init_from_db(user)\n\n return None", "def get_user_by_email(email):\n\n return User.query.filter(User.email == email).first()", "def get_user_by_email(email):\n\n return User.query.filter(User.email == email).first()", "def get(user_id=None, username=None, email=None, api_key=None):\r\n user_query = User.query\r\n\r\n if username is not None:\r\n return user_query.filter(User.username == username).first()\r\n\r\n if user_id is not None:\r\n return user_query.filter(User.id == user_id).first()\r\n\r\n if email is not None:\r\n return user_query.filter(User.email == email).first()\r\n\r\n if api_key is not None:\r\n return user_query.filter(User.api_key == api_key).first()\r\n\r\n return None", "def get_userid_profile(db, user_id):\n return db['user'].find_one({'_id': user_id})", "def find_by_email(cls, email):\n return User.objects.filter(email=email).first()", "def get_user(self, user_id=None, email=None):\n\n if not user_id and not email:\n raise ValueError('Either a user id or email must be provided.')\n\n query = User.objects.all()\n\n if user_id:\n query = query.filter(id=user_id)\n\n if email:\n email = normalize_email_address(email)\n query = query.filter(username=email)\n\n try:\n return query.get()\n except User.DoesNotExist:\n raise UserNotFound('The user with the provided parameters is not found.')", "def profile() -> str:\n session_id = flask.request.cookies.get('session_id')\n user = AUTH.get_user_from_session_id(session_id)\n if user:\n return flask.jsonify({\"email\": user.email}), 200\n else:\n flask.abort(403)", "def user_by_email(email):\n user = User.query.filter(User.email == email).one_or_none()\n return user", "def get_by_email(self, email):\n return self.session.query(User).filter_by(email=email).first()", "def retrieve_user_details(self, email):\n if self.database is None:\n raise Exception(\"No database.\")\n if email is None or len(email) == 0:\n raise Exception(\"Bad parameter.\")\n return self.database.retrieve_user_details(email)", "def get(self, email: str):\r\n args = authParser.parse_args()\r\n\r\n if not validators.email(email):\r\n response = {\r\n \"status\": \"error\",\r\n \"details\": {\r\n \"message\": \"Input a valid email address\"\r\n }\r\n }\r\n return response, http.client.BAD_REQUEST\r\n\r\n user = UserModel.query.filter(UserModel.email == email).first()\r\n\r\n if not user:\r\n # The email doesnt exist\r\n return {\r\n \"status\": \"error\",\r\n \"details\": {\r\n \"message\": \"Not Found\"\r\n }\r\n }, http.client.NOT_FOUND\r\n user = admin_namespace.marshal(user, user_model)\r\n return {\r\n \"status\": \"success\",\r\n \"details\": {\r\n \"result\": user\r\n }\r\n }, http.client.OK", "def get_user(self, email):\n try:\n return RegisterUser.objects.get(email=email)\n except:\n return None", "def profile(self, user, **kwargs):\n # pylint: disable=no-member\n return self._get(API.USER.value.format(user_id=user), **kwargs)", "def get_user_by_email(email):\r\n\t\tif email:\r\n\t\t\ttry:\r\n\t\t\t\tuser = User.objects.filter(email=email, is_active=True)[0]\r\n\t\t\t\treturn user\r\n\t\t\texcept:\r\n\t\t\t\tpass\r\n\r\n\t\treturn None", "def get_user_me(self):\n url = 'users/me'\n result = self.get(url)\n return result.get('user', result)", "def profile_for(email):\n return OrderedDict([(b'email', email), (b'uid', b'10'), (b'role', b'user')])", "def lookup_email(email):\n user = User.objects(email=email).first()\n return user", "def get_user_profile(self, username: str = None) -> Profile:\n if username:\n res: dict[str, Any] = self.api.users.get(user_ids=username).pop()\n else:\n res = self.api.users.get().pop()\n return VkontakteConnector.build_profile(res)", "def profile(self) -> dict:\n endpoint = \"/api/users/profile/\"\n ret = self._request(endpoint=endpoint)\n return ret", "def fetch_user(uid):\n users = find_users(uid=uid)\n if users:\n return users[0]._asdict()\n return (\"Not found\", 404)", "def get_user(self):\n if not self.is_valid():\n return None\n # error checking done in: clean_email\n # NOTE: all emails are stored in lower-case\n e = self.clean_email().lower()\n return User.objects.get(email=e)", "def print_user(email):\n # Get the first user where _id=email\n user = models.User.objects.raw({\"_id\": email}).first()\n print(user.email)\n print(user.heart_rate)\n print(user.heart_rate_times)\n return user", "def get(self, username):\n\t\tdb = getattr(g, 'db', None)\n\n\t\tqry = \"SELECT username,email,active,steamid FROM\\\n\t\t\tprofiles WHERE username = %s;\"\n\t\twith db as cursor:\n\t\t\tcursor.execute(qry, (username,))\n\n\t\treturn {'profile':cursor.fetchone()}", "def get_user(self, activation_key):\n try:\n profile = self.get(activation_key=activation_key)\n return profile.user\n except self.model.DoesNotExist:\n return None", "def get_user_profile(user_id, org_id):\r\n config_settings = config.get_config()\r\n db_client = boto3.resource('dynamodb', region_name=config_settings['region'])\r\n table = db_client.Table(config_settings['table_name'])\r\n\r\n try:\r\n response = table.get_item(Key={'user_id':str(user_id), 'org_id': str(org_id)})\r\n except ClientError as e:\r\n logger.error(\"Failed to retrieve profile for user {} and organization {}:{}\"\r\n .format(user_id, org_id, e.response['Error']['Message']))\r\n else:\r\n if \"Item\" in response:\r\n return response[\"Item\"]\r\n\r\n return None", "def get_one_user_by_email(email):\n return UserModel.query.filter_by(email=email, deleted_at=None).first()", "def get_profile():\n logger.debug(\"entering function get_profile\")\n response = read_user_profile()\n logger.debug(\"exiting function get_profile\")\n return jsonify(response)", "def get_user_by_slug(self, slug):\n users = self.get_users({ 'profile_url': slug })\n if len(users) > 0:\n return users[0]", "def getProfile(self):\n # GET /profile\n debugMain('getProfile')\n return self._genericGet('/profile')", "def test_resource_user_resource_get_user_by_email_address_get(self):\n pass", "def get_profile(user):\n if user.is_authenticated():\n # Return the PootleProfile associated with authenticated users\n return user.get_profile()\n else:\n # Anonymous users get the PootleProfile associated with the 'nobody' user\n return User.objects.get(username='nobody').get_profile()", "def get_user_profile() -> ApiResponse:\n user_id = request.args.get('user_id')\n try:\n int(user_id)\n except ValueError as e:\n return RateMyDormApiResponse(None, 400, f\"User id was not a valid integer {e}\").response\n\n connection = get_connection()\n cursor = connection.cursor(buffered=True, named_tuple=True)\n\n params = {'user_id': user_id}\n query = \"\"\"SELECT username, first_name, last_name, email, profile_image, status, profile_bio, user_role\n FROM users\n WHERE user_id = %(user_id)s\n LIMIT 1\"\"\"\n cursor.execute(query, params)\n user = cursor.fetchone()\n logger.debug(user)\n\n payload = {}\n if user:\n reviews, images = get_user_history(user_id, cursor)\n user_dict = convert_single_row_to_dict(user)\n payload['user'] = user_dict\n payload['reviews'] = reviews\n payload['images'] = images\n\n logger.debug(payload)\n connection.close()\n response = RateMyDormApiResponse(payload, 200).response\n return response", "def get_by_slug(self, profile_slug):\n return User.gql(\"WHERE profile_slug = :1\", profile_slug).get()", "def _get_user_by_criteria(id_, email):\n criteria = dict()\n try:\n if id_:\n criteria[\"id_\"] = id_\n elif email:\n criteria[\"email\"] = email\n return User.query.filter_by(**criteria).one_or_none()\n except StatementError as e:\n print(e)\n return None", "def get_profile():\n\n if request['user_id']:\n\n user = User.select().where(User.id == request['user_id']).get()\n uSchema = UserSchema()\n jsonUser = uSchema.dumps(user)\n\n del request['user_id']\n return jsonUser.data\n\n return", "def get(self):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user:\n abort(404, message=\"User not Found\")\n\n return user", "def search_user_by_email(email: str, session=Depends(transaction)):\n user = get_user_by_email(session, email)\n if user is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND,\n detail=\"User was not found.\")\n return UserResponse(id=user.id, email=user.email, name=user.name,\n address=user.address, latitude=user.latitude,\n longitude=user.longitude)", "def get_user_profile(uid, viewer_uid):\n user = User.find(uid)\n if not user:\n st_raise_error(ErrorCode.USER_NOT_EXIST)\n work_experiences = work_service.get_work_experiences(uid)\n endorsement = get_user_endorsement(uid, current_user.user.id)\n latest_comment = EndorseComment.find_latest_by_uid(uid)\n profile = user.to_dict()\n profile['endorsement'] = endorsement\n profile['work_experiences'] = work_experiences\n if latest_comment:\n profile['latest_comment'] = latest_comment.to_dict()\n contact = Contact.find_by_uid(uid, viewer_uid)\n profile['is_contact'] = contact is not None \\\n and contact.status == ContactStatus.Connected\n return profile", "def user_loader(self, email):\n if not self.um.check_file_exists(\"users.json\"):\n return\n\n if email not in self.users:\n return\n\n user = User()\n user.id = email\n return user", "def user(self):\n u = self.user_info\n return self.user_model.get_by_id(u['user_id']) if u else None", "def lookup_user_name(self, user_email):\n\n url = 'users'\n options = '?query={}'.format(user_email.lower())\n response = self._pagerduty_session_get(url, options).json()\n\n if len(response['users'][0]) == 0:\n return None, None\n\n return response['users'][0]['id'], response['users'][0]['name']", "def get_user(self):\n if \"user\" not in self._data:\n self._data[\"user\"] = User.objects.get(pk=self.kwargs[\"user_id\"])\n return self._data[\"user\"]", "def get(self, username):\n q = \"SELECT * FROM profiles where username = ?\"\n r = self._query(q, (username,), fetch='one')\n try:\n return r\n except Exception as e:\n raise e", "def test_user_retrieve(self):\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.get_token())\n response = self.client.get(reverse(\"account:user-profile\"))\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data.get('user').get('email'), \"testuser@gmail.com\")", "def get_user(user_id=None):\n users = storage.all('User')\n user = users.get('User' + \".\" + user_id)\n if user is None:\n abort(404)\n else:\n return jsonify(user.to_dict()), 200", "def get_profile(request):\n collected_values = {}\n\n # Only allow GET requests on this endpoint\n if request.method != 'GET':\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Wrong HTTP verb\"\n return JsonResponse(collected_values, status=400)\n\n # Extract params\n uid = request.GET['uid']\n key = request.GET['key']\n\n # Hardcoded key for security\n if key != SUPER_SECURE_STRING:\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Invalid Key\"\n return JsonResponse(collected_values, status=400)\n\n # Grab the user's profile information\n users = LUser.objects.filter(user_id=uid)\n user = users[0]\n\n # Collect values\n collected_values[\"user_info\"] = user.get_map()\n collected_values[\"success\"] = True\n\n LOGGER.info(\"Get Profile Result: %s\", user)\n return JsonResponse(collected_values, status=200)", "def get_profile(user_id):\n return User.objects.select_related('userprofile', 'userstatistics') \\\n .get(pk=user_id)", "def get_user_profile(self):\n\t\treturn Job(SDK.PrlSrv_GetUserProfile(self.handle)[0])", "def get_profile(self):\n endpoint = '/profile'\n return self.get_request(endpoint)", "def get_user(self, uid: typing.Union[str, None] = None, cell: typing.Union[str, None] = None,\n email: typing.Union[str, None] = None) -> tuple:\n if (uid is not None) and (uid != \"\"):\n user_instance: UserModel = UserModel.query(UserModel.uid == uid).get()\n if isinstance(user_instance, UserModel):\n message: str = 'successfully retrieved user by uid'\n return jsonify({'status': True, 'payload': user_instance.to_dict(), 'message': message}), 200\n\n if (cell is not None) and (cell != \"\"):\n user_instance: UserModel = UserModel.query(UserModel.cell == cell).get()\n if isinstance(user_instance, UserModel):\n message: str = 'successfully retrieved user by cell'\n return jsonify({'status': True, 'payload': user_instance.to_dict(), 'message': message}), 200\n\n if (email is not None) and (email != \"\"):\n user_instance: UserModel = UserModel.query(UserModel.email == email).get()\n if isinstance(user_instance, UserModel):\n message: str = 'successfully retrieved user by email'\n return jsonify({'status': True, 'payload': user_instance.to_dict(), 'message': message}), 200\n\n return jsonify({'status': False, 'message': 'to retrieve a user either submit an email, cell or user id'}), 500", "def fetch_user(user_id):\n user = user_collection.find_one({\"_id\": user_id})\n user_bookmarks = list()\n for project_id in user[\"bookmarks\"]:\n project = project_collection.find_one({\"_id\": project_id})\n if project is None:\n continue\n bookmark_details = {\n \"PROJECT_ID\": str(project_id),\n \"projectTitle\": project[\"projectTitle\"],\n \"projectDescription\": project[\"projectDescription\"],\n }\n user_bookmarks.append(bookmark_details)\n user_contributions = list()\n for project_id in user[\"contributions\"]:\n project = project_collection.find_one({\"_id\": project_id})\n if project is None:\n continue\n contribution_details = {\n \"projectTitle\": project[\"projectTitle\"],\n \"projectDescription\": project[\"projectDescription\"],\n }\n user_contributions.append(contribution_details)\n user_dict = {\n \"username\": user[\"username\"],\n \"userid\": user[\"userid\"],\n \"email\": user[\"email\"],\n \"avatar\": user[\"avatar\"],\n \"githubURL\": user[\"githubURL\"],\n \"linkedinURL\": user[\"linkedinURL\"],\n \"stackoverflowURL\": user[\"stackoverflowURL\"],\n \"skills\": user[\"skills\"],\n \"bookmarks\": user_bookmarks,\n \"contributions\": user_contributions,\n }\n return user_dict", "def get_user_by_id(self, user_id: str) -> typing.Optional[User]:\n query_params = {\n \"$select\": \",\".join(\n [\"displayName\", \"id\", \"mail\", \"department\", \"companyName\"]\n ),\n }\n\n request = self._prepare_request(\n method=\"get\",\n resource_path=f\"users/{user_id}\",\n query_params=query_params,\n )\n with requests.Session() as session:\n response = session.send(request=request)\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as exception:\n if response.status_code == 400:\n return None\n raise exception\n user = response.json()\n return User.from_dict(**user) if user.get(\"id\") else None", "def _find_user_by_email_address(self, email_address):\n # XXX: Maybe model is more appropriate place for such a method\n self.user = meta.session.query(User).filter_by(email=email_address).filter_by(verification=None).first()\n return self.user", "def get_user(self):\n session_key = request.get_cookie(\n self.conf['auth.cookie_key'],\n secret=self.conf['auth.cookie_secret']\n )\n if session_key:\n with atomic(self.conf['auth.dbfile']) as cursor:\n try:\n username, email = next(cursor.execute(\"\"\"\n SELECT username, email\n FROM sessions\n INNER JOIN users ON users.userid = sessions.userid\n WHERE sessions.key = ?\n AND sessions.started <= (SELECT\n datetime('now', '+3 hour'))\n \"\"\", (session_key,)))\n except StopIteration:\n return\n else:\n return User(username, email, get_usergroups(cursor, \n username))", "def get_user_by_email(tx: Transaction, user_email: str) -> BoltStatementResult:\n\n # NOTE: tag labels are hardcoded here. If they change in tags csv, must be changed here.\n query = f\"\"\"\n MATCH (user:Person {{email: '{user_email}'}})\n OPTIONAL MATCH (user)-->(skill_tag:Tag:CanHelpWithTag)\n OPTIONAL MATCH (user)-->(passion_tag:Tag:PassionsTag)\n RETURN user, COLLECT(DISTINCT skill_tag.name) AS help_others, COLLECT(DISTINCT passion_tag.name) AS passions\"\"\"\n return tx.run(query)", "def get_user(pk):\n user = UserService(user=pk).get_user_by_id()\n return CustomResponse(data=user).response()", "def get_user_profile_info(token):\n user_profile_endpoint = 'https://api.spotify.com/v1/me'\n headers = {'Authorization': 'Bearer %s' % token}\n\n r = requests.get(user_profile_endpoint, headers=headers)\n if r.status_code != 200:\n return None\n profile = r.json()\n\n display_name = profile['display_name']\n profile_id = profile['id']\n email = profile['email']\n\n return User(display_name=display_name,\n profile_id=profile_id,\n email=email)", "async def get_profile_by_user_id(self, *, user_id: int) -> ProfileInDB:\n profile = await self.db.fetch_one(query=GET_PROFILE_BY_USER_ID_QUERY, values={\"user_id\": user_id})\n if not profile:\n return None\n return ProfileInDB(**profile)", "def getUserProfile(request):\n user = request.user\n serializer = UserSerializer(user, many=False)\n return Response(serializer.data)", "def get_account_for_email(cls, email):\n assert email\n key = '<%s>' % email\n return cls.get_by_key_name(key)", "def fusion_api_get_user(self, uri=None, param='', api=None, headers=None):\n return self.user.get(uri=uri, api=api, headers=headers, param=param)", "def get(self, id):\n\t\ttry:\n\t\t\tflask_app.logger.debug('We are getting the user: %d', id)\n\t\t\treturn user_service.get(id)\n\t\texcept AssertionError as e:\n\t\t\tuser_space.abort(400, e.args[0], status = \"Could not get user\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tuser_space.abort(500, e.args[0], status = \"Could not get user\", statusCode = \"500\")", "def get_login_user_profile(uid):\n # fetch the user info from db,\n # just in case the info has been updated somewhere\n json_user = User.find(uid).to_dict_with_mobile()\n json_user['work_experiences'] = work_service.get_work_experiences(uid)\n return json_user", "def get(self, username):\n return User.find_by_username_or_email(username)", "def _get_user(self, name: str, users: list, first_time=True) -> Optional[dict]:\n try:\n user = next(filter(lambda x: x['profile'].get('real_name_normalized') == name, users))\n except StopIteration:\n name = slughifi(name).decode('utf-8')\n if first_time:\n return self._get_user(name, users, first_time=False)\n return None\n return user", "def get(self, **kwargs) -> (dict, int):\n params = {\"email\": kwargs[\"email\"]}\n\n if not params[\"email\"]:\n return MissingInformation.get_response(), 400\n\n taken = request_utils.value_in_database(\"UserTable\", \"email\", params[\"email\"])\n return {\"success\": True, \"taken\": taken}, 200", "def user_profile(token, u_id):\n # pylint: disable=unused-argument\n # NB: Supressed this warning because token is in fact used in\n # the decorator, however pylint doesn't check for this.\n user = database.get_user_data(u_id)\n return {\"user\": user}", "def get_user_account_from_email(email, default='raise', active_only=True):\n email = email.strip()\n try:\n return auth.models.User.objects.get(email__iexact=email,\n is_active=active_only)\n except auth.models.User.DoesNotExist:\n # User does not exist\n if default == 'raise':\n raise\n else:\n return default\n except auth.models.User.MultipleObjectsReturned:\n # The system expects to only have one user record per email,\n # so let's reraise the error to have it fixed in the database.\n raise auth.models.User.MultipleObjectsReturned(\n 'Found multiple records for user with email %r' % email)", "def get_username_profile(db, username):\n return db['user'].find_one({'username': username})", "def fetch_user(self, username=None,email=None, nationalid=None):\n #Specify query param to use depending on set parameters\n lookup = username if username else email if email else nationalid\n #Specify field to use depending on set parameters\n lookup_name = \"Username\" if username else \"Email\" if email else \"NationalID\"\n\n cursor = DB.instance.connection.cursor()\n query = \"SELECT users.*, usertypes.Name as UserType FROM users INNER JOIN usertypes ON (users.UserTypes_idUserTypes=usertypes.idUserTypes) WHERE {} = '{}'\".format(lookup_name,lookup)\n cursor.execute(query)\n\n return cursor.fetchall()" ]
[ "0.74857944", "0.7336757", "0.7331344", "0.7298807", "0.72768545", "0.72726756", "0.71711785", "0.7168781", "0.7130947", "0.71272796", "0.70798576", "0.70757645", "0.7041153", "0.6998165", "0.6979574", "0.6948253", "0.69294655", "0.69286436", "0.69207954", "0.6915467", "0.6889324", "0.6851269", "0.6831796", "0.68107396", "0.6796974", "0.67915887", "0.67849123", "0.6779606", "0.67747545", "0.6767703", "0.6746741", "0.6746741", "0.6745075", "0.67160803", "0.67008847", "0.66967696", "0.6667437", "0.66525584", "0.66485906", "0.6640964", "0.66395074", "0.6638115", "0.6614909", "0.66033083", "0.65926147", "0.65923053", "0.65457416", "0.65454316", "0.65370077", "0.65244913", "0.65115523", "0.6497214", "0.6488762", "0.6480366", "0.64563465", "0.64071906", "0.6398771", "0.63882023", "0.6357339", "0.635431", "0.6344992", "0.63332784", "0.63282776", "0.6325696", "0.63137186", "0.63085836", "0.630095", "0.6294203", "0.62928414", "0.62746215", "0.6268606", "0.6268482", "0.6247581", "0.6242491", "0.62385005", "0.6219381", "0.6216113", "0.62160414", "0.6211217", "0.6199708", "0.6199612", "0.61945075", "0.6186669", "0.6180995", "0.6175045", "0.6172664", "0.6158132", "0.6149945", "0.6149587", "0.61484647", "0.6146759", "0.612436", "0.6123439", "0.61122805", "0.6106082", "0.610149", "0.60961145", "0.60853857", "0.6083687", "0.60769796" ]
0.8088444
0
Endpoint to get multiple user profiles at once
def get_user_profiles(args): # GET limit = args.get("limit", type=int) if args.get("limit") else 0 # NOTE checks if the string value of hasateam is equal to "true" because HTTP protocol only passes strings hasateam = args.get("hasateam", "").lower() == "true" if hasateam: users = list(coll("users").find({"hasateam": hasateam}).limit(limit)) else: users = list(coll("users").find({}).limit(limit)) for user in users: user["user_id"] = user.pop("_id") return {"user_profiles": users}, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def user_list(request):\n if request.method == 'GET':\n user_info = UserData.objects.all()\n serializer = UserProfileSerializer(user_info, many=True)\n return JSONResponse(serializer.data)\n else:\n return JSONResponse('Using wrong api.', status=404)", "def getUserProfile(request):\n user = request.user\n serializer = UserSerializer(user, many=False)\n return Response(serializer.data)", "async def test_retrieve_many(self):\n expected = [{\n '_id': 'id',\n 'name': 'name',\n 'version': 4,\n 'status': 'active'\n }]\n rsps = respx.get(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles') \\\n .mock(return_value=Response(200, json=expected))\n profiles = await provisioning_client.get_provisioning_profiles(5, 'active')\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles?version=5&status=active'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert profiles == expected", "def list_profiles(self, params):\n return self.profiles", "def get_user_profile(self):\n return self.request('get', 'id/users')", "def get(self, request, format=None):\n queryset = request.user.following.all()\n paginated = self.paginate_queryset(queryset)\n return self.get_paginated_response(UserProfileSerializer(paginated, many=True).data)", "def users(self, per_page=None, page=None):\r\n params = base.get_params(None, locals())\r\n url = '{0}/{1}'.format(self.get_url(), 'users')\r\n return http.Request('GET', url, params), parsers.parse_json", "def fetch_all(profile):\n params = {}\n params[\"profile\"] = profile\n response = utils.do_request(instanceprofile, \"get\", params)\n data = utils.get_data(\"InstanceProfiles\", response)\n return data", "def users_get(self) -> Dict[str, list]:\n self.__logger.debug('Eva.users_get called')\n return self.__http_client.users_get()", "def users(self, request, pk):\n store = Store.objects.get(id=pk)\n user_store_ids = UserStore.objects.filter(store=store).values('user__id')\n users = User.objects.filter(id__in=user_store_ids)\n return Response(UserSerializer(users, many=True).data)", "def profiles():\n images = get_uploaded_images()\n records = db.session.query(UserProfile).all()\n return render_template('profiles.html', images=images, records =records)", "def get_profile(request):\n collected_values = {}\n\n # Only allow GET requests on this endpoint\n if request.method != 'GET':\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Wrong HTTP verb\"\n return JsonResponse(collected_values, status=400)\n\n # Extract params\n uid = request.GET['uid']\n key = request.GET['key']\n\n # Hardcoded key for security\n if key != SUPER_SECURE_STRING:\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Invalid Key\"\n return JsonResponse(collected_values, status=400)\n\n # Grab the user's profile information\n users = LUser.objects.filter(user_id=uid)\n user = users[0]\n\n # Collect values\n collected_values[\"user_info\"] = user.get_map()\n collected_values[\"success\"] = True\n\n LOGGER.info(\"Get Profile Result: %s\", user)\n return JsonResponse(collected_values, status=200)", "def fusion_api_get_server_profiles(self, uri=None, param='', api=None, headers=None):\n return self.profile.get(uri=uri, api=api, headers=headers, param=param)", "def getUsers(client, req):\n client.sendTarget(req[\"id\"], key=\"get.users\", payload={\"payload\": magic.users})", "def profile(self) -> dict:\n endpoint = \"/api/users/profile/\"\n ret = self._request(endpoint=endpoint)\n return ret", "def test_users_profile_list_return(self):\n self.client.credentials(\n HTTP_AUTHORIZATION='Token ' + self.login_response.data['token'])\n self.response = self.client.get(\"/api/users/users_list/\")\n self.assertEqual(self.response.status_code, status.HTTP_200_OK)\n self.assertIn('users', self.response.data)", "def get_user_list(self, connection):\n http = get_web_service(connection)\n try:\n req = http.request('GET', connection[\"url\"] + '/users/?_format=json')\n data = json.loads(req.data.decode('utf-8'))\n # print(json.dumps(data, indent=4, sort_keys=True))\n return data\n except urllib3.exceptions.HTTPError as e:\n print(\"Connection error\")\n print(e)", "def profiles(self):\n if not self._profiles:\n self.GetAllProfiles()\n return self._profiles", "def get(self, request):\n all_users = UserModel.objects.all()\n all_users_serializer = UserSerializer(all_users, many=True)\n return Response(all_users_serializer.data)", "def get_profiles(count, credentials_type=constants.API_KEY_CREDENTIALS_TYPE, verbose=False):\n print('getting profiles through {}...'.format(credentials_type))\n\n # getting profiles information\n profile_ids = plus_api.get_unique_profile_ids(count, credentials_type=credentials_type, verbose=verbose)\n profile_urls = list(map(plus_api.profile_id_to_google_plus_url, profile_ids))\n # display_names, image_urls = get_profile_display_names_and_image_urls(profile_ids, credentials_type=credentials_type,\n # verbose=verbose)\n display_names, image_urls = get_profile_display_names_and_image_urls_with_getBatchGet(profile_ids,\n credentials_type=credentials_type,\n verbose=verbose)\n\n # saving as plain text\n txt_filename = 'results/people-api-profiles.txt'\n if verbose:\n print('saving profiles as plain text to {}...'.format(txt_filename))\n result_str = ''\n for profile in zip(profile_ids, profile_urls, display_names, image_urls):\n result_str += '\\n'.join(profile) + '\\n\\n'\n profiles_file = open(txt_filename, 'w', encoding='utf-8')\n profiles_file.write(result_str)\n profiles_file.close()\n\n # saving as json\n json_filename = 'results/people-api-profiles.json'\n if verbose:\n print('saving profiles as json to {}...'.format(json_filename))\n result_list = []\n json_fields = ('profileId', 'profileUrl', 'displayName', 'imageUrl')\n for profile in zip(profile_ids, profile_urls, display_names, image_urls):\n result_list.append(dict(zip(json_fields, profile)))\n json_str = json.dumps(result_list)\n profiles_file = open(json_filename, 'w', encoding='utf-8')\n profiles_file.write(json_str)\n profiles_file.close()", "def get_users():\n return Response(f\"{User.get_all_users()}\", 200, mimetype='text/plain')", "def get_users():\n\n error_on_unauthorized()\n\n users = User.query.order_by(User.id)\n total_num = users.count()\n\n if total_num == 0:\n return jsonify(total=0, uploads=[])\n\n try:\n count = int(request.args.get('max', total_num))\n page = int(request.args.get('page', 1))\n origin = request.args.get('origin', None)\n\n if count <= 0 or page <= 0:\n raise APIError(422, \"Query parameters out of range\")\n\n if origin is not None:\n users = users.filter_by(origin=origin)\n\n begin = (page - 1) * count\n end = min(begin + count, total_num)\n \n return jsonify(total=total_num, users=[u.to_dict() for u in users.all()[begin:end]]), 200\n except ValueError:\n raise APIError(422, \"Invalid query parameter\")", "def get_profile():\n logger.debug(\"entering function get_profile\")\n response = read_user_profile()\n logger.debug(\"exiting function get_profile\")\n return jsonify(response)", "def get_users():\n users = storage.all('User')\n users_list = []\n for user in users.values():\n users_list.append(user.to_dict())\n return jsonify(users_list), 200", "def profile():\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n return jsonify(u.to_dict())", "def get(self, new_data):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user.is_superuser:\n abort(401,\n message=\"You do not have permission to view this endpoint\")\n if 'limit' in new_data and 'skip' in new_data:\n users = user_crud.get_multi(skip=new_data['skip'],\n limit=new_data['limit'])\n elif 'limit' in new_data:\n users = user_crud.get_multi(limit=new_data['limit'])\n elif 'skip' in new_data:\n users = user_crud.get_multi(skip=new_data['skip'])\n else:\n users = user_crud.get_multi()\n\n return users", "def get_user_profiles(self):\n print 'inside get user profiles'\n print 'self.username :' + self.username\n g = GoogleAnalyticsAPI(self.username)\n if g:\n print 'GA client exists'\n user_accounts = g.get_user_accounts()\n return user_accounts.get('items')\n else:\n print 'GA client does not exist'\n return []", "def get(self, request):\n current_user = request.user\n user_profile = self.serializer_class(current_user.profile)\n return Response({\n \"message\": PROFILE_MSGS['MY_PROFILE'],\n \"profile\": user_profile.data\n }, status=status.HTTP_200_OK)", "def get_all_users():", "def get_users():\n username = request.args.get('username')\n netAdminToolDB = app.config['DATABASE']\n if username != None:\n users = []\n users.append(netAdminToolDB.get_user_name(username))\n else:\n users = netAdminToolDB.get_user()\n\n userList = []\n for user in users:\n uri = url_for('get_user', user_id=user.id,_external=True)\n #role = netAdminToolDB.get_role(user.role_id)\n userList.append({\n 'id': user.id,\n 'uri': uri,\n 'username': user.username,\n 'display_name': user.display_name,\n 'role': user.role_name\n })\n if userList == []:\n return jsonify({'error': 'No users found'}), 404\n\n return jsonify({'users': userList })", "def GetAllProfiles(self):\n profiles = []\n feed_uri = self._gd_client.GetFeedUri('profiles')\n while feed_uri:\n feed = self._gd_client.GetProfilesFeed(uri=feed_uri)\n profiles.extend(feed.entry)\n feed_uri = feed.FindNextLink()\n self._profiles = profiles", "def user_list(ctx):\n data = ctx.obj.get_all_users()\n output_json_data(data)", "def get_users():\n users = models.User.query.all()\n friends_json = []\n for u in users:\n user = {\n 'id': u.id,\n 'name': u.name,\n 'email': u.email,\n 'regID': u.regid,\n 'photo': u.photo\n }\n friends_json.append(user)\n return jsonify({'users': friends_json}), 200", "def profile():\n\n if not session.get('oauth_token'):\n return redirect(url_for('login'))\n tokenString = \"bearer {0}\".format(session['oauth_token']['access_token'])\n headers = {\"Authorization\": tokenString}\n profileInfo = {'access_token': session['oauth_token']['access_token']}\n\n # get user summary\n userinfourl = '{}/userinfo'.format(baseUAAurl)\n userinfo = json.loads(requests.get(\n userinfourl, headers=headers, verify=False).text)\n session['userinfo'] = userinfo\n profileInfo['userinfo'] = json.dumps(session['userinfo'])\n\n # Method 1 : get user roles by orgs and space\n usersummaryurl = '{0}/v2/users/{1}/summary'.format(\n baseAPIurl, userinfo['user_id'])\n usersummary = json.loads(requests.get(\n usersummaryurl, headers=headers, verify=False).text)\n\n if usersummary.get('entity'):\n spaceWiseUserRoles = getSpaceWiseUserRoles(usersummary['entity'])\n else:\n # Method 2 : get user roles by orgs and space\n spaceWiseUserRoles = {}\n spaceurl = baseAPIurl + '/v2/spaces'\n spaceresponse = requests.get(spaceurl, headers=headers, verify=False)\n space_json_data = json.loads(spaceresponse.text)\n for spaceresource in space_json_data['resources']:\n entity = spaceresource['entity']\n spaceGuid = spaceresource['metadata']['guid']\n\n # get all auditors\n auditorurl = baseAPIurl + entity['auditors_url']\n auditorresponse = json.loads(requests.get(\n auditorurl, headers=headers, verify=False).text)\n if isInThisRole(auditorresponse, userinfo['user_name']):\n spaceWiseUserRoles[spaceGuid] = {\n 'role': 'auditor',\n 'name': spaceresource['entity']['name']\n }\n\n # get all developers\n devurl = baseAPIurl + entity['developers_url']\n devresponse = json.loads(requests.get(\n devurl, headers=headers, verify=False).text)\n if isInThisRole(devresponse, userinfo['user_name']):\n spaceWiseUserRoles[spaceGuid] = {\n 'role': 'developer',\n 'name': spaceresource['entity']['name']\n }\n\n # get all managers\n managerurl = baseAPIurl + entity['managers_url']\n managerresponse = json.loads(requests.get(\n managerurl, headers=headers, verify=False).text)\n if isInThisRole(managerresponse, userinfo['user_name']):\n spaceWiseUserRoles[spaceGuid] = {\n 'role': 'manager',\n 'name': spaceresource['entity']['name']\n }\n\n profileInfo['spaceWiseUserRoles'] = json.dumps(spaceWiseUserRoles)\n session['spaceWiseUserRoles'] = spaceWiseUserRoles\n\n # get user apps from all spaces\n url = '{}/v2/apps'.format(baseAPIurl)\n response = requests.get(url, headers=headers, verify=False)\n appsData = json.loads(response.text)\n appsUrls = {}\n\n # user accessible app url\n for resource in appsData['resources']:\n routes_url = baseAPIurl + \\\n resource['entity']['routes_url']\n routes_url_response = json.loads(requests.get(\n routes_url, headers=headers, verify=False).text)\n for app in routes_url_response['resources']:\n hostname = app['entity']['host']\n appsUrls[hostname] = {\n 'url': 'http://{}.local.pcfdev.io'.format(hostname),\n 'space_guid': app['entity']['space_guid'],\n 'userRole': getSpaceRole(spaceWiseUserRoles, app['entity'][\n 'space_guid'], userinfo['user_name'])}\n profileInfo['apps'] = appsUrls\n\n organization_guid = getOrganizationId(session, appsData)\n profileInfo['org_id'] = organization_guid\n profileInfo['org_users'] = json.dumps(getOrganizationUsers(\n session, organization_guid))\n return render_template('profile.html', data=profileInfo)", "def get_users():\r\n page = request.args.get('page', 1, type=int)\r\n per_page = min(request.args.get('per_page', 10, type=int), 100)\r\n data = User.to_collection_dict(User.query, page, per_page, 'api.get_users')\r\n return jsonify(data)", "def getProfile(self):\n # GET /profile\n debugMain('getProfile')\n return self._genericGet('/profile')", "def get_users(self):\n users = []\n page = 1\n while not len(users) % 100:\n users += self._get('/users?{0}'.format(urllib.urlencode({'per_page': 100, 'page': page})))\n if not users:\n break\n page += 1\n return users", "def test_list_profiles(\n api_client, enable_premium_requirement, profile_factory, user_factory\n):\n password = \"password\"\n user = user_factory(has_premium=True, password=password)\n api_client.log_in(user.primary_email.email, password)\n\n profile = profile_factory(km_user__user=user)\n\n url = f\"/know-me/users/{user.km_user.pk}/profiles/\"\n response = api_client.get(url)\n\n assert response.status_code == status.HTTP_200_OK\n assert response.json() == [\n {\n \"id\": profile.pk,\n \"url\": api_client.build_full_url(\n f\"/know-me/profile/profiles/{profile.pk}/\"\n ),\n \"created_at\": serialized_time(profile.created_at),\n \"updated_at\": serialized_time(profile.updated_at),\n \"is_private\": profile.is_private,\n \"name\": profile.name,\n \"permissions\": {\"read\": True, \"write\": True},\n \"topics_url\": api_client.build_full_url(\n f\"/know-me/profile/profiles/{profile.pk}/topics/\"\n ),\n }\n ]", "def list(self, request):\n\n user_profile = get_object_or_404(UserProfile, user=request.user)\n #   Get all sent accepted invitations\n sent = user_profile.creator_friendships.filter(status=1)\n # Get all received accepted invitations\n received = user_profile.invited_friendships.filter(status=1)\n #   Combine results to get all friends:\n friends = []\n for friendship in sent:\n friends.append(UserProfileSerializer(friendship.user_2).data)\n for friendship in received:\n friends.append(UserProfileSerializer(friendship.user_1).data)\n return Response(friends, status=rest_status.HTTP_200_OK)", "def profiles():\n \n if 'username' in session:\n profiles = mongo.db.profiles.find()\n return render_template('pages/profiles.html', title='Profiles', profiles = profiles)\n flash('Please login to view user profiles.', 'warning')\n return redirect(url_for('login'))", "async def get_users(request):\n\n page = request.GET.getone(\"page\", None)\n page_size = request.GET.getone(\"page_size\", None)\n filter_name = request.GET.getone(\"q\", \"\")\n filter_admin = request.GET.getone(\"filter_admin\", \"false\")\n\n try:\n count_only = request.GET.getone(\"count_only\").lower() == \"true\"\n except (ValueError, KeyError):\n count_only = False\n\n if page:\n try:\n page = int(page)\n except (ValueError, TypeError):\n return web.Response(text=\"Incorrect value for page\", status=400)\n page = 1 if page < 1 else page\n\n if page_size:\n try:\n page_size = int(page_size)\n except (ValueError, TypeError):\n return web.Response(text=\"Incorrect value for page_size\", status=400)\n page_size = 1 if page_size < 1 else page_size\n\n query = request.cirrina.db_session.query(User)\n\n if filter_admin.lower() == \"true\":\n query = query.filter(User.is_admin)\n\n if filter_name:\n query = query.filter(User.username.like(\"%{}%\".format(filter_name)))\n\n nb_users = query.count()\n query = query.order_by(User.username)\n\n if page and page_size:\n users = query.limit(page_size).offset((page - 1) * page_size).all()\n else:\n users = query.all()\n\n data = {\"total_result_count\": nb_users}\n if not count_only:\n data[\"results\"] = [\n {\"id\": user.id, \"username\": user.username, \"is_admin\": user.is_admin}\n for user in users\n ]\n\n return web.json_response(data)", "def describe_users(AuthenticationType=None, MaxResults=None, NextToken=None):\n pass", "def get_users(self, headers=None, query_params=None):\n uri = self.url + \"/users\"\n uri = uri + build_query_string(query_params)\n return requests.get(uri, headers=headers)", "def users_list(request):\n users = User.objects.all()\n serializer = UserSerializer(users, many=True)\n return Response(serializer.data)", "def list_users(profile=None, api_key=None):\n return salt.utils.pagerduty.list_items(\n \"users\", \"id\", __salt__[\"config.option\"](profile), api_key, opts=__opts__\n )", "def list_user_info(service):\n profile = service.users().getProfile(userId='me').execute()\n return profile", "def profiles_names(self):\n url = get_url('profiles')\n response = self._get(url)\n raise_on_error(response)\n return response.json()", "def list_user():\n\tbegin = 0\n\tlength = 25\n\ttry:\n\t\tif request.json != None:\n\t\t\tbegin = int(request.json.get('begin', 0))\n\t\t\tlength = int(request.json.get('length', 25))\n\texcept:\n\t\tabort(403)\n\tif length > 100 :\n\t\tlength = 100\n\tuserList = User.list(begin, length)\n\tif userList == None:\n\t\tabort(400)\n\treturn jsonify({'users': map(lambda(e): e.output(), userList), 'begin': begin, 'length': len(userList)})", "def user_list(request):\r\n params = request.params\r\n order = params.get('order', None)\r\n limit = params.get('limit', None)\r\n user_list = UserMgr.get_list(order=order, limit=limit)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def get_all(path: str) -> tuple:\n if path == \"all\":\n user_data: dict = request.get_json()\n organization_id: str = user_data.get(\"organization_id\")\n return user_view.get_all_users(organization_id=organization_id)\n if path == \"active\":\n user_data: dict = request.get_json()\n organization_id: str = user_data.get(\"organization_id\")\n return user_view.get_active_users(organization_id=organization_id)\n if path == \"in-active\":\n user_data: dict = request.get_json()\n organization_id: str = user_data.get(\"organization_id\")\n return user_view.get_in_active_users(organization_id=organization_id)\n\n return jsonify({\"status\": False, \"message\": \"general error fetching users\"}), 500", "def get_all_user():\n user = UserModel.objects()\n return jsonify(user), 200", "def get(self, request):\n user = YouYodaUser.objects.get(auth_token=request.headers['Authorization'].replace('Token ', ''))\n serializer = ProfileEditSerializer(user)\n return Response(serializer.data)", "def get_profiles(self):\n profiles = [['Profile name', 'GUID']]\n r = self.system_cursor.execute('{Call wtGetProfileList()}')\n for row in r.fetchall():\n profiles.append([row.PROFILE_NAME, row.PROFILE_GUID])\n return profiles", "def get_users(request):\n\n users_list = User.objects.all().values(\n 'id', 'username', 'first_name', 'last_name'\n )\n\n return HttpResponse(json.dumps(\n {'users': list(users_list)}\n ))", "def retrieve_users(payload):\n selection = User.query.order_by(User.id).all()\n users = []\n for item in selection:\n formatted_user = item.format()\n users.append(formatted_user)\n\n return jsonify({\n 'success': True,\n 'total': len(users),\n 'users': users\n })", "def test_view_all_users_profiles(self):\n self.authorize_user(self.user_login_details)\n response = self.client.get(self.profiles_url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def users(self, site = None):\r\n uids = self.user_ids()\r\n if uids:\r\n users = Account._byID(uids, True, return_dict = False)\r\n return [self.ajax_user(u) for u in users]\r\n else:\r\n return ()", "def get_users(self, *args, **kwargs):\n\n users_data = api.get_users(\n *args,\n api_key=self.__creds.api_key_v2, \n **kwargs)\n return [en.User(creds=self.__creds, **user_data) for user_data in users_data]", "def users_profile_query(self):\n email_query = request.args.get('email')\n if not email_query:\n self.logger.debug((messages.MISSING_FIELDS_ERROR % \"email\"))\n return messages.ERROR_JSON % (messages.MISSING_FIELDS_ERROR % \"email\"), 400\n try:\n user_data = self.auth_server.profile_query(email_query)\n except UnexistentUserError:\n self.logger.debug(messages.USER_NOT_FOUND_MESSAGE % email_query)\n return messages.ERROR_JSON % (messages.USER_NOT_FOUND_MESSAGE % email_query), 404\n return json.dumps(user_data), 200", "def get(self):\n\n offset = 0\n limit = Config.get_page_limit()\n args = request.args\n try:\n offset = request.args['offset']\n except Exception:\n pass\n\n try:\n limit = request.args['limit']\n except Exception:\n pass\n\n return self.get_request_handler(request.headers).get_all_users(offset=offset, limit=limit)", "async def all(self):\n log('retrieving profile cards..')\n start = time.monotonic()\n profiles = await asyncio.gather(*list(\n map(lambda profile: self.retrieve(str(profile['id'])), await self.list())\n ))\n elapsed = \"%0.2fs\" % (time.monotonic() - start,)\n log(\"retrieved {} profile cards in {}\".format(len(profiles), elapsed))\n return {\n \"hits\": len(profiles),\n \"updated\": time.time() * 1000,\n \"time\": elapsed,\n \"applicants\": list(filter(None, map(self.parse, profiles)))\n }", "async def get(self):\n await self.handle_request(self.users_api, 1)", "def profile_get(request):\n fields = [\"email\", \"token\", \"filename\"]\n\n # serializes the quert string to a dict (neeto)\n args = request.args\n auth = azure_refresh_token(args[\"token\"])\n if not auth[0]:\n return http400(\"Not Authenticated\")\n query_validation = validate_query_params(args, fields)\n # check that body validation succeeded\n if query_validation[1] != 200 or not validate_photo(args[\"filename\"]):\n return query_validation\n\n profile_storage = Storage(\"biit_profiles\")\n\n try:\n response = {\n \"data\": profile_storage.get(args[\"filename\"]),\n \"access_token\": auth[0],\n \"refresh_token\": auth[1],\n }\n\n return jsonHttp200(\"File Received\", response)\n except:\n return http400(\"File not found\")", "def test_more_profiles(self):\n\n for x in range(0, 10):\n User.objects.create_user(\n username=\"\".join((\"koalabear\", str(x))),\n email=\"\".join((\"koalabear@example.com\", str(x))),\n password=\"\".join((\"secret\", str(x)))\n )\n\n c = Client()\n response = c.get(reverse('profiles:index'))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(response.data), 10)", "def get(self):\n queries = {\"wildcard_properties\": []}\n\n fullname_query = request.args.get(\"fullName\", None)\n email_query = request.args.get(\"email\", None)\n\n if fullname_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{fullname_query}')\"\n queries[\"wildcard_properties\"].append(\"fullName\")\n if email_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{email_query}')\"\n queries[\"wildcard_properties\"].append(\"email\")\n\n users = User.filter(limit=10, **queries)\n response = UserListSchema(many=True).dumps(users).data\n\n return jsonify_response(json.loads(response), 200)", "def users():\n access_token = session['access_token']\n return \"%s\" % list_users(access_token)", "def profile(self, name=\"johndoe\"):\r\n url = \"/account/%s\" % name\r\n return self.app.get(url, follow_redirects=True)", "def get(self):\n self.response.headers['Content-Type'] = 'application/json'\n self.response.out.write(self._robot.GetProfileJson())", "def users(self, predicate=None):\n \n if predicate is None:\n return self._get(\"users\").json()\n else:\n return self._get(\"users/search\", params={\"predicate\":predicate}).json()", "def get_users(self):\n fields = ['name', ]\n return self.get_data(\"myUsers\", fields)", "def list_network_profiles(self, **params):\r\n return self.get(self.network_profiles_path, params=params)", "def get_users():\n request_filters = request.args.get(\"filters\")\n request_filter_type = request.args.get(\"type\")\n parameters = None\n if request_filter_type and request_filters:\n parameters = {\"type\": request_filter_type, \"filters\": request_filters}\n\n if parameters is None:\n filters = {}\n else:\n if parameters[\"type\"] == \"in\":\n filters = make_filters(FilterType.IN, parameters[\"filters\"])\n elif parameters[\"type\"] == \"and\":\n filters = make_filters(FilterType.AND, parameters[\"filters\"])\n else:\n filters = make_filters(FilterType.OR, parameters[\"filters\"])\n\n users = user_service.get_users(filters)\n if not users:\n resp = make_response(\n dumps({\"status\": False, \"message\": \"No se encontraron usuarios\"}), 404\n )\n resp = make_response(dumps({\"status\": False, \"users\": users}), 200)\n return resp", "def get_profile(self):\n endpoint = '/profile'\n return self.get_request(endpoint)", "def read_user_profile():\n logger.debug(\"entering function read_profile\")\n find_query = {\"user_id\": current_user.id}\n project_query = {\"_id\": 0, \"user_id\": 0, \"password\": 0}\n result = run_find_one_query(config.USERS_COL, find_query, project_query, error=True,\n error_msg=NO_USER_ERR_MSG)\n logger.info(\"fetched user profile for %s\", current_user.id)\n response = get_success_response(data=result)\n logger.debug(\"exiting function read_profile\")\n return response", "def get(self, request, pk, format=None):\n account = Account.objects.get(pk=pk)\n users = account.get_users()\n s = UserSerializer(users)\n return Response(s.data)", "def user_list(request_dict):\n users = User.query.all()\n users_list = list()\n for user in users:\n users_list.append(user)\n\n return JSONTools.user_list_reply(users_list)", "def get_queryset(self):\n queryset = Profile.objects.all()\n userid = self.request.query_params.get('userid', None)\n if userid is not None:\n queryset = queryset.filter(user_id=userid)\n return queryset", "def get_profile(self,fields=('id','first-name','last-name','headline','summary')):\n\n if not self._access_token:\n raise FBError(\"Authentication needed!\")\n \n token = oauth.Token(self._access_token['oauth_token'], self._access_token['oauth_token_secret'])\n client = oauth.Client(self.consumer, token)\n profile_url = self.profile_url % \",\".join(fields)\n resp, content = client.request(profile_url,headers={\"x-li-format\":'json'})\n \n if resp['status'] != '200':\n print resp\n raise FBError(\"Invalid response %s.\" % resp['status'])\n \n try:\n return json.loads(content)\n except Exception, e:\n raise FBError(\"Invalid json %s.\" % unicode(e))", "def get_users():\n users = User.query.order_by(User.id).all()\n users = {user.id: user.username for user in users}\n\n response = jsonify({\"success\": True, \"users\": users})\n\n return response", "def get_users():\n return jsonify([\n users.to_dict()\n for users in models.storage.all('User').values()\n ])", "async def list(self):\n all = (await self.get(self.profiles_list))['results']\n log(\"retrieved participant metadata.\")\n return all or []", "def user_show(ctx, args):\n for user_id in args:\n data = ctx.obj.get_user_by_username(user_id)\n output_json_data(data)", "def get_users(self, params=None):\n url = 'users'\n if params:\n url += '?%s' % urllib.urlencode(params)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBodyList(resp, body['users'])", "def list(self , request,*args,**kwargs):\n return super(UsersViewset,self).list(request,args,kwargs)", "def get_users_info(): \n \n data = user_obj.get_users_info()\n return data", "def test_list_profiles(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(len(response.data['results']), 1)", "def getInterestedUsers():", "def get_user_boxes(my_id, user_ids, page):\n my_following = get_following(my_id)\n profiles = UserProfile.objects.select_related('user') \\\n .filter(pk__in=user_ids)\n paginator = Paginator(profiles, 12)\n current_page_user_ids = [profile.pk for profile in\n paginator.page(page)]\n are_following = list(set(my_following) & set(current_page_user_ids))\n\n previous_page, next_page = \"\", \"\"\n current_page = paginator.page(page)\n if current_page.has_previous():\n previous_page = current_page.previous_page_number()\n if current_page.has_next():\n next_page = current_page.next_page_number()\n \n response = [{\n 'id': profile.pk,\n 'full_name': profile.user.get_full_name(),\n 'about_me': escape(profile.about_me),\n 'image_url': get_thumbnail(profile.avatar, '100x100',\n crop='center').url,\n 'url': reverse('user-profile', args=[profile.pk]),\n 'is_following': True if profile.pk in are_following else False,\n 'previous': previous_page,\n 'next': next_page \n } for profile in paginator.page(page)]\n\n return simplejson.dumps(response)", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def get(self):\n return get_all_users()", "def get(self, request, *args, **kwargs):\n # pylint:disable=too-many-locals\n with translate_praw_exceptions(request.user):\n api = self.request.channel_api\n profile_username = self.kwargs[\"username\"]\n profile_user = User.objects.get(username=profile_username)\n object_type = self.kwargs[\"object_type\"]\n listing_params = get_listing_params(self.request)\n\n if object_type == \"posts\":\n serializer_cls = BasePostSerializer\n listing_getter = api.list_user_posts\n else:\n serializer_cls = BaseCommentSerializer\n listing_getter = api.list_user_comments\n\n object_listing = listing_getter(profile_username, listing_params)\n pagination, user_objects = get_pagination_and_reddit_obj_list(\n object_listing, listing_params\n )\n\n if object_type == \"posts\":\n user_objects = proxy_posts(user_objects)\n user_objects = list(\n filter(lambda object: not object.removed, user_objects)\n )\n else:\n spam_comments = Comment.objects.filter(\n comment_id__in=[object.id for object in user_objects], removed=True\n ).values_list(\"comment_id\", flat=True)\n\n user_objects = list(\n filter(lambda object: object.id not in spam_comments, user_objects)\n )\n\n return Response(\n {\n object_type: serializer_cls(\n user_objects,\n many=True,\n context={\n **self.get_serializer_context(),\n \"users\": {profile_username: profile_user},\n },\n ).data,\n \"pagination\": pagination,\n }\n )", "def list(self, request, *args, **kwargs):\n return super(UserViewSet, self).list(request, *args, **kwargs)", "def get_users(self):\r\n\t\tlogger.debug(\"Fetch users\")\r\n\t\t\r\n\t\treturn login.get_users()", "def show_users():\n users_list = []\n all_users = storage.all('User')\n for obj in all_users.values():\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def profile(request):\n current_user = request.user\n if current_user.profile is None:\n users = User.objects.all()\n else:\n matches = Profile.objects.filter(\n ~Q(gender=request.user.profile.gender)).all()\n print(matches)\n for match in matches:\n users = User.objects.filter(id=match.user.id).all()\n print(users)\n return render(request, 'profile.html', {\"users\": users})", "def get_users(filter, api_site_parameter, page = 1, pagesize = 30, sort = 'reputation'):\n path = \"users\"\n results = __fetch_results(path, api_site_parameter, inname= filter, page = page, pagesize = pagesize, sort = sort)\n return results", "def get_all_users(self) -> tuple:\n users_list: dict_list_type = [user.to_dict() for user in UserModel.query().fetch()]\n message: str = 'successfully retrieved active users'\n return jsonify({'status': True, 'payload': users_list, 'message': message}), 200", "def list_profiles(self):\n return self._get(\"posture\", box=BoxList)" ]
[ "0.7123131", "0.7092195", "0.67327285", "0.66832435", "0.6620146", "0.6597439", "0.6545158", "0.65444624", "0.6516381", "0.6486743", "0.6464873", "0.64039516", "0.6396808", "0.63603383", "0.63488567", "0.6325813", "0.63202125", "0.63083386", "0.62850004", "0.62842536", "0.6279527", "0.6278602", "0.62654066", "0.62647605", "0.62625444", "0.6257511", "0.625311", "0.6248016", "0.6215719", "0.6205903", "0.61977583", "0.6195367", "0.61775595", "0.6176527", "0.6169886", "0.6163345", "0.61597264", "0.61267257", "0.61249477", "0.6120301", "0.61152667", "0.60759294", "0.60698277", "0.60598356", "0.6054395", "0.60542715", "0.6053428", "0.6051207", "0.60498667", "0.6045603", "0.6040998", "0.6034327", "0.6023586", "0.6016734", "0.60131127", "0.6008626", "0.600778", "0.60067314", "0.5996815", "0.5981065", "0.5981044", "0.59667057", "0.59651494", "0.59589756", "0.5957387", "0.59497654", "0.5944124", "0.59432805", "0.59386003", "0.59340805", "0.5923561", "0.59160256", "0.5909372", "0.5895863", "0.58909076", "0.5888112", "0.5887687", "0.5887429", "0.58784735", "0.587796", "0.5873687", "0.5868585", "0.5861942", "0.5860199", "0.585971", "0.5858132", "0.58576363", "0.5857296", "0.58533573", "0.58533573", "0.58533573", "0.58533573", "0.5850585", "0.58497334", "0.5847768", "0.5847221", "0.58457214", "0.58385444", "0.5836766", "0.5836293" ]
0.69456804
2
Create user profile Creates a new user profile from the user email, skills, prizes, and other fields.
def create_user_profile(email, **kwargs): # POST user_exists = coll("users").find_one({"_id": email}) if user_exists: return {"message": "User already exists"}, 400 # NOTE Doesn't make sense for a person to have prizes only a team should have this coll("users").insert_one( { "_id": email, "skills": kwargs["skills"], "prizes": kwargs["prizes"], "bio": kwargs["bio"], "github": kwargs["github"], "interests": kwargs["interests"], "seriousness": kwargs["seriousness"], "team_id": "", "hasateam": False, } ) return {"message": "User profile successfully created"}, 201
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, data):\n # Make User\n username = data['email'].split(\"@\")[0]\n user = User.objects.create_user(**data, username=username, is_verified=False, is_client=True)\n Profile.objects.create(user=user)\n send_confirmation_email.delay(user_pk=user.pk)\n return user", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n profile = UserProfile()\n profile.user = instance\n profile.email=instance.email\n profile.save()", "def create(self, validated_data):\n request = self.context.get('request')\n profile = Profile(**validated_data)\n profile.user = request.user\n profile.save()\n return profile", "def profile_create(faker_obj=fake_init()):\n profile = faker_obj.simple_profile()\n user = User.objects.create(\n username=profile[\"username\"],\n email=profile[\"mail\"],\n password=profile[\"username\"][::-1],\n )\n return user.id", "def create(self, validated_data):\r\n user_data = validated_data.pop('user')\r\n user = UserSerializer.create(UserSerializer(), validated_data = user_data)\r\n profile, created = Profile.objects.update_or_create(user = user,\r\n bio = validated_data.pop('bio'),\r\n location = validated_data.pop('location'),\r\n birth_date = validated_data.pop('birth_date'))\r\n return profile", "def create(self, validated_data):\n user_data = validated_data.pop('user')\n user = UserSerializer.create(UserSerializer(), validated_data=user_data)\n profile, created = Profile.objects.update_or_create(\n user=user,\n avatar=validated_data.pop('avatar'),\n biography=validated_data.pop('biography'),\n link=validated_data.pop('link') \n )\n return profile", "def create (self, validated_data):\n user = models.UserProfile.objects.create_user(\n email = validated_data ['email'],\n name = validated_data ['name'],\n password = validated_data ['password']\n )\n\n return user", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data['email'],\n first_name = validated_data['first_name'],\n last_name = validated_data['last_name'],\n password = validated_data['password']\n )\n return user", "def create_user_profile(instance, created, **_):\n if created:\n Profile.objects.create(user=instance)", "def create_profile(self, user, *args, **kwargs):\n salt = hashlib.sha1(str(random.random())).hexdigest()[:5]\n activation_key = hashlib.sha1(salt + user.username).hexdigest()\n return self.create(user=user, activation_key=activation_key, **kwargs)", "def create_profile_for_new_user(sender, created, instance, **kwargs):\n if created:\n profile = self.get_model('profile')(user=instance)\n profile.save()", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create(self, validated_data):\n user = UserProfile.objects.create_user(\n email=validated_data[\"email\"],\n name=validated_data[\"name\"],\n password=validated_data[\"password\"]\n )\n\n return user", "def create(self, validated_data):\n user = models.UserProfile.objects.create_user(\n email=validated_data['email'],\n username=validated_data['username'],\n password=validated_data['password'],\n\n )\n\n return user", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def create(self, validated_data):\n\n user = models.UserProfile(\n username=validated_data['username'],\n email=validated_data['email'],\n first_name=validated_data['first_name'],\n mobile_number=validated_data['mobile_number'],\n )\n\n user.set_password(validated_data['password'])\n user.save()\n\n return user", "def createUserProfile(user):\n MyProfile.objects.get_or_create(user=user)", "def create_profile(sender, **kwargs):\n user = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n user_profile = Profile(user=user)\n user_profile.save()", "def create(self, validated_data):\n user = User.objects.create(\n first_name=validated_data.get('first_name'),\n middle_name=validated_data.get('middle_name'),\n last_name=validated_data.get('last_name'),\n email=validated_data.get('email'),\n username=validated_data.get('username'),\n mobile_number=validated_data.get('mobile_number'),\n gender=validated_data.get('gender'),\n is_active=validated_data.get('is_active'),\n country=validated_data.get('country'),\n address=validated_data.get('address'),\n role=validated_data.get('role'),\n )\n if self.context['request'].data.get('file_profile_picture') is not None:\n user.profile_picture = self.context['request'].data['file_profile_picture']\n if self.context['request'].data.get('file_signature') is not None:\n user.signature = self.context['request'].data['file_signature']\n user.set_password(validated_data.get('password'))\n user.save()\n return user", "def create_user(self, *args, **kwargs):\n user = User.objects.create_user(*args, **kwargs)\n return get_profile(user)", "def create(self, validated_data):\n\n user = models.UserProfile(\n email=validated_data['email'],\n name=validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n user.save()\n return user", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n # create new Stellar account\n stellar.api.create_account(user=instance)", "def create(self, validated_data):\n ## overriding default create\n\n user = UserProfile.objects.create_user(\n email = validated_data['email'],\n name = validated_data['name'],\n password=validated_data['password']\n )\n \n return user", "def create_user_profile(sender, instance, created, **kwargs):\n\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def profile(**kwargs):\n defaults = {'name': 'Test K. User', 'bio': 'Some bio.',\n 'website': 'http://support.mozilla.com',\n 'timezone': None, 'country': 'US', 'city': 'Mountain View',\n 'locale': 'en-US'}\n if 'user' not in kwargs:\n u = user(save=True)\n defaults['user'] = u\n defaults.update(kwargs)\n\n p = Profile(**defaults)\n p.save()\n return p", "def create(self, validated_data):\n\n user = models.UserProfile(\n email=validated_data['email'],\n name=validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n user.save()\n\n return user", "def create_profile(self, user):\n salt = sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n return self.create(user=user,\n activation_key=activation_key)", "def create_profile(self, user):\r\n salt = sha.new(str(random.random())).hexdigest()[:5]\r\n activation_key = sha.new(salt+user.username).hexdigest()\r\n return self.create(user=user,\r\n activation_key=activation_key)", "def create( self , validated_data ) :\n\n user = models.UserProfile(\n email = validated_data[ 'email' ] ,\n name = validated_data[ 'name' ]\n )\n\n user.set_password( validated_data[ 'password' ] )\n user.save( )\n\n return user", "def create_profile(username):\n user = User.objects.create(username=username)\n return Profile.objects.create(user=user)", "def create_profile(sender, **kw):\n user = kw['instance']\n if kw['created']:\n profile = UserProfile(user=user)\n profile.save()", "def create_my_profile(\n body: Optional[UserProfilePrivateCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = CreateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def save(self):\n data = self.cleaned_data\n data.pop('password_confirmation')\n user = User.objects.create_user(**data)\n profile = Profile(user=user)\n profile.save()", "def profile(user, **kwargs):\n defaults = {'user': user, 'name': 'Test K. User', 'bio': 'Some bio.',\n 'website': 'http://support.mozilla.com',\n 'timezone': None, 'country': 'US', 'city': 'Mountain View'}\n defaults.update(kwargs)\n\n p = Profile(**defaults)\n p.save()\n return p", "def create(self, validated_data):\n\n # Here we actually create a new user.\n user = models.UserProfile(\n email = validated_data['email'],\n name = validated_data['name']\n )\n\n user.set_password(validated_data['password'])\n\n # Here we save the object to the database.\n user.save()\n\n return user", "def create(self, validated_data):\n # We override the create function by accessing the method 'create_user(args)'\n # defined in 'objects' that is an object\n # which references UserProfileManager class into UserProfile class.\n user = models.UserProfile.objects.create_user(\n email = validated_data['email'],\n name = validated_data['name'],\n password = validated_data['password']\n )\n\n return user", "def create_profile(self, user):\n salt = sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n# prepend \"key_\" to the key_name, because key_names can't start with numbers\n registrationprofile = RegistrationProfile(user=user, activation_key=activation_key)\n db = DB_Session()\n db.add(registrationprofile)\n db.flush()\n db.refresh(registrationprofile)\n db.commit()\n db.close()\n return registrationprofile", "def create_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def create_user(context, params):\n form_user = dict()\n # form_user['edited_by'] = context.user\n if params.get('username'):\n form_user['username'] = params.get('username')\n else:\n form_user['username'] = create_username(params) # 'email_user{}'.format(MISUser.objects.latest('id').id + 1\n form_user['first_name'] = params.get('first_name')\n form_user['last_name'] = params.get('last_name')\n form_person = create_person(params)\n form_user.update(form_person)\n user = User.objects.create(**form_user)\n user.set_password(params.get('password'))\n\n email = {'label': 'Work', 'val': params.get('email'), 'person': user, 'is_main': True}\n create_email(context, email)\n\n user.save()\n return user", "def create_user(self, email, name, phone1, password=None, signed_up=timezone.localtime(),):\n if not email:\n raise ValueError(_('Users must have an email address'))\n\n user = self.model(\n email=self.normalize_email(email),\n name=name,\n phone1=phone1,\n signed_up=signed_up,\n )\n\n user.set_password(password)\n user.save(using=self._db)\n MyUserProfile.objects.create(myuser=user) \n NotifClick.objects.create(myuser=user) \n\n return user", "def create_user(request):\n message = None\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n # This creates a new User in the database\n new_user = form.save()\n # now we create a new blank profile, link it to the new user and save it\n new_profile = Profile()\n new_profile.user = new_user\n new_profile.save()\n # Get the user object we just created\n new_user = authenticate(username=form.cleaned_data['username'],\n password=form.cleaned_data['password1'])\n login(request, new_user)\n return HttpResponseRedirect('/index')\n else:\n form = UserCreationForm()\n\n return render(request, 'woofer/show_form.html', {\n 'form' : form,\n 'message' : message,\n 'form_action' : reverse('create-user'),\n 'title' : \"Create Account\"\n })", "def create_profile(self,user):\n salt= sha.new(str(random.random())).hexdigest()[:5]\n activation_key = sha.new(salt+user.username).hexdigest()\n\n return RegistrationProfile(user=user,\n activation_key=activation_key)", "async def create_profile_for_user(self, *, profile_create: ProfileCreate) -> ProfileInDB:\n created_profile = await self.db.fetch_one(query=CREATE_PROFILE_FOR_USER_QUERY, values=profile_create.dict())\n return ProfileInDB(**created_profile)", "def create_user_profile(sender, **kwargs):\n\n if kwargs['created']:\n UserProfile.objects.create(user=kwargs['instance'])", "def public_create_user_profile(\n user_id: str,\n body: Optional[UserProfileCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicCreateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def create_profile_for_new_users(sender, instance, created, **kwargs):\n if not created:\n return\n\n profile = Profile.objects.filter(user=instance).first()\n if profile is None:\n profile = Profile(user=instance)\n profile.save()", "def create(self, email, password=None, *args, **kwargs):\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n profile = Profile(\n user=user,\n )\n profile.save()\n return user", "def create_user(request):\n serializer = UserProfileSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status = status.HTTP_400_BAD_REQUEST)", "def create_profile(sender, instance, created, **kwargs):\n if created: \n profile, new = UserProfile.objects.get_or_create(user=instance)", "def create_user(user, first_name, last_name, major, bio):\n return userAccount.objects.create(user=user, first_name=first_name, last_name=last_name, major=major, bio=bio)", "def create_profile(sender, instance, created, **kwargs):\n if created:\n profile, created = UserProfile.objects.get_or_create(user=instance)", "def create_user(username, password, user_fname, user_lname, email, profile_picture=\"/static/img/profile_pictures/default.png\"):\n\n user = User(username=username, password=password, user_fname=user_fname, user_lname=user_lname, profile_picture=profile_picture, email=email)\n\n db.session.add(user)\n db.session.commit()\n\n return user", "def create_profile(sender, **kwargs):\n\n # I import profile here cause i can't import it right in the top.\n from .profiles import Profile\n\n user = kwargs['instance']\n\n Profile.objects.get_or_create(user=user)", "def create(self, validated_data):\n # print('Validated Data',validated_data['profile'].get('group'))\n group = validated_data['profile'].get('group')\n profile_data = validated_data.pop('profile')\n password = validated_data.pop('password')\n user = User(**validated_data)\n user.set_password(password)\n user.save()\n \"\"\"\n After the creation of the user he is added to a particular group.\n \"\"\"\n user.groups.add(Group.objects.get(name=group))\n UserProfile.objects.create(user=user, **profile_data)\n return user", "def create_profile(sender, instance, signal, created, **kwargs):\n \n from phylocommons.models import UserProfile\n \n if created:\n UserProfile(user = instance).save()", "def user_create(client_id, email, password=None, first_name=None, last_name=None, user_info=None):\n # validate if email contains actually a valid email address:\n try:\n validate_email(email)\n except ValidationError:\n raise ex.UserError(\"please enter a valid email address\")\n # create account\n user = create_user(email)\n user.first_name = first_name\n user.last_name = last_name\n if password:\n user.set_password(password)\n if user_info:\n for (key, value) in user_info.iteritems():\n if key == \"social\" and value is not None: user.meta['social'] = value\n elif key == \"address\" and value is not None: user.meta['address'] = value\n elif key == \"crm\" and value is not None: user.meta['crm'] = value\n elif key == \"local\" and value is not None: user.meta['local'] = value\n \n user_info = user_to_dict(user, include_name=True)\n\n # build success result\n return user_info", "def create(cls, **kwargs):\n if \"user\" not in kwargs:\n with mute_signals(post_save):\n profile = ProfileFactory.create()\n kwargs[\"user\"] = profile.user\n return super().create(**kwargs)", "def save(self):\n # First save the parent form and get the user.\n new_user = super(SignupFormExtra, self).save()\n\n # Get the profile, the `save` method above creates a profile for each\n # user because it calls the manager method `create_user`.\n # See: https://github.com/django-userena-ce/django-userena-ce/blob/master/userena/managers.py#L65\n profile = new_user.my_profile\n profile.gender = self.cleaned_data['gender']\n profile.education = self.cleaned_data['education']\n profile.birthday = self.cleaned_data['birthday']\n profile.annual_income = self.cleaned_data['annual_income']\n profile.save()\n\n # Userena expects to get the new user from this form, so return the new\n # user.\n return new_user", "def create_profile(sender, instance, signal, created, **kwargs):\n \n from tutablr_app.models import UserProfile\n \n if created:\n UserProfile.objects.get_or_create(user = instance);\n # Do additional stuff here if needed, e.g.\n # create other required related records", "def createProfile(self):\n if self.profile:\n return\n from soc.modules.gsoc.models.profile import GSoCProfile\n user = self.createUser()\n properties = {'link_id': user.link_id, 'student_info': None, 'user': user,\n 'parent': user, 'scope': self.program, 'status': 'active'}\n self.profile = seeder_logic.seed(GSoCProfile, properties)", "def create_user_profile_callback(sender, instance, created, **kwargs):\n try:\n instance.get_profile()\n except UserProfile.DoesNotExist:\n UserProfile.objects.create(user=instance)", "def manage_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)\n else:\n instance.profile.save()", "async def public_create_user_profile_async(\n user_id: str,\n body: Optional[UserProfileCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicCreateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def save(self, profile_callback=None):\r\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\r\n password=self.cleaned_data['password1'],\r\n email=self.cleaned_data['email'],\r\n profile_callback=profile_callback)\r\n return new_user", "def create_or_update_user_profile(sender, instance, created, **kwargs):\n _, created = UserProfile.objects.get_or_create(user=instance)\n if created and instance.email != \"\":\n instance.profile.email = instance.email\n instance.profile.save()", "def create_user_object(self, request):\r\n user = {\r\n \"first_name\": request.form.get(\"first_name\"),\r\n \"last_name\": request.form.get(\"last_name\"),\r\n \"age\": request.form.get(\"age\"),\r\n \"cpr_number\": request.form.get(\"CPR\"),\r\n \"email\": request.form.get(\"email\"),\r\n \"phone_number\": request.form.get(\"phone_number\"),\r\n \"password\": PasswordHasher().hash(request.form.get(\"password\")),\r\n \"bank_account\": str(BankAccount(\"Savings\", 1000.00).store_account().inserted_id),\r\n \"crypto_wallet\": str(CryptoWallet(\"Bitcoin\", 0.0045).store_account().inserted_id)\r\n }\r\n return user", "def create_registration_profile(self, user):\n salt = hashlib.sha1(str(random.random())).hexdigest()[:10]\n activation_key = hashlib.sha1(salt + user.username).hexdigest()\n\n return self.create(user=user, activation_key=activation_key)", "def add_user(request):\n profile = get_object_or_404(UserProfile, user=request.user)\n\n # make sure only managers and admins can add a team\n if profile.level == 'admin' or profile.level == 'manager':\n\n if request.method == 'POST':\n form = UserProfileForm(request.POST)\n user_email = UserForm(request.POST)\n\n if user_email.is_valid() and form.is_valid():\n user = User.objects.create_user(username=random_username(),\n email=request.POST.get('email'),\n password='EggBox900')\n messages.success(request, 'Profile added successfully')\n\n user.userprofile.first_name = form.data['first_name']\n user.userprofile.last_name = form.data['last_name']\n user.userprofile.company_id = profile.company_id\n # user.userprofile.start_date = form.data['start_date']\n # user.userprofile.end_date = form.data['end_date']\n user.userprofile.level = form.data['level']\n user.userprofile.team = Team.objects.get(pk=form.data['team'])\n user.userprofile.contract_type = form.data['contract_type']\n user.userprofile.contract_percentage = form.data['contract_percentage']\n user.userprofile.agent_goal = form.data['agent_goal']\n user.userprofile.save()\n else:\n messages.error(request, 'Update failed. Please ensure the form is valid.')\n\n users = UserProfile.objects.filter(company_id=profile.company_id)\n\n template = 'profiles/user_management.html'\n context = {\n 'users': users,\n 'profile': profile\n }\n\n return render(request, template, context)\n\n else:\n form = UserProfileForm()\n user_email = UserForm()\n\n template = 'profiles/add_user.html'\n context = {\n 'form': form,\n 'profile': profile,\n 'user_email': user_email\n }\n\n return render(request, template, context)\n else:\n messages.info(request, \"Sorry, you are not authorized to add users. Ask a Manager or Admin.\")\n\n return redirect(reverse('planning', ))", "def create_user():\n record = request.get_json()\n if record is None:\n return {\"Error\": \"No data Supplied.\"}, 400\n\n schema = user_schema.load(record)\n\n if UserModel.objects(email=schema['email']):\n return {\"Error\": \"User Data already exists.\"}, 400\n user = UserModel(**schema)\n user.hash_password()\n user.save()\n ser_data = user_schema.dump(user)\n token = Auth.generate_token(ser_data[\"_id\"])\n return {\"message\": \"User Created Successfully\", \"Token\": token, \"id\": str(user.id)}, 200", "def save(self, commit=True):\n user = super(UserCreationForm, self).save(commit=False)\n user.set_password(self.cleaned_data['password1'])\n\n user.save()\n\n # Making user profile and assigning to CPCESU\n # CPCESU\n #group = Organization.objects.get(name='Colorado Plateau')\n\n # New profile with group\n profile = UserProfile(user=user, first_name=self.cleaned_data.get('first_name'),\n last_name=self.cleaned_data.get('last_name'))\n profile.save()\n\n return user", "def _profile(user):\n profile = UserProfile()\n profile.user_id = user.id\n profile.save()", "def create_user_profile(IamUserArn=None, SshUsername=None, SshPublicKey=None, AllowSelfManagement=None):\n pass", "def save(self):\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\n password=self.cleaned_data['password1'],\n email=self.cleaned_data['email'],\n firstname=self.cleaned_data['first_name'],\n lastname=self.cleaned_data['last_name'],\n agree=self.cleaned_data['tos'])\n return new_user", "def create(self, request, **kwargs):\n fullname = request.CLEANED['fullname'] #this is full name actually\n email = request.CLEANED['email']\n password = request.CLEANED['password']\n username = request.CLEANED['username']\n # Prevent repeatly create user with the SAME email\n if User.objects.filter(email=email, is_active=False).exists():\n user = User.objects.filter(email=email, is_active=False)[0]\n user.set_password(password)\n user.save()\n registration = Registration.objects.get(user=user)\n registration.send_activation_mail()\n return registration.to_json()\n\n slug_name = Slug.objects.sluggify(username)\n user = sign_up(slug_name, password, request)\n user.email = email\n user.save()\n user_profile = user.get_profile()\n user_profile.full_name = fullname\n user_profile.save()\n\n registration = Registration.objects.get(user=user)\n registration.send_activation_mail()\n\n return registration.to_json()", "def post (self):\n\t\tobj = request.get_json()\n\n\t\tif (('username' not in obj) or ('email' not in obj) or ('secret' not in obj)):\n\t\t\treturn {\"status\":\"MISSING_PARAMS\"}\n\t\telif (len(obj['username'])<4 or len(obj['username'])>25):\n\t\t\treturn {\"status\":\"USER_NAME_LENGTH\"}\n\n\t\ttry:\n\t\t\tobj['username'].decode('ascii')\n\t\texcept UnicodeEncodeError:\n\t\t\treturn {'status':'INVALID_NAME'}\n\n\t\tdb = getattr(g, 'db', None)\n\t\twith db as cur:\n\t\t\tqry = \"INSERT INTO profiles VALUES (default, %s, %s, FALSE, %s, '', '', NULL, NULL);\"\n\t\t\ttry:\n\n\t\t\t\tsecret = obj['secret']\n\t\t\t\tif isinstance(secret, unicode):\n\t\t\t\t\tsecret = secret.encode('utf-8')\n\n\t\t\t\thashed = hashpw(secret, gensalt())\n\t\t\t\tcur.execute(qry, (obj['username'],obj['email'], hashed))\n\t\t\t\tdb.commit()\n\t\t\t\treturn {\"status\":\"USER_CREATED\"}\n\t\t\texcept Exception as e:\n\t\t\t\tprint \"Error\", e\n\t\t\t\treturn {\"status\":\"USER_EXISTS\"}", "def do_user_create():\n target = User(\n request.form['gender'],\n request.form['first_name'],\n request.form['name'],\n request.form['mail'],\n request.form['meter_id'],\n request.form['group_id'],\n secrets.token_hex(33))\n target.set_role(request.form['role'])\n target.nick = request.form['nick']\n db.session.add(target)\n db.session.commit()\n return user_list(\"Created user \" + target.name)", "async def create_my_profile_async(\n body: Optional[UserProfilePrivateCreate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = CreateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def make_profile_for_user(sender, instance, **kwargs):\n if kwargs['created']:\n new_profile = ImagerProfile(user=instance)\n new_profile.save()", "def create_account_form(request, post):\n username = post.get(\"username\")\n first_name = post.get(\"first_name\")\n last_name = post.get(\"last_name\")\n email = post.get(\"email\")\n\n phone_number = post.get(\"phone\")\n\n password = post.get(\"password\")\n\n height = float(post.get(\"height\"))\n weight = float(post.get(\"weight\"))\n sex = post.get(\"sex\")\n\n current_medications = post.get(\"medications\")\n allergies = post.get(\"allergies\")\n medical_conditions = post.get(\"medical_conditions\")\n family_history = post.get(\"family_history\")\n additional_info = post.get(\"additional_info\")\n primary_hospital = Hospital.objects.get(pk=post.get(\"primary_hospital\"))\n\n policy_number = int(post.get(\"policy_number\"))\n company = post.get(\"company\")\n\n if User.objects.filter(username=username).exists():\n messages.add_message(request, messages.ERROR, 'User already exists!')\n return False\n\n else:\n new_user = User.objects.create_user(\n username=username, password=password,\n first_name=first_name, last_name=last_name, email=email\n )\n\n new_user_profile = UserProfile.objects.create(\n user=new_user,\n phone_number=phone_number, status=UserStatus.objects.get(pk=3),\n primary_hospital=primary_hospital\n )\n\n medical_info = MedicalInformation.objects.create(\n height=height, weight=weight, sex=sex,\n medical_conditions=medical_conditions,\n allergies=allergies, medications=current_medications,\n family_history=family_history, additional_info=additional_info,\n user=new_user_profile\n )\n\n insurance = Insurance.objects.create(\n policy_number=policy_number, company=company, medical_information=medical_info,\n )\n\n return True", "def test_create_user(profile_data):\n email = \"email@localhost\"\n username = \"username\"\n user = api.create_user(username, email, profile_data, {\"first_name\": \"Bob\"})\n\n assert isinstance(user, User)\n assert user.email == email\n assert user.username == username\n assert user.first_name == \"Bob\"\n\n if \"name\" in profile_data:\n assert user.profile.name == profile_data[\"name\"]\n else:\n assert user.profile.name is None", "def user_profile():\n user = current_user\n user_is_valid = True\n if not user.active:\n flash('This user account is under review. Please update your profile '\n + ' and contact the organizing team to access all functions of '\n + 'this platform.', 'warning')\n\n form = UserForm(obj=user, next=request.args.get('next'))\n form.roles.choices = [(r.id, r.name) for r in Role.query.order_by('name')]\n\n # Check conflicting PKs\n if form.email.data != user.email:\n if User.query.filter_by(email=form.email.data).first() is not None:\n flash('This e-mail address is already registered.', 'error')\n user_is_valid = False\n\n if user.sso_id:\n # Do not allow changing password on SSO\n del form.password\n\n # Validation has passed\n if form.is_submitted() and form.validate() and user_is_valid:\n # Assign roles\n user.roles = [Role.query.filter_by(\n id=r).first() for r in form.roles.data]\n del form.roles\n\n # Sanitize username\n user.username = sanitize_input(form.username.data)\n del form.username\n\n # Assign password if changed\n originalhash = user.password\n form.populate_obj(user)\n # Do not allow changing password on SSO\n if not user.sso_id:\n if form.password.data:\n user.set_password(form.password.data)\n else:\n user.password = originalhash\n\n user.updated_at = datetime.utcnow()\n db.session.add(user)\n db.session.commit()\n user.socialize()\n flash('Profile updated.', 'success')\n return redirect(url_for('public.user', username=user.username))\n\n if not form.roles.choices:\n del form.roles\n else:\n form.roles.data = [(r.id) for r in user.roles]\n return render_template('public/useredit.html',\n oauth_type=oauth_type(),\n user=user, form=form,\n active='profile')", "def create_user():\n body = request.get_json(silent=True)\n if body is None:\n abort(400, jsonify(error=\"Not a JSON\"))\n if 'email' not in body:\n abort(400, jsonify(error=\"Missing email\"))\n if 'password' not in body:\n abort(400, jsonify(error=\"Missing password\"))\n user = models.user.User(**body)\n models.storage.new(user)\n models.storage.save()\n return make_response(jsonify(user.to_dict()), 201)", "def _create_user(self, phone_number, password, **extra_fields):\n\n if not phone_number:\n raise ValueError(\"Phone number is missing\")\n\n user = self.model(phone_number=phone_number, **extra_fields)\n user.set_password(password)\n\n user.save(using=self._db)\n\n # Create a Profile object for the user\n profile = Profile(user=user)\n profile.save()\n\n # Create a Cart object for the user\n cart = Cart(user=user)\n cart.save()\n\n # Create a UserAddress object for the user\n address = UserAddress(user=user)\n address.save()\n\n return user", "def register(self, form):\n new_user = form.save(commit=False)\n username_field = getattr(new_user, 'USERNAME_FIELD', 'username')\n # Save lowercased email as username.\n setattr(new_user, username_field, form.cleaned_data['email'].lower())\n new_user.first_name = form.cleaned_data['first_name']\n new_user.last_name = form.cleaned_data['last_name']\n new_user.save()\n new_user = authenticate(username=getattr(new_user, username_field), password=form.cleaned_data['password1'])\n login(self.request, new_user)\n user_registered.send(sender=self.__class__, user=new_user, request=self.request)\n profile, _ = Profile.objects.get_or_create(user=new_user)\n self.request.session['signed_up'] = True\n profile.payment_plan = int(form.cleaned_data['payment_plan'])\n profile.company_name = form.cleaned_data['company']\n profile.phone = form.cleaned_data['phone']\n profile.save(update_fields=['payment_plan', 'company_name', 'phone'])\n if profile.payment_plan != Profile.PAYMENT_PLAN_FREE:\n messages.add_message(self.request, messages.INFO,\n 'Congratulations! We won\\'t charge you for this plan for now.')\n return new_user", "def _do_create_account(post_vars):\r\n user = User(username=post_vars['username'],\r\n email=post_vars['email'],\r\n is_active=False)\r\n user.set_password(post_vars['password'])\r\n registration = Registration()\r\n\r\n # TODO: Rearrange so that if part of the process fails, the whole process fails.\r\n # Right now, we can have e.g. no registration e-mail sent out and a zombie account\r\n try:\r\n user.save()\r\n except IntegrityError:\r\n # Figure out the cause of the integrity error\r\n if len(User.objects.filter(username=post_vars['username'])) > 0:\r\n raise AccountValidationError(\r\n _(\"An account with the Public Username '{username}' already exists.\").format(username=post_vars['username']),\r\n field=\"username\"\r\n )\r\n elif len(User.objects.filter(email=post_vars['email'])) > 0:\r\n raise AccountValidationError(\r\n _(\"An account with the Email '{email}' already exists.\").format(email=post_vars['email']),\r\n field=\"email\"\r\n )\r\n else:\r\n raise\r\n\r\n # add this account creation to password history\r\n # NOTE, this will be a NOP unless the feature has been turned on in configuration\r\n password_history_entry = PasswordHistory()\r\n password_history_entry.create(user)\r\n\r\n registration.register(user)\r\n\r\n profile = UserProfile(user=user)\r\n profile.name = post_vars['name']\r\n profile.level_of_education = post_vars.get('level_of_education')\r\n profile.gender = post_vars.get('gender')\r\n profile.mailing_address = post_vars.get('mailing_address')\r\n profile.city = post_vars.get('city')\r\n profile.country = post_vars.get('country')\r\n profile.goals = post_vars.get('goals')\r\n\r\n try:\r\n profile.year_of_birth = int(post_vars['year_of_birth'])\r\n except (ValueError, KeyError):\r\n # If they give us garbage, just ignore it instead\r\n # of asking them to put an integer.\r\n profile.year_of_birth = None\r\n try:\r\n profile.save()\r\n except Exception:\r\n log.exception(\"UserProfile creation failed for user {id}.\".format(id=user.id))\r\n raise\r\n\r\n UserPreference.set_preference(user, LANGUAGE_KEY, get_language())\r\n\r\n return (user, profile, registration)", "def create_account(request):\n if request.method == 'POST':\n\n post = request.POST\n form = forms.RegisterForm(post)\n\n if form.is_valid():\n # create a new user\n user = models.HAWCUser.objects.create_user(post['email'],\n post['password1'])\n user.first_name = post['first_name']\n user.last_name = post['last_name']\n user.full_clean()\n user.save()\n\n # create a new user profile\n profile = models.UserProfile(user=user)\n profile.save()\n\n # after save, log user in\n user = authenticate(username=post['email'],\n password=post['password1'])\n login(request, user)\n return redirect('portal')\n else:\n form = forms.RegisterForm()\n\n return render(request, 'registration/create_account.html', {'form': form})", "def create_or_update_user_profile(sender, instance, created, **kwargs):\n\n # Create profile and set ACTIVE status to account -- TODO : ACTIVE STATUS\n if created:\n Profile.objects.create(user=instance, status=Status.get_or_create_status(strings.ACTIVE_STATUS))\n\n else:\n instance.profile.save()", "def create(self, validated_data):\n\n user_data = {\n \"username\" : validated_data.get(\"username\"),\n \"email\" : validated_data.get(\"email\"),\n \"password\" : validated_data.get(\"password\")\n }\n user = User.objects.create_user(**user_data)\n user.save()\n\n account_data = {\n \"phone\" : validated_data.get(\"phone\"),\n \"type\" : validated_data.get(\"type\"),\n \"lat\" : validated_data.get(\"lat\"),\n \"lang\" : validated_data.get(\"lang\"),\n \"center_point\" : validated_data.get(\"center_point\")\n }\n account = Account(user = user, **account_data)\n account.save()\n\n return user", "def create_user():\n first_name = request.form['first_name'].capitalize()\n last_name = request.form['last_name'].capitalize()\n image_url = request.form['image_url']\n\n new_user = User(first_name=first_name, last_name=last_name, image_url=image_url)\n db.session.add(new_user)\n db.session.commit()\n\n return redirect(\"/users\")", "def create(self, body):\n try:\n user_record = UserRecord.create_user(\n email=body[\"email\"],\n password=body[\"password\"],\n display_name=body[\"display_name\"],\n auth=web_sdk.auth,\n )\n complete_register = body.get(\"complete_register\") or False\n user_record.make_claims({\"complete_register\": complete_register})\n user = User(\n uid=user_record.uid,\n email=user_record.email,\n display_name=user_record.display_name,\n phone_number=body.get(\"phone_number\"),\n name=body[\"name\"],\n lastname=body[\"lastname\"],\n headline=body.get(\"headline\"),\n about_me=body.get(\"about_me\"),\n complete_register=complete_register,\n link_video=body.get(\"link_video\"),\n timezone=body.get(\"timezone\"),\n location=body.get(\"location\"),\n )\n\n if \"specialities\" in body:\n user.append_specialities(body[\"specialities\"])\n if \"methods\" in body:\n user.append_methods(body[\"methods\"])\n if \"plans\" in body:\n user.append_plans(body[\"plans\"])\n\n user.add()\n user.save()\n\n return {\"uid\": user_record.uid, \"a\": user_record, \"b\": user}\n except KeyError as ex:\n raise HandlerException(400, \"Bad request: \" + str(ex))", "def create(khoros_object, user_settings=None, login=None, email=None, password=None, first_name=None, last_name=None,\n biography=None, sso_id=None, web_page_url=None, cover_image=None, ignore_exceptions=False):\n # TODO: Add functionality for followers, following, rank, roles, user_avatar and user_badges\n payload = structure_payload(user_settings, login, email, password, first_name, last_name, biography, sso_id,\n web_page_url, cover_image)\n query_url = f\"{khoros_object.core_settings.get('v2_base')}/users\"\n headers = {'content-type': 'application/json'}\n response = api.post_request_with_retries(query_url, payload, auth_dict=khoros_object.auth, headers=headers)\n if not api.query_successful(response) and not ignore_exceptions:\n raise errors.exceptions.UserCreationError(user=payload.get('login'), exc_msg=response.get('message'))\n return response", "def _create_user(self, username, email, password, phone, **extra_fields):\n\n username = self.model.normalize_username(username)\n user = self.model(username=username, email=email, phone=phone, **extra_fields) # using email_id instead of email\n user.set_password(password)\n user.save(using=self._db)\n return user", "def post(self, request):\n\n user_obj = UserProfile.objects.filter(id=request.user.id).first()\n if not user_obj:\n return existence_error('user')\n\n request_json = {\n 'username': request.data.get('username'),\n 'name': request.data.get('name'),\n 'last_name': request.data.get('last_name'),\n 'bio': request.data.get('bio'),\n 'website': request.data.get('website'),\n 'email': request.data.get('email'),\n 'phone_number': request.data.get('phone_number'),\n 'business': request.data.get('business'),\n 'country': request.data.get('country'),\n 'city': request.data.get('city')\n }\n\n user_serialized = EditUserProfileSerializer(user_obj, data=request_json, partial=True)\n if not user_serialized.is_valid():\n return validate_error(user_serialized)\n user_serialized.save()\n\n response_json = {\n 'status': True,\n 'message': 'successful',\n 'data': {}\n }\n\n return Response(response_json, status=201)", "def create_new_user(first_name, last_name, email, password):\n \n new_user = User(first_name, last_name, email, password)\n db.session.add(new_user)\n db.session.commit()\n \n # link a root storage folder to the user\n root_folder = Folder()\n db.session.add(root_folder)\n db.session.commit()\n new_user.storage_root_id = root_folder.id\n new_user.storage_root = root_folder\n db.session.commit()\n\n # link usage tracking to the user\n usage = Usage()\n usage.user_id = new_user.id\n new_user.usage = usage\n db.session.add(usage)\n db.session.commit()\n\n # link a billing address to the user\n billing_address = BillingAddress()\n billing_address.user_id = new_user.id\n new_user.billing_address = billing_address\n db.session.add(billing_address)\n db.session.commit()\n\n # link settings to the User\n settings = Settings()\n settings.user_id = new_user.id\n new_user.settings = settings\n db.session.add(settings)\n db.session.commit()", "def create_user():\n usr = request.get_json()\n if not usr:\n abort(400, {'Not a JSON'})\n elif 'email' not in usr:\n abort(400, {'Missing email'})\n elif 'password' not in usr:\n abort(400, {'Missing password'})\n else:\n new_usr = User(**usr)\n storage.new(new_usr)\n storage.save()\n return jsonify(new_usr.to_dict()), 201", "def add_profile():\n \n form=ProfileForm() \n if 'username' in session:\n user = mongo.db.user.find_one({'username': session['username']})\n pro = mongo.db.profiles.find_one({'user_id': user['_id']})\n if pro:\n flash('Sorry, only one profile per user permitted. You can update your profile on your dashboard under the profile tab.', 'info')\n return redirect(url_for('dashboard'))\n \n if request.method == 'POST':\n if form.validate_on_submit():\n \n mongo.db.profiles.insert_one({'user_id': user['_id'],\n 'headline': form.headline.data,\n 'bio': form.bio.data,\n 'username': session['username'],\n 'date': datetime.utcnow(),\n 'xp': form.xp.data,\n 'interests': form.interests.data,\n 'stack': form.stack.data,\n 'languages': form.languages.data,\n 'frameworks': form.frameworks.data,\n 'github': form.github.data,\n 'linkedin': form.linkedin.data\n })\n flash('Your profile has been created.', 'success')\n return redirect('profiles')\n \n return render_template('pages/addprofile.html', title='Post',\n form=form, legend='Create your profile')\n \n flash('You need to be logged in to post any content.', 'info')\n return redirect(url_for('login'))", "def create_user(self, email, password, first_name, last_name, phone, credits=0, \n driver=False, drivers_license=None, profile_picture=None, number_of_rides=0, years_of_experience=0):\n password_hash = make_password_hash(email, password)\n return UserModel(email=email, password=password_hash, first_name=first_name, last_name=last_name, \n phone=phone, credits=credits, driver=driver, drivers_license=drivers_license, \n profile_picture=profile_picture, number_of_rides=number_of_rides, \n years_of_experience=years_of_experience)", "def new_profile(request):\n phone = request.POST.get('phone', None)\n name = request.POST.get('name', None)\n email = request.POST.get('email', None)\n auth_token, user_id = UserAuth(phone).user_registration(name, email)\n\n if auth_token is not None:\n return Response(\n {\n 'success': True,\n 'phone': phone,\n 'auth_token': auth_token,\n 'user_id': user_id\n }\n )\n\n else:\n return Response(\n {\n 'success': False\n }\n )" ]
[ "0.75975007", "0.74751395", "0.7427103", "0.73537993", "0.7324293", "0.7319972", "0.7295748", "0.7285775", "0.7275217", "0.7270615", "0.7237489", "0.72268796", "0.72268796", "0.72268796", "0.7216276", "0.7180537", "0.716592", "0.7164825", "0.71644413", "0.7159965", "0.7144012", "0.71275973", "0.7117181", "0.710969", "0.7108435", "0.7101867", "0.7091569", "0.7084894", "0.7069145", "0.70556456", "0.7042848", "0.7038658", "0.7035273", "0.70232856", "0.70221674", "0.69993764", "0.6989211", "0.69726413", "0.69657075", "0.69651645", "0.6937159", "0.69366395", "0.69322026", "0.69269085", "0.69017166", "0.68802136", "0.6877417", "0.6873529", "0.6872183", "0.6866663", "0.685789", "0.6856158", "0.6833686", "0.680395", "0.67711705", "0.6730707", "0.6720938", "0.671246", "0.670604", "0.6704368", "0.6699663", "0.66372544", "0.66332436", "0.66283554", "0.66200924", "0.66150373", "0.6612135", "0.6610039", "0.65935975", "0.6590304", "0.6583902", "0.65834665", "0.6571603", "0.65714025", "0.6567948", "0.6562682", "0.6553807", "0.6547267", "0.6541026", "0.6515373", "0.6503939", "0.649379", "0.64843357", "0.6480678", "0.6478302", "0.64641684", "0.6463921", "0.64618146", "0.6461109", "0.64563257", "0.64545316", "0.6448689", "0.64238584", "0.6400928", "0.6393765", "0.6389912", "0.63879645", "0.6383815", "0.6376287", "0.63686717" ]
0.8317838
0
Update user profile Update a user profile with new parameters.
def update_user_profile(email, **kwargs): # PUT user = coll("users").find_one({"_id": email}) if not user: return {"message": "User not found"}, 404 coll("users").update_one({"_id": email}, {"$set": kwargs}) return {"message": "User profile successfully updated"}, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def user_update_profile():\n \n if 'userid' and 'email' not in request.forms:\n return {'status':'Failure','message':'User Id is missing,please try with correct data.'}\n \n data = user_obj.user_update_profile(request.forms)\n return data", "def update_profile():\n logger.debug(\"entering function update_profile\")\n response = update_user_profile(request.json)\n logger.debug(\"exiting function update_profile\")\n return jsonify(response)", "def update_user_profile(request):\n if request.method == 'POST':\n form = UserProfileForm(request.POST)\n\n if form.is_valid():\n user = get_object_or_404(User, pk=request.user.pk)\n user.first_name = request.POST['first_name']\n user.last_name = request.POST['last_name']\n user.profile_picture = request.POST['profile_picture']\n user.save()\n messages.success(request, 'Your profile has been updated!')\n else:\n messages.error(\n request, 'Unable to update your profile. Please try again later.')\n\n return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse('dev_panel')))", "def update_profile(request, id):\n username = request.POST.get(\"username\")\n first_name, last_name = request.POST.get(\"fullname\").split()\n email = request.POST.get(\"email\")\n phone = request.POST.get(\"phone\")\n address = request.POST.get(\"address\")\n\n userObj = User.objects.get(id=id)\n userObj.first_name = first_name\n userObj.last_name= last_name\n userObj.username= username\n userObj.email = email\n userObj.phone = phone\n userObj.address = address\n userObj.save()\n messages.success(request, \"Your profile is successfully update.\", fail_silently=False)\n return redirect(\"user_profile\", id)", "def update_user_profile(user_info):\n user_id = user_info[\"USER_ID\"]\n user_collection.find_one_and_update(\n {\"_id\": user_id},\n {\n \"$set\": {\n \"username\": user_info[\"username\"],\n \"email\": user_info[\"email\"],\n \"avatar\": user_info[\"avatar\"],\n \"githubURL\": user_info[\"githubURL\"],\n \"linkedinURL\": user_info[\"linkedinURL\"],\n \"stackoverflowURL\": user_info[\"stackoverflowURL\"],\n \"skills\": user_info[\"skills\"],\n }\n },\n upsert=False,\n )", "def update_user_profile(req_data):\n logger.debug(\"entering function update_user_profile\")\n\n update_fields = {}\n for field in req_data:\n update_fields[field] = req_data[field]\n if \"password\" in req_data:\n update_fields[\"password\"] = generate_password_hash(req_data[\"password\"])\n\n find_query = {\"user_id\": current_user.id}\n update_query = {\"$set\": update_fields}\n run_update_one_query(config.USERS_COL, find_query, update_query,\n error=True, error_msg=PROFILE_UPDATE_FAILED_ERR_MSG)\n logger.info(\"Profile update success for %s\", current_user.id)\n\n logger.debug(\"exiting function update_user_profile\")\n return get_success_response(PROFILE_UPDATE_SUCCESS_MSG)", "def users_profile_update(self):\n email_query = request.args.get('email')\n if not email_query:\n self.logger.debug((messages.MISSING_FIELDS_ERROR % \"email\"))\n return messages.ERROR_JSON % (messages.MISSING_FIELDS_ERROR % \"email\"), 400\n token = auth.current_user()[1]\n content = request.form\n password = content[\"password\"] if \"password\" in content else None\n fullname = content[\"fullname\"] if \"fullname\" in content else None\n phone_number = content[\"phone_number\"] if \"phone_number\" in content else None\n photo = Photo.from_bytes(request.files['photo'].stream) if 'photo' in request.files else None\n try:\n self.auth_server.profile_update(email=email_query, user_token=token,\n password=password, fullname=fullname,\n phone_number=phone_number, photo=photo)\n except UnauthorizedUserError:\n self.logger.debug(messages.USER_NOT_AUTHORIZED_ERROR)\n return messages.ERROR_JSON % messages.USER_NOT_AUTHORIZED_ERROR, 403\n except UnexistentUserError:\n self.logger.debug(messages.USER_NOT_FOUND_MESSAGE % email_query)\n return messages.ERROR_JSON % (messages.USER_NOT_FOUND_MESSAGE % email_query), 404\n return messages.SUCCESS_JSON, 200", "def update_user_profile_info(user_id, user_fname, user_lname, email):\n \n user=User.query.filter(User.user_id == user_id).first()\n\n if email != None:\n user.update_email(email)\n if user_fname != None:\n user.update_first_name(user_fname)\n if user_lname != None:\n user.update_last_name\n \n db.session.commit()", "def update_profile(name):\r\n user = User.query.filter_by(name=name).first()\r\n if not user:\r\n return abort(404)\r\n if current_user.id != user.id:\r\n return abort(403)\r\n show_passwd_form = True\r\n if user.twitter_user_id or user.google_user_id or user.facebook_user_id:\r\n show_passwd_form = False\r\n usr, apps, apps_created = cached_users.get_user_summary(name)\r\n # Extend the values\r\n current_user.rank = usr.get('rank')\r\n current_user.score = usr.get('score')\r\n # Title page\r\n title_msg = \"Update your profile: %s\" % current_user.fullname\r\n # Creation of forms\r\n update_form = UpdateProfileForm(obj=user)\r\n update_form.set_locales(current_app.config['LOCALES'])\r\n avatar_form = AvatarUploadForm()\r\n password_form = ChangePasswordForm()\r\n external_form = update_form\r\n\r\n\r\n if request.method == 'GET':\r\n return render_template('account/update.html',\r\n title=title_msg,\r\n user=usr,\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n show_passwd_form=show_passwd_form)\r\n else:\r\n # Update user avatar\r\n if request.form.get('btn') == 'Upload':\r\n avatar_form = AvatarUploadForm()\r\n if avatar_form.validate_on_submit():\r\n file = request.files['avatar']\r\n coordinates = (avatar_form.x1.data, avatar_form.y1.data,\r\n avatar_form.x2.data, avatar_form.y2.data)\r\n prefix = time.time()\r\n file.filename = \"%s_avatar.png\" % prefix\r\n container = \"user_%s\" % current_user.id\r\n uploader.upload_file(file,\r\n container=container,\r\n coordinates=coordinates)\r\n # Delete previous avatar from storage\r\n if current_user.info.get('avatar'):\r\n uploader.delete_file(current_user.info['avatar'], container)\r\n current_user.info = {'avatar': file.filename,\r\n 'container': container}\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your avatar has been updated! It may \\\r\n take some minutes to refresh...'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(\"You have to provide an image file to update your avatar\",\r\n \"error\")\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Update user profile\r\n elif request.form.get('btn') == 'Profile':\r\n update_form = UpdateProfileForm()\r\n update_form.set_locales(current_app.config['LOCALES'])\r\n if update_form.validate():\r\n current_user.id = update_form.id.data\r\n current_user.fullname = update_form.fullname.data\r\n current_user.name = update_form.name.data\r\n current_user.email_addr = update_form.email_addr.data\r\n current_user.privacy_mode = update_form.privacy_mode.data\r\n current_user.locale = update_form.locale.data\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your profile has been updated!'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n title_msg = 'Update your profile: %s' % current_user.fullname\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n\r\n # Update user password\r\n elif request.form.get('btn') == 'Password':\r\n # Update the data because passing it in the constructor does not work\r\n update_form.name.data = user.name\r\n update_form.fullname.data = user.fullname\r\n update_form.email_addr.data = user.email_addr\r\n update_form.ckan_api.data = user.ckan_api\r\n external_form = update_form\r\n if password_form.validate_on_submit():\r\n user = db.session.query(model.user.User).get(current_user.id)\r\n if user.check_password(password_form.current_password.data):\r\n user.set_password(password_form.new_password.data)\r\n db.session.add(user)\r\n db.session.commit()\r\n flash(gettext('Yay, you changed your password succesfully!'),\r\n 'success')\r\n return redirect(url_for('.update_profile', name=name))\r\n else:\r\n msg = gettext(\"Your current password doesn't match the \"\r\n \"one in our records\")\r\n flash(msg, 'error')\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Update user external services\r\n elif request.form.get('btn') == 'External':\r\n del external_form.locale\r\n del external_form.email_addr\r\n del external_form.fullname\r\n del external_form.name\r\n if external_form.validate():\r\n current_user.ckan_api = external_form.ckan_api.data or None\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your profile has been updated!'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n title_msg = 'Update your profile: %s' % current_user.fullname\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Otherwise return 415\r\n else:\r\n return abort(415)", "def update_my_profile(\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def edit_profile():\n # handle pre-flight for browsers CORS access\n if request.method == \"OPTIONS\":\n return generate_response()\n # part1: verify that user has logged in and the request is legit\n checked_and_verified, response = check_verify_token(request,login_session)\n if checked_and_verified != True: return response\n # handle the edge case where user is authorized to perform create user but not other method\n if not is_loggedin(login_session):\n response = generate_message(MESSAGE_USER_NOT_LOGGED_IN,401)\n return response\n # part2: check json\n checked_json, response, requested_json = check_json_form(request,MESSAGE_BAD_JSON,MESSAGE_CREATE_USER_NO_JSON)\n if checked_json != True: return response\n # part3: verify json data\n try:\n user_email = login_session[\"login_user_email\"]\n except KeyError:\n # key error means we are offline til this far\n user_email = requested_json[\"email\"]\n # design decision: if there are invalid field names, only update the valid fields.\n # check updates keys and formats\n try:\n update_pairs = convert_to_underscore(requested_json[\"updates\"])\n \n if isinstance(update_pairs,dict) != True:\n response = generate_message(MESSAGE_UPDATE_PROFILE_NON_DICT,400)\n else:\n correct_format,valid_update_pairs, response = process_request_json(User,update_pairs)\n if correct_format == True: \n update_field(User, session, {\"email\": user_email},valid_update_pairs)\n response = generate_message(MESSAGE_UPDATE_PROFILE_SUCCESS,200)\n except KeyError:\n response = generate_message(MESSAGE_UPDATE_PROFILE_NO_ENTRY,400)\n return response", "def put(self, request):\n profile = Profile.get_by_id(request.user.id)\n if not profile:\n return HttpResponse(status=403)\n user = CustomUser.objects.get(id=request.user.id)\n update_data = json.loads(request.body.decode('utf-8'))\n user.update(first_name=update_data.get('first_name'),\n last_name=update_data.get('last_name'))\n profile.update(\n birthday=update_data.get('birthday'),\n gender=update_data.get('gender'),\n hobbies=update_data.get('hobbies'),\n facebook=update_data.get('facebook'))\n data = profile.to_dict()\n return JsonResponse(data, status=200)", "def update_user():", "def update_profile(username):\n\n description = request.json.get('description')\n token = request.headers.get('token')\n\n if description is None:\n return jsonify({'message': 'New description not provided'}), 404\n\n # Token Validation\n token_valid, response = is_token_valid(token)\n if not token_valid:\n return response\n token_username = response\n\n # Privilege handling\n if token_username != username:\n return jsonify({'message': \"You may not edit others profiles\"}), 404\n\n if username not in Profiles.keys():\n return jsonify({'message': 'User {} not found'.format(username)}), 404\n\n Profiles[username]['description'] = description\n return Profiles[username]", "def update_user(context, params):\n\n user = User.objects.filter(id=params.get('id')).first()\n if not user:\n raise ValueError(\"user not found\")\n user.language = Language.objects.filter(id=params.get('language_id', None)).first()\n user.deputy = User.objects.filter(id=params.get('deputy_id', None)).first()\n # user.edited_by = context.user\n\n user.save()\n\n update_person(context, user, params)\n\n user.save()\n return user", "def update_user_profile(IamUserArn=None, SshUsername=None, SshPublicKey=None, AllowSelfManagement=None):\n pass", "def update_profile(self, method=\"POST\", id=1, fullname=\"John Doe\",\r\n name=\"johndoe\", locale=\"es\",\r\n email_addr=\"johndoe@example.com\",\r\n new_name=None,\r\n btn='Profile'):\r\n url = \"/account/%s/update\" % name\r\n if new_name:\r\n name = new_name\r\n if (method == \"POST\"):\r\n return self.app.post(url,\r\n data={'id': id,\r\n 'fullname': fullname,\r\n 'name': name,\r\n 'locale': locale,\r\n 'email_addr': email_addr,\r\n 'btn': btn},\r\n follow_redirects=True)\r\n else:\r\n return self.app.get(url,\r\n follow_redirects=True)", "def viewprofile():\n user = current_user\n form = UserUpdateForm(obj=user)\n form.populate_obj(user)\n if form.validate_on_submit():\n form.populate_obj(user)\n\n db.session.commit()\n\n flash('You have successfully edited your profile!')\n return render_template('user/user.html', title=\"View Profile\",\n user=user, form=form, action='Edit')", "def update_user():\n #TODO user update \n pass", "def edit_user():\n if CURR_USER_KEY in session:\n user = g.user\n form = ProfileEditForm(obj=user)\n\n if form.validate_on_submit():\n user.first_name = form.first_name.data\n user.last_name = form.last_name.data\n user.description = form.description.data\n user.email = form.email.data\n user.image_url = form.image_url.data or \"/static/images/default-pic.png\"\n\n db.session.commit()\n\n flash(\"Profile edited.\")\n return redirect(\"/profile\")\n\n return render_template('/profile/edit-form.html', form=form)\n else:\n return redirect('/login')", "def update(\n self, name: str = None, company: str = None, bio: str = None, avatar: str = None\n ):\n query = gql(\n \"\"\"\n mutation UserUpdate($user: UserUpdateInput!) {\n userUpdate(user: $user)\n }\n \"\"\"\n )\n params = {\"name\": name, \"company\": company, \"bio\": bio, \"avatar\": avatar}\n\n params = {\"user\": {k: v for k, v in params.items() if v is not None}}\n\n if not params[\"user\"]:\n return SpeckleException(\n message=\"You must provide at least one field to update your user profile\"\n )\n\n return self.make_request(\n query=query, params=params, return_type=\"userUpdate\", parse_response=False\n )", "def user_profile():\n user = current_user\n user_is_valid = True\n if not user.active:\n flash('This user account is under review. Please update your profile '\n + ' and contact the organizing team to access all functions of '\n + 'this platform.', 'warning')\n\n form = UserForm(obj=user, next=request.args.get('next'))\n form.roles.choices = [(r.id, r.name) for r in Role.query.order_by('name')]\n\n # Check conflicting PKs\n if form.email.data != user.email:\n if User.query.filter_by(email=form.email.data).first() is not None:\n flash('This e-mail address is already registered.', 'error')\n user_is_valid = False\n\n if user.sso_id:\n # Do not allow changing password on SSO\n del form.password\n\n # Validation has passed\n if form.is_submitted() and form.validate() and user_is_valid:\n # Assign roles\n user.roles = [Role.query.filter_by(\n id=r).first() for r in form.roles.data]\n del form.roles\n\n # Sanitize username\n user.username = sanitize_input(form.username.data)\n del form.username\n\n # Assign password if changed\n originalhash = user.password\n form.populate_obj(user)\n # Do not allow changing password on SSO\n if not user.sso_id:\n if form.password.data:\n user.set_password(form.password.data)\n else:\n user.password = originalhash\n\n user.updated_at = datetime.utcnow()\n db.session.add(user)\n db.session.commit()\n user.socialize()\n flash('Profile updated.', 'success')\n return redirect(url_for('public.user', username=user.username))\n\n if not form.roles.choices:\n del form.roles\n else:\n form.roles.data = [(r.id) for r in user.roles]\n return render_template('public/useredit.html',\n oauth_type=oauth_type(),\n user=user, form=form,\n active='profile')", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "def update_user_profile(\n user_id: str,\n body: Optional[UserProfileAdmin] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def update_user_profile(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.nickname = request.json['nickname']\n u.company = request.json['nickname']\n with store_context(fs_store):\n with open(files.path(request.json['header'])) as f:\n u.header_icon.from_file(f)\n db.session.merge(u)\n db.session.commit()\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def public_update_user_profile(\n user_id: str,\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicUpdateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def update_user_personal_data():\n if not 'user' in session:\n raise InvalidUsage(\"Access denied\", 401)\n\n data = request.json\n if 'name' not in data or not data['name']:\n raise InvalidUsage(\"Name must not be empty\", 422)\n if 'surname' not in data or not data['surname']:\n raise InvalidUsage(\"Surname must not be empty\", 422)\n if 'currentPassword' not in data or len(data['currentPassword']) < 6:\n raise InvalidUsage(\"Current password must have more then 5 characters\", 422)\n\n database = mysql.get_db()\n cursor = database.cursor()\n activeUser = session.get('user')\n\n query = '''SELECT password\n FROM users\n WHERE users.id = %s'''\n\n cursor.execute(query, (activeUser['id']))\n user = cursor.fetchone()\n\n if not bcrypt.check_password_hash(user['password'], data['currentPassword']):\n raise InvalidUsage(\"Wrong current password\", 401)\n\n query = '''UPDATE users\n SET name = %s, surname = %s\n WHERE id = %s'''\n\n cursor.execute(query, (data['name'], data['surname'], session.get('user')['id']))\n database.commit()\n\n activeUser['name'] = data['name']\n activeUser['surname'] = data['surname']\n session['user'] = activeUser\n\n return jsonify({'message': 'Successfully updated'}), 200", "async def update_my_profile_async(\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def update_user(cls, **kwargs):\n return cls._do_call(\n 'PUT', cls.api_endpoint + 'users', params=kwargs)", "def edit_user_profile(user_id):\n if CURRENT_USER_KEY not in session or session[CURRENT_USER_KEY] != user_id:\n raise Unauthorized()\n\n user = User.query.get_or_404(user_id)\n\n form = UserEditForm(obj=user)\n\n if form.validate_on_submit():\n try:\n user.email = form.email.data\n user.username = form.username.data\n user.first_name = form.first_name.data.capitalize()\n user.last_name = form.last_name.data.capitalize()\n user.image_url = form.image_url.data or User.image_url.default.arg\n user.cover_url = form.cover_url.data or User.cover_url.default.arg\n user.bio = form.bio.data\n\n db.session.commit()\n except IntegrityError:\n db.session.rollback()\n flash(\n \"Email or Username already taken!! Please try again\", 'danger')\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)\n\n flash('Profile Successfully Updated!', 'success')\n return redirect(url_for('show_user_profile', user_id=user.id))\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)", "def edit_profile(request):\n profile_to_edit = get_object_or_404(UserProfile, user=request.user)\n if request.method == \"POST\":\n form = UserProfileForm(request.POST, instance=profile_to_edit)\n if form.is_valid:\n form.save()\n messages.success(request, \"Profile updated succesfully\")\n return redirect('profile')\n else:\n messages.error(request, \"Updated failed. \\\n Please ensure the form is valid\")\n else:\n profile_form = UserProfileForm(instance=profile_to_edit)\n template = 'profiles/edit_profile.html'\n context = {\n 'form': profile_form,\n }\n return render(request, template, context)", "def updateProfile( token, user=False, userinfo={'nickname':'newUser','first_name':'newUser'}):\n \n if not user:\n l= list(validName)\n sysrand.shuffle(l)\n l= \"\".join(l)\n print \"Attempting to create a user with the name \"+l\n user=User.objects.create_user(l,'')\n user.save()\n sid = transaction.savepoint()\n updateName( user, str(userinfo['nickname']).replace(' ',''), userinfo['first_name'], sid )\n transaction.savepoint_commit(sid)\n\n try: \n userprofile = user.get_profile()\n userprofile.uid = cPickle.dumps(token) #ensures the token parameter is retreivable and unique\n userprofile.user_id = user.id\n userprofile.save()\n transaction.commit()\n except:\n transaction.rollback()\n return user", "def update_profile(orcid_id, data=None):\n \n u = db.session.query(User).filter_by(orcid_id=orcid_id).first()\n if u:\n u.updated = datetime.utcnow()\n if data:\n u.profile = data\n # save the user\n db.session.begin_nested()\n try:\n db.session.add(u)\n db.session.commit()\n except exc.IntegrityError as e:\n db.session.rollback()\n # per PEP-0249 a transaction is always in progress \n db.session.commit()", "def update_profile(request):\n collected_values = {}\n\n # Only allow POST requests with this endpoint\n if request.method != 'POST':\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Wrong HTTP verb\"\n return JsonResponse(collected_values, status=400)\n\n # Extract params\n uid = request.POST.get('user_id')\n username = request.POST.get('username')\n password = request.POST.get('password')\n email = request.POST.get('email')\n profile_picture = request.POST.get('profile_picture')\n image_index = request.POST.get('image_index')\n images_visited = request.POST.get('images_visited')\n friends = request.POST.get('friends')\n security_level = request.POST.get('security_level')\n token = request.POST.get('token')\n info = request.POST.get('info')\n\n # Check auth\n is_valid, collected_values[\"token\"] = check_auth(uid, token, timezone.now())\n if not is_valid:\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Invalid Token\"\n return JsonResponse(collected_values, status=400)\n\n user_obj = LUser.objects.filter(user_id=uid)[0]\n\n # Potentiall load object with valid values\n if username is not None: user_obj.username = username\n if password is not None: user_obj.password = password\n if email is not None: user_obj.email = email\n if profile_picture is not None: user_obj.profile_picture = profile_picture\n if image_index is not None: user_obj.image_index = image_index\n if images_visited is not None: user_obj.images_visited = images_visited\n if friends is not None: user_obj.friends = friends\n if security_level is not None: user_obj.security_level = security_level\n if info is not None: user_obj.info = info\n\n # Update user record\n user_obj.save()\n\n # Collect Return values\n collected_values[\"success\"] = True\n\n LOGGER.info(\"Update Profile Result: %s\", collected_values)\n return JsonResponse(collected_values, status=200)", "def profile():\n\n if not g.user:\n flash(\"Access unauthorized.\", \"danger\")\n return redirect(\"/\")\n\n form = UserEditForm(obj=g.user)\n\n if form.validate_on_submit():\n if not User.authenticate(g.user.username, form.data[\"password\"]):\n flash(\"Invalid password.\", \"danger\")\n return render_template('/users/edit.html', form=form) \n # data = {k:v for k,v in form.data.items() if k != \"csrf_token\"}\n # data[\"image_url\"] = data[\"image_url\"] or None\n # data[\"header_image_url\"] = data[\"header_image_url\"] or None\n\n g.user.username = form.data[\"username\"]\n g.user.email = form.data[\"email\"]\n g.user.image_url = form.data[\"image_url\"] or None\n g.user.header_image_url = form.data[\"header_image_url\"] or None\n g.user.bio = form.data[\"bio\"]\n\n db.session.commit()\n\n flash(\"Profile edited!\", \"success\")\n return redirect(f'/users/{g.user.id}')\n\n return render_template('/users/edit.html', form=form)", "def save_user_profile(instance, **_):\n instance.profile.save()", "def update_user(self):\n self.client.force_authenticate(user=self.user)\n self.response = self.client.patch(\n reverse(\n 'edit_account',kwargs={ 'pk': self.user.id}),\n self.updated_data, format='json'\n )\n self.user = CustomUser.objects.get(username=self.user.username)", "def edit_user_profile(request):\n user = request.user\n user_profile = UserProfile.objects.filter(user=user)[0]\n if request.method == 'POST':\n form = MemberProfileForm(request.POST)\n additional_form = MemberAdditionalProfileForm(request.POST)\n if form.is_valid() and additional_form.is_valid():\n cd = form.cleaned_data\n user.first_name = cd['first_name']\n user.last_name = cd['last_name']\n user.email = cd['email']\n user.save()\n if 'picture' in request.FILES:\n file = request.FILES['picture']\n user_profile.picture.save(file.name, file, save=True)\n user_profile.gravatar = additional_form.cleaned_data['gravatar']\n user_profile.save()\n return HttpResponseRedirect('/')\n else:\n form = MemberProfileForm(instance=request.user)\n additional_form = MemberAdditionalProfileForm(instance=user_profile)\n return render_to_response('edit_profile.html', locals())", "def updateUserProfile(request):\n user = request.user\n serializer = UserSerializerWithToken(user, many=False)\n\n # If the user has put in two names, separate it into first_name and last_name and save that data.\n try:\n first_name = request.data['name'].split()[0]\n last_name = request.data['name'].split()[1]\n\n user.first_name = first_name\n user.last_name = last_name\n user.username = request.data['email']\n user.email = request.data['email']\n\n # Only modify the password if the field isn't empty.\n if request.data['password'] != '':\n user.password = make_password( request.data['password'] )\n\n user.save()\n return Response(serializer.data)\n\n # For users who enter one name.\n except:\n user.first_name = request.data['name']\n user.last_name = ''\n user.username = request.data['email']\n user.email = request.data['email']\n\n if request.data['password'] != '':\n user.password = make_password( request.data['password'] )\n\n user.save()\n return Response(serializer.data)", "def edit_profile(request):\n if request.method == 'POST':\n form = EditProfileForm(request.POST, instance=request.user)\n\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully.')\n return redirect('profile')\n\n else:\n messages.error(request, 'Invalid entry, please try again.')\n return redirect('edit_profile')\n else:\n form = EditProfileForm(instance=request.user)\n return render(request, 'accounts/edit_profile.html', {'form': form})", "def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)", "def test_can_update_user_profile(self):\n self.update_user()\n self.assertEqual(self.user.first_name, self.updated_data['first_name'])\n self.assertEqual(self.user.last_name, self.updated_data['last_name'])\n self.assertEqual(self.user.email, self.updated_data['email'])", "def user_update(user_id, user_info):\n user = lookup_user_by_id(user_id)\n for (key, value) in user_info.iteritems():\n if key == \"first_name\" and value is not None:\n user.first_name = value\n elif key == \"last_name\" and value is not None:\n user.last_name = value\n elif key == \"email\" and value is not None:\n try:\n lookup_user_by_email(value)\n except:\n user.email = value\n elif key == \"password\" and value is not None:\n user.set_password(value)\n elif key == \"active\" and value is not None:\n if value:\n user.activate()\n else:\n user.deactivate()\n send_activation_mail.delay(user_id)\n elif key == \"social\" and value is not None:\n user.meta['social'] = value\n elif key == \"address\" and value is not None:\n user.meta['address'] = value\n elif key == \"crm\" and value is not None:\n user.meta['crm'] = value\n elif key == \"local\" and value is not None:\n user.meta['local'] = value\n return user_to_dict(user)", "def profile():\n\n form = EditUserForm(obj=g.user)\n\n if form.validate_on_submit():\n if User.authenticate(g.user.username, form.password.data):\n g.user.username = form.username.data\n g.user.email = form.email.data\n g.user.image_url = form.image_url.data\n g.user.header_image_url = form.header_image_url.data\n g.user.bio = form.bio.data\n g.user.private = form.private.data\n db.session.commit()\n return redirect(f'/users/{g.user.id}')\n flash('Incorrect password', 'danger')\n return render_template('users/edit.html', user_id=g.user.id, form=form)", "def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))", "def update_user(self, username):\n parser_update.add_argument('email', type=validate_email,\n required=False, nullable=False,\n help=\"Email must be formatted correctly\")\n\n parser_update.add_argument('phoneNumber', type=validate_phonenumber,\n required=False, nullable=False,\n help=\"Enter a valid phone number\")\n\n parser_update.add_argument('firstname', type=validate_characters,\n required=False, nullable=False,\n help=\"First name must be formatted correctly\")\n\n parser_update.add_argument('lastname', type=validate_characters,\n required=False, nullable=False,\n help=\"Last name must be formatted correctly\")\n\n parser_update.add_argument('othernames', type=validate_characters,\n required=False, nullable=False,\n help=\"Other name must be formatted correctly\")\n\n user = self.get_user(username)\n if user is None:\n return None\n\n args = parser_update.parse_args()\n new_data = {\n 'email': request.json.get('email', user['email']).lower(),\n 'firstname': request.json.get('firstname', user['firstname']).capitalize(),\n 'lastname': request.json.get('lastname', user['lastname']).capitalize(),\n 'othernames': request.json.get('othernames', user['othernames']).capitalize(),\n 'phoneNumber': request.json.get('phoneNumber', user['phonenumber']),\n }\n\n getEmail = self.get_user(new_data['email'])\n verification_status = True\n\n if user['email'] != new_data['email']:\n if getEmail is not None:\n return 'email exists'\n verification_status = False\n\n query = \"\"\"UPDATE users SET firstname=%s,lastname=%s,othernames=%s,\\\n email=%s,phonenumber=%s,emailverified=%s WHERE username=%s\"\"\"\n values = new_data['firstname'], new_data['lastname'], new_data['othernames'], new_data['email'], new_data['phoneNumber'], verification_status, username\n\n conn = self.db\n cursor = conn.cursor()\n cursor.execute(query, values)\n conn.commit()\n return new_data", "def update_adminhod_view(request):\r\n # get current adminhod.\r\n adminhod = get_object_or_404(AdminHOD, user__id=request.user.id) \r\n # display adminhod's initial data.\r\n user_form = UpdateUserForm(\r\n request.POST or None,\r\n staff_student=adminhod, \r\n instance=adminhod,\r\n initial={'full_name': adminhod.user.full_name,\r\n 'email': adminhod.user.email, \r\n })\r\n if request.method == 'POST':\r\n if user_form.is_valid():\r\n # update adminhod.\r\n adminhod.user.full_name = user_form.cleaned_data.get(\"full_name\")\r\n adminhod.user.email = user_form.cleaned_data.get(\"email\")\r\n adminhod.user.save()\r\n # Display success message.\r\n messages.success(request, f'Your profile has been updated successfully.', extra_tags='update-adminhod-profile')\r\n return redirect('adminhod:update-adminhod-profile') \r\n context = {'user_form':user_form}\r\n return render(request, 'adminhod/update_adminhod_profile.html', context)", "def post(self, request):\n\n user_obj = UserProfile.objects.filter(id=request.user.id).first()\n if not user_obj:\n return existence_error('user')\n\n request_json = {\n 'username': request.data.get('username'),\n 'name': request.data.get('name'),\n 'last_name': request.data.get('last_name'),\n 'bio': request.data.get('bio'),\n 'website': request.data.get('website'),\n 'email': request.data.get('email'),\n 'phone_number': request.data.get('phone_number'),\n 'business': request.data.get('business'),\n 'country': request.data.get('country'),\n 'city': request.data.get('city')\n }\n\n user_serialized = EditUserProfileSerializer(user_obj, data=request_json, partial=True)\n if not user_serialized.is_valid():\n return validate_error(user_serialized)\n user_serialized.save()\n\n response_json = {\n 'status': True,\n 'message': 'successful',\n 'data': {}\n }\n\n return Response(response_json, status=201)", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"dialtone@gmail.com\",\n customer_profile_id=u\"122\"\n )", "def patch(self, request):\n\n user_obj = UserProfile.objects.filter(id=request.user.id).first()\n if not user_obj:\n return existence_error('user')\n\n request_json = {\n 'profile_photo': request.data.get('profile_photo')\n }\n\n user_serialized = EditUserProfileSerializer(user_obj, data=request_json, partial=True)\n if not user_serialized.is_valid():\n return validate_error(user_serialized)\n user_serialized.save()\n\n response_json = {\n 'status': True,\n 'message': 'successful',\n 'data': {}\n }\n\n return Response(response_json, status=201)", "def update_user_profile_deep(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.title = request.json['title']\n u.company = request.json['company']\n u.nickname = request.json['company']\n u.phone_number = request.json['phone_number']\n u.email = request.json['email']\n if request.json['password'] != None and request.json['password'] != '':\n u.password = request.json['password']\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def update_user_info(user, save=True):\n p = bayou.Person.from_default_services(user.username)\n\n user.email = p.email if p.email else user.email\n user.first_name = p.first_name if p.first_name else user.first_name\n user.last_name = p.surname if p.surname else user.last_name\n\n if save:\n user.save()\n\n return user", "def edit_profile(request):\r\n\r\n user = request.user\r\n profile = Profile.objects.for_user(user)\r\n\r\n if request.method != 'POST':\r\n profile_form = ProfileForm(instance=profile)\r\n user_form = UserForm(instance=user)\r\n else:\r\n profile_form = ProfileForm(request.POST, instance=profile)\r\n user_form = UserForm(request.POST, instance=user)\r\n\r\n if profile_form.is_valid() and user_form.is_valid():\r\n profile_form.save()\r\n user_form.save()\r\n\r\n return HttpResponseRedirect(reverse('epic.core.views.view_profile', kwargs={}))\r\n\r\n return render_to_response(\r\n 'core/edit_profile.html',\r\n {'profile_form': profile_form, 'user_form': user_form,},\r\n context_instance=RequestContext(request))", "def profile(request):\n if request.method == 'POST':\n form = UpdateForm(request.POST, instance=request.user)\n \n if form.is_valid():\n form.save()\n messages.success(request, f'Your account has been updated!')\n return redirect('profile')\n else:\n form = UpdateForm(instance=request.user)\n\n context = {\n 'title': 'Profile',\n 'form': form,\n 'prices': get_pix_price(),\n 'colors_pack': Colors_pack.objects.all().prefetch_related('contains'),\n }\n return render(request, 'users/profile.html', context)", "def profile_edit():\n form = ProfileForm(obj=current_user)\n\n if form.validate_on_submit():\n form.populate_obj(current_user)\n\n try:\n correct = True\n db.session.commit()\n\n flash(_('Profile updated correctly'), 'success')\n\n return render_template('admin/profile/edit.html', form=form)\n\n except IntegrityError:\n # Email already exists\n correct = False\n form.errors.email.append(_('Email is already registered'))\n\n return render_template('admin/profile/edit.html', form=form)\n\n except Exception:\n # Catch anything unknown\n correct = False\n\n flash(_('Failed to update profile, contact an administrator'), 'error')\n\n return render_template('admin/profile/edit.html', form=form)\n\n finally:\n if not correct:\n db.session.rollback()\n\n return render_template('admin/profile/edit.html', form=form)", "async def public_update_user_profile_async(\n user_id: str,\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicUpdateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def putProfile(profileType,value):\n # PUT /profile/$profileType\n pass", "def update_user(self, user_id, **kwargs):\n user = self.get(user_id, raise_error=True)\n if 'display_name' in kwargs:\n user.display_name = kwargs['display_name']\n if 'email' in kwargs:\n user.email = kwargs['email']\n if 'verified' in kwargs:\n user.verified = kwargs['verified']\n self.session.add(user)", "def update(self, user: U) -> None:\n ...", "def update_profile(profile_id):\n \n profile = mongo.db.profiles\n profile.find_one_and_update({'_id': ObjectId(profile_id)},\n {'$set': {'date': datetime.utcnow(),\n 'headline': request.form.get('headline'),\n 'bio': request.form.get('bio'),\n 'xp': request.form.get('xp'),\n 'interests': request.form.get('interests'),\n 'stack': request.form.get('stack'),\n 'languages': request.form.get('languages'),\n 'frameworks': request.form.get('frameworks'),\n 'github': request.form.get('github'),\n 'linkedin': request.form.get('linkedin')\n }\n }\n )\n return redirect(url_for('dashboard'))", "def update_user(user_id):\n\n user = User.query.get_or_404(user_id)\n user.first_name = request.form[\"edit_first_name\"]\n user.last_name = request.form[\"edit_last_name\"]\n user.image_url = request.form[\"edit_image_url\"]\n\n db.session.add(user)\n db.session.commit()\n return redirect(\"/users\")", "def update_user_profile(self,\n account_id: str,\n iam_id: str,\n *,\n firstname: str = None,\n lastname: str = None,\n state: str = None,\n email: str = None,\n phonenumber: str = None,\n altphonenumber: str = None,\n photo: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if account_id is None:\n raise ValueError('account_id must be provided')\n if iam_id is None:\n raise ValueError('iam_id must be provided')\n headers = {}\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='update_user_profile')\n headers.update(sdk_headers)\n\n data = {\n 'firstname': firstname,\n 'lastname': lastname,\n 'state': state,\n 'email': email,\n 'phonenumber': phonenumber,\n 'altphonenumber': altphonenumber,\n 'photo': photo\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n\n path_param_keys = ['account_id', 'iam_id']\n path_param_values = self.encode_path_vars(account_id, iam_id)\n path_param_dict = dict(zip(path_param_keys, path_param_values))\n url = '/v2/accounts/{account_id}/users/{iam_id}'.format(**path_param_dict)\n request = self.prepare_request(method='PATCH',\n url=url,\n headers=headers,\n data=data)\n\n response = self.send(request)\n return response", "def profile(request):\n profile = get_object_or_404(UserProfile, user=request.user)\n\n if request.method == 'POST':\n form = UserProfileForm(request.POST, instance=profile)\n user_email = UserForm(instance=request.user)\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully')\n else:\n messages.error(request, 'Update failed. Please ensure the form is valid.')\n # doesnt work yet: Email is not saved:\n\n if user_email.is_valid():\n user_email.save()\n messages.success(request, 'Profile updated successfully')\n else:\n messages.error(request, 'Update failed. Please ensure the form is valid.')\n else:\n form = UserProfileForm(instance=profile)\n user_email = UserForm(instance=request.user)\n\n template = 'profiles/profile.html'\n context = {\n 'form': form,\n 'user_email': user_email,\n 'on_profile_page': True,\n 'profile': profile\n }\n\n return render(request, template, context)", "def update_user_profile(sender, instance, created, **kwargs):\n if created:\n GameplanUser.objects.create(user=instance)\n instance.gameplanuser.save()", "def update(self, profile: Dict[datetime.time, float]) -> None:\n\n if self._profile is None:\n self._profile = profile\n else:\n self._profile.update(profile)", "def edit_profile():\n form = EditProfileForm()\n if request.method == 'GET':\n form.first_name.data = current_user.first_name\n form.first_name.data = current_user.first_name\n form.last_name.data = current_user.last_name\n form.email.data = current_user.email\n form.address_1.data = current_user.address_1\n form.address_2.data = current_user.address_2\n form.city.data = current_user.city\n form.state.data = current_user.state\n form.zipcode.data = current_user.zipcode\n form.telephone.data = current_user.telephone\n if form.validate_on_submit():\n form.last_name.data = form.last_name.data\n current_user.first_name = form.first_name.data\n current_user.last_name = form.last_name.data\n current_user.email = form.email.data\n current_user.address_1 = form.address_1.data\n current_user.address_2 = form.address_2.data\n current_user.city = form.city.data\n current_user.state = form.state.data\n current_user.zipcode = form.zipcode.data\n current_user.telephone = form.telephone.data\n db.session.commit()\n flash(('Your changes have been saved.'))\n\n return redirect(url_for('edit_profile'))\n\n return render_template('edit_profile.html', title=('Edit Profile'),\n form=form)", "async def update_user_profile_async(\n user_id: str,\n body: Optional[UserProfileAdmin] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateUserProfile.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def update_user(request):\n post = request.POST.dict()\n user = post.get('user_id')\n if user is None:\n response = {'status':-1, 'status_message':'No user_id specified'}\n return HttpResponse(json.dumps(response))\n try:\n user_obj = User.objects.get(id=user)\n except User.DoesNotExist:\n response = {'status':-1, 'status_message':'Invalid user_id: {}'.format(user)}\n return HttpResponse(json.dumps(response))\n user_obj.first_name = post.get('first_name')\n user_obj.last_name = post.get('last_name')\n password = post.get('password')\n if password and password != \"**********\":\n # update the password\n user_obj.set_password(password)\n if post.get('username'):\n user_obj.username = post['username']\n user_obj.email = post.get('email')\n user_obj.is_superuser = json.loads(post.get('is_admin', 'false'))\n user_obj.is_active = json.loads(post.get('is_enabled', 'false'))\n user_obj.save()\n response = {'status': 1, 'status_message': 'Success'}\n return HttpResponse(json.dumps(response))", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def update_user(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['id'])\n return self.__create_request(payload=user, request_type=self.REQUEST_PUT, version=\"v1\")", "def profile(request):\n profile = get_object_or_404(User, username=request.user)\n\n if request.method == 'POST':\n form = SignupForm(request.POST, instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully')\n\n form = SignupForm()\n\n template = 'profiles/profile.html'\n context = {\n 'form': form,\n 'page': 'account',\n }\n\n return render(request, template, context)", "def update_user(self, username, password, fullname, description, email):\n params = {\n \"f\" : \"json\",\n \"username\" : username\n }\n if password is not None:\n params['password'] = password\n if fullname is not None:\n params['fullname'] = fullname\n if description is not None:\n params['description'] = description\n if email is not None:\n params['email'] = email\n uURL = self._url + \"/users/update\"\n return self._con.post(path=uURL, postdata=params)", "def updateUser(self, uID, ufirstname, ulastname, udescription, urole, uclassification):\n cursor = self.conn.cursor()\n query= \"UPDATE Users \"\\\n \"SET ufirstname= %s, ulastname= %s, udescription= %s, urole= %s, uclassification= %s \"\\\n \"WHERE uID= %s; \"\n cursor.execute(query,(ufirstname, ulastname, udescription, urole, uclassification,uID,))\n self.conn.commit()\n return uID", "def edit_user(user_id):\n if request.method == 'GET':\n # init form with current user:\n form = ProfileForm(\n nickname = session[Session.PROFILE][\"nickname\"], \n location = session[Session.PROFILE][\"location\"],\n about_me = session[Session.PROFILE][\"about_me\"]\n )\n if request.method == 'POST': \n # init form with POSTed form:\n form = ProfileForm(request.form)\n\n if form.validate(): \n # update backend:\n response = service_user_management.patch(\n id = f'auth0|{user_id}', \n nickname = form.nickname.data, \n location = form.location.data,\n about_me = form.about_me.data\n )\n\n # success:\n if 'identities' in response: \n try:\n # update db:\n delegated_user = DelegatedUser.query.get_or_404(\n user_id, \n description='There is no user with id={}'.format(user_id)\n )\n delegated_user.nickname = form.nickname.data\n # update:\n db.session.add(delegated_user)\n # write\n db.session.commit()\n\n # update session:\n session[Session.PROFILE][\"nickname\"] = form.nickname.data\n session[Session.PROFILE][\"location\"] = form.location.data\n session[Session.PROFILE][\"about_me\"] = form.about_me.data\n \n # on successful profile update, flash success\n flash('Your profile was successfully updated.')\n\n return redirect(url_for('.show_user', user_id = user_id))\n except:\n db.session.rollback()\n # on unsuccessful registration, flash an error instead.\n flash('An error occurred. New account could not be created.')\n finally:\n db.session.close()\n # failure:\n else:\n flash(response['message']) \n else:\n # for debugging only:\n flash(form.errors)\n \n return render_template('users/forms/user.html', form=form, user_id=user_id)", "def update_user(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.image_url = request.form['image_url']\n\n db.session.add(user)\n db.session.commit()\n\n return redirect(\"/users\")", "def update_user_data(self, new_user: User):\n self.user_data.update_user_data(new_user)", "def partial_update(self, request, pk=None):\n\n user_to_update = WhoYouUser.objects.get(pk=pk)\n\n requester = WhoYouUser.objects.get(user=request.auth.user)\n if requester != user_to_update:\n return Response({\"message\": \"Permission denied\"}, status=status.HTTP_401_UNAUTHORIZED)\n\n if \"profile_image_path\" in request.data:\n format, imgstr = request.data[\"profile_image_path\"].split(';base64,')\n ext = format.split('/')[-1]\n data = ContentFile(base64.b64decode(imgstr), name=f'{pk}-{uuid.uuid4()}.{ext}')\n user_to_update.profile_image_path = data\n\n if \"cover_image_path\" in request.data:\n user_to_update.cover_image_path = request.data[\"cover_image_path\"]\n\n user_to_update.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')", "def save_user_profile(sender, instance, **kwargs):\n instance.profile.save()", "def save_user_profile(sender, instance, **kwargs):\n instance.profile.save()", "def update(self, uid, body, user_auth):\n user_record = UserRecord.get_user(uid, auth=admin_sdk.auth)\n user = Admin.query.filter_by(uid=user_record.uid).first()\n\n if not user_record or not user:\n raise HandlerException(404, \"Not found user\")\n\n if user_auth[\"uid\"] == uid:\n raise HandlerException(\n 401, \"Logged user can't modify own profile in this endpoint\"\n )\n\n if not user_auth[\"b\"].has_access(user.privileges, True):\n raise HandlerException(\n 401,\n \"Logged user doesn't have sufficient permissions \\\n to create a user with equal or higher privileges\",\n )\n\n user_record.serialize(body)\n user_record.update_user()\n\n if \"privileges\" in body:\n user_record.make_claims(\n {\"admin\": True, \"access_level\": body[\"privileges\"]}\n )\n\n user.serialize(body)\n user.save()\n\n return {\"uid\": user_record.uid, \"a\": user_record, \"b\": user}", "def update_login_profile(self, user_name, password):\r\n params = {'UserName' : user_name,\r\n 'Password' : password}\r\n return self.get_response('UpdateLoginProfile', params)", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def update_user(user_id):\n user = User.query.get_or_404(user_id)\n user.first_name = request.form['first_name']\n user.last_name = request.form['last_name']\n user.image_url = request.form['image_url']\n\n\n db.session.add(user)\n db.session.commit()\n flash(f\"{user.full_name} user has been edited.\")\n\n return redirect(\"/users\")", "def edit_profile(request, userId):\n try:\n try:\n user = User.objects.get(pk=userId)\n profile = Profile.objects.get(user_id=userId)\n user.first_name = request.data['first_name'] if 'first_name' in request.data else user.first_name\n user.last_name = request.data['last_name'] if 'last_name' in request.data else user.last_name\n user.email = request.data['email'] if 'email' in request.data else user.email\n profile.phone = request.data['phone'] if 'phone' in request.data else profile.phone\n profile.profile_img_path = request.data['profile_img_path'] if 'profile_img_path' in request.data else profile.profile_img_path\n profile.information = request.data['information'] if 'information' in request.data else profile.information\n user.save()\n profile.save()\n return JsonResponse({\n \"statusCode\": 200,\n \"statusText\": \"Success\",\n \"message\": \"Save profile success\",\n \"error\": False\n })\n\n except ObjectDoesNotExist:\n return JsonResponse({\n \"statusCode\": 404,\n \"statusText\": \"Not Found\",\n \"message\": \"User Not Exist\",\n \"error\": True\n })\n except:\n return JsonResponse({\n \"statusCode\": 500,\n \"statusText\": \"Internal Server\",\n \"message\": \"Internal Server\",\n \"error\": True\n })", "def edit_basic_info(request):\n if request.POST:\n request.user.first_name = request.POST['first_name']\n request.user.last_name = request.POST['last_name']\n request.user.email = request.POST['email']\n request.user.save()\n request.user.userprofile.phone_number = request.POST['phone']\n request.user.userprofile.save()\n messages.add_message(request, messages.SUCCESS, 'Your changes have been saved.')\n return redirect('base_dashboard')\n\n return render(request, 'edit_basic_info.html', {'the_user': request.user})", "def update(self, request, *args, **kwargs):\n username = kwargs.get(\"user\")\n response = super().update(request, *args, **kwargs)\n cache.set(f\"{USER_PROFILE_PREFIX}{username}\", response.data)\n return response", "def edit_profile(request):\n profile = request.user.profile\n form = forms.ProfileForm(instance=profile)\n\n if request.method == 'POST':\n if settings.SYSTEM_MAINTENANCE_NO_UPLOAD:\n # Allow submitting the form, but do not allow the photo to\n # be modified.\n if 'delete_photo' in request.POST or request.FILES:\n raise ServiceUnavailable()\n\n if 'edit_profile' in request.POST:\n # Update the profile and return to the same page. Place a message\n # at the top of the page: 'your profile has been updated'\n form = forms.ProfileForm(data=request.POST, files=request.FILES,\n instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Your profile has been updated.')\n elif 'delete_photo' in request.POST:\n profile.delete_photo()\n messages.success(request, 'Your profile photo has been deleted.')\n\n if not form.errors:\n form = forms.ProfileForm(instance=profile)\n\n return render(request, 'user/edit_profile.html', {'form':form})", "def update_user(id):\n pass", "def test_update_user_profile(setup_client, setup_user):\n client = setup_client\n user = setup_user\n payload = {\n \"name\": \"New name\",\n \"role\": \"Purchaser\",\n \"password\": \"New password\"\n }\n res = client.patch(ME_URL, payload)\n user.refresh_from_db()\n assert res.status_code == status.HTTP_200_OK\n assert user.name == payload[\"name\"]\n assert user.role == payload[\"role\"]\n assert user.check_password(payload[\"password\"])\n assert res.status_code == status.HTTP_200_OK", "def public_update_user_profile_status(\n user_id: str,\n body: Optional[UserProfileStatusUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicUpdateUserProfileStatus.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def test_update_user_profile(self):\r\n payload = {\r\n 'name': 'new_name',\r\n 'password': 'password123'\r\n }\r\n\r\n res = self.client.patch(ME_URL, payload)\r\n\r\n self.user.refresh_from_db()\r\n\r\n self.assertEqual(self.user.name, payload['name'])\r\n self.assertTrue(self.user.check_password(payload['password']))\r\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def update_user_profile_status(\n user_id: str,\n body: Optional[UserProfileStatusUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateUserProfileStatus.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def user_profile(request):\n user_profile = get_object_or_404(UserProfile, user=request.user)\n # Create a new instance of the user profile form\n if request.method == 'POST':\n form = UserProfileForm(request.POST, instance=user_profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully')\n else:\n messages.error(\n request, 'Update failed. Please ensure the form is valid.')\n else:\n form = UserProfileForm(instance=user_profile)\n orders = user_profile.orders.all()\n\n template = 'user_profiles/user_profile.html'\n context = {\n 'form': form,\n 'orders': orders,\n 'on_profile_page': True\n }\n\n return render(request, template, context)", "def update_user():\n json = request.json\n name = json[\"name\"]\n email = json[\"email\"]\n pwd = json[\"pwd\"]\n user_id = json[\"user_id\"]\n if name and email and pwd and user_id and request.method == \"POST\":\n # save edits\n sql = \"UPDATE users SET user_name=%s, user_email=%s, \" \\\n \"user_password=%s WHERE user_id=%s\"\n data = (name, email, pwd, user_id)\n try:\n conn = mysql.connect()\n cursor = conn.cursor()\n cursor.execute(sql, data)\n conn.commit()\n resp = jsonify(\"User updated successfully!\")\n resp.status_code = 200\n cursor.close()\n conn.close()\n return resp\n except Exception as exception:\n return jsonify(str(exception))\n else:\n return jsonify(\"Please provide id, name, email and pwd\")", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "def edit_profile(request, pk=None):\n profiledetails = UserProfile.objects.filter(user=request.user).first()\n if UserProfile.objects.filter(user=request.user or request.user.is_superuser):\n\n if request.method == \"POST\":\n profile_details_form = UserProfileForm(request.POST, request.FILES, instance=profiledetails)\n if profile_details_form.is_valid():\n profiledetails = profile_details_form.save()\n messages.success(request, 'Your profile has been updated!')\n return redirect(user_profile)\n else:\n profile_details_form = UserProfileForm(instance=profiledetails)\n else:\n return HttpResponseForbidden()\n \n return render(request, 'newprofiledetails.html', {'profile_details_form': profile_details_form})", "def update_account_with(self, id_, **kwargs):\n self.update_user_with(id_, **kwargs)\n self.update_profile_with(id_, **kwargs)\n # TODO:\n # self.update_prefecture_with(id_, kwargs)", "def fusion_api_edit_server_profile(self, body, uri, api=None, headers=None, param=''):\n return self.profile.update(body, uri, api, headers, param=param)", "def test_update_user_profile(self):\n payload = {'name': 'new name', 'password': 'newpassword123'}\n\n response = self.client.patch(ME_URL, payload)\n\n self.user.refresh_from_db()\n self.assertEqual(self.user.name, payload['name'])\n self.assertTrue(self.user.check_password(payload['password']))\n self.assertEqual(response.status_code, status.HTTP_200_OK)" ]
[ "0.8154847", "0.7868579", "0.7719946", "0.77043855", "0.7689022", "0.7664817", "0.7529994", "0.75082785", "0.7425593", "0.7420964", "0.73797953", "0.7358667", "0.73411655", "0.7204369", "0.71413976", "0.71375716", "0.70822394", "0.7041348", "0.70147234", "0.6986787", "0.69684273", "0.69623977", "0.69507056", "0.6949552", "0.6940251", "0.6921688", "0.68978983", "0.68848807", "0.68656933", "0.68616533", "0.6858408", "0.68489516", "0.6835714", "0.68272287", "0.67921865", "0.678093", "0.67724085", "0.674086", "0.6734096", "0.67338115", "0.67256296", "0.6724413", "0.6717732", "0.66985273", "0.66924036", "0.6674833", "0.6663417", "0.6662179", "0.66610587", "0.66591775", "0.6652376", "0.6639268", "0.6629874", "0.6610397", "0.6610334", "0.6598399", "0.6587208", "0.65856504", "0.6582207", "0.6567475", "0.65589327", "0.65583926", "0.6557363", "0.6553749", "0.6541145", "0.65371066", "0.65094167", "0.65024203", "0.6497391", "0.6492596", "0.64899015", "0.6484774", "0.6475125", "0.64723", "0.6463438", "0.6463429", "0.6463029", "0.6460248", "0.6458801", "0.6458801", "0.6455922", "0.64490366", "0.64430207", "0.64393413", "0.64173806", "0.6416918", "0.64006037", "0.6399814", "0.63800955", "0.6375535", "0.63693064", "0.6359453", "0.6358327", "0.6353676", "0.6347949", "0.63431436", "0.6336595", "0.6331841", "0.6328094", "0.6324041" ]
0.7519383
7
Add a channel to watch. Gif attachments that break mobile clients will be removed in these channels.
async def watch(self, ctx, channel: discord.TextChannel): channel_list = await self.config.guild(ctx.guild).watching() if channel.id not in channel_list: channel_list.append(channel.id) await self.config.guild(ctx.guild).watching.set(channel_list) await ctx.send(f"{self.bot.get_channel(channel.id).mention} will have bad gifs removed.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addChannel(self, channel):\n c = SubElement(self.root, 'channel')\n self.setattr(c, 'id', channel['id'])\n\n # Display Name\n for display_name in channel['display-name']:\n dn = SubElement(c, 'display-name')\n self.settext(dn, display_name)\n\n # Icon\n if 'icon' in channel:\n self.seticons(c, channel['icon'])\n\n # URL\n if 'url' in channel:\n for url in channel['url']:\n u = SubElement(c, 'url')\n self.settext(u, url, with_lang=False)", "async def add(self, ctx, channel: discord.TextChannel):\n config = await self.config.guild(ctx.guild).channels()\n if channel.id in config:\n return await ctx.send(\"This channel is already a spoiler channel.\")\n await ctx.send(\"Channel added to the spoiler channel list.\")\n config.append(channel.id)\n await self.config.guild(ctx.guild).channels.set(config)", "def add_channel(self, channel):\n self._channels[channel.fileno] = channel\n self._poller.add(channel.fileno, channel._events)", "def add_channel(self, channel):\n if channel in self.clients:\n return False\n self.clients[channel] = []\n return True", "def addchan(channel):", "def add(self, name, chan_id):\r\n self.channels.append(Channel(name, chan_id))", "def test_add_channel_adds_channel(self):\n channel = Mock()\n with mock.patch.object(self.notifier, \"_silenced_channels\") as silenced_channels:\n self.notifier.add_channel(channel)\n silenced_channels.__setitem__.assert_called_with(channel, self.notifier._current_loop)", "def add_channel(self, channel: discord.Channel):\n to_exec = \"INSERT INTO channel(channel_id, server_id, enabled) VALUES(%s, %s, 1)\"\n self.__cursor.execute(to_exec, (str(channel.id), str(channel.server.id)))\n self.__connection.commit()", "def _add_channel(self, chan_id: str):\n if not chan_id in self._messages:\n self._messages[chan_id] = {}\n else:\n raise ValueError(\"ReactionListener tried to create space for an already listened channel!\")", "def add_channel(self, channel):\n self.task.ai_channels.add_ai_voltage_chan(channel)", "async def add(self, ctx, *, channel: discord.VoiceChannel):\n auto_channels = await self.config.guild(ctx.guild).auto_channels()\n if auto_channels is None:\n auto_channels = []\n auto_channels.append(channel.id)\n await self.config.guild(ctx.guild).auto_channels.set(auto_channels)\n await ctx.send(\n _(\"Startchannel used for automatic voicechannels added: {channel}\").format(\n channel=channel.name\n )\n )", "def _onaddchannel(self):\n\n self._fileinfolayout.insertWidget(\n self._fileinfolayout.count() - 1,\n ChannelInfoWidget(self._channels)\n )", "async def add(self, ctx, channel : discord.Channel):\r\n \r\n server = ctx.message.server\r\n if server.id not in self.set:\r\n self.server_init(server)\r\n await self.bot.say(\"Server initialized!\")\r\n if channel.id in self.set[server.id][\"channels\"]:\r\n await self.bot.say(\":x: This channel is already a counting channel!\")\r\n return\r\n self.set[server.id][\"channels\"][channel.id] = {\"last\": None, \"count\": 0, \"goal\": 0, \"strict\": False}\r\n self.save()\r\n await self.bot.edit_channel(channel,topic = \"Next message must start with 1\")\r\n await self.bot.say(\"Channel added!\")", "async def addchannel(self, ctx, channel: discord.TextChannel):\n guild = ctx.message.guild\n excluded_channels = await self.config.guild(guild).excluded_channels()\n\n for excluded_channel in excluded_channels:\n if excluded_channel == channel.id:\n await ctx.send(\n \"%s already added to channel exclusion list\" % channel.name\n )\n return\n\n excluded_channels.append(channel.id)\n await self.config.guild(guild).excluded_channels.set(excluded_channels)\n await ctx.send(\"%s added to channel exclusion list\" % channel.name)", "def _new_channel_added(self, channel_name):\r\n if self._match_filter(channel_name):\r\n self._add_filter_channel(channel_name)", "async def channel(self, ctx, channel: discord.TextChannel):\r\n server = ctx.guild\r\n self._logs[str(server.id)][\"channel\"] = str(channel.id)\r\n dataIO.save_json(self._logs_file, self._logs)\r\n await ctx.send(f\"<#{str(channel.id)}> has been set as the modlog channel {self.bot.get_emoji(470063310386233344)}\")", "async def cmd_galtogglechannel(self, ctx, channel):\n\n # ===== GET CHANNEL ID\n try:\n ch_id = int(channel.lower().replace('<').replace('>').replace('#').strip())\n\n except ValueError:\n ctx.send_help('galtogglechannel', delete_after=Gallery.delete_after)\n \n ret_msg=\"\"\n\n # ===== REMOVE CHANNEL ID FROM LIST\n if ch_id in self.cogset['channel_ids']:\n self.cogset['channel_ids'].remove(ch_id)\n\n ret_msg = f\"<#{ch_id}> is no longer a gallery channel.\"\n\n ###=== DELETE LOGGED MESSAGES FROM DATABASE\n await self.db.execute(pgCmds.DEL_GALL_MSGS_FROM_CH, ch_id, self.cogset['guild_id'])\n\n # ===== ADD CHANNEL ID TO LIST\n else:\n self.cogset['channel_ids'] = list(set(self.cogset['channel_ids']) + {ch_id})\n ret_msg = f\"<#{ch_id}> has been made a gallery channel.\"\n\n # ===== SAVE SETTINGS \n await cogset.SAVE(self.cogset, cogname=self.qualified_name)\n\n # ===== END\n await ctx.channel.send(content=ret_msg, delete_after=Gallery.delete_after)\n return", "def on_channel_change(self, new_channel):\n pass", "def add(self, irc, msg, args, channel, updateInterval):\n res = self._checkDBhasChannel(channel)\n if res is True:\n irc.reply(\"Channel exist in database.\", prefixNick=True)\n else:\n if updateInterval < 60 or updateInterval > 600:\n irc.reply(\"Update interval must be greater or equal 60 seconds AND maxvalue = 600 (10 minutes)\", prefixNick=True)\n else:\n SQL = 'INSERT INTO registry (channel, isActive, updateInterval) VALUES (?, ?, ?)'\n SQLargs = (channel, 1, updateInterval)\n self._SQLexec(SQL, SQLargs)\n self.lock.acquire()\n v0 = channel # channel\n v1 = 1 # isactive\n v2 = updateInterval # updateinterval\n v3 = 0 # lastcheckdtime\n v4 = 0 # Per channel last know bug\n list_ = [v0, v1, v2, v3, v4]\n self.channelscontrol.append(list_)\n self.lock.release()\n irc.reply(\"Channel added and activated.\", prefixNick=True)\n g = threading.Thread(target=self._getLastBug, args=(irc, channel,))\n g.setDaemon(True)\n g.start()", "def new_channel(session, channel):\n session.create_chan_event.clear()\n key = b64encode(messaging.common.pkc_encrypt(get_random_bytes(\n config.SECURE_CHANNEL_KEY_SIZE_BYTES), session.encryption_key)).decode()\n msg = {\n kk.typ: kk.add_user,\n kk.inviter: session.user,\n kk.invitee: session.user,\n kk.chid: channel,\n kk.chkey: key\n }\n msg[kk.signature] = b64encode(\n messaging.common.create_msg_sig(session, msg)).decode()\n messaging.common.send_msg(session.sock, msg, key=session.symkey)", "def subscribe(self, inst, channel):\r\n if channel not in self._channels:\r\n self._channels[channel] = TalkChannel(channel, print_messages=self.verbose, timeref=self._timeref)\r\n self._channels[channel].subscribe(inst)", "def _add_control_channel(self, attrs):\n _cable_data = {}\n _cable_data[\"crate\"] = self._crate\n _cable_data[\"module\"] = self._module\n _cable_data[\"channel\"] = int(attrs.get('number', \"\"))\n _cable_data[\"name\"] = str(attrs.get('name', \"\"))\n self._data.append(_cable_data)", "def channel(self, channel):\n allowed_values = [\"whatsapp\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and channel not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `channel` ({0}), must be one of {1}\" # noqa: E501\n .format(channel, allowed_values)\n )\n\n self._channel = channel", "def _set_channel_(self, channel):\n self._channel = channel", "async def _daily_channel(self, ctx: commands.Context, channel: discord.TextChannel = None):\n self.check_if_exist(ctx.guild)\n\n self.daily_guilds[str(ctx.guild.id)][\"channel\"] = channel.id\n self.daily_info.update(\"guilds\", self.daily_guilds)\n await ctx.reply(\"New daily post channel is {0}\".format(channel.mention))", "async def unwatch(self, ctx, channel: discord.TextChannel):\r\n channel_list = await self.config.guild(ctx.guild).watching()\r\n if channel.id in channel_list:\r\n channel_list.remove(channel.id)\r\n else:\r\n return await ctx.send(\"Channel is not being watched.\")\r\n await self.config.guild(ctx.guild).watching.set(channel_list)\r\n await ctx.send(f\"{self.bot.get_channel(channel.id).mention} will not have bad gifs removed.\")", "def addChannel(self, *args):\n return _osgAnimation.Animation_addChannel(self, *args)", "async def defchannel(self, ctx, channel: str):\n self.data_check(ctx)\n server = ctx.message.server\n\n self.riceCog2[server.id][\"defchannel\"] = channel\n dataIO.save_json(self.warning_settings,\n self.riceCog2)\n await self.bot.say(\"Log channel is now: **{}**\".format(channel))", "async def handleChannelCreate(self, channel: discord.abc.GuildChannel):\n self.logger.info(\n \"Channel creation has been detected. Name: %s, ID: %s\", channel.name, channel.id\n )\n\n if not isinstance(channel, discord.TextChannel):\n return\n\n if channel.name == AH_CHANNEL:\n self.logger.info(\"%s detected, applying exceptions\", AH_CHANNEL)\n ctx = await self.getContext(channel)\n if not ctx:\n return\n await self.notifyChannel(ctx)\n await self.makeHighlightChanges(ctx, channel)\n await self.makeStarboardChanges(ctx, channel)\n await self.makeWordFilterChanges(ctx, channel)\n async with self.config.guild(channel.guild).get_attr(KEY_CHANNEL_IDS)() as channelIds:\n channelIds[channel.id] = {\"time\": datetime.now().timestamp()}", "def new_channel(self, *args, **kwargs):\n logger.debug('creating channel -> connection.channel(%r, %r)' % (args, kwargs))\n if self.enabled:\n channel = self.connection.channel(*args, **kwargs)\n self._channels.append(channel)\n return channel\n else:\n return None", "async def channel(self, ctx: commands.Context, channel: discord.TextChannel):\n self.channel = str(channel.id)\n await self._update_db()\n\n await ctx.send(f\"Done! {channel.mention} is the Starboard Channel now!\")", "def set_channel(self, channel):\n self.response['channel'] = channel", "async def votechannel_add(self, ctx, channel: discord.TextChannel, reaction_type=None):\n if reaction_type is None:\n channel_type = \"voting\"\n elif reaction_type.lower() in [\"rate\", \"rating\"]:\n channel_type = \"rating\"\n elif reaction_type.lower() in [\"vote\", \"voting\"]:\n channel_type = \"voting\"\n else:\n raise exceptions.Warning(f\"Unknown reaction type `{reaction_type}`\", help_footer=True)\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO voting_channel (guild_id, channel_id, voting_type)\n VALUES (%s, %s, %s)\n ON DUPLICATE KEY UPDATE\n voting_type = VALUES(voting_type)\n \"\"\",\n ctx.guild.id,\n channel.id,\n channel_type,\n )\n self.bot.cache.votechannels.add(channel.id)\n await util.send_success(\n ctx, f\"{channel.mention} is now a voting channel of type `{channel_type}`\"\n )", "def set_channel(cls, channel):\n cls.channel = channel", "async def managechannels(self, ctx:commands.Context):", "def register(self, cli: Client, channel: str) -> None:\n subscribers = self._channels_to_subscribers.get(channel, [])\n subscribers.append(cli)\n self._channels_to_subscribers[channel] = subscribers", "def setChannel(self, channel, ircChannel):\n channel = channel.lower()\n self.channels[channel] = ircChannel\n self.flush()", "async def addjoinchannel(self, ctx: commands.Context, channel: discord.TextChannel):\n db_session = self.bot.create_db_session()\n\n existing = db_session.query(Channel).filter(Channel.id == channel.id).one_or_none()\n if existing:\n existing.joinable = True\n else:\n db_session.add(Channel(id=channel.id, name=channel.name, joinable=True))\n\n db_session.commit()\n db_session.close()\n await ctx.send(f\"{channel.mention} was added as a joinable channel.\")", "def add_channels(self, channels):\n for i in range(len(channels)):\n self.task.ai_channels.add_ai_voltage_chan(channels[i])", "async def set_audit_channel(self, ctx: Context, channel: discord.TextChannel) -> None:\n\n assert ctx.guild is not None # handle by `cog_check`\n\n try:\n await execute_query(\n self.bot.database,\n 'INSERT INTO LOGGING (GUILD_ID, CHANNEL_ID, BITS) VALUES (?, ?, ?) '\n 'ON CONFLICT(GUILD_ID) DO UPDATE SET CHANNEL_ID=EXCLUDED.CHANNEL_ID',\n (ctx.guild.id, channel.id, 0)\n )\n await ctx.send(f'Updated the logging channel to {channel.mention}.')\n\n except aiosqliteError:\n await ctx.send('Failed to update the logging channel.')", "async def set_starboard_channel(self, ctx: commands.Context, channel: discord.TextChannel):\n self.check_if_exist(ctx.guild)\n\n self.starboard_guilds = self.starboard_info.find(\"guilds\")\n try:\n self.starboard_guilds[str(ctx.guild.id)][\"channel\"] = channel.id\n except:\n self.starboard_guilds[str(ctx.guild.id)] = {}\n self.starboard_guilds[str(ctx.guild.id)][\"channel\"] = channel.id\n self.starboard_info.update(\"guilds\", self.starboard_guilds)\n\n await ctx.reply(\"Starred messages will be sent to {0}\".format(channel.mention))", "def test_add_channel_skips_start_with_channels(self):\n with mock.patch.object(self.notifier, \"_silenced_channels\"):\n self.notifier.add_channel(Mock())\n self.notifier_start_mock.assert_not_called()", "def push(self, client, channel, data, status_code):\n if client not in self.storage:\n self.storage[client] = {}\n if channel not in self.storage[client]:\n self.storage[client][channel] = []\n self.storage[client][channel].append((data, status_code))", "async def on_channel_create(self, channel):\n if channel.is_private:\n return\n\n role = await self.get_role(channel.server)\n if not role:\n return\n\n await self.setup_channel(channel, role)", "def subscribe(self, channel, **kwargs):\n pass", "def add_chnl(self):\n\n # new widget\n chnl = QWidget()\n \n # load elements\n uic.loadUi(self.chnl_ui, chnl)\n\n # connect remove button\n chnl.rmv.clicked.connect(lambda _: self.rmv_chnl(chnl))\n\n # add to list\n self.chnls.append(chnl)\n\n # add to layout\n self.channels_layout.addWidget(chnl)", "async def game_channel(self, ctx, *, channel: discord.TextChannel=None):\n if channel is None:\n channel_id = self.data[ctx.guild.id]['channel']\n if channel_id is None:\n return await ctx.send(\"There is no games channel for this server.\")\n return await ctx.send(f\"The current games channel is {self.bot.get_channel(channel_id)}.\")\n self.data[ctx.guild.id]['channel'] = channel.id\n return await ctx.send(f\"The games channel is now set to {channel}\")", "def modify_channel(self, channel):\n self._poller.modify(channel.fileno, channel._events)", "async def set_channel(self, ctx, channel):\n cyphon = discord.utils.get(ctx.message.server.members, id=\"186835826699665409\")\n\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n self.stream_channel = channel\n await self.bot.say(\"Channel sucessfully assigned.\")\n else:\n await self.bot.send_message(ctx.message.author, \"You don't have permission to execute that command.\")", "async def add_bot_channels(self, guild):\n api_cog = self.bot.get_cog('RR_API')\n channelInfo = await api_cog.get_channel_info(guild.id)\n if not channelInfo:\n print(\"Server Name Not in DB, Can't add channels. Server: \" + str(guild.id))\n return\n category_id = channelInfo['categoryid']\n future_id = channelInfo['futurechannelid']\n past_id = channelInfo['pastchannelid']\n loot_id = channelInfo['lootchannelid']\n commands_id = channelInfo['commandschannelid']\n\n await self.addcategory(guild.id, category_id, future_id, past_id, loot_id, commands_id)", "def test_adding_media_to_channel(self):\n videos = [\n make_video(title='test title', media_id='1'),\n make_video(title='test title 2', media_id='2'),\n ]\n channels = [make_channel(title='test channel', media_ids=['1'], collection_id='3')]\n set_resources_and_sync(videos, channels)\n c = mpmodels.Channel.objects.filter(sms__id='3').first()\n self.assertIsNotNone(c)\n self.assertEqual(len(c.items.all()), 1)\n channels[0]['custom']['sms_collection_media_ids'] = 'collection_media_ids:1,2:'\n channels[0]['updated'] += 1\n set_resources_and_sync(videos, channels)\n self.assertEqual(len(c.items.all()), 2)\n # also check playlist\n playlist = mpmodels.Playlist.objects.filter(sms__id='3').first()\n self.assertEqual(len(playlist.media_items), 2)", "def set_channel(self, c, channel):\n try:\n self.binding.set_switcher_channel(channel)\n except Exception, e:\n self.handle_wavemeter_error(e)\n return False\n\n return True", "async def remove(self, ctx, channel: discord.TextChannel):\n config = await self.config.guild(ctx.guild).channels()\n if not channel.id in config:\n return await ctx.send(\"This channel is not a spoiler channel.\")\n config.remove(channel.id)\n await self.config.guild(ctx.guild).channels.set(config)\n await ctx.send(\"Channel removed from the spoiler channel list.\")", "def _add_vlsb_channel(self, attrs):\n _cable_data = {}\n _cable_data[\"vlsb_computer_id\"] = self._vlsb_computer_id\n _cable_data[\"vlsb_geo_number\"] = self._vlsb_geo_number\n _cable_data[\"vlsb_channel\"] = int(attrs.get('number', \"\"))\n _cable_data[\"tracker_no\"] = int(attrs.get('trackerNumber', \"\"))\n _cable_data[\"station\"] = int(attrs.get('station', \"\"))\n _cable_data[\"plane\"] = int(attrs.get('plane', \"\"))\n _cable_data[\"channel\"] = int(attrs.get('channel', \"\"))\n self._data.append(_cable_data)", "async def watchlist(self, ctx):\r\n channel_list = await self.config.guild(ctx.guild).watching()\r\n msg = \"Bad gifs will be removed in:\\n\"\r\n for channel in channel_list:\r\n channel_obj = self.bot.get_channel(channel)\r\n if channel_obj is None: # Catch deleted/unexisting channels\r\n continue\r\n msg += f\"{channel_obj.mention}\\n\"\r\n await ctx.send(msg)", "async def channel(self, ctx):\n pass", "async def _na_channel(self, ctx: Context, *, channel: discord.TextChannel):\n\n await self.config.guild(ctx.guild).na_channel_id.set(channel.id)\n\n await ctx.message.add_reaction(CHECK_MARK)", "def channel(self):\n if not hasattr(self, '_channel'):\n self._channel = self.new_channel()\n return self._channel", "def linkTrackToChannel(*args, **kwargs):\n pass", "async def ccdeny(self, ctx, channel: discord.TextChannel):\n channel_list = await self.config.guild(ctx.guild).channel_deny()\n if channel.id not in channel_list:\n channel_list.append(channel.id)\n await self.config.guild(ctx.guild).channel_deny.set(channel_list)\n await ctx.send(f\"{channel.mention} was added to the deny list for chatchart.\")", "async def channel_add(\n self, ctx: commands.Context, channel: Union[discord.TextChannel, discord.VoiceChannel], *team_ids: int):\n if set(team_ids) - set(self.teams):\n await ctx.send('Missing data for the following team IDs: %s' % (\n ', '.join(map(str, set(team_ids) - set(self.teams))),))\n return\n\n await asyncio.gather(*[\n self._permit_team_in_channel(self.teams[team_id], channel)\n for team_id in team_ids],\n return_exceptions=True)\n await ctx.send('Added team%s `%s` to channel %s' % (\n nl.s(len(team_ids)),\n '`, `'.join(self.teams[team_id].username for team_id in team_ids),\n channel.mention))", "def join(self, channel):\n raise NotImplementedError", "async def set_channel(self, ctx, *, channel: discord.Channel=None):\n\n server = ctx.message.server\n\n temp = self.bot.dota_ticker_settings.get(server.id)\n\n if temp is None or not temp['enabled']:\n await self.bot.say('The match ticker has not been enabled on this server.')\n return\n\n if channel is None:\n await self.bot.say('No channel name or mention received.')\n return\n\n settings = {'enabled': True, 'channel_id': channel.id}\n\n await self.bot.dota_ticker_settings.put(server.id, settings)\n await self.bot.say('The match ticker has been enabled on {0.mention}.'.format(channel))", "def subscribe(self, channel: str) -> None:\n print(f'{self._name} starts subscribing to channel-[{channel}]')\n self._server.register(self, channel)", "def set_channel(self, channel_name, value):\n try:\n cm = self.__core.get_service(\"channel_manager\")\n cdb = cm.channel_database_get()\n channel = cdb.channel_get(channel_name)\n try:\n typing_value = channel.type()(value)\n except Exception:\n traceback.print_exc()\n return\n channel.consumer_set(Sample(time.time(), typing_value))\n except Exception:\n traceback.print_exc()", "async def blacklist_channel(\n self, ctx: commands.Context, channel: discord.TextChannel\n ):\n if str(channel.id) in self.channel_blacklist:\n self.channel_blacklist.remove(str(channel.id))\n await self._update_db()\n removed = True\n else:\n self.channel_blacklist.append(str(channel.id))\n await self._update_db()\n removed = False\n\n await ctx.send(f\"{'Un' if removed else None}Blacklisted {channel.mention}\")\n return", "def set_channel(self, channel):\n self.l1.setText(\"Channel: \" + str(channel))", "def getChannel(self, channel):\n channel = channel.lower()\n if channel in self.channels:\n return self.channels[channel]\n else:\n c = IrcChannel()\n self.channels[channel] = c\n return c", "def active_channel(self, channel):\n old_timeout = self.resource.timeout\n self.resource.timeout = 500\n if channel in self.channel_list:\n self.scpi.set_active_channel(channel)\n else:\n print('Channel %i not in list of channels. Create channel first'\n % channel)\n set_channel = self.scpi.query_active_channel()\n self.resource.timeout = old_timeout\n return set_channel", "def set_channel(self, channel_name, value):\n try:\n cm = self.__core.get_service(\"channel_manager\")\n cdb = cm.channel_database_get()\n channel = cdb.channel_get(channel_name)\n try:\n print \"in set_channel\" #if this is not displayed => this function is not called => it must be deleted...\n typing_value = channel.type()(value)#what is going on here?! I don't know...\n except:\n traceback.print_exc()\n return\n channel.consumer_set(Sample(time.time(), typing_value))\n except Exception:\n traceback.print_exc()", "def join(self, channel):\n self.channels[channel.name.lower()] = channel\n channel.protocol = self.protocol\n self.protocol.join(channel.name)", "async def _set_channels(self, ctx: Context):\n\n guild: discord.Guild = ctx.guild\n\n signup = await guild.create_text_channel(\"sign-ups\")\n await self.config.guild(guild).signup_channel.set(signup.id)\n\n host_role = await self.role_from_config(guild, \"host_id\")\n\n na_overwrites = {\n guild.default_role: discord.PermissionOverwrite(\n read_messages=False\n ),\n host_role: discord.PermissionOverwrite(\n read_messages=True,\n send_messages=True\n ),\n guild.me: discord.PermissionOverwrite(\n read_messages=True,\n send_messages=True\n )\n }\n\n nightaction = await guild.create_text_channel(\n \"night-action\", overwrites=na_overwrites\n )\n await self.config.guild(guild).na_channel_id.set(nightaction.id)\n\n txt = _(\n \"Sign-ups: {}\\nNight Actions: {}\"\n ).format(\n signup.mention, nightaction.mention,\n )\n\n embed = discord.Embed(\n color=0x37BFFF, title=\"Created Channels!\", description=txt\n )\n\n try:\n await ctx.send(embed=embed)\n except discord.Forbidden:\n await ctx.send(\"Created required channels!\")\n await ctx.send(txt)", "async def setcoachchannel(self, ctx, channel: int):\r\n if ctx.guild.id == 445092370006933505:\r\n await self.config.guild(ctx.guild).coachchannel.set(int(channel))\r\n await ctx.send(\"You set {} as the coaching channel\".format(channel))\r\n else:\r\n await ctx.send(\"This command only works in the Legend eSports server, join us at: https://discord.gg/GGuCXDn\")", "def channel(self, channel: int, /) -> \"TimerChannel\" | None:", "async def set_channel(self, ctx: commands.Context, channel: discord.TextChannel = None):\n if channel is not None:\n await self.config.guild(ctx.guild).autopostchannel.set(channel.id)\n await ctx.send(\"Auto-post channel has been set to {}\".format(channel.mention))\n else:\n await self.config.guild(ctx.guild).autopostchannel.set(None)\n await ctx.send(\"Auto-post channel has been cleared\")", "async def mutechannel(self, ctx, channel: str):\n self.data_check(ctx)\n server = ctx.message.server\n\n self.riceCog2[server.id][\"mutechannel\"] = channel\n dataIO.save_json(self.warning_settings,\n self.riceCog2)\n await self.bot.say(\"Mute channel is now: **{}**\".format(channel))", "def channel_id(self, channel_id):\n \n self._channel_id = channel_id", "async def log_channel(self, ctx, channel: discord.TextChannel = None):\n if not channel:\n channel = ctx.channel\n await self.config.logChannel.set(channel.id)\n await ctx.send(f\"Set {channel.mention} as the log channel.\")", "def joinedChannel(self, channel, users):\n pass", "def _update_channel(self, channel, data):\n logging.info(\"Update channel `%s' information in DB\", data['name'])\n\n channel.update(data)\n channel.user = self.q(o.User).filter(o.User.slackid ==\n data['creator']).one_or_none()\n channel.purpose = self._get_create_obj(data['purpose'], o.Purpose,\n channel)\n channel.topic = self._get_create_obj(data['topic'], o.Topic, channel)\n self.session.flush()", "def subscribe(self, client, api_key, channel):\n if channel not in self.clients:\n return False\n pair = (client, api_key)\n if pair in self.clients[channel]:\n return False\n\n self.clients[channel].append(pair)\n return True", "async def setwelcomechannel(self, ctx, *, channel : discord.TextChannel = None):\n\n isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator\n if not isAdmin:\n checkAdmin = self.settings.getServerStat(ctx.message.guild, \"AdminArray\")\n for role in ctx.message.author.roles:\n for aRole in checkAdmin:\n # Get the role that corresponds to the id\n if str(aRole['ID']) == str(role.id):\n isAdmin = True\n\n # Only allow admins to change server stats\n if not isAdmin:\n await ctx.channel.send('You do not have sufficient privileges to access this command.')\n return\n\n if channel == None:\n self.settings.setServerStat(ctx.message.guild, \"WelcomeChannel\", \"\")\n if self._getDefault(ctx.guild):\n msg = 'Welcome and goodbye messages will be displayed in the default channel (**{}**).'.format(self._getDefault(ctx.guild).mention)\n else:\n msg = \"Welcome and goodbye messages will **not** be displayed.\"\n await ctx.channel.send(msg)\n return\n\n # If we made it this far - then we can add it\n self.settings.setServerStat(ctx.message.guild, \"WelcomeChannel\", channel.id)\n\n msg = 'Welcome and goodbye messages will be displayed in **{}**.'.format(channel.mention)\n await ctx.channel.send(msg)", "async def channel_stats(self, ctx, channel: discord.TextChannel = None):\n channel = channel or ctx.channel\n embed = discord.Embed(\n title=f\"Stats for **{channel.name}**\",\n description=f\"{'Category: {}'.format(channel.category.name) if channel.category else 'This channel is not in a category'}\",\n color=discord.Color.blurple(),\n )\n embed.add_field(name=\"Channel Guild\",\n value=ctx.guild.name, inline=False)\n embed.add_field(name=\"Channel Id\", value=channel.id, inline=False)\n embed.add_field(\n name=\"Channel Topic\",\n value=f\"{channel.topic if channel.topic else 'No topic.'}\",\n inline=False,\n )\n embed.add_field(name=\"Channel Position\",\n value=channel.position, inline=False)\n embed.add_field(\n name=\"Channel Slowmode Delay\", value=channel.slowmode_delay, inline=False\n )\n embed.add_field(name=\"Channel is nsfw?\",\n value=channel.is_nsfw(), inline=False)\n embed.add_field(name=\"Channel is news?\",\n value=channel.is_news(), inline=False)\n embed.add_field(\n name=\"Channel Creation Time\", value=channel.created_at, inline=False\n )\n embed.add_field(\n name=\"Channel Permissions Synced\",\n value=channel.permissions_synced,\n inline=False,\n )\n embed.add_field(name=\"Channel Hash\", value=hash(channel), inline=False)\n\n await ctx.message.delete()\n await ctx.send(embed=embed)", "async def setup(self, ctx):\n self.report_channel = ctx.message.channel\n with open('data/report_channel.json', 'w') as f:\n json.dump({\"channel\": self.report_channel.id}, f)\n await ctx.send('This channel is now the report channel')", "async def register(self, ctx:commands.Context, channel_type):\r\n\r\n if not channel_type in [POOL_CHANNEL, SHOP_CHANNEL]:\r\n await ctx.send(f'{channel_type} is not a valid channel type\\n_Channel types:_\\npool\\nshop')\r\n\r\n channel_type = await self.GetChannelType(ctx.guild, ctx.channel.id)\r\n if channel_type == 'none':\r\n await self.AddSpecializedChannel(ctx.guild, ctx.channel.id, channel_type)\r\n await ctx.send(f'<#{ctx.channel.id}> is now a {channel_type}!')\r\n else:\r\n await ctx.send(f'<#{ctx.channel.id}> is already a {channel_type}!')", "async def addcategory(self, guild_id, category_id, future_channel_id, past_channel_id,\n loot_channel_id, commands_channel_id):\n updateDB = False\n guild = self.bot.get_guild(guild_id)\n\n if not guild:\n return\n\n # Check if channel exists in db and not guild\n if category_id is not None:\n category = guild.get_channel(int(category_id))\n if category is None:\n updateDB = True\n category = await guild.create_category('\\U0001F432 ReadyRaider Bot')\n\n # Category not in DB, create new one and a cmd-channel \\U0001F\n # 432 = DRAGON\n else:\n updateDB = True\n category = await guild.create_category('\\U0001F432 ReadyRaider Bot')\n\n # Check if channel exists in db and not guild\n if future_channel_id is not None:\n future_channel = guild.get_channel(int(future_channel_id))\n if future_channel is None:\n updateDB = True\n future_channel_id = await self.addfuturechannel(guild, category)\n else:\n True\n # await future_channel.edit(category=category) No longer forces the use of the category\n else:\n updateDB = True\n future_channel_id = await self.addfuturechannel(guild, category)\n\n # Check if channel exists in db and not guild\n if loot_channel_id is not None:\n loot_channel = guild.get_channel(int(loot_channel_id))\n if loot_channel is None:\n updateDB = True\n loot_channel_id = await self.addlootchannel(guild, category)\n else:\n True\n # await loot_channel.edit(category=category) No longer forces the use of the category\n else:\n updateDB = True\n loot_channel_id = await self.addlootchannel(guild, category)\n\n # Check if channel exists in db and not guild\n if past_channel_id is not None:\n past_channel = guild.get_channel(int(past_channel_id))\n if past_channel is None:\n updateDB = True\n past_channel_id = await self.addpastchannel(guild, category)\n else:\n True\n # await past_channel.edit(category=category)\n else:\n updateDB = True\n past_channel_id = await self.addpastchannel(guild, category)\n\n # Check if channel exists in db and not guild\n if commands_channel_id is not None:\n commands_channel = guild.get_channel(int(commands_channel_id))\n if commands_channel is None:\n updateDB = True\n commands_channel_id = await self.addcommandschannel(guild, category)\n else:\n True\n # await commands_channel.edit(category=category)\n else:\n updateDB = True\n commands_channel_id = await self.addcommandschannel(guild, category)\n\n if updateDB:\n api_cog = self.bot.get_cog('RR_API')\n guildData = await api_cog.guild_data(guild.id)\n rr_id = guildData['_id']\n body = {\"guild\": rr_id,\n \"pastchannelid\": str(past_channel_id),\n \"futurechannelid\": str(future_channel_id),\n \"lootchannelid\": str(loot_channel_id),\n \"commandsid\": str(commands_channel_id),\n \"categoryid\": str(category.id)\n }\n headers = {\"Authorization\": \"Bearer \" + self.bot.api_key}\n r = requests.post(self.bot.channelAPI, data=body, headers=headers)", "async def _cmdf_setchannel(self, substr, msg, privilege_level):\n ch_obj = None\n if len(substr) == 0:\n ch_obj = msg.channel\n else:\n ch_obj = self._client.search_for_channel(substr, enablenamesearch=True, serverrestriction=self._server)\n\n if ch_obj is None:\n buf = \"**Error:** Channel not found. No changes were made.\"\n else:\n self._ch_msg_channelid = ch_obj.id\n self._save_settings()\n buf = \"In-channel greeting messages will now be sent in \" + utils.ch_to_mention(ch_obj) + \".\"\n await self._client.send_msg(msg, buf)\n return", "def enable(self, name, channel):\n if channel not in self.disabled_extensions:\n self.disabled_extensions[channel] = set()\n\n if name not in self.extension_names:\n return False\n\n logger.info('Enabling %s on %s' % (name, channel))\n self.disabled_extensions[channel].discard(name)\n\n return True", "def part(self, channel):\n raise NotImplementedError", "def channel(self):\n raise NotImplementedError", "def open_channel(self):\n # LOGGER.info('Creating a new channel')\n self._connection.channel(on_open_callback=self.on_channel_task_open)\n self._connection.channel(on_open_callback=self.on_channel_ctrl_open)", "async def remove(self, ctx, channel : discord.Channel):\r\n \r\n server = ctx.message.server\r\n if server.id not in self.set:\r\n await self.bot.say(\":x: Uninitialized server!\")\r\n return\r\n if channel.id not in self.set[server.id][\"channels\"]:\r\n await self.bot.say(\":x: This is not a counting channel!\")\r\n return\r\n del self.set[server.id][\"channels\"][channel.id]\r\n self.save()\r\n await self.bot.edit_channel(channel,topic = None)\r\n await self.bot.say(\"Channel removed!\")", "def on_done(self, input):\n\n input = input.strip()\n\n if re.match('https?://', input, re.I) == None:\n show_error(u\"Unable to add the channel \\\"%s\\\" since it does not appear to be served via HTTP (http:// or https://).\" % input)\n return\n\n settings = sublime.load_settings('Package Control.sublime-settings')\n channels = settings.get('channels', [])\n if not channels:\n channels = []\n channels.append(input)\n settings.set('channels', channels)\n sublime.save_settings('Package Control.sublime-settings')\n sublime.status_message(('Channel %s successfully ' +\n 'added') % input)", "def open_channel(self):\n logger.info('Creating a new channel')\n self._connection.channel(on_open_callback=self.on_channel_open)", "def hop_channel(self, channel):\n self.logger.info(\"Hopping to channel %s\", channel)\n os.system(f\"iwconfig {self.interface} channel {channel}\")", "async def set_channel(self, ctx: commands.Context):\n if ctx.message.author.id != conf.user:\n return None\n\n new_channel = ctx.channel.id\n\n conf.channel = new_channel\n\n log.info(f\"Bot channel set to channel with: #{ctx.channel} (ID:{ctx.channel.id})\")\n await ctx.message.channel.send(f\"✅ Set bot channel for {ctx.message.author} to #{ctx.channel}\")", "def set_channel(self, channel_value):\n error_message = \"Value '{chan}' is not \".format(chan=channel_value)\n # Make sure channel_value is an int before getting the number of keys to send\n try:\n channel_value = int(channel_value)\n assert(0<=channel_value<=999)\n except AssertionError:\n raise TVError(error_message + \"in the channel range\", 'set_channel')\n except ValueError:\n raise TVError(error_message + \"an integer\", 'set_channel')\n except:\n raise TVError(\"Unexpected error !\", 'set_fm')\n for char in str(channel_value).zfill(3):\n self.send_key('BTN_' + char)\n time.sleep(0.1)", "def open_channel(self):\n self.logger.info('creating channel')\n self._connection.channel(on_open_callback=self.on_channel_opened)", "def open_channel(self):\n self.logger.info('Creating a new channel')\n self._connection.channel(on_open_callback=self.on_channel_open)", "def on_channel_open(self, new_channel):\n\t\tself.channel = new_channel\n\t\tself.declare_queue(self.on_queue_declared)" ]
[ "0.7233943", "0.72161865", "0.7117447", "0.6998894", "0.69869983", "0.6717073", "0.6696119", "0.66698676", "0.6659829", "0.6645461", "0.66428185", "0.66339123", "0.66172534", "0.6603776", "0.63936055", "0.6305807", "0.6265757", "0.6263904", "0.6182621", "0.60905236", "0.605285", "0.60348904", "0.6033484", "0.60299104", "0.6024671", "0.59868586", "0.5982515", "0.59743285", "0.59682035", "0.59603083", "0.5956341", "0.5953155", "0.5944141", "0.59328204", "0.5917584", "0.5891411", "0.5874783", "0.583456", "0.583208", "0.58239067", "0.5792721", "0.578955", "0.5780077", "0.5760339", "0.57549965", "0.57515246", "0.57492805", "0.57437634", "0.5733871", "0.5728085", "0.57169247", "0.56830454", "0.5635646", "0.56252855", "0.56200266", "0.559825", "0.55870396", "0.55828446", "0.5582018", "0.55798966", "0.55777365", "0.5571824", "0.5568862", "0.5563303", "0.55545783", "0.5536587", "0.5526873", "0.5508298", "0.55054265", "0.5504355", "0.5501801", "0.5500033", "0.5491788", "0.5491055", "0.5481393", "0.54698104", "0.5466707", "0.5460676", "0.54307413", "0.5422025", "0.542143", "0.5398787", "0.5386632", "0.5366084", "0.53652686", "0.535813", "0.53470814", "0.5341061", "0.53402334", "0.53373325", "0.53304", "0.5323839", "0.5322875", "0.5311789", "0.5310932", "0.5308384", "0.5308265", "0.53065634", "0.53062177", "0.5295267" ]
0.71405834
2
List the channels being watched.
async def watchlist(self, ctx): channel_list = await self.config.guild(ctx.guild).watching() msg = "Bad gifs will be removed in:\n" for channel in channel_list: channel_obj = self.bot.get_channel(channel) if channel_obj is None: # Catch deleted/unexisting channels continue msg += f"{channel_obj.mention}\n" await ctx.send(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _channels_list(self):\n result = self.slack.api_call(\"channels.list\")\n\n if not result.get(\"ok\"):\n logging.error(result['error'])\n return None\n\n return result['channels']", "def get_channels(self):\n return self.channels", "def channels(self):\n return self._channels", "def channels(self):\n return self._channels.keys()", "def get_channels():\n r = slack.channels.list().body\n return [ c for c in r['channels'] if c['is_member'] ]", "async def list_channel(self, ctx: MyContext):\n channels = self.db_get_channels(ctx.guild.id)\n if not channels: # we can't send an empty list\n await ctx.send(\n await self.bot._(\n ctx.guild.id, \"wormhole.error.no-channels\", p=ctx.prefix\n )\n )\n return\n txt = \"\\n\".join([c.to_str() for c in channels])\n await ctx.send(txt)", "def get_channels():\n\tchannels = slack.get_channels()\n\treturn jsonify(channels=channels.body['channels'])", "def channels(self):\n return [channel for channel in self.client.channels if channel.has_nick(self)]", "def channels(self):\r\n return v3.Channels(self)", "def getChannels(self) -> List:\n\t\tif self._taking_off:\n\t\t\tself.takeOff()\n\n\t\tif self._landing:\n\t\t\tself.land()\n\n\t\treturn self._altHoldController.getChannels() + [2000]", "def channels(self):\n if not self.is_loaded():\n return []\n else:\n return ipmi_channels()", "async def listchannels(self, ctx: commands.Context):\n db_session = self.bot.create_db_session()\n channels_query = db_session.query(Channel).filter(Channel.joinable == True).order_by(Channel.name)\n db_session.close()\n\n header_message = \"Here is a list of the joinable channels\"\n channel_list = \"\\n\".join(channel.name for channel in channels_query)\n footer_messge = (\"To join or leave one of these channels, use the !joinchannel and !leavechannel commands.\\n\"\n \"To join multiple channels, separate them with a space.\")\n\n message = discord.Embed()\n message.title = \"Joinable Channels\"\n message.description = channel_list\n message.set_footer(text=footer_messge)\n\n await ctx.send(embed=message)", "def channels():\n channels = db.session.query(Channel).all()\n return render_template(\"admin/channels.html\", channels=channels)", "def get_channels(self):\n response = self.client.api_call(\n f'conversations.list?types={cfg.CHANNEL[\"types\"]}&exclude_archived={cfg.CHANNEL[\"exclude_archived\"]}'\n )\n assert response['ok']\n return response['channels']", "def collect_channels(session):\n channel_objects = []\n brew_channels = session.listChannels()\n\n for brew_channel in brew_channels:\n channel_objects.append(channel(brew_channel[\"name\"], brew_channel[\"id\"]))\n\n return channel_objects", "def showChannels(self):\n print(\"Channels:\")\n for c in self.channels:\n if c.role != channel_pb2.Channel.Role.DISABLED:\n cStr = stripnl(MessageToJson(c.settings))\n print(\n f\" {channel_pb2.Channel.Role.Name(c.role)} psk={pskToString(c.settings.psk)} {cStr}\")\n publicURL = self.getURL(includeAll=False)\n adminURL = self.getURL(includeAll=True)\n print(f\"\\nPrimary channel URL: {publicURL}\")\n if adminURL != publicURL:\n print(f\"Complete URL (includes all channels): {adminURL}\")", "def channels_listall(token):\n channels_results = channels.list()\n channels_list = []\n for channel in channels_results:\n channels_list.append(\n {\"channel_id\": channel[\"channel_id\"], \"name\": channel[\"name\"]}\n )\n return {\"channels\": channels_list}", "def get_list_youtube_channels_check(self):\n return self.bot_data_file[\"youtube\"][\"channels\"]", "def get_channels_json(self):\n logging.debug(f\"Getting all Slack channels...\")\n return self.get_list_json(\"conversations\")[\"channels\"]", "async def votechannel_list(self, ctx):\n channels = await self.bot.db.execute(\n \"\"\"\n SELECT channel_id, voting_type FROM voting_channel WHERE guild_id = %s\n \"\"\",\n ctx.guild.id,\n )\n if not channels:\n raise exceptions.Info(\"There are no voting channels on this server yet!\")\n\n rows = []\n for channel_id, voting_type in channels:\n rows.append(f\"<#{channel_id}> - `{voting_type}`\")\n\n content = discord.Embed(\n title=f\":1234: Voting channels in {ctx.guild.name}\", color=int(\"3b88c3\", 16)\n )\n await util.send_as_pages(ctx, content, rows)", "def list_all_channels(_response=Response, _db=Depends(get_db)):\n\n res_status, _data = ChatController(_db).list_channels()\n\n _response.status_code = res_status\n\n return {\"data\": _data}", "def channels_list(token):\n auth_u_id = get_id_from_token(token)\n all_channels = channels.query(\"all_members\", \"contains\", auth_u_id)\n channels_list = []\n for channel in all_channels:\n channels_list.append(\n {\"channel_id\": channel[\"channel_id\"], \"name\": channel[\"name\"]}\n )\n return {\"channels\": channels_list}", "def get_channels(\n self,\n on_channel_open: Callable[[str], None],\n on_catastrophic_disconnect: Callable[[str], None],\n on_message: Callable[[str, \"Packet\"], None],\n ) -> List[\"Channel\"]:\n raise NotImplementedError()", "def channels(self) -> int:\n return self._channels", "async def _list(self, ctx):\n config = await self.config.guild(ctx.guild).channels()\n data = [self.bot.get_channel(x).mention for x in config]\n if ctx.channel.id in config:\n destination = ctx.author\n else:\n destination = ctx\n if not data:\n return await destination.send(\"There are no channels.\")\n await destination.send(\", \".join(data))", "def list_channels():\n user = getpass.getuser()\n base_path = \"C:\\\\Users\\\\\" + user + \"\\\\Documents\\\\Eve\\\\logs\\\\Chatlogs\\\\\"\n today = datetime.datetime.utcnow().strftime(\"%Y%m%d\")\n most_recent = {}\n for filename in os.listdir(base_path):\n filename = filename[:-4]\n full_filename = filename\n time = filename[-6:]\n filename = filename[:-7]\n date = filename[-8:]\n channel_name = filename[:-9]\n if date == today:\n channel = Channel()\n channel.file_name = full_filename\n channel.dir = base_path\n channel.channel_name = channel_name\n channel.date = date\n channel.time = time\n if most_recent.get(channel_name):\n newest_channel = most_recent.get(channel_name)\n if int(time) > int(newest_channel.time):\n most_recent[channel_name] = channel\n else:\n most_recent[channel_name] = channel\n\n return most_recent", "def get_user_channels(self):\n\n request = self.youtube.subscriptions().list(\n part='snippet',\n mine=True,\n order='alphabetical'\n )\n subscriptions = []\n while request:\n response = request.execute()\n subscriptions.append(response)\n request = self.youtube.subscriptions().list_next(request, response)\n\n channels = {}\n for subscription in subscriptions:\n for channel in subscription['items']:\n channel_title = channel['snippet']['title']\n channel_id = channel['snippet']['resourceId']['channelId']\n channels[channel_title] = channel_id\n\n return channels", "def extract_channels(self, index: int) -> ListLike:\n cmd_pieces = self[index].split()\n channels = []\n for i, piece in enumerate(cmd_pieces):\n if piece in [\"--channel\", \"-c\"]:\n channels.append(cmd_pieces[i + 1])\n return channels", "def foundInChannels(self, cls:\"Client\") -> List[\"Channel\"]:\n\n\t\tret:List[\"Channel\"] = []\n\n\t\tfor channel_name in self.found_in:\n\n\t\t\tCh:\"Channel\" = cls.channels.get(channel_name, None)\n\t\t\tif Ch: ret.append(Ch)\n\n\t\treturn ret", "def get_channels(cls):\n class_members = getmembers(cls)\n\n channels = []\n for name, member in class_members:\n if isinstance(member, CommonBase.BaseChannelCreator):\n channels.append((name, member))\n return channels", "def list_channels(title=None, uri=None):\r\n # Set plugin category. It is displayed in some skins as the name\r\n # of the current section.\r\n xbmcplugin.setPluginCategory(_handle, 'Channels')\r\n\r\n # Set plugin content. It allows Kodi to select appropriate views\r\n # for this type of content -- didn't use this since it's not working well\r\n # with the video item.\r\n # xbmcplugin.setContent(_handle, 'videos')\r\n\r\n # Get channels.\r\n result = _get_data(uri or 'https://api.hotstar.com/o/v1/channel/list?perPage=1000')\r\n # Iterate through categories\r\n\r\n for channel in result['items']:\r\n # Channel JSON structure.\r\n # {\r\n # \"title\": \"Star Vijay\",\r\n # \"categoryId\": 748,\r\n # \"contentId\": 824,\r\n # \"uri\": \"https://api.hotstar.com/o/v1/channel/detail?id=12&avsCategoryId=748&contentId=824&offset=0&size=20\r\n # &pageNo=1&perPage=20\",\r\n # \"description\": \"A Tamil general entertainment channel with family drama, comedy and reality shows.\",\r\n # \"assetType\": \"CHANNEL\",\r\n # \"genre\": [\r\n # \"LiveTV\"\r\n # ],\r\n # \"lang\": [\r\n # \"Tamil\"\r\n # ],\r\n # \"showCnt\": 137\r\n # },\r\n #\r\n _add_directory_item(\r\n parent_title=title,\r\n title=channel['title'],\r\n content_id=channel['contentId'],\r\n genre=channel.get('genre'),\r\n description=channel['description'],\r\n uri=channel['uri'],\r\n action='programs',\r\n image=get_thumbnail_image(channel)\r\n )\r\n\r\n if not uri:\r\n # Add Sports\r\n _add_directory_item(\r\n title='HotStar Sports',\r\n description='Sports',\r\n content_id=821,\r\n genre='Sports',\r\n uri='https://api.hotstar.com/o/v1/page/1327?tas=30',\r\n action='program_details',\r\n country_code='CA'\r\n )\r\n # Movies\r\n _add_directory_item(\r\n title='HotStar Movies',\r\n content_id=821,\r\n genre='Movies',\r\n description='Movies',\r\n uri='https://api.hotstar.com/o/v1/page/1328?tas=30',\r\n action='program_details',\r\n country_code='CA'\r\n )\r\n\r\n # TV\r\n _add_directory_item(\r\n title='HotStar TV',\r\n content_id=821,\r\n description='TV',\r\n genre='TV',\r\n uri='https://api.hotstar.com/o/v1/page/1329?tas=30',\r\n action='program_details',\r\n country_code='CA'\r\n )\r\n\r\n # Genre\r\n _add_directory_item(\r\n title='HotStar Genres',\r\n content_id=821,\r\n description='Genres',\r\n genre='Genre',\r\n uri='https://api.hotstar.com/o/v1/genre/list?perPage=1000',\r\n action='programs',\r\n )\r\n\r\n _add_search_item()\r\n\r\n # Add a sort method for the virtual folder items (alphabetically, ignore articles)\r\n xbmcplugin.addSortMethod(_handle, xbmcplugin.SORT_METHOD_LABEL)\r\n\r\n # Finish creating a virtual folder.\r\n xbmcplugin.endOfDirectory(_handle)", "def _read_channels(self, info):\n channels = []\n if info.desc().child(\"channels\").empty():\n return channels\n\n channel = info.desc().child(\"channels\").child(\"channel\")\n for _ in range(info.channel_count()):\n channel_name = channel.child_value(\"label\")\n # If the data stream has a TRG channel, rename it so it doesn't\n # conflict with the marker channel.\n if channel_name == 'TRG' and self._marker_inlets:\n channel_name = \"TRG_device_stream\"\n channels.append(channel_name)\n channel = channel.next_sibling()\n\n for appended_channel in self._appended_channels:\n channels.append(appended_channel)\n\n trg_marker_index = self._trigger_inlet_index()\n for i, inlet in enumerate(self._marker_inlets):\n col = inlet_name(inlet)\n if i == trg_marker_index:\n col = 'TRG'\n channels.append(col)\n\n return channels", "async def list(self, ctx):\n cyphon = discord.utils.get(ctx.message.server.members, id=\"186835826699665409\")\n\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n message = []\n message.append(\"```\\n\")\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n if len(self.twitch_streams) > 0:\n for stream in self.twitch_streams:\n message.append(stream[\"NAME\"] + \"\\n\")\n else:\n message.append(\"No streams found!\")\n message.append(\"```\")\n output = ''.join(message)\n await self.bot.say(output)\n else:\n await self.bot.send_message(ctx.message.author, \"You don't have permission to execute that command.\")", "def get_channels(self):\n bus_name = self.telepathy_conn.requested_bus_name\n connection_path = self.telepathy_conn.object_path\n channels = [self.telepathy_text_chan.object_path,\n self.telepathy_tubes_chan.object_path]\n\n print('%r: bus name is %s, connection is %s, channels are %r',\n self, bus_name, connection_path, channels)\n return bus_name, connection_path, channels", "def get_channels_for(self, server, nick):\n channels = []\n for channel in self.serverchans[server.lower()].values():\n if irc.strings.lower(nick) in channel.users:\n channels.append(channel)\n return channels", "def channels(self):\n return [cc for cc in list(self.dataset.data_vars)]", "def get_channels(self):\n return [self.afos, \"%s...\" % (self.afos[:3], )]", "def channel_list(self):\n return_str = self.scpi.query_channel_catalog().split(',')\n channel_dct = {}\n for i in range(int(len(return_str)/2)):\n channel_dct[int(return_str[2 * i])] = return_str[2 * i + 1]\n return channel_dct", "def get_channels(cj): \n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\n channels = opener.open(\"http://www.douban.com/j/app/radio/channels\")\n channel_list = json.loads(channels.read())\n return channel_list[\"channels\"]\n # print channel_list", "def _channelList_changed(self):\n self.oscilloscope.visibleChannels = self.channelList", "def getChannelsByName(self, unit, channels): \n\t\treturn self.selectChannelsByName(unit, channels, dontSelect = 1)", "def channels(message):\n load_users(message._client.users)\n for x in message._client.channels:\n chan = message._client.channels[x]\n if 'is_member' in chan:\n if chan['is_member']:\n message.reply(\"{} ({})\".format(chan['name'], chan['id']))\n# message.reply(pretty_json(chan, True))\n elif 'is_im' in chan:\n print(chan)\n friendlyname = chan['user']\n try:\n friendlyname = chan['user'].name\n except KeyError:\n pass\n message.reply(\"User channel: {} ({})\".format(friendlyname,\n chan['id']))", "def iter_channels(self) -> Iterable[\"MChannel\"]:\n for server in self.servers.values():\n yield from server.channels.values()", "async def fetch_dm_channels(self):\n data = await self.http.get_dm_channels()\n channels = []\n for dm_channel_data in data.get('channels', data):\n dm_channel = self.http.create_channel(data=dm_channel_data)\n channels.append(dm_channel)\n\n return channels", "async def managechannels(self, ctx:commands.Context):", "def get_channels(self, uuid=None, address=None):\n params = self._build_params(uuid=uuid, address=address)\n return self._get_query('channels', params, Channel)", "async def used_channels(request: web.Request) -> web.Response:\n\n session_factory = get_session_factory_from_request(request)\n\n with session_factory() as session:\n channels = await get_channels(session)\n\n response = web.json_response({\"channels\": channels})\n response.enable_compression()\n return response", "def GetChannels(self, type='tv', group=2):\n self.logger.debug(\"Loading XBMC PVC channel list.\")\n try:\n xbmc = Server(self.url('/jsonrpc', True))\n return xbmc.PVR.GetChannels(channelgroupid=int(group), properties=['thumbnail'])\n except:\n return", "def get_channels(kwargs, limit=100):\n conn = engine.connect()\n\n fields = [field.name for field in ChannelStatusForm()]\n\n # make sure all the values in kwargs are actual fields\n kwargs = dict(item for item in kwargs.items() if item[0] in fields)\n\n query = \"SELECT * FROM current_channel_status \"\n if len(kwargs):\n query += \"WHERE %s \" % (\" AND \".join([\"%s = %%(%s)s\" % (item[0], item[0]) for item in kwargs.items()]))\n query += \"ORDER BY crate, slot, channel LIMIT %i\" % limit\n\n result = conn.execute(query, kwargs)\n\n if result is None:\n return None\n\n keys = result.keys()\n rows = result.fetchall()\n\n return [dict(zip(keys,row)) for row in rows]", "def channels(self): # type: (...) -> List[BlendingRangePair]\n return self._channels", "def database_channels(self):\n return [c for c in self.values() if isinstance(c, DatabaseChannel)]", "def list(sw, args):\n parser = argparse.ArgumentParser(\n prog='space channel list',\n description='List channels in spacewalk.'\n )\n parser.add_argument(\n 'type',\n choices=[\n 'all',\n 'user',\n 'popular',\n 'retired',\n 'shared',\n 'software',\n 'vendor'\n ],\n default='popular',\n help=\"Type of search you would like to perform\"\n )\n parser.add_argument(\n '--format',\n choices=[\n 'raw',\n 'json',\n 'pretty'\n ],\n default='pretty',\n required=False\n )\n parser.add_argument(\n '--popcount',\n default=None,\n help=('channels with at least this many systems ' +\n 'subscribed will be returned')\n )\n\n api_calls = {\n 'all': 'channel.listAllChannels',\n 'user': 'channel.listMyChannels',\n 'popular': 'channel.listPopularChannels',\n 'retired': 'channel.listRetiredChannels',\n 'shared': 'channel.listSharedChannels',\n 'software': 'channel.listSoftwareChannels',\n 'vendor': 'channel.listVendorChannels'\n }\n\n p = parser.parse_args(args)\n\n if p.type == 'popular' and not p.popcount:\n print(\"Popular requires popcount arg.\")\n parser.print_help()\n return False\n\n if p.popcount:\n popcount = int(p.popcount)\n results = sw.call(\n api_calls[p.type],\n popcount\n )\n else:\n results = sw.call(\n api_calls[p.type]\n )\n if results == []:\n print(\"Empty result set.\")\n\n channels = []\n for result in results:\n channels.append(result)\n\n if p.format == 'pretty':\n \"\"\"\n int \"id\"\n string \"label\"\n string \"name\"\n string \"provider_name\"\n int \"packages\"\n int \"systems\"\n string \"arch_name\"\n \"\"\"\n if p.type == \"software\":\n t = prettytable.PrettyTable([\n \"Label\",\n \"Name\",\n \"Parent Label\",\n \"End Of Life\",\n \"Arch\"\n ])\n t.align[\"Label\"] = \"l\"\n t.align[\"Name\"] = \"l\"\n t.align[\"Parent Label\"] = \"l\"\n t.padding_width = 1\n for c in results:\n\n t.add_row([\n c['label'],\n c['name'],\n c['parent_label'],\n c['end_of_life'],\n c['arch']\n ])\n else:\n t = prettytable.PrettyTable([\n \"Label\",\n \"Name\",\n \"Provider Name\",\n \"Packages\",\n \"Systems\",\n \"Arch Name\"\n ])\n t.align[\"Label\"] = \"l\"\n t.align[\"Name\"] = \"l\"\n t.align[\"Packages\"] = \"r\"\n t.align[\"Systems\"] = \"r\"\n t.align[\"Provider Name\"] = \"l\"\n t.padding_width = 1\n for c in results:\n\n t.add_row([\n c['label'],\n c['name'],\n c['provider_name'],\n c['packages'],\n c['systems'],\n c['arch_name']\n ])\n print(t)\n\n elif p.format == 'json':\n output = json.dumps(dict(channels=channels))\n print(output)\n else:\n for result in results:\n print(result)\n return results", "def channels_playing(self):\n channels = c_int()\n real = c_int()\n ckresult(\n _dll.FMOD_System_GetChannelsPlaying(self._ptr, byref(channels), byref(real))\n )\n return so(channels=channels.value, real_channels=real.value)", "def get_clients(self, channel):\n if channel not in self.clients.keys():\n return []\n return self.clients[channel]", "def channels(message):\n for channel in message._client.channels:\n if 'is_member' in channel:\n message.reply(\"{} ({})\".format(channel['name'], channel['id']))\n elif 'is_im' in channel:\n #print(channel)\n friendlyname = channel['user']\n try:\n friendlyname = channel['user'][\"name\"]\n except (KeyError, AttributeError):\n pass\n message.reply(\"User channel: {} ({})\".format(friendlyname,\n channel['id']))", "def get_activechannels(self,):\n\n channels_nibble = self._read('CSR')[0] >> 4\n channels = []\n\n for i in reversed (range (4)):\n if channels_nibble >> i > 0:\n channels.append(i)\n channels_nibble -= 2**i\n\n channels.reverse()\n\n return channels", "def _get_cloudwatch_subscriptions(self):\n return self._get_subscriptions(self.cloudwatch_arn)", "def active_channels(cls, user, notification_type):\n if notification_type not in NOTIFICATION_TYPES:\n raise ValueError(\"You asked for an invalid notification_type\")\n\n try:\n setting = cls.objects.get(user=user, notification_type=notification_type)\n except cls.DoesNotExist:\n # No setting equals all channels\n return CHANNELS\n\n if not setting.enabled:\n # Everything is disabled when the enabled flag is False\n return []\n\n # When enabled is True return all valid channels in the channels field.\n return list(set(setting.channels or []) & set(CHANNELS))", "def get_channels_from_file():\n with open(\"data/channels.json\", \"r\") as read_file:\n channels = json.load(read_file)\n\n return channels", "def __iter__(self):\n for channel in self.channels.itervalues():\n yield channel", "def watch_list(self) -> list:\n return []", "def launch_channels(self) -> None:\n live_run = self.get_live_run()\n\n channels = live_run.architect.get_channels(\n self._on_channel_open,\n self._on_catastrophic_disconnect,\n self._on_channel_message,\n )\n for channel in channels:\n self._register_channel(channel)\n\n async def launch_status_task():\n self._status_task = asyncio.create_task(self._ping_statuses_while_alive())\n\n live_run.loop_wrap.execute_coro(launch_status_task())", "def channels(self) -> Tuple[chans.Channel]:\n return self.operands", "def getDescriptorChannels(self): # real signature unknown; restored from __doc__\n pass", "def data_channels(self) -> List[ChannelData]:\n return [self.__map_channels[self.__map_idx[idx]] for idx in sorted(self.__map_idx.keys())]", "def db_get_channels(self, guildID: int):\n query = \"SELECT rowid, * FROM wormhole_channel WHERE guildID = ?\"\n channels = self.bot.db_query(query, (guildID,), astuple=True)\n # come as: (rowid, name, channelID, guildID, type, webhookID,\n # webhookTOKEN)\n res: List[WormholeChannel] = []\n for row in channels:\n res.append(WormholeChannel(*row[1:5]))\n res[-1].id = row[0]\n return res if len(res) > 0 else None", "def names(self, channel, *args, **kwargs):\n pass", "def GetChannelGroups(self, type='tv'):\n self.logger.debug(\"Loading XBMC PVC channel list.\")\n try:\n xbmc = Server(self.url('/jsonrpc', True))\n return xbmc.PVR.GetChannelGroups(channeltype=type)\n except ValueError:\n return", "def list():\n\n return {\"cncs\": [{\"id\": id.split(\"/\")[-1]} for id in sorted(flask.current_app.redis.keys(\"/cnc/*\"))]}", "def get_channels(youtube, channelId):\n channels_response = youtube.channels().list(\n id=AUTH_USER_CHANNEL_ID,\n part=\"contentDetails\"\n ).execute()\n \n return channels_response[\"items\"]", "def rooms(self):\n\n channels = self.conn.channels.keys()\n return [IRCMUCRoom(node=channel) for channel in channels]", "def get_num_channels(self):\n return _uhd_swig.rx_streamer_get_num_channels(self)", "def list(self):\n\t\tif self.client is None:\n\t\t\traise UsageError(\"Not connected!\")\n\t\treturn self.client.list_conns()", "async def list(self, ctx):\r\n try:\r\n if ctx.message.server.id not in self.adkillr:\r\n await self.bot.say(\"There are no filters set for this server.\")\r\n else:\r\n await self.bot.say(\"The current filters are\\n{}.\".format(\", \".join(self.adkillr[ctx.message.server.id]['filters'])))\r\n except KeyError:\r\n await self.bot.say(\"There are no filters set for this server.\")", "def get_channels_for_brand(\n brand_id: BrandID, *, only_non_archived: bool = False\n) -> list[NewsChannel]:\n stmt = (\n select(DbNewsChannel)\n .filter_by(brand_id=brand_id)\n .order_by(DbNewsChannel.id)\n )\n\n if only_non_archived:\n stmt = stmt.filter_by(archived=False)\n\n db_channels = db.session.scalars(stmt).all()\n\n return [_db_entity_to_channel(db_channel) for db_channel in db_channels]", "def refresh_chanlist(self):\n self._chanlist.delete(0, Tix.END)\n for name in sorted(self._channel_frames.keys(), _k.cmp_channels):\n self._chanlist.insert(Tix.END, name)", "def __redrawChannels(self):\n self.__channelWin.clear()\n all_chans = self._client.getChannels()\n all_chans.sort(key=lambda c: c.getName())\n count = min(len(all_chans), self.__channelWin.getmaxyx()[0])\n show = all_chans[:count]\n for c in show:\n cur = self._client.currentChannel() == c\n if cur:\n attr = curses.A_REVERSE\n elif c in self._client.getJoined():\n attr = curses.A_BOLD\n else:\n attr = curses.A_DIM\n if c != self._client.getNoneChannel():\n self.__channelWin.addstr(\n \"{chan}\\n\".format(chan=c.getName()),\n attr\n )", "def get_channel_names(self, datapath):\n self.logger.debug(\"get_channel_names: for %s\", datapath)\n names = glob.glob(datapath+\".*\")\n self.logger.debug(\"get_channel_names: from %s\", names)\n channel_names = []\n for name in names:\n channel_names.append(\"%02d\" % int(os.path.splitext(name)[1][1:]))\n return channel_names", "def print(self):\r\n for e in self.channels:\r\n print(e)", "def _scan_radio_channels(self, cradio, start=0, stop=125):\n return list(cradio.scan_channels(start, stop, (0xff,)))", "def select_channels(channels, limitfile):\n sed_ok_decay = CollectLimits.is_decay_limits(limitfile)\n sed_ok_ann = CollectLimits.is_ann_limits(limitfile)\n ochans = []\n for chan in channels:\n chan_is_decay = chan.find('_decay') >= 0\n if chan_is_decay:\n if sed_ok_decay:\n ochans.append(chan)\n else:\n if sed_ok_ann:\n ochans.append(chan)\n return ochans", "def comchans(self, nick):\n comchannels = 0\n for chan in self.chandb:\n if nick in chan:\n comchannels += 1\n return comchannels", "def channels(self, val):\n if type(val) in [types.TupleType, types.ListType]:\n \n # If we have a real list\n if len([x for x in val if x in SENSORS] = len(val):\n \n #if all the values correspond to possible channels\n self._channels = tuple(val) # Make sure it's a tuple\n \n else:\n \n # Should raise a \"Unknown Channel\" error or sorts\n pass\n else:\n # Should raise an exception", "def channels(self,station=[]):\n #{{{ function to return list of valid channels\n chans = {}\n\n if station:\n\n for sta in station:\n if sta in self.stachan_cache:\n\n for ch in self.stachan_cache[sta]:\n\n chans[ch] = 1\n else:\n\n return False\n else:\n\n for st in self.stachan_cache.keys():\n\n for ch in self.stachan_cache[st]:\n\n chans[ch] = 1\n\n return chans.keys()", "def test_channel_list1():\n reset_data()\n user1 = auth_register(\"123eff45\", \"xxx\", \"yyyy\", email=\"hi@gmail.com\")\n owner1 = auth_register(\"123eff45\", \"xxx\", \"yyyy\", email=\"hii@gmail.com\")\n channel1_1 = channels_create(owner1['token'], \"channel1\", True)['channel_id']\n channel_join(user1['token'], channel1_1)\n channel_list1 = channels_list(user1['token'])\n channels = [channel['channel_id'] for channel in channel_list1]\n assert channels == [channel1_1]\n print(\"=========pass test1 : only one channel in channel_list========\")", "def redis_client_list(self):\n def func(server):\n return server.server.client_list()\n self.__run_redis_cmd(func)", "def get_physical_output_channels(self):\r\n bufsize = 1024\r\n buf = ctypes.create_string_buffer(bufsize)\r\n NIDAQ_dll.DAQmxGetDevAOPhysicalChans(self.dev_id.encode('ascii'),\r\n ctypes.byref(buf), bufsize)\r\n channel_list = buf_to_list(buf)\r\n channel_list = [channel.lstrip(self.dev_id+'/') for channel in channel_list]\r\n return channel_list", "def test_switch_channels(self):\n\t\t# not available yet, experimental\n\t\tpass", "def _get_all_filtered_channels(self, topics_without_signature):\n mpe_address = self.get_mpe_address()\n event_signature = self.ident.w3.sha3(\n text=\"ChannelOpen(uint256,uint256,address,address,address,bytes32,uint256,uint256)\").hex()\n topics = [event_signature] + topics_without_signature\n logs = self.ident.w3.eth.getLogs(\n {\"fromBlock\": self.args.from_block, \"address\": mpe_address, \"topics\": topics})\n abi = get_contract_def(\"MultiPartyEscrow\")\n event_abi = abi_get_element_by_name(abi, \"ChannelOpen\")\n channels_ids = [get_event_data(event_abi, l)[\n \"args\"][\"channelId\"] for l in logs]\n return channels_ids", "def updateChannels(self):\n self.__redrawChannels()\n self.__update()", "def handle_state(token: str = \"\"):\n helper = SlackHelper(token)\n\n response = paginated_api_call(\n api_method=helper.client.conversations_list,\n response_objects_name=\"channels\",\n exclude_archived=0,\n types=\"public_channel, private_channel\",\n )\n return {\"channels\": response}", "def get_monitored_changes(self) -> List:\n pass", "def list(self, nick, channel):\n if len(self.locks.keys()) == 0:\n return (channel, \"There are no registered resources\")\n else:\n return (channel, \"List of registered resources: %s\" %\n ', '.join(sorted(self.locks.keys())))", "def software_channels(self):\n channels = c_int()\n ckresult(_dll.FMOD_System_GetSoftwareChannels(self._ptr, byref(channels)))\n return channels.value", "def ordered_channel_names(self):\n channel_list = []\n for k in self.__dict__.keys():\n if k.startswith('channel_'):\n channel_list.append(\n [int(k.split('channel_')[1]), self.__dict__[k]]\n )\n channel_list.sort()\n if len(channel_list) == 0:\n print('********* warning!! empty channel list - are there ay channel_N attributes? ')\n return [i[1] for i in channel_list]", "def print_all(self):\r\n for e in self.channels:\r\n e.print()", "def get_api_default_channel_list(self):\n url = \"http://api.applezhuan.com/api/c/get_default_channellist?&\"\n params = {\n \"android_id\": self.mobile.android_id,\n \"platform\": \"2\",\n \"av\": \"2\",\n \"type\": \"1\",\n \"time\": self.get_current_time,\n \"ov\": self.mobile.os,\n \"lon\": self.mobile.lon,\n \"lat\": self.mobile.lat,\n \"device_name\": \"dpi\",\n \"device_code\": self.device_code,\n \"brand\": self.mobile.brand,\n \"mac\": self.mobile.mac,\n \"vn\": \"1.0.2\",\n \"network\": self.mobile.network\n }\n params_str = self.encrypt.get_secret_param(params)\n url = url + \"s=\" + params_str\n headers = {\n \"Accept-Language\": \"zh-CN,zh;q=0.8\",\n \"User-Agent\": \"Mozilla/5.0 (Linux; U; Android \" + self.mobile.os + \"; zh-cn; GT-N7100 Build/\" +\n self.mobile.brand + \") AppleWebKit/534.30\"\n \" (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30\",\n \"Host\": \"api.applezhuan.com\",\n \"Connection\": \"Keep-Alive\",\n \"Accept-Encoding\": \"gzip\",\n \"Cookie\": self.cookie\n }\n\n res = requests.get(url, headers=headers)\n # print(res.text)\n result = json.loads(res.text)\n return result[\"d\"]", "def GetChannelNames(vDataSet):\r\n nc = vDataSet.GetSizeC()\r\n ret = []\r\n for i in range(nc):\r\n name = vDataSet.GetChannelName(i)\r\n ret.append(name)\r\n\r\n return ret", "def channels(self) -> int:\n return len(self._channel_arrays)", "def get_db_channels(self, origin=None):\n\n if not origin:\n origins_list = list(self.list.keys())\n else:\n origins_list = origin.lower()\n\n if isinstance(origins_list, str):\n origins_list = [origins_list]\n\n for origin in origins_list:\n\n self.fhdhr.logger.info(\"Checking for %s Channel information stored in the database.\" % origin)\n channel_ids = self.fhdhr.db.get_fhdhr_value(\"channels\", \"list\", origin) or []\n\n if len(channel_ids):\n self.fhdhr.logger.info(\"Found %s existing channels in the database.\" % str(len(channel_ids)))\n\n for channel_id in channel_ids:\n channel_obj = Channel(self.fhdhr, self.id_system, origin=origin, channel_id=channel_id)\n channel_id = channel_obj.dict[\"id\"]\n self.list[origin][channel_id] = channel_obj" ]
[ "0.79604846", "0.77818024", "0.7657704", "0.76543194", "0.76405656", "0.75859505", "0.7379902", "0.7354642", "0.7337159", "0.73244745", "0.73033804", "0.7295559", "0.72613245", "0.7203369", "0.7156832", "0.7063657", "0.7061825", "0.7007366", "0.6981566", "0.6963767", "0.6926028", "0.6876751", "0.68682545", "0.6794832", "0.6789178", "0.6751764", "0.6725386", "0.67018276", "0.66852415", "0.66827106", "0.6658148", "0.6605034", "0.660225", "0.6589116", "0.65860355", "0.658056", "0.6552479", "0.6542476", "0.65106153", "0.6485537", "0.6475711", "0.64534765", "0.6401827", "0.63964987", "0.63727486", "0.63616574", "0.6329009", "0.6322852", "0.63123333", "0.6301329", "0.6289052", "0.6244059", "0.62387764", "0.6222434", "0.61909276", "0.6182818", "0.6182393", "0.61668", "0.6151563", "0.61282116", "0.6099001", "0.5982438", "0.59630346", "0.59629893", "0.59536034", "0.59425914", "0.59185517", "0.5911294", "0.59006405", "0.58765596", "0.58601654", "0.58186644", "0.5809344", "0.5806665", "0.5805992", "0.5802548", "0.5800681", "0.5787374", "0.57790565", "0.57778555", "0.5775059", "0.57693475", "0.5766712", "0.5765728", "0.57637316", "0.57620144", "0.5759668", "0.57525194", "0.57449746", "0.57431185", "0.57223845", "0.5689102", "0.5682801", "0.5674938", "0.5673554", "0.56668526", "0.5664007", "0.5657998", "0.5656056", "0.5644728" ]
0.6865468
23
Remove a channel from the watch list.
async def unwatch(self, ctx, channel: discord.TextChannel): channel_list = await self.config.guild(ctx.guild).watching() if channel.id in channel_list: channel_list.remove(channel.id) else: return await ctx.send("Channel is not being watched.") await self.config.guild(ctx.guild).watching.set(channel_list) await ctx.send(f"{self.bot.get_channel(channel.id).mention} will not have bad gifs removed.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_channel(self, channel):\n self._channels.pop(channel.fileno, None)\n\n try:\n self._poller.remove(channel.fileno, channel._events)\n except (IOError, OSError):\n log.exception(\"Error while removing %r.\" % channel)", "def remove(self, irc, msg, args, channel):\n res = self._checkDBhasChannel(channel)\n if res is True:\n SQL = 'DELETE FROM registry WHERE channel = ?'\n SQLargs = (channel,)\n self._SQLexec(SQL, SQLargs)\n self.lock.acquire()\n for x in range(0, len(self.channelscontrol)):\n v0 = str(self.channelscontrol[x][0])\n if v0 == channel:\n self.channelscontrol.pop(x)\n break\n self.lock.release()\n irc.reply(\"Channel removed from DB.\", prefixNick=True)\n else:\n irc.reply(\"Channel does not exist in DB.\", prefixNick=True)", "def remove_channel(self, channel):\n to_exec = \"DELETE FROM channel WHERE channel_id = %s\"\n self.__cursor.execute(to_exec, (str(channel.id),))\n self.__connection.commit()", "def test_remove_channel_removes_channel(self):\n channel = Mock()\n with mock.patch.object(self.notifier, \"_silenced_channels\") as silenced_channels:\n self.notifier.remove_channel(channel)\n silenced_channels.__delitem__.assert_called_with(channel)", "async def remove(self, ctx, channel: discord.TextChannel):\n config = await self.config.guild(ctx.guild).channels()\n if not channel.id in config:\n return await ctx.send(\"This channel is not a spoiler channel.\")\n config.remove(channel.id)\n await self.config.guild(ctx.guild).channels.set(config)\n await ctx.send(\"Channel removed from the spoiler channel list.\")", "async def remove(self, ctx, channel : discord.Channel):\r\n \r\n server = ctx.message.server\r\n if server.id not in self.set:\r\n await self.bot.say(\":x: Uninitialized server!\")\r\n return\r\n if channel.id not in self.set[server.id][\"channels\"]:\r\n await self.bot.say(\":x: This is not a counting channel!\")\r\n return\r\n del self.set[server.id][\"channels\"][channel.id]\r\n self.save()\r\n await self.bot.edit_channel(channel,topic = None)\r\n await self.bot.say(\"Channel removed!\")", "def _onremove(self):\n self._channellist.remove(self)\n self.deleteLater()", "async def votechannel_remove(self, ctx, *, channel: discord.TextChannel):\n await self.bot.db.execute(\n \"DELETE FROM voting_channel WHERE guild_id = %s and channel_id = %s\",\n ctx.guild.id,\n channel.id,\n )\n self.bot.cache.votechannels.discard(channel.id)\n await util.send_success(ctx, f\"{channel.mention} is no longer a voting channel.\")", "def drop_channel(self, channel):\n return self.clients.pop(channel, None)", "async def remove(self, ctx, *, channel: discord.VoiceChannel):\n auto_channels = await self.config.guild(ctx.guild).auto_channels()\n auto_channels.remove(channel.id)\n await self.config.guild(ctx.guild).auto_channels.set(auto_channels)\n await ctx.send(\n _(\"Startchannel used for automatic voicechannels removed: {channel}\").format(\n channel=channel.name\n )\n )", "def unsubscribe(self, channel, update_handler=None):\n pass", "def unsubscribe(self, inst, channel):\r\n if channel not in self._channels:\r\n raise ValueError(\"Channel {} not exists!\".format(channel))\r\n self._channels[channel].unsubscribe(inst)\r\n return\r\n # TODO: ?delete channels if there is no subscribers\r\n # if len(self._channels[channel].subscribers) == 0:\r\n # del self._channels[channel]\r", "def cleanup(self, channel=None):\n # falls `channel` angegeben wurden, werden nur diese bereinigt,\n # ansonsten wird alles bereinigt\n if channel:\n # ueberpruefe, ob `channel` eine Zahl ist und erstelle eventuell eine Liste nur mit dieser Zahl\n # dies ist wichtig, weil eine For-Schleife nicht ueber eine Zahl,\n # sondern in meinem Fall nur ueber eine Liste, iterieren kann\n if type(channel) == int:\n channel = [channel]\n for c in channel:\n # loesche den channel `c` aus dem dictionary `self.channels`\n del self.channels[c]\n print(f\"cleanup von channel {c}\")\n else:\n print(\"cleanup\")\n self.channels = {}", "def remove_event_detect(self, channel):\n self._check_mode()\n # entferne den channel aus der Liste `self.events`\n self.events.remove(channel)\n print(f\"event detect fuer channel {channel} entfernt\")", "async def removechannel(self, ctx, channel: discord.TextChannel):\n guild = ctx.message.guild\n excluded_channels = await self.config.guild(guild).excluded_channels()\n\n if channel.id in excluded_channels:\n excluded_channels.remove(channel.id)\n await self.config.guild(guild).excluded_channels.set(excluded_channels)\n await ctx.send(\"Removed %s from channel exclusion list.\" % channel.name)\n else:\n await ctx.send(\"%s is not excluded channel.\" % channel.name)", "def remove(self, channel, nick, comment=\"\"):\n time.sleep(1)\n self.s.send(\"REMOVE %s %s%s\\n\" % (channel, nick, (comment and (\" :\" + comment))))\n logger.log(\"REMOVE %s %s%s\" % (channel, nick, (comment and (\" :\" + comment)))).LogSend()", "def unsubscribe(self, client, channel):\n clients = self.clients.get(channel)\n if clients is None:\n return False\n index = None\n for i, pair in enumerate(clients):\n if pair[0] != client:\n continue\n index = i\n break\n if index is not None:\n del self.clients[channel][index]\n return True", "async def watch(self, ctx, channel: discord.TextChannel):\r\n channel_list = await self.config.guild(ctx.guild).watching()\r\n if channel.id not in channel_list:\r\n channel_list.append(channel.id)\r\n await self.config.guild(ctx.guild).watching.set(channel_list)\r\n await ctx.send(f\"{self.bot.get_channel(channel.id).mention} will have bad gifs removed.\")", "def test_remove_channel_stops_loop(self):\n with mock.patch.object(self.notifier, \"_silenced_channels\", __bool__=lambda _: False):\n self.notifier.remove_channel(Mock())\n self.notifier_stop_mock.assert_called_once()", "def remove(self, channels=None):\n if channels is None:\n channels = self.get_channels()\n self.remove_from_frames(\n self.data, self.integration.frames, channels)", "def part(self, channel):\n\n self._pubsub.unsubscribe('cluster:%s' % channel)", "def test_remove_channel_skips_stop_with_channels(self):\n self.notifier.remove_channel(Mock())\n self.notifier_stop_mock.assert_not_called()", "def channel_leave(token, channel_id):\n auth_u_id = get_id_from_token(token)\n channel = channels.get(channel_id)\n if channel is None:\n raise ValueError(\"channel does not exist\")\n if auth_u_id not in channel[\"all_members\"]:\n raise AccessError(\"Authorised user is not a member of the channel.\")\n channels.remove(channel_id, \"all_members\", auth_u_id)", "def left(self, channel):\n ss = self.findSessions(channel)[0]\n self.sessions.remove(ss)", "def dropchan(channel):", "async def blacklist_channel(\n self, ctx: commands.Context, channel: discord.TextChannel\n ):\n if str(channel.id) in self.channel_blacklist:\n self.channel_blacklist.remove(str(channel.id))\n await self._update_db()\n removed = True\n else:\n self.channel_blacklist.append(str(channel.id))\n await self._update_db()\n removed = False\n\n await ctx.send(f\"{'Un' if removed else None}Blacklisted {channel.mention}\")\n return", "def delete_channel(channel_id: NewsChannelID) -> None:\n db.session.execute(\n delete(DbNewsChannel).where(DbNewsChannel.id == channel_id)\n )\n db.session.commit()", "def deleteChannel(channelName):\n renameTo = ''.join([char if char.isnumeric() else '_' for char in str(datetime.utcnow())])\n renameChannel(channelName, renameTo)\n\n post(f\"https://slack.com/api/conversations.archive?{parse.urlencode({'channel' : channelNameToID(renameTo)})}\", headers=slackHeader(current_user.slackUserToken))\n return \"Zulip deleted a Slack channel\"", "async def test_removed_notifier(self):\n await self.cog._unsilence(self.text_channel)\n self.cog.notifier.remove_channel.assert_called_once_with(self.text_channel)", "async def removejoinchannel(self, ctx: commands.Context, channel: discord.TextChannel):\n db_session = self.bot.create_db_session()\n\n try:\n existing = db_session.query(Channel).filter(Channel.id == channel.id).one()\n existing.joinable = False\n except NoResultFound:\n await ctx.send(f\"There was no record for {channel.mention}. The channel is not currently joinable.\")\n return\n\n db_session.commit()\n db_session.close()\n await ctx.send(f\"{channel.mention} was removed as a joinable channel.\")", "def remove_watcher(self, watcher):\n log.debug(\"Removing watcher: {}\".format(watcher))\n self._watchers.remove(watcher)", "async def channel_remove(\n self, ctx: commands.Context, channel: discord.abc.GuildChannel, *team_ids: int):\n if set(team_ids) - set(self.teams):\n await ctx.send('Missing data for the following team IDs: %s' % (\n ', '.join(map(str, set(team_ids) - set(self.teams))),))\n return\n\n await asyncio.gather(*[\n self._forbid_team_in_channel(self.teams[team_id], channel)\n for team_id in team_ids],\n return_exceptions=True)\n await ctx.send('Removed team%s `%s` from channel %s' % (\n nl.s(len(team_ids)),\n '`, `'.join(self.teams[team_id].username for team_id in team_ids),\n channel.mention))", "def channel_removeowner(token, channel_id, u_id):\n auth_u_id = get_id_from_token(token)\n channel = channels.get(channel_id)\n if channel is None:\n raise ValueError(\"channel_id does not exist.\")\n if u_id not in channel[\"owners\"]:\n raise ValueError(\"user is not an owner\")\n user = users.get(auth_u_id)\n if auth_u_id not in channel[\"owners\"] and user[\"is_admin\"] is False:\n raise AccessError(\"You do not have permission to remove owners\")\n\n channels.remove(channel_id, \"owners\", u_id)", "def unmute(guild, channel):\n\tlogger.info('Unmuting channel {}::{}...', guild.name, channel.name)\n\tif str(guild.id) in Settings.muted_channels:\n\t\tif str(channel.id) in Settings.muted_channels[str(guild.id)]:\n\t\t\tSettings.muted_channels[str(guild.id)].remove(str(channel.id))", "async def watchlist(self, ctx):\r\n channel_list = await self.config.guild(ctx.guild).watching()\r\n msg = \"Bad gifs will be removed in:\\n\"\r\n for channel in channel_list:\r\n channel_obj = self.bot.get_channel(channel)\r\n if channel_obj is None: # Catch deleted/unexisting channels\r\n continue\r\n msg += f\"{channel_obj.mention}\\n\"\r\n await ctx.send(msg)", "def kickedFrom(self, channel, kicker, message):\n ss = self.findSessions(channel)[0]\n self.sessions.remove(ss)", "def unsubscribe(self, client, channel_id):\n clients = self.clients.get(channel_id, None)\n if not clients:\n return\n\n if client in clients:\n clients.remove(client)\n\n if not clients:\n # no client subscribed on this channel...\n del self.clients[channel_id]\n\n if channel_id in self._subscribed_channels:\n # the channel maybe is under subscribing - ignore it\n self.subscriber.punsubscribe(channel_id)", "def channel_leave(token, channel_id):\n\n # Check if token is valid and raise AccessError if not\n curr_id = database.get_current_user(token)\n\n # check if user is a member of channel with channel_ID and return AccessError if not\n user_channel = is_user_channel_member(channel_id, curr_id)\n if user_channel is False:\n raise error.AccessError(description=\"user is not a member of this channel\")\n\n # remove user with u_id from the channel (from member_ids)\n curr_channel = database.get_channel_data(channel_id)\n\n curr_channel[\"member_ids\"].remove(curr_id)\n # if user is an owner it removes them as an owner as well\n for owner_id in curr_channel[\"owner_ids\"]:\n if owner_id == curr_id:\n curr_channel[\"owner_ids\"].remove(curr_id)\n\n database.set_channel_data(curr_channel)", "def rmv_chnl(self, chnl):\n\n chnl = self.chnls.pop(self.chnls.index(chnl))\n\n self.channels_layout.removeWidget(chnl)\n\n chnl.setParent(None)", "async def unlink(self, ctx: MyContext):\n query = \"SELECT * FROM wormhole_channel WHERE channelID = ?\"\n wh_channel = self.bot.db_query(\n query, (ctx.channel.id,), astuple=True, fetchone=True\n )\n # comes as: (name, channelID, guildID, type, webhookID, webhookTOKEN)\n if len(wh_channel) == 0:\n await ctx.send(await self.bot._(ctx.guild.id, \"wormhole.error.not-linked\"))\n return\n query = \"DELETE FROM wormhole_channel WHERE channelID = ? AND name = ?\"\n async with ClientSession() as session:\n webhook = discord.Webhook.partial(\n wh_channel[4], wh_channel[5], session=session\n )\n await webhook.delete()\n self.bot.db_query(query, (wh_channel[0], ctx.channel.id))\n await ctx.send(\n await self.bot._(ctx.guild.id, \"wormhole.success.channel-unlinked\")\n )", "async def handleChannelDelete(self, channel: discord.abc.GuildChannel):\n self.logger.info(\n \"Channel deletion has been detected. Name: %s, ID: %s\", channel.name, channel.id\n )\n\n if not isinstance(channel, discord.TextChannel):\n return\n\n async with self.config.guild(channel.guild).get_attr(KEY_CHANNEL_IDS)() as channelIds:\n if str(channel.id) in channelIds:\n self.logger.info(\"%s detected, removing exceptions\", AH_CHANNEL)\n ctx = await self.getContext(channel)\n if not ctx:\n return\n await self.notifyChannel(ctx, remove=True)\n await self.makeHighlightChanges(ctx, channel, remove=True)\n await self.makeStarboardChanges(ctx, channel, remove=True)\n await self.makeWordFilterChanges(ctx, channel, remove=True)\n del channelIds[str(channel.id)]", "def channel_removeowner(token, channel_id, u_id):\n # Check if token is valid and raise AccessError if not\n curr_id = database.get_current_user(token)\n # gets current channel data\n curr_channel = database.get_channel_data(channel_id)\n # gets the permissions of current user from database\n user_perms = database.get_permission_dict(curr_id)\n\n u_id_permission = database.get_permission_dict(u_id)\n if u_id_permission[\"permission_id\"] == 1:\n raise error.AccessError(description=\"user being removed is the owner of the slackr\")\n\n # checks if u_id is not an owner of the channel\n # also checks if current auth user is an owner of the channel\n is_u_owner = False\n is_curr_owner = False\n for owner_id in curr_channel[\"owner_ids\"]:\n if u_id == owner_id:\n is_u_owner = True\n if curr_id == owner_id:\n is_curr_owner = True\n if is_u_owner is False:\n raise error.InputError(description=\"user being removed is not an owner of the channel\")\n\n\n # if the auth user is owner of slackr, allows him to remove u_id as owner\n if user_perms[\"permission_id\"] == 1:\n # removes the user from channel_owner\n curr_channel[\"owner_ids\"].remove(u_id)\n # if the auth user is an owner of the channel, allow him to remove u_id as owner of channel\n elif is_curr_owner is True:\n # adds the user into channel_owner\n curr_channel[\"owner_ids\"].remove(u_id)\n # else the auth user is not an owner and thus cannot use addowner\n else:\n raise error.AccessError(description=\"\"\"Authorised user user is not an owner of the channel,\n or of the slackr\"\"\")", "def unsubscribe(self, chanel_name):\n name = 'unsubscribe'\n\n self._send_websocket_request(name, chanel_name)", "def del_badchannel(self):\n text = 'Channel number: \\n(e.g.: 3, 5, 8-12)'\n uinp, ok = QInputDialog.getText(None, 'Delete bad channel', text)\n if ok:\n uinp = uinp.replace(' ', '') # removes blank spaces\n ch_str = uinp.split(',') # splits csv\n try:\n ch_list = []\n for elem in ch_str:\n if '-' in elem: # if given a range e.g. 7-12\n elem_lims = elem.split('-')\n seq = range(int(elem_lims[0]), int(elem_lims[1]) + 1)\n ch_list.extend(seq)\n else: # if given a single value\n ch_list.append(int(elem))\n self.model.BadChannelDel(ch_list=ch_list)\n except Exception as ex:\n print(str(ex))", "async def clear(self, ctx):\n await self.config.guild(ctx.guild).channels.clear()\n await ctx.send(\"Spoiler channel list cleared.\")", "async def remove_bot_channels(self, guild_id):\n api_cog = self.bot.get_cog('RR_API')\n channelInfo = await api_cog.get_channel_info(guild_id)\n\n if not channelInfo:\n print(\"Server Name Not in DB, Can't delete channels. Server: \" + str(guild_id))\n return\n if channelInfo['futurechannelid']:\n await self.bot.get_channel(int(channelInfo['futurechannelid'])).delete()\n if channelInfo['pastchannelid']:\n await self.bot.get_channel(int(channelInfo['pastchannelid'])).delete()\n if channelInfo['lootchannelid']:\n await self.bot.get_channel(int(channelInfo['lootchannelid'])).delete()\n if channelInfo['commandschannelid']:\n await self.bot.get_channel(int(channelInfo['commandschannelid'])).delete()\n if channelInfo['categoryid']:\n await self.bot.get_channel(int(channelInfo['categoryid'])).delete()", "def drop_message(self, client, channel, i):\n del self.storage[client][channel][i]", "async def logremove(self, ctx):\n if await check_if_logged(channel_id=ctx.channel.id):\n c.execute(\"DELETE FROM logging.channels WHERE channelid = %s\", (ctx.channel.id,))\n DBconn.commit()\n await ctx.send(\"> **This channel is no longer being logged.**\")\n else:\n await ctx.send(f\"> **This channel is not being logged.**\")", "def delete_at_index(self, idx):\n del self.timeseries[idx]\n del self.freq[idx]\n del self.ch_name[idx]\n del self.units[idx]\n\n if self.trigger_idx == idx:\n LGR.warning(\"Removing trigger channel - are you sure you are doing\" \"the right thing?\")\n self.trigger_idx = 0", "def deleteChannel(self, channelIndex):\n ch = self.channels[channelIndex]\n if ch.role != channel_pb2.Channel.Role.SECONDARY:\n raise Exception(\"Only SECONDARY channels can be deleted\")\n\n # we are careful here because if we move the \"admin\" channel the channelIndex we need to use\n # for sending admin channels will also change\n adminIndex = self.iface.localNode._getAdminChannelIndex()\n\n self.channels.pop(channelIndex)\n self._fixupChannels() # expand back to 8 channels\n\n index = channelIndex\n while index < self.iface.myInfo.max_channels:\n self.writeChannel(index, adminIndex=adminIndex)\n index += 1\n\n # if we are updating the local node, we might end up *moving* the admin channel index as we are writing\n if (self.iface.localNode == self) and index >= adminIndex:\n # We've now passed the old location for admin index (and writen it), so we can start finding it by name again\n adminIndex = 0", "def remove_channels(self, *channels):\n channels = set(c.id for c in channels)\n conf_to_remove = set()\n\n # Check every FollowConfig\n for chan_conf in self.follows:\n if set(c.id for c in chan_conf.discord_channels) & channels:\n # Remove the given channels from this FollowConfig\n dchans_to_remove = set(c for c in chan_conf.discord_channels if c.id in channels)\n chan_conf.discord_channels = [c for c in chan_conf.discord_channels if c not in dchans_to_remove]\n\n # If this FollowConfig ended up with 0 channel, save it to remove it later\n if not chan_conf.discord_channels:\n conf_to_remove.add(chan_conf)\n\n if conf_to_remove:\n self.follows = [c for c in self.follows if c not in conf_to_remove]", "async def remove(self, request: RemoveFromWatchlistRequest) -> WatchlistResponse:\n return await self._modify_watchlist(request)", "async def channel(self, ctx, limit: int=100, channel: discord.TextChannel=None):\n\n if channel is None:\n channel = ctx.channel\n\n # noinspection PyUnresolvedReferences\n messages = await channel.purge(limit=limit)\n messages = len(messages)\n\n plural = '' if messages == 1 else 's'\n\n await ctx.send('Purged {} message{}.'.format(messages, plural), delete_after=10)", "def removePlayer(self, player):\n self.players.remove(player)\n for observer in self.observers:\n observer.playerRemoved(player)", "def remove(self, bot):\n\n self.bots.remove(bot)", "def part(self, source, channel, reason=None):\n\n self.channel_map[channel].remove(source[0])\n self.nick_map[source[0]].remove(channel)\n\n self.log(\"*** {0:s} has left {1:s} ({2:s})\".format(source[0], channel, reason or \"\"))", "def remove_component(self, c):\n if isinstance(c, ComponentViewer):\n component_id = c.id\n c._view = None\n else:\n component_id = c\n self._clear_component_auto_completion()\n if self._trajlist:\n for traj in self._trajlist:\n if traj.id == component_id:\n self._trajlist.remove(traj)\n component_index = self._ngl_component_ids.index(component_id)\n self._ngl_component_ids.remove(component_id)\n self._ngl_component_names.pop(component_index)\n\n self._remote_call(\n 'removeComponent', target='Stage', args=[\n component_index,\n ])\n\n self._update_component_auto_completion()", "def remove(self, websocket):\n if websocket not in self:\n return\n\n logger.info(\"Removing websocket %s\" % format_addresses(websocket))\n with self.lock:\n fd = websocket.sock.fileno()\n self.websockets.pop(fd, None)\n self.poller.unregister(fd)", "async def togglechannel(self, ctx, channel):\r\n\r\n user = ctx.message.author\r\n channel = await commands.clean_content().convert(ctx, channel)\r\n await ctx.message.delete()\r\n\r\n if channel == \"nsfw\":\r\n\r\n if self.bot.nsfw_role in user.roles:\r\n await user.remove_roles(self.bot.nsfw_role)\r\n await user.send(\"Access to NSFW channels revoked.\")\r\n else:\r\n await user.add_roles(self.bot.nsfw_role)\r\n await user.send(\"Access to NSFW channels granted.\")\r\n else:\r\n await user.send(\"{} is not a togglable channel.\".format(channel))", "def remove_child(self, child):\n if hasattr(child, \"_protected\"):\n raise TypeError(\"You cannot remove channels defined at class level.\")\n if hasattr(child, \"_collection\"):\n collection = getattr(self, child._collection)\n del collection[child.id]\n delattr(self, child._name)", "def remove_servers_channels(self):\n for _hash in self._sections.keys():\n if not re.match(ur'^ server ', _hash) and not re.match(ur'^ channel ', _hash):\n continue\n del self._sections[_hash]", "async def remove(self, ctx, index: int):\n player = self.bot.lavalink.players.get(ctx.guild.id)\n if not player.is_connected:\n return await ctx.send(\"I'm not connected to a voice channel :no_entry:\")\n if not player.is_playing:\n return await ctx.send(\"Nothing is currently playing :no_entry:\")\n if not player.queue:\n return await ctx.send('Nothing is queued :no_entry:')\n if index > len(player.queue) or index < 1:\n return await ctx.send(\"Invalid song index :no_entry:\")\n index -= 1\n removed = player.queue.pop(index)\n\n await ctx.send(\"Removed **\" + removed.title + \"** from the queue <:done:403285928233402378>\")", "def delete_channel_file(self, channel_id: str, file_id: str) -> dict:\n path = '/channels/{}/{}'.format(channel_id, file_id)\n return self._connection.api_request(method='DELETE', path=path)", "def remove(self, chromosome):\n self.chromosome_list.remove(to_chromosome(chromosome))", "def clear(self, channel=None, lines=0):\n f = self.get_channel_frame(channel, create=False)\n if f:\n f.clear(lines)", "def delete_channel_value(self, channel, key):\n channel = self.get_channel_slug(channel)\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n # ChannelValue exists, delete\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n self.ssession.remove()", "async def deleted_channel(self, ctx, *, channel: ChannelSetting):\n await queries.update_setting(\n ctx,\n \"logging_settings\",\n \"message_log_channel_id\",\n channel.id if channel is not None else None,\n )\n if channel is None:\n await util.send_success(ctx, \"Deleted message logging **disabled**\")\n else:\n await util.send_success(\n ctx, f\"Deleted messages will now be logged to {channel.mention}\"\n )", "def collector_remove(self, msg, args):\n client = self._connect()\n collector_name = args.pop(0)\n collector = sumologic.Collectors(client)\n collector.delete(collector_name)\n message = 'collector {0} deleted.'.format(collector_name)\n self.send(msg.frm,\n message,\n message_type=msg.type,\n in_reply_to=msg,\n groupchat_nick_reply=True)", "def removeCategory(self, c):\n\t\tif c not in self._categories:\n\t\t\treturn\n\t\tself._categories.remove(c)\n\t\tCONNECTOR.removeCategory(self, c)", "async def delete_matches_category(self):\n existing_categories = self.get_channels(\n 'matches', ChannelType.category)\n for c in existing_categories:\n try:\n await asyncio.gather(*(chan.delete() for chan in c.channels))\n await c.delete()\n # We can't delete channels not created by us.\n except discord.HTTPException as e:\n log.warning(e)", "async def togglechannel(self, ctx, channel):\n\n user = ctx.message.author\n await ctx.message.delete()\n\n if channel == \"nsfw\":\n\n if self.bot.nsfw_role in user.roles:\n await user.remove_roles(self.bot.nsfw_role)\n await user.send(\"Access to NSFW channels revoked.\")\n else:\n await user.add_roles(self.bot.nsfw_role)\n await user.send(\"Access to NSFW channels granted.\")\n else:\n await user.send(\"{} is not a togglable channel.\".format(channel))", "async def delete_bot_msg(self, channel):\n await channel.purge(limit=100, check=self.is_me)", "def remove_component(self, component):\n component.remove()\n self._changed = True", "def remove(self, component) -> None:\n pass", "async def channeldelete(ctx):\r\n await ctx.send(\"🉐Deleting all channels...\")\r\n for channel in ctx.guild.channels:\r\n try:\r\n await channel.delete()\r\n except:\r\n print(f\"{Fore.RED}[-]CHANNEL => {Fore.RESET}Failed to delete: {channel}\")", "def remove_challenger(self, old_challenger):\n self.challengers.remove(old_challenger)", "def Remove(self, trackFrame):\n TrackFramePool.Remove(self, trackFrame)\n self.sampleGroup.remove(trackFrame.track.sampleControl)", "async def reddit_remove(self, ctx, subreddit : SubredditConverter, post_type : EnumConverter(Subreddit.PostType) = Subreddit.PostType.hot):\n Subreddit.delete().where(Subreddit.channel_id == ctx.channel.id).where(Subreddit.subreddit == subreddit).execute()\n await ctx.success()", "def remove_watch(callback: Callable[[Event], None]) -> None:\n if callback not in _event_watch_handles:\n warnings.warn(f\"{callback} is not an active event watcher, nothing was removed.\", RuntimeWarning)\n return\n handle = _event_watch_handles[callback]\n lib.SDL_DelEventWatch(lib._sdl_event_watcher, handle)\n del _event_watch_handles[callback]", "def screens_channels_properties_to_remove(self, screens_channels_properties_to_remove: ConfigNodePropertyArray):\n\n self._screens_channels_properties_to_remove = screens_channels_properties_to_remove", "async def leavechannel(self, ctx: commands.Context, channel=\"\"):\n\n # If a channel is not specified, attempt to leave the current channel.\n if (channel == \"\"):\n channel = ctx.channel.name\n dm_notify = True\n\n channel_query = self._channel_query(channel)\n\n if channel_query == None:\n await ctx.send(\"Unable to leave that channel.\")\n return\n\n channel = self.bot.get_channel(channel_query.id)\n guild = self.bot.get_guild(SERVER_ID)\n member = guild.get_member(ctx.author.id)\n\n # You can't leave a channel that doesn't exist or you're not in.\n if channel == None or channel.permissions_for(member).is_strict_subset(JOINED_PERMISSIONS):\n await ctx.send(\"Unable to leave that channel.\")\n return\n\n await channel.set_permissions(member, read_messages=False, reason=\"UQCSbot removed.\")\n if dm_notify:\n await ctx.author.send(f\"You've left {channel.mention}\")\n else:\n await ctx.send(f\"You've left {channel.mention}\")", "async def deregister(self, ctx:commands.Context):\r\n\r\n if await self.IsSpecialized(ctx.guild, ctx.channel.id):\r\n channels = await self.config.guild(ctx.guild).channels()\r\n t = channels.pop(str(ctx.channel.id))\r\n await self.config.guild(ctx.guild).channels.set(channels)\r\n await ctx.send(f'<#{ctx.channel.id}> is no longer a {t}')\r\n else:\r\n await ctx.send(f'<#{ctx.channel.id}> was never specialized!')", "def on_remove_command(self, event, index):\n self.pre_check(event)\n self.same_channel_check(event)\n if not self.get_player(event.guild.id).queue:\n api_loop(\n event.channel.send_message,\n \"There aren't any songs queued right now.\",\n )\n elif str(index).lower() == \"all\":\n self.get_player(event.guild.id).queue = list()\n api_loop(event.channel.send_message, \"Cleared playing queue.\")\n elif (str(index).isdigit() and\n 0 <= (int(index) - 1) <=\n len(self.get_player(event.guild.id).queue)):\n yt_dl_object = self.get_player(event.guild.id).pop(int(index) - 1)\n ytdata = self.get_ytdl_values(yt_dl_object.metadata)\n api_loop(\n event.channel.send_message,\n \"Removed index ``{}`` at index ``{}``.\".format(\n ytdata[\"title\"],\n index,\n ),\n )\n else:\n api_loop(event.channel.send_message, \"Invalid index input.\")", "def unregister(self, nick, channel, resourcestr):\n resources, multi = self.getlocks(resourcestr)\n for r in resources:\n if self.locks[r]:\n raise LockBotException('ERROR, resource \"%s\" is locked by %s' %\n (r, self.locks[r]),\n resourcestr, self.verb)\n\n # all clear, unregister resources\n for r in resources:\n del self.locks[r]\n return (channel,\n \"%s: removed resource%s %s\" %\n (nick,\n 's' if multi else '',\n ', '.join(resources)))", "def channel(self, channel: int, /) -> \"TimerChannel\" | None:", "def listen_channel_moderator_remove(self, broadcaster_user_id: str, callback: CALLBACK_TYPE) -> str:\n return self._subscribe('channel.moderator.remove',\n '1',\n {'broadcaster_user_id': broadcaster_user_id},\n callback)", "def disconnectChannel(sock, chan):\n sock.send(\"PART {}\\r\\n\".format(chan).encode(\"utf-8\"))\n console.info(\"Successfully disconnected from {}\".format(chan))", "def removePlayer(self, player):\n\t\tfor i in range(len(self.playerList)):\n\t\t\tif self.playerList [i] == player:\n\t\t\t\tself.playerList[i] = None\n\t\t\t\treturn", "def remove(self, session: \"pwncat.manager.Session\"):", "def remove_from_list (self, video_id):\n return self._update_my_list(video_id=video_id, operation='remove')", "def removePlayer(self, player):\n #if (not self.__configuring) and (player in self.__players):\n if (player in self.__players):\n self.__players.remove(player)\n for event in self.__events:\n if player in event:\n del event[player]\n player.unbind(self)", "def remove_notification_for_user(\n self, login, type, channel=\"EmailNotificationChannel\", project=None\n ):", "def delete_notification_channel(\n self,\n name: str,\n retry: Retry | _MethodDefault = DEFAULT,\n timeout: float | None = None,\n metadata: Sequence[tuple[str, str]] = (),\n ) -> None:\n channel_client = self._get_channel_client()\n try:\n channel_client.delete_notification_channel(\n request={\"name\": name}, retry=retry, timeout=timeout, metadata=metadata or ()\n )\n except HttpError as err:\n raise AirflowException(f\"Delete notification channel failed. Error was {err.content}\")", "def remove_player(self, player):\n\t\tself.players.remove(player)", "def delete_slack_generated(self, channel_name=None, channel_id=None):\n self.delete_messages(\n channel_name=channel_name, channel_id=channel_id, confirmation_override=True,\n restrict={'type': 'subtype', 'values': cfg.SUBTYPES}\n )", "def removeCapability(self, capability):\n self.capabilities.remove(capability)", "def remove_cable(cid, comment, release = True):\n \n if release:\n release_cable(cid, comment)\n \n SQL.execute('''\n UPDATE\n cables \n SET\n state = 'removed',\n online = 0,\n comment = ?\n WHERE\n cid = ?\n ;''', (\n comment,\n cid\n ));\n \n vlog(2, 'Marked c%s as removed: %s' % (cid, comment))", "async def vote_clear(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is None:\n await ctx.send('This channel was never setup for votes.')\n return\n old_votes = session.query(Vote).filter_by(channel_id=ctx.channel.id).all()\n for old_vote in old_votes:\n session.delete(old_vote)\n session.commit()\n await ctx.send(f'Votes for {ctx.channel} cleared!')", "async def ccallow(self, ctx, channel: discord.TextChannel):\n channel_list = await self.config.guild(ctx.guild).channel_deny()\n if channel.id in channel_list:\n channel_list.remove(channel.id)\n else:\n return await ctx.send(\"Channel is not on the deny list.\")\n await self.config.guild(ctx.guild).channel_deny.set(channel_list)\n await ctx.send(f\"{channel.mention} will be allowed for chatchart use.\")", "def add_channel(self, channel):\n if channel in self.clients:\n return False\n self.clients[channel] = []\n return True" ]
[ "0.78963095", "0.73827446", "0.7304483", "0.72998226", "0.7257182", "0.7189836", "0.7003809", "0.6930595", "0.6920038", "0.68613374", "0.68129927", "0.65488666", "0.6515532", "0.64504415", "0.6387552", "0.6373514", "0.63086313", "0.62170106", "0.61841017", "0.60812473", "0.6033527", "0.6030897", "0.60208887", "0.5997036", "0.5974832", "0.5957408", "0.5930835", "0.5930453", "0.5896084", "0.5870922", "0.5844393", "0.57938063", "0.57223874", "0.5702751", "0.5681743", "0.56728375", "0.5652354", "0.56477034", "0.5586406", "0.55732775", "0.5565177", "0.55318964", "0.5505557", "0.5501219", "0.5492194", "0.54842347", "0.54584235", "0.5429251", "0.53847194", "0.5381765", "0.53784955", "0.53775924", "0.53654057", "0.53635776", "0.53600013", "0.5339902", "0.5314917", "0.5279332", "0.52717406", "0.5266416", "0.5257662", "0.52477527", "0.5214596", "0.5207053", "0.5201622", "0.51887184", "0.5180883", "0.51786053", "0.5170916", "0.5169669", "0.5157139", "0.51468164", "0.51211065", "0.5120542", "0.5096531", "0.5095091", "0.50868005", "0.50831753", "0.5081035", "0.5077395", "0.50744873", "0.50694", "0.5062786", "0.506034", "0.50544", "0.5049292", "0.5044537", "0.5031556", "0.50312454", "0.5008238", "0.5003608", "0.5001242", "0.49846402", "0.49787253", "0.49732193", "0.4967159", "0.49526933", "0.4944951", "0.49228644", "0.49203163" ]
0.7280058
4
Resolver should be able to produce a value for a given key. If key doesn't exist, should return None.
def resolve(self, key: str) -> Optional[Any]: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve(self, key: str) -> Optional[Any]:\n return self.dict.get(key)", "def get(self, key: K)-> Optional[V]:\n return self._func(key)", "def lookup(self, key):\n n = self.find(key)\n if n:\n return n.value\n else:\n return False", "def _get(self, key):\n try:\n val = getattr(self, f\"_{key}\")\n if val is not None:\n return val\n else:\n self._load()\n return getattr(self, f\"_{key}\")\n except AttributeError:\n return None", "def get(self, key, fallback):\r\n try:\r\n return self[key]\r\n except (KeyError, IndexError):\r\n return fallback", "def get_value(self, key):\r\n if self.hash_table[self.horner_hash(key)] is not None:\r\n if self.hash_table[self.horner_hash(key)].key == key:\r\n return self.hash_table[self.horner_hash(key)].value\r\n else:\r\n return None", "def get(self, key: Union[Any, int]) -> Union[Any, Sequence[Any]]:\n try:\n return[key]\n except KeyError:\n return self.default_factory", "def get_value(self, key: str) -> Optional[str]:\n raise NotImplementedError", "def get(self, key: K) -> Optional[V]:\n return self.mget([key])[0]", "def get_or_call(self, key, callback, ttl=None):\n if self.contains(key):\n res = self[key]\n else:\n res = callback()\n self.set(key, res, ttl=ttl)\n return res", "def get_if_exist(self, data, key):\n if key in data:\n return data[key]\n return None", "def get_value(self, key: str) -> Any:\r\n if key is None:\r\n return self.data\r\n try:\r\n return self.data[key]\r\n except KeyError:\r\n return None", "def _safe_read(self, source: dict, key: str, mapper: Callable[[str], any]) -> any:\n return mapper(source[key]) if key in source else None", "def _resolve_with_default(\n self,\n key: Union[str, int, Enum],\n value: Any,\n default_value: Any = DEFAULT_VALUE_MARKER,\n ) -> Any:\n\n def is_mandatory_missing(val: Any) -> bool:\n return get_value_kind(val) == ValueKind.MANDATORY_MISSING # type: ignore\n\n value = _get_value(value)\n has_default = default_value is not DEFAULT_VALUE_MARKER\n if has_default and (value is None or is_mandatory_missing(value)):\n return default_value\n\n resolved = self._resolve_interpolation(\n key=key,\n value=value,\n throw_on_missing=not has_default,\n throw_on_resolution_failure=not has_default,\n )\n if resolved is None and has_default:\n return default_value\n\n if is_mandatory_missing(resolved):\n if has_default:\n return default_value\n else:\n raise MissingMandatoryValue(\"Missing mandatory value: $FULL_KEY\")\n\n return _get_value(resolved)", "def lookup(self, key):", "def _single_getitem(self, key):\n try:\n return self._dict[key]\n except KeyError:\n return self.default", "def get(self, key: Hashable) -> Any: # type: ignore\n try:\n return[key]\n except (KeyError, TypeError):\n if self.default_factory is None:\n raise KeyError(f'{key} is not in {self.__class__}')\n else:\n try:\n return self.default_factory()\n except TypeError:\n return self.default_factory", "def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)", "def get(\n self,\n key: str,\n ) -> T.Optional[VALUE]:\n record = self._get_record_from_backend(key)\n if record is None:\n return None\n\n if record.expire:\n now = utc_now()\n if (now.timestamp() - record.update_ts) < record.expire:\n return self.deserialize(record.value)\n else:\n return None\n else:\n return self.deserialize(record.value)", "def __getitem__(self, key):\n if self._root:\n node = self._getItemHelper(key, self._root)\n if node:\n return node.value\n else:\n return None\n else:\n return None", "def resolve_resolver_value(self, resolver: \"Resolver\") -> Any:\n try:\n return resolver.resolve()\n except RecursiveResolve:\n # Recursive resolve issues shouldn't be masked by a placeholder.\n raise\n except Exception:\n if are_placeholders_enabled():\n placeholder_value = create_placeholder_value(\n resolver, self.placeholder_type\n )\n\n self.logger.debug(\n \"Error encountered while resolving the resolver. This is allowed for the current \"\n f\"operation. Resolving it to a placeholder value instead: {placeholder_value}\"\n )\n return placeholder_value\n raise", "def get(self, key):\n if self.defs:\n name = self.defs[0]\n val = self.defs[1]\n old_self = self.defs[2]\n if key == name:\n return val\n else:\n return old_self.get(key)", "def get(self, key, default=None):\r\n try:\r\n return self.data[key]()\r\n except (KeyError, SleekRefDied):\r\n return default", "def value(self, key):\n item = self.default(key)\n return self.__getSafeValue(key, item)", "def get(self, key):\n if key in self.fields:\n return self.fields.get(key).get()\n return None", "def resolve(self, key: str) -> Any:\n return _ba.resolve_appconfig_value(key)", "def __getitem__(self, key):\n result = mongo['readable-api'].foo.find_one({\"foo\": key})\n if result:\n return self.make_child(key)\n return None", "def get(self, key, default=None):", "def resolve(self, section, key):\n\n return self.sections[section][key]", "def get(self, key):\n if type(key) != str:\n raise TypeError(\"This is not the string you're looking for!\")\n number = self._hash(key)\n stored_key = number if self.function == 'fnv' else key\n try:\n return self.bucket_list[number % self.bucket_number].search(stored_key).stored_value\n except AttributeError:\n return None", "def get(self, key, default=None):\n key = self._validate_key(key)\n sql = u\"\"\"\n SELECT `value` FROM `{table}` WHERE key = ?\n \"\"\".format(table=self.name)\n\n r = self.conn.execute(sql, (key,)).fetchone()\n\n if r:\n return self.convert_out(r['value'])\n\n return default", "def get(self, key):\n # TODO: Check if the given key exists and return its associated value\n hash_key = self._bucket_index(key) # Gets the index of the key\n\n if self.buckets[hash_key].is_empty() is False: # If the hask_key exists\n for key_value_pair in self.buckets[hash_key]: # Iteratre through the value pair\n if key_value_pair[0] is key: # If the key matches\n return key_value_pair[1] # Return the value\n raise KeyError(\"Key doesn't exist\") # If key doesn't exist, return None", "def lookup(self, key):\n k = self.get_position(key)\n\n if self.keys[k] == key:\n return node.values[k]\n\n # Lookup in the child node.\n if self.refs[k+1] == None:\n return None\n return self.refs[k+1].lookup(key)", "def lookup(key, default=None):\n def _lookup(mapping):\n return mapping.get(key, default)\n return _lookup", "def get(self, key):\n if key is None:\n return None # None is not a valid key\n return get_from_subtree(self.root, key)", "def __getitem__(self, key: str) -> Any:\n\n # Make sure the definition is up-to-date\n self._set_definition(self.pyfiguration.definition)\n\n # Make sure the key exists in the definition\n keyDefinition = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n )\n\n # Keep track of the keys that have been accessed\n if isinstance(self.accessStatus.get(key, None), bool):\n self.accessStatus[key] = True\n\n # Get the value from the store\n defaultValue = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n ).get(\"default\", None)\n if defaultValue is None and \"required\" not in keyDefinition:\n defaultValue = {}\n value = self.store.get(self.__keytransform__(key), defaultValue)\n\n # Perform a predefined set of tests on the value\n self.check_value(self.__keytransform__(key), value)\n\n # Return the checked value\n return value", "def get(self, key):\n\n node = self._get_node(key) # Get the node with the key (if it exists)\n\n if node is None:\n return None\n else:\n return node.value", "def try_read(self, key):\n if key not in self.db:\n return None\n return self.db[key]", "def get(self, key, def_value=None):\n\n index = self._get_hash(key)\n\n if self.table[index] is not None:\n for pair in self.table[index]:\n if key == pair[0]:\n return pair[1]\n\n if def_value is not None:\n return def_value\n\n raise ValueError(f\"can't find value with given key {key}\")", "def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default", "def get_value(self, key):\n pass", "def resolve(self, key: str) -> Optional[Any]:\n return environ.get(key)", "def get(self, key):\n dkey = digest(key)\n # if this node has it, return it\n if self.storage.get(dkey) is not None:\n return defer.succeed(self.storage.get(dkey))\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def __getitem__(self, key):\n exists = self.keys[self._linear_probe(key, \"get\")] is not None # _linear_probe() returns None if not found\n if not exists:\n raise KeyError(\"Error: \" + str(key) + \" does not exist in the table\")\n else:\n location = self._linear_probe(key, \"get\")\n assert self.keys[location] == key, \"Error in linear probing to get()\"\n return self.values[location]", "def first(self, key):\n # Look through the JSON cache\n for name, value in self._json_cache:\n if key == name:\n return value\n\n # Exhaustion\n else:\n return None", "def safely_get_value(dct: Mapping, key: Any,\n default: Union[T, None] = None\n ) -> Union[Any, T]:\n if key in dct:\n return dct[key]\n else:\n return default", "def _map___getitem__(self, key):\n if not isinstance(key, self.keytype):\n raise KeyError('type of key should be ' + repr(self.keytype) + ' but got ' + repr(type(key)))\n if key not in self:\n raise KeyError('key not found')\n return self.second(self.find(key))", "def get(self, key: t.Hashable) -> t.Any:", "def get(self, key):\n if key is None:\n return None\n return self.cache_data.get(key, None)", "def get(self, key):\n dkey = digest(key)\n _log.debug(\"Server:get %s\" % base64.b64encode(dkey))\n # if this node has it, return it\n exists, value = self.storage.get(dkey)\n if exists:\n return defer.succeed(value)\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def get(self, key: T) -> Optional[U]:\n return self._store.get(key)", "def get(self, key):\n if key:\n return self.cache_data.get(key)\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data:\n return self.cache_data[key]\n return None", "def get(self, key, alternative=None):\n try:\n return self[key]\n except (KeyError, TypeError, ValueError):\n return alternative", "def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default", "def get(self, key):\n h = key%self.m\n a = self.a\n if a[h]:\n return a[h].val\n else:\n return -1", "def get(self, key):\n # Your code here\n\n idx = self.hash_index(key)\n\n # check if the index is in range\n if idx >= 0 and idx < self.capacity:\n curr_node = self.hash_table[idx]\n\n # check if any node at index exists\n if curr_node is None:\n return None\n\n # if there's already something at this index\n while curr_node is not None:\n \n # check to see if there is an entry at this index whose key matches the provided key\n while curr_node.key is not key:\n curr_node = curr_node.next\n \n # if we never found an entry with a matching key, return None\n if curr_node.key is not key or curr_node is None:\n return None\n else:\n return curr_node.value\n \n \n # otherwise return None if the index is not in range\n else:\n return None", "async def get(self, key):\n return self.dict.get(key, None)", "def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]", "def get_value(self, key):\n if self.is_key_in_cache(key):\n entry_node, frequency_node = self.__remove_key(key)\n if frequency_node.next.key != frequency_node.key + 1:\n self.__create_frequency_node_after(frequency_node)\n new_frequency_node = frequency_node.next\n self.__add_entry_to_frequency_node(entry_node, new_frequency_node)\n if frequency_node.frequency_cache.size == 0:\n self.__remove_frequency_node(frequency_node)\n self.key_to_frequency_node[key] = new_frequency_node\n return entry_node.value\n else:\n return None", "def __getitem__(self, key: T) -> T:\n return self.lookup(key)", "def resolve(self):\n return resolve(self.val())", "def get(self, key, default=None):\n return self.metadata_dict.get(key, default)", "def getValue(dictionary, key, value):\n if not key in dictionary.keys():\n return value\n else:\n return dictionary[key]", "def get(self, key):\n hi = self.hash_index(key)\n if (self.storage[hi]):\n if(self.storage[hi].next):\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n\n return None", "def _get(self, key: str):\n pass", "def get(self, key):\r\n if not isinstance(key, str):\r\n raise TypeError(\"Key must be a string\")\r\n\r\n node = self._find_node(key)\r\n if node is None:\r\n return None\r\n else:\r\n return node.value[1]", "def safe_get(self,section,key,default_value=None):\n try:\n return self.get(section,key)\n except:\n return default_value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def dict_item(dictionary, key):\n try:\n return dictionary.get(key, None)\n except AttributeError:\n # fail silently if something other than a dict is passed\n return None", "def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value", "def __getitem__(self, key):\n if key not in self.rules:\n raise KeyError(key)\n return self.get(key)", "def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default", "def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def lookup(self, key: T) -> T:\n\n idx: int = self.hash_fn(key) # get an index location for 'key'\n if self.table[idx] is None: # 'key' doesn't exists in hash table\n raise Exception(\"Key doesn't exist in hashtable\")\n else:\n self.key_comparison_counts += 1\n return self.table[self.find(key)][1] # return pair value", "def get(self, key, default=None):\n pass", "def get(self, key, alternative=None):\n try:\n return self[key]\n except KeyError:\n return alternative", "def get(self, key: str, default: t.Optional[object] = None) -> t.Any:\n try:\n index = self.__keys.index(str(key))\n except ValueError:\n return default\n if 0 <= index < len(self):\n return self._super_getitem_single(index)\n else:\n return default", "def get_value(key, obj, default=missing):\n if isinstance(key, int):\n return _get_value_for_key(key, obj, default)\n return _get_value_for_keys(key.split('.'), obj, default)", "def lookup(self, key, default=None):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n return default\n for key_val_pair in bucket:\n k, v = key_val_pair\n if k == key:\n return v", "def __getitem__(self, key: ir.Value) -> ir.Value:\n return ops.MapGet(self, key).to_expr()", "def get(self, key: str) -> Optional[Value]:\n\n return self._blob.get(key)", "def get(self, key):\n _filter = {'_id': key}\n doc = self.collection.find_one(_filter)\n\n if doc and not self._verify_timeout(doc):\n return self._unpickle(doc['value'])", "def get(self, key: str, fn=None):\n value = self._redis.get(key)\n if fn is None:\n return value\n return fn(value)", "def get_value(self, key):\n try:\n return self.map[key]\n except KeyError:\n raise KeyError('key is not in map')", "def retrieve(self, key):\n index = self._hash_mod(key)\n node = self.storage[index]\n while node is not None:\n if node.key == key:\n return node.value\n node = node.next\n return None", "def get(self, key):\n if key in self._db:\n return self._db[key]\n else:\n return None", "def _resolve_schema_key(self, key):\n if super(Hdf5, self).__contains__(key):\n # If the dataset exists in the underlying HDF5 file, just return it\n return key, None\n\n # Straight mapping between the key and a dataset\n key = key.lstrip('/') if tokio.common.isstr(key) else key\n if key in self.schema:\n hdf5_key = self.schema.get(key)\n if super(Hdf5, self).__contains__(hdf5_key):\n return hdf5_key, None\n\n # Key maps to a transformation\n if key in self.dataset_providers:\n return None, self.dataset_providers[key]\n\n errmsg = \"Unknown key %s in %s\" % (key, self.filename)\n raise KeyError(errmsg)", "def get(self, key):\r\n\t\t# return None if the key doesn't exist\r\n\t\tif not self.contains_key(key):\r\n\t\t\treturn None\r\n\t\telse:\r\n\t\t\tindex = self.get_index(key) # get the index of the key\r\n\r\n\t\t\t# begin traversal of the linked list until we reach the key\r\n\t\t\tcur_node = self._buckets[index].head\r\n\t\t\twhile cur_node.key != key:\r\n\t\t\t\tcur_node = cur_node.next\r\n\r\n\t\t\treturn cur_node.value", "def find_value(dic, key):\n return dic[key]", "def visit(self, func: Callable[[str], Optional[Any]]) -> Optional[Any]:\n for key, _ in self._recurse():\n result = func(key)\n if result is not None:\n return result", "def get(self, key):\n\t\treturn self.__get(key, key[1:])", "def lookup(self, key):\n # check that this tree actually has a root node\n debug.printMsg(\"Call made to Lookup\")\n debug.printMsg(\"checking if we have a BST\")\n if self.root:\n debug.printMsg(\"Calling Recursive Lookup\")\n (result, err) = self.recursiveLookup(key, self.root)\n # if we did not find anything\n if err: \n debug.printMsg(\"Oops, we couldn't find anything\")\n return None\n else: \n # we found a result\n debug.printMsg(\"we found: \")\n return result\n else:\n debug.printMsg(\"Oops, the BST seems to not exist\")\n # root doesnt exist\n return None", "def get(self, key):", "def get(self, key):", "def get_value(key):\n\n request_dict = RequestFileCom.file_to_dict()\n\n try:\n\n return request_dict[key]\n\n except:\n\n return None", "def _get_value(match_entry: Dict, path0: str) -> any:\n if path0 is None:\n current_el = match_entry\n else:\n path = path0.split('/')\n current_el = match_entry\n for p in path:\n if current_el is None:\n break\n current_el = current_el.get(p)\n return current_el" ]
[ "0.7909129", "0.6813241", "0.67681676", "0.66341573", "0.64758044", "0.6463813", "0.64607257", "0.6392654", "0.6385483", "0.63703537", "0.63212407", "0.6292602", "0.6284046", "0.62764496", "0.619645", "0.6181006", "0.617452", "0.614984", "0.6148691", "0.6125853", "0.61204463", "0.611363", "0.610535", "0.6089134", "0.60884", "0.60846543", "0.60740733", "0.60708284", "0.6038302", "0.6037554", "0.60342723", "0.6034109", "0.60314757", "0.60213065", "0.6017173", "0.6015758", "0.60123986", "0.6001456", "0.5994992", "0.59899116", "0.59817934", "0.59762084", "0.59453714", "0.59381527", "0.59351104", "0.5922778", "0.5918769", "0.5901678", "0.5901575", "0.58990586", "0.58955973", "0.58931106", "0.5892004", "0.58891565", "0.5871098", "0.58665097", "0.5865487", "0.58558285", "0.5842927", "0.5840968", "0.58404326", "0.5838513", "0.58372706", "0.5831573", "0.5830609", "0.5830559", "0.5825645", "0.58072823", "0.58038807", "0.57995063", "0.5793654", "0.57915705", "0.578296", "0.5778612", "0.5775515", "0.57735616", "0.57735616", "0.57693106", "0.5768527", "0.5768501", "0.5761349", "0.5755889", "0.57466114", "0.574628", "0.5740622", "0.57401776", "0.5723776", "0.5719862", "0.5716286", "0.57077986", "0.5704595", "0.57025105", "0.5700118", "0.5698316", "0.56929576", "0.56902814", "0.5679246", "0.5679246", "0.56766874", "0.56752855" ]
0.75538653
1
Return all values available in the resolver.
def values(self) -> Dict[str, Any]: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getValues(self):\n return self.__get('values')", "def get_values(self):\n \n return []", "def _values(self):\n return self.__values", "def get_values(self):\n raise NotImplementedError(\"Abstract method not implemented.\")", "def values(self):\r\n return self.__values", "def values(self):\n return self[\"values\"]", "def values(self):\n return self[\"values\"]", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values (self):\n return self._values", "def values (self):\n return self._values", "def values(self):\n\t\treturn self.myVals", "def values(self):\n return [self[name] for name in self.keys()]", "def values(self) -> list:\n return self.__values", "def __call__(self):\n return self._main._values()", "def valuerefs(self):\r\n return self.data.values()", "def values():", "def values(self):\n return ValueCollection()", "def get_all_variables(self):\n return [self.item]", "def GetValues(self):\n ...", "def GetValues(self):\n ...", "def get_all_values(self):\n return self.display_table.get_all_values(root=self.display_table_root,include=self.params)", "def values(self):\n with self.__plock:\n return map(self.get, self._keys)", "def GetValues(self):", "def values(cls):\n return cls._values", "def values(self, items_list):\n return [self.resolve(value) for value in items_list]", "def values(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:\n return pulumi.get(self, \"values\")", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\r\n return [self[k] for k in self]", "def valves(self):\n for name in self._valves:\n yield name, self._data[name]", "def values(self):\n return self._get_storage().items()", "def return_values(self):\r\n\r\n values = list(self.piDD.values())\r\n return values", "async def values(cls):\n result = []\n for key in await ProxyMethod.channel.call_remote(\n \"%s.values\" % (cls.__namespace__,),\n ):\n result.append(cls(key=key))\n return result", "def values(self):\n values = []\n for key in self.keys():\n values.append(self[key])\n return values", "def values(self, *args, **kwargs):\n return [ self._get(doc, *args, **kwargs) for doc in self.keys(*args, **kwargs) ]", "def get_res_values(name):\n pass", "def get_all_variables(self):\n return self.item.get_all_variables()", "def all_values(cls) -> List[str]:\n return list(member.value for member in cls.__members__.values())", "def get_values(self):\n return map(lambda x: x.value(),self)", "def values(self):\n return [ self[x] for x in self ]", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self):\n return [self[k] for k in self.keys()]", "def get_variable_values(self, vars):\n raise NotImplementedError()", "def values(self):\n return [_ for _ in self._dict.values()]", "def get_all_variables(self):\n return []", "def get_all(self):\n return [self.get(name) for name in self.factories.iterkeys()]", "def valuerefs(self):\n return [ref(value) for value in self.itervalues()]", "def values(self):\n return self._ctx.values()", "def values(self):\n return [p.value for p in self]", "def get_all(self):\n return self.__items", "def values(self):\n return [i.value for i in self.value]", "def all():\n return current().values", "def values(self) -> List:\n pass", "def get_sub_values(self):\n return list()", "def __call__(self):\n return self.get_items()", "def values(self, values=None):\n return [self[key] for key in self._sequence]", "def values(self) -> List[BaseValue]:\n raise NotImplementedError", "def returnAll(self):\n try:\n # self.checkValName()\n self.conn.execute(self.query, self.val)\n self.results = self.cursor.fetchall()\n except Exception as e:\n print \"Query failed: %s \" % e\n raise", "def values(self):\n return self._list.values()", "def values(self):\n return self.datasource.data[\"values\"]", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def get_all_variables(self):\n raise NotImplementedError()", "def getall(self, key):\n return self.values.get(key, [])", "def _values(self, items):\n # type: (Iterable[weakref.ReferenceType]) -> Iterable[Any]\n return map(self.value, items)", "def values(self):\n return self.d.keys()", "def values(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self):\n x = []\n for k in list(self.keys()):\n x.append(self[k])\n return x", "def values(self):\n return [kvp.value for kvp in self.keyvaluepair_set.all()]", "def get_items(self):\r\n return self.items()", "def get_dynamic_values(self):\n \n # Start with an empty list.\n values = []\n \n # If a static list of values was provided, use that.\n if self._values:\n values.extend(self._values)\n \n # If a dynamic function to get values was provided, extend the values\n # with its return value.\n get_values = getattr(self, 'get_values', None)\n if callable(get_values):\n values.extend(get_values())\n \n # If a list of values to append was provided, do so.\n if self._append_values:\n values.extend(self._append_values)\n \n # Return the list of values.\n return values", "def values(self) -> Iterable[U]:\n return self._store.values()", "def get_values(self) -> list:\r\n values = []\r\n for key, value in self._items:\r\n values.append(value)\r\n return values", "def values(self):\r\n my_values = []\r\n for sleek_ref in self.data.values():\r\n try:\r\n my_values.append(sleek_ref())\r\n except SleekRefDied:\r\n pass\r\n return my_values", "def values(self):\n if not self.__values:\n self.rank()\n return self.__values", "def itervaluerefs(self):\r\n return self.data.itervalues()", "def get_all_variables(self):\n out = []\n for i in self.items:\n out += i.get_all_variables()\n return out" ]
[ "0.71192074", "0.71074104", "0.7013543", "0.6938158", "0.68647885", "0.6794239", "0.6794239", "0.6765329", "0.6765329", "0.6765329", "0.6765329", "0.67555463", "0.67555463", "0.6741106", "0.6719452", "0.67193747", "0.66979545", "0.668874", "0.66784626", "0.667716", "0.6666213", "0.66542363", "0.66542363", "0.6620889", "0.66153467", "0.6614304", "0.6600735", "0.65755785", "0.65743417", "0.656299", "0.656299", "0.656299", "0.6542688", "0.65301985", "0.6522858", "0.6522676", "0.65225464", "0.6504222", "0.6501298", "0.64902234", "0.64863855", "0.64832807", "0.64829564", "0.6477167", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.6464348", "0.64600265", "0.6438934", "0.6438128", "0.6432904", "0.64217126", "0.641816", "0.64164174", "0.6412385", "0.64122534", "0.6401091", "0.63998616", "0.63789153", "0.6378632", "0.63657653", "0.63442403", "0.63393533", "0.6334216", "0.63318634", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63182586", "0.63122344", "0.6312045", "0.62984914", "0.6286657", "0.6273454", "0.6270745", "0.62523234", "0.6238314", "0.6234463", "0.62339824", "0.6233671", "0.6225189", "0.62244576", "0.6215591" ]
0.0
-1
Resolver should be able to produce a value for a given key. If key doesn't exist, should return None.
def resolve(self, key: str) -> Optional[Any]: return environ.get(key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve(self, key: str) -> Optional[Any]:\n return self.dict.get(key)", "def resolve(self, key: str) -> Optional[Any]:\n pass", "def get(self, key: K)-> Optional[V]:\n return self._func(key)", "def lookup(self, key):\n n = self.find(key)\n if n:\n return n.value\n else:\n return False", "def _get(self, key):\n try:\n val = getattr(self, f\"_{key}\")\n if val is not None:\n return val\n else:\n self._load()\n return getattr(self, f\"_{key}\")\n except AttributeError:\n return None", "def get(self, key, fallback):\r\n try:\r\n return self[key]\r\n except (KeyError, IndexError):\r\n return fallback", "def get_value(self, key):\r\n if self.hash_table[self.horner_hash(key)] is not None:\r\n if self.hash_table[self.horner_hash(key)].key == key:\r\n return self.hash_table[self.horner_hash(key)].value\r\n else:\r\n return None", "def get(self, key: Union[Any, int]) -> Union[Any, Sequence[Any]]:\n try:\n return[key]\n except KeyError:\n return self.default_factory", "def get_value(self, key: str) -> Optional[str]:\n raise NotImplementedError", "def get(self, key: K) -> Optional[V]:\n return self.mget([key])[0]", "def get_or_call(self, key, callback, ttl=None):\n if self.contains(key):\n res = self[key]\n else:\n res = callback()\n self.set(key, res, ttl=ttl)\n return res", "def get_if_exist(self, data, key):\n if key in data:\n return data[key]\n return None", "def get_value(self, key: str) -> Any:\r\n if key is None:\r\n return self.data\r\n try:\r\n return self.data[key]\r\n except KeyError:\r\n return None", "def _safe_read(self, source: dict, key: str, mapper: Callable[[str], any]) -> any:\n return mapper(source[key]) if key in source else None", "def _resolve_with_default(\n self,\n key: Union[str, int, Enum],\n value: Any,\n default_value: Any = DEFAULT_VALUE_MARKER,\n ) -> Any:\n\n def is_mandatory_missing(val: Any) -> bool:\n return get_value_kind(val) == ValueKind.MANDATORY_MISSING # type: ignore\n\n value = _get_value(value)\n has_default = default_value is not DEFAULT_VALUE_MARKER\n if has_default and (value is None or is_mandatory_missing(value)):\n return default_value\n\n resolved = self._resolve_interpolation(\n key=key,\n value=value,\n throw_on_missing=not has_default,\n throw_on_resolution_failure=not has_default,\n )\n if resolved is None and has_default:\n return default_value\n\n if is_mandatory_missing(resolved):\n if has_default:\n return default_value\n else:\n raise MissingMandatoryValue(\"Missing mandatory value: $FULL_KEY\")\n\n return _get_value(resolved)", "def lookup(self, key):", "def _single_getitem(self, key):\n try:\n return self._dict[key]\n except KeyError:\n return self.default", "def get(self, key: Hashable) -> Any: # type: ignore\n try:\n return[key]\n except (KeyError, TypeError):\n if self.default_factory is None:\n raise KeyError(f'{key} is not in {self.__class__}')\n else:\n try:\n return self.default_factory()\n except TypeError:\n return self.default_factory", "def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)", "def get(\n self,\n key: str,\n ) -> T.Optional[VALUE]:\n record = self._get_record_from_backend(key)\n if record is None:\n return None\n\n if record.expire:\n now = utc_now()\n if (now.timestamp() - record.update_ts) < record.expire:\n return self.deserialize(record.value)\n else:\n return None\n else:\n return self.deserialize(record.value)", "def __getitem__(self, key):\n if self._root:\n node = self._getItemHelper(key, self._root)\n if node:\n return node.value\n else:\n return None\n else:\n return None", "def resolve_resolver_value(self, resolver: \"Resolver\") -> Any:\n try:\n return resolver.resolve()\n except RecursiveResolve:\n # Recursive resolve issues shouldn't be masked by a placeholder.\n raise\n except Exception:\n if are_placeholders_enabled():\n placeholder_value = create_placeholder_value(\n resolver, self.placeholder_type\n )\n\n self.logger.debug(\n \"Error encountered while resolving the resolver. This is allowed for the current \"\n f\"operation. Resolving it to a placeholder value instead: {placeholder_value}\"\n )\n return placeholder_value\n raise", "def get(self, key):\n if self.defs:\n name = self.defs[0]\n val = self.defs[1]\n old_self = self.defs[2]\n if key == name:\n return val\n else:\n return old_self.get(key)", "def get(self, key, default=None):\r\n try:\r\n return self.data[key]()\r\n except (KeyError, SleekRefDied):\r\n return default", "def value(self, key):\n item = self.default(key)\n return self.__getSafeValue(key, item)", "def get(self, key):\n if key in self.fields:\n return self.fields.get(key).get()\n return None", "def resolve(self, key: str) -> Any:\n return _ba.resolve_appconfig_value(key)", "def __getitem__(self, key):\n result = mongo['readable-api'].foo.find_one({\"foo\": key})\n if result:\n return self.make_child(key)\n return None", "def get(self, key, default=None):", "def resolve(self, section, key):\n\n return self.sections[section][key]", "def get(self, key):\n if type(key) != str:\n raise TypeError(\"This is not the string you're looking for!\")\n number = self._hash(key)\n stored_key = number if self.function == 'fnv' else key\n try:\n return self.bucket_list[number % self.bucket_number].search(stored_key).stored_value\n except AttributeError:\n return None", "def get(self, key, default=None):\n key = self._validate_key(key)\n sql = u\"\"\"\n SELECT `value` FROM `{table}` WHERE key = ?\n \"\"\".format(table=self.name)\n\n r = self.conn.execute(sql, (key,)).fetchone()\n\n if r:\n return self.convert_out(r['value'])\n\n return default", "def get(self, key):\n # TODO: Check if the given key exists and return its associated value\n hash_key = self._bucket_index(key) # Gets the index of the key\n\n if self.buckets[hash_key].is_empty() is False: # If the hask_key exists\n for key_value_pair in self.buckets[hash_key]: # Iteratre through the value pair\n if key_value_pair[0] is key: # If the key matches\n return key_value_pair[1] # Return the value\n raise KeyError(\"Key doesn't exist\") # If key doesn't exist, return None", "def lookup(self, key):\n k = self.get_position(key)\n\n if self.keys[k] == key:\n return node.values[k]\n\n # Lookup in the child node.\n if self.refs[k+1] == None:\n return None\n return self.refs[k+1].lookup(key)", "def lookup(key, default=None):\n def _lookup(mapping):\n return mapping.get(key, default)\n return _lookup", "def get(self, key):\n if key is None:\n return None # None is not a valid key\n return get_from_subtree(self.root, key)", "def __getitem__(self, key: str) -> Any:\n\n # Make sure the definition is up-to-date\n self._set_definition(self.pyfiguration.definition)\n\n # Make sure the key exists in the definition\n keyDefinition = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n )\n\n # Keep track of the keys that have been accessed\n if isinstance(self.accessStatus.get(key, None), bool):\n self.accessStatus[key] = True\n\n # Get the value from the store\n defaultValue = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n ).get(\"default\", None)\n if defaultValue is None and \"required\" not in keyDefinition:\n defaultValue = {}\n value = self.store.get(self.__keytransform__(key), defaultValue)\n\n # Perform a predefined set of tests on the value\n self.check_value(self.__keytransform__(key), value)\n\n # Return the checked value\n return value", "def get(self, key):\n\n node = self._get_node(key) # Get the node with the key (if it exists)\n\n if node is None:\n return None\n else:\n return node.value", "def try_read(self, key):\n if key not in self.db:\n return None\n return self.db[key]", "def get(self, key, def_value=None):\n\n index = self._get_hash(key)\n\n if self.table[index] is not None:\n for pair in self.table[index]:\n if key == pair[0]:\n return pair[1]\n\n if def_value is not None:\n return def_value\n\n raise ValueError(f\"can't find value with given key {key}\")", "def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default", "def get_value(self, key):\n pass", "def get(self, key):\n dkey = digest(key)\n # if this node has it, return it\n if self.storage.get(dkey) is not None:\n return defer.succeed(self.storage.get(dkey))\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def __getitem__(self, key):\n exists = self.keys[self._linear_probe(key, \"get\")] is not None # _linear_probe() returns None if not found\n if not exists:\n raise KeyError(\"Error: \" + str(key) + \" does not exist in the table\")\n else:\n location = self._linear_probe(key, \"get\")\n assert self.keys[location] == key, \"Error in linear probing to get()\"\n return self.values[location]", "def first(self, key):\n # Look through the JSON cache\n for name, value in self._json_cache:\n if key == name:\n return value\n\n # Exhaustion\n else:\n return None", "def safely_get_value(dct: Mapping, key: Any,\n default: Union[T, None] = None\n ) -> Union[Any, T]:\n if key in dct:\n return dct[key]\n else:\n return default", "def _map___getitem__(self, key):\n if not isinstance(key, self.keytype):\n raise KeyError('type of key should be ' + repr(self.keytype) + ' but got ' + repr(type(key)))\n if key not in self:\n raise KeyError('key not found')\n return self.second(self.find(key))", "def get(self, key: t.Hashable) -> t.Any:", "def get(self, key):\n if key is None:\n return None\n return self.cache_data.get(key, None)", "def get(self, key):\n dkey = digest(key)\n _log.debug(\"Server:get %s\" % base64.b64encode(dkey))\n # if this node has it, return it\n exists, value = self.storage.get(dkey)\n if exists:\n return defer.succeed(value)\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def get(self, key: T) -> Optional[U]:\n return self._store.get(key)", "def get(self, key):\n if key:\n return self.cache_data.get(key)\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data:\n return self.cache_data[key]\n return None", "def get(self, key, alternative=None):\n try:\n return self[key]\n except (KeyError, TypeError, ValueError):\n return alternative", "def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default", "def get(self, key):\n h = key%self.m\n a = self.a\n if a[h]:\n return a[h].val\n else:\n return -1", "def get(self, key):\n # Your code here\n\n idx = self.hash_index(key)\n\n # check if the index is in range\n if idx >= 0 and idx < self.capacity:\n curr_node = self.hash_table[idx]\n\n # check if any node at index exists\n if curr_node is None:\n return None\n\n # if there's already something at this index\n while curr_node is not None:\n \n # check to see if there is an entry at this index whose key matches the provided key\n while curr_node.key is not key:\n curr_node = curr_node.next\n \n # if we never found an entry with a matching key, return None\n if curr_node.key is not key or curr_node is None:\n return None\n else:\n return curr_node.value\n \n \n # otherwise return None if the index is not in range\n else:\n return None", "async def get(self, key):\n return self.dict.get(key, None)", "def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]", "def get_value(self, key):\n if self.is_key_in_cache(key):\n entry_node, frequency_node = self.__remove_key(key)\n if frequency_node.next.key != frequency_node.key + 1:\n self.__create_frequency_node_after(frequency_node)\n new_frequency_node = frequency_node.next\n self.__add_entry_to_frequency_node(entry_node, new_frequency_node)\n if frequency_node.frequency_cache.size == 0:\n self.__remove_frequency_node(frequency_node)\n self.key_to_frequency_node[key] = new_frequency_node\n return entry_node.value\n else:\n return None", "def __getitem__(self, key: T) -> T:\n return self.lookup(key)", "def resolve(self):\n return resolve(self.val())", "def get(self, key, default=None):\n return self.metadata_dict.get(key, default)", "def getValue(dictionary, key, value):\n if not key in dictionary.keys():\n return value\n else:\n return dictionary[key]", "def get(self, key):\n hi = self.hash_index(key)\n if (self.storage[hi]):\n if(self.storage[hi].next):\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n\n return None", "def _get(self, key: str):\n pass", "def get(self, key):\r\n if not isinstance(key, str):\r\n raise TypeError(\"Key must be a string\")\r\n\r\n node = self._find_node(key)\r\n if node is None:\r\n return None\r\n else:\r\n return node.value[1]", "def safe_get(self,section,key,default_value=None):\n try:\n return self.get(section,key)\n except:\n return default_value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def dict_item(dictionary, key):\n try:\n return dictionary.get(key, None)\n except AttributeError:\n # fail silently if something other than a dict is passed\n return None", "def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value", "def __getitem__(self, key):\n if key not in self.rules:\n raise KeyError(key)\n return self.get(key)", "def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default", "def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def lookup(self, key: T) -> T:\n\n idx: int = self.hash_fn(key) # get an index location for 'key'\n if self.table[idx] is None: # 'key' doesn't exists in hash table\n raise Exception(\"Key doesn't exist in hashtable\")\n else:\n self.key_comparison_counts += 1\n return self.table[self.find(key)][1] # return pair value", "def get(self, key, default=None):\n pass", "def get(self, key, alternative=None):\n try:\n return self[key]\n except KeyError:\n return alternative", "def get(self, key: str, default: t.Optional[object] = None) -> t.Any:\n try:\n index = self.__keys.index(str(key))\n except ValueError:\n return default\n if 0 <= index < len(self):\n return self._super_getitem_single(index)\n else:\n return default", "def get_value(key, obj, default=missing):\n if isinstance(key, int):\n return _get_value_for_key(key, obj, default)\n return _get_value_for_keys(key.split('.'), obj, default)", "def lookup(self, key, default=None):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n return default\n for key_val_pair in bucket:\n k, v = key_val_pair\n if k == key:\n return v", "def __getitem__(self, key: ir.Value) -> ir.Value:\n return ops.MapGet(self, key).to_expr()", "def get(self, key: str) -> Optional[Value]:\n\n return self._blob.get(key)", "def get(self, key):\n _filter = {'_id': key}\n doc = self.collection.find_one(_filter)\n\n if doc and not self._verify_timeout(doc):\n return self._unpickle(doc['value'])", "def get(self, key: str, fn=None):\n value = self._redis.get(key)\n if fn is None:\n return value\n return fn(value)", "def get_value(self, key):\n try:\n return self.map[key]\n except KeyError:\n raise KeyError('key is not in map')", "def retrieve(self, key):\n index = self._hash_mod(key)\n node = self.storage[index]\n while node is not None:\n if node.key == key:\n return node.value\n node = node.next\n return None", "def get(self, key):\n if key in self._db:\n return self._db[key]\n else:\n return None", "def _resolve_schema_key(self, key):\n if super(Hdf5, self).__contains__(key):\n # If the dataset exists in the underlying HDF5 file, just return it\n return key, None\n\n # Straight mapping between the key and a dataset\n key = key.lstrip('/') if tokio.common.isstr(key) else key\n if key in self.schema:\n hdf5_key = self.schema.get(key)\n if super(Hdf5, self).__contains__(hdf5_key):\n return hdf5_key, None\n\n # Key maps to a transformation\n if key in self.dataset_providers:\n return None, self.dataset_providers[key]\n\n errmsg = \"Unknown key %s in %s\" % (key, self.filename)\n raise KeyError(errmsg)", "def get(self, key):\r\n\t\t# return None if the key doesn't exist\r\n\t\tif not self.contains_key(key):\r\n\t\t\treturn None\r\n\t\telse:\r\n\t\t\tindex = self.get_index(key) # get the index of the key\r\n\r\n\t\t\t# begin traversal of the linked list until we reach the key\r\n\t\t\tcur_node = self._buckets[index].head\r\n\t\t\twhile cur_node.key != key:\r\n\t\t\t\tcur_node = cur_node.next\r\n\r\n\t\t\treturn cur_node.value", "def find_value(dic, key):\n return dic[key]", "def visit(self, func: Callable[[str], Optional[Any]]) -> Optional[Any]:\n for key, _ in self._recurse():\n result = func(key)\n if result is not None:\n return result", "def get(self, key):\n\t\treturn self.__get(key, key[1:])", "def lookup(self, key):\n # check that this tree actually has a root node\n debug.printMsg(\"Call made to Lookup\")\n debug.printMsg(\"checking if we have a BST\")\n if self.root:\n debug.printMsg(\"Calling Recursive Lookup\")\n (result, err) = self.recursiveLookup(key, self.root)\n # if we did not find anything\n if err: \n debug.printMsg(\"Oops, we couldn't find anything\")\n return None\n else: \n # we found a result\n debug.printMsg(\"we found: \")\n return result\n else:\n debug.printMsg(\"Oops, the BST seems to not exist\")\n # root doesnt exist\n return None", "def get(self, key):", "def get(self, key):", "def get_value(key):\n\n request_dict = RequestFileCom.file_to_dict()\n\n try:\n\n return request_dict[key]\n\n except:\n\n return None", "def _get_value(match_entry: Dict, path0: str) -> any:\n if path0 is None:\n current_el = match_entry\n else:\n path = path0.split('/')\n current_el = match_entry\n for p in path:\n if current_el is None:\n break\n current_el = current_el.get(p)\n return current_el" ]
[ "0.7909129", "0.75538653", "0.6813241", "0.67681676", "0.66341573", "0.64758044", "0.6463813", "0.64607257", "0.6392654", "0.6385483", "0.63703537", "0.63212407", "0.6292602", "0.6284046", "0.62764496", "0.619645", "0.6181006", "0.617452", "0.614984", "0.6148691", "0.6125853", "0.61204463", "0.611363", "0.610535", "0.6089134", "0.60884", "0.60846543", "0.60740733", "0.60708284", "0.6038302", "0.6037554", "0.60342723", "0.6034109", "0.60314757", "0.60213065", "0.6017173", "0.6015758", "0.60123986", "0.6001456", "0.5994992", "0.59899116", "0.59817934", "0.59453714", "0.59381527", "0.59351104", "0.5922778", "0.5918769", "0.5901678", "0.5901575", "0.58990586", "0.58955973", "0.58931106", "0.5892004", "0.58891565", "0.5871098", "0.58665097", "0.5865487", "0.58558285", "0.5842927", "0.5840968", "0.58404326", "0.5838513", "0.58372706", "0.5831573", "0.5830609", "0.5830559", "0.5825645", "0.58072823", "0.58038807", "0.57995063", "0.5793654", "0.57915705", "0.578296", "0.5778612", "0.5775515", "0.57735616", "0.57735616", "0.57693106", "0.5768527", "0.5768501", "0.5761349", "0.5755889", "0.57466114", "0.574628", "0.5740622", "0.57401776", "0.5723776", "0.5719862", "0.5716286", "0.57077986", "0.5704595", "0.57025105", "0.5700118", "0.5698316", "0.56929576", "0.56902814", "0.5679246", "0.5679246", "0.56766874", "0.56752855" ]
0.59762084
42
Resolver should be able to produce a value for a given key. If key doesn't exist, should return None.
def resolve(self, key: str) -> Optional[Any]: return self.dict.get(key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve(self, key: str) -> Optional[Any]:\n pass", "def get(self, key: K)-> Optional[V]:\n return self._func(key)", "def lookup(self, key):\n n = self.find(key)\n if n:\n return n.value\n else:\n return False", "def _get(self, key):\n try:\n val = getattr(self, f\"_{key}\")\n if val is not None:\n return val\n else:\n self._load()\n return getattr(self, f\"_{key}\")\n except AttributeError:\n return None", "def get(self, key, fallback):\r\n try:\r\n return self[key]\r\n except (KeyError, IndexError):\r\n return fallback", "def get_value(self, key):\r\n if self.hash_table[self.horner_hash(key)] is not None:\r\n if self.hash_table[self.horner_hash(key)].key == key:\r\n return self.hash_table[self.horner_hash(key)].value\r\n else:\r\n return None", "def get(self, key: Union[Any, int]) -> Union[Any, Sequence[Any]]:\n try:\n return[key]\n except KeyError:\n return self.default_factory", "def get_value(self, key: str) -> Optional[str]:\n raise NotImplementedError", "def get(self, key: K) -> Optional[V]:\n return self.mget([key])[0]", "def get_or_call(self, key, callback, ttl=None):\n if self.contains(key):\n res = self[key]\n else:\n res = callback()\n self.set(key, res, ttl=ttl)\n return res", "def get_if_exist(self, data, key):\n if key in data:\n return data[key]\n return None", "def get_value(self, key: str) -> Any:\r\n if key is None:\r\n return self.data\r\n try:\r\n return self.data[key]\r\n except KeyError:\r\n return None", "def _safe_read(self, source: dict, key: str, mapper: Callable[[str], any]) -> any:\n return mapper(source[key]) if key in source else None", "def _resolve_with_default(\n self,\n key: Union[str, int, Enum],\n value: Any,\n default_value: Any = DEFAULT_VALUE_MARKER,\n ) -> Any:\n\n def is_mandatory_missing(val: Any) -> bool:\n return get_value_kind(val) == ValueKind.MANDATORY_MISSING # type: ignore\n\n value = _get_value(value)\n has_default = default_value is not DEFAULT_VALUE_MARKER\n if has_default and (value is None or is_mandatory_missing(value)):\n return default_value\n\n resolved = self._resolve_interpolation(\n key=key,\n value=value,\n throw_on_missing=not has_default,\n throw_on_resolution_failure=not has_default,\n )\n if resolved is None and has_default:\n return default_value\n\n if is_mandatory_missing(resolved):\n if has_default:\n return default_value\n else:\n raise MissingMandatoryValue(\"Missing mandatory value: $FULL_KEY\")\n\n return _get_value(resolved)", "def lookup(self, key):", "def _single_getitem(self, key):\n try:\n return self._dict[key]\n except KeyError:\n return self.default", "def get(self, key: Hashable) -> Any: # type: ignore\n try:\n return[key]\n except (KeyError, TypeError):\n if self.default_factory is None:\n raise KeyError(f'{key} is not in {self.__class__}')\n else:\n try:\n return self.default_factory()\n except TypeError:\n return self.default_factory", "def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)", "def get(\n self,\n key: str,\n ) -> T.Optional[VALUE]:\n record = self._get_record_from_backend(key)\n if record is None:\n return None\n\n if record.expire:\n now = utc_now()\n if (now.timestamp() - record.update_ts) < record.expire:\n return self.deserialize(record.value)\n else:\n return None\n else:\n return self.deserialize(record.value)", "def __getitem__(self, key):\n if self._root:\n node = self._getItemHelper(key, self._root)\n if node:\n return node.value\n else:\n return None\n else:\n return None", "def resolve_resolver_value(self, resolver: \"Resolver\") -> Any:\n try:\n return resolver.resolve()\n except RecursiveResolve:\n # Recursive resolve issues shouldn't be masked by a placeholder.\n raise\n except Exception:\n if are_placeholders_enabled():\n placeholder_value = create_placeholder_value(\n resolver, self.placeholder_type\n )\n\n self.logger.debug(\n \"Error encountered while resolving the resolver. This is allowed for the current \"\n f\"operation. Resolving it to a placeholder value instead: {placeholder_value}\"\n )\n return placeholder_value\n raise", "def get(self, key):\n if self.defs:\n name = self.defs[0]\n val = self.defs[1]\n old_self = self.defs[2]\n if key == name:\n return val\n else:\n return old_self.get(key)", "def get(self, key, default=None):\r\n try:\r\n return self.data[key]()\r\n except (KeyError, SleekRefDied):\r\n return default", "def value(self, key):\n item = self.default(key)\n return self.__getSafeValue(key, item)", "def get(self, key):\n if key in self.fields:\n return self.fields.get(key).get()\n return None", "def resolve(self, key: str) -> Any:\n return _ba.resolve_appconfig_value(key)", "def __getitem__(self, key):\n result = mongo['readable-api'].foo.find_one({\"foo\": key})\n if result:\n return self.make_child(key)\n return None", "def get(self, key, default=None):", "def resolve(self, section, key):\n\n return self.sections[section][key]", "def get(self, key):\n if type(key) != str:\n raise TypeError(\"This is not the string you're looking for!\")\n number = self._hash(key)\n stored_key = number if self.function == 'fnv' else key\n try:\n return self.bucket_list[number % self.bucket_number].search(stored_key).stored_value\n except AttributeError:\n return None", "def get(self, key, default=None):\n key = self._validate_key(key)\n sql = u\"\"\"\n SELECT `value` FROM `{table}` WHERE key = ?\n \"\"\".format(table=self.name)\n\n r = self.conn.execute(sql, (key,)).fetchone()\n\n if r:\n return self.convert_out(r['value'])\n\n return default", "def get(self, key):\n # TODO: Check if the given key exists and return its associated value\n hash_key = self._bucket_index(key) # Gets the index of the key\n\n if self.buckets[hash_key].is_empty() is False: # If the hask_key exists\n for key_value_pair in self.buckets[hash_key]: # Iteratre through the value pair\n if key_value_pair[0] is key: # If the key matches\n return key_value_pair[1] # Return the value\n raise KeyError(\"Key doesn't exist\") # If key doesn't exist, return None", "def lookup(self, key):\n k = self.get_position(key)\n\n if self.keys[k] == key:\n return node.values[k]\n\n # Lookup in the child node.\n if self.refs[k+1] == None:\n return None\n return self.refs[k+1].lookup(key)", "def lookup(key, default=None):\n def _lookup(mapping):\n return mapping.get(key, default)\n return _lookup", "def get(self, key):\n if key is None:\n return None # None is not a valid key\n return get_from_subtree(self.root, key)", "def __getitem__(self, key: str) -> Any:\n\n # Make sure the definition is up-to-date\n self._set_definition(self.pyfiguration.definition)\n\n # Make sure the key exists in the definition\n keyDefinition = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n )\n\n # Keep track of the keys that have been accessed\n if isinstance(self.accessStatus.get(key, None), bool):\n self.accessStatus[key] = True\n\n # Get the value from the store\n defaultValue = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.get_definition()\n ).get(\"default\", None)\n if defaultValue is None and \"required\" not in keyDefinition:\n defaultValue = {}\n value = self.store.get(self.__keytransform__(key), defaultValue)\n\n # Perform a predefined set of tests on the value\n self.check_value(self.__keytransform__(key), value)\n\n # Return the checked value\n return value", "def get(self, key):\n\n node = self._get_node(key) # Get the node with the key (if it exists)\n\n if node is None:\n return None\n else:\n return node.value", "def try_read(self, key):\n if key not in self.db:\n return None\n return self.db[key]", "def get(self, key, def_value=None):\n\n index = self._get_hash(key)\n\n if self.table[index] is not None:\n for pair in self.table[index]:\n if key == pair[0]:\n return pair[1]\n\n if def_value is not None:\n return def_value\n\n raise ValueError(f\"can't find value with given key {key}\")", "def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default", "def get_value(self, key):\n pass", "def resolve(self, key: str) -> Optional[Any]:\n return environ.get(key)", "def get(self, key):\n dkey = digest(key)\n # if this node has it, return it\n if self.storage.get(dkey) is not None:\n return defer.succeed(self.storage.get(dkey))\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def __getitem__(self, key):\n exists = self.keys[self._linear_probe(key, \"get\")] is not None # _linear_probe() returns None if not found\n if not exists:\n raise KeyError(\"Error: \" + str(key) + \" does not exist in the table\")\n else:\n location = self._linear_probe(key, \"get\")\n assert self.keys[location] == key, \"Error in linear probing to get()\"\n return self.values[location]", "def first(self, key):\n # Look through the JSON cache\n for name, value in self._json_cache:\n if key == name:\n return value\n\n # Exhaustion\n else:\n return None", "def safely_get_value(dct: Mapping, key: Any,\n default: Union[T, None] = None\n ) -> Union[Any, T]:\n if key in dct:\n return dct[key]\n else:\n return default", "def _map___getitem__(self, key):\n if not isinstance(key, self.keytype):\n raise KeyError('type of key should be ' + repr(self.keytype) + ' but got ' + repr(type(key)))\n if key not in self:\n raise KeyError('key not found')\n return self.second(self.find(key))", "def get(self, key: t.Hashable) -> t.Any:", "def get(self, key):\n if key is None:\n return None\n return self.cache_data.get(key, None)", "def get(self, key):\n dkey = digest(key)\n _log.debug(\"Server:get %s\" % base64.b64encode(dkey))\n # if this node has it, return it\n exists, value = self.storage.get(dkey)\n if exists:\n return defer.succeed(value)\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def get(self, key: T) -> Optional[U]:\n return self._store.get(key)", "def get(self, key):\n if key:\n return self.cache_data.get(key)\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data:\n return self.cache_data[key]\n return None", "def get(self, key, alternative=None):\n try:\n return self[key]\n except (KeyError, TypeError, ValueError):\n return alternative", "def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default", "def get(self, key):\n h = key%self.m\n a = self.a\n if a[h]:\n return a[h].val\n else:\n return -1", "def get(self, key):\n # Your code here\n\n idx = self.hash_index(key)\n\n # check if the index is in range\n if idx >= 0 and idx < self.capacity:\n curr_node = self.hash_table[idx]\n\n # check if any node at index exists\n if curr_node is None:\n return None\n\n # if there's already something at this index\n while curr_node is not None:\n \n # check to see if there is an entry at this index whose key matches the provided key\n while curr_node.key is not key:\n curr_node = curr_node.next\n \n # if we never found an entry with a matching key, return None\n if curr_node.key is not key or curr_node is None:\n return None\n else:\n return curr_node.value\n \n \n # otherwise return None if the index is not in range\n else:\n return None", "async def get(self, key):\n return self.dict.get(key, None)", "def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]", "def get_value(self, key):\n if self.is_key_in_cache(key):\n entry_node, frequency_node = self.__remove_key(key)\n if frequency_node.next.key != frequency_node.key + 1:\n self.__create_frequency_node_after(frequency_node)\n new_frequency_node = frequency_node.next\n self.__add_entry_to_frequency_node(entry_node, new_frequency_node)\n if frequency_node.frequency_cache.size == 0:\n self.__remove_frequency_node(frequency_node)\n self.key_to_frequency_node[key] = new_frequency_node\n return entry_node.value\n else:\n return None", "def __getitem__(self, key: T) -> T:\n return self.lookup(key)", "def resolve(self):\n return resolve(self.val())", "def get(self, key, default=None):\n return self.metadata_dict.get(key, default)", "def getValue(dictionary, key, value):\n if not key in dictionary.keys():\n return value\n else:\n return dictionary[key]", "def get(self, key):\n hi = self.hash_index(key)\n if (self.storage[hi]):\n if(self.storage[hi].next):\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n\n return None", "def _get(self, key: str):\n pass", "def get(self, key):\r\n if not isinstance(key, str):\r\n raise TypeError(\"Key must be a string\")\r\n\r\n node = self._find_node(key)\r\n if node is None:\r\n return None\r\n else:\r\n return node.value[1]", "def safe_get(self,section,key,default_value=None):\n try:\n return self.get(section,key)\n except:\n return default_value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def dict_item(dictionary, key):\n try:\n return dictionary.get(key, None)\n except AttributeError:\n # fail silently if something other than a dict is passed\n return None", "def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value", "def __getitem__(self, key):\n if key not in self.rules:\n raise KeyError(key)\n return self.get(key)", "def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default", "def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def get(self, key):\n if key and key in self.cache_data.keys():\n return self.cache_data[key]\n else:\n return None", "def lookup(self, key: T) -> T:\n\n idx: int = self.hash_fn(key) # get an index location for 'key'\n if self.table[idx] is None: # 'key' doesn't exists in hash table\n raise Exception(\"Key doesn't exist in hashtable\")\n else:\n self.key_comparison_counts += 1\n return self.table[self.find(key)][1] # return pair value", "def get(self, key, default=None):\n pass", "def get(self, key, alternative=None):\n try:\n return self[key]\n except KeyError:\n return alternative", "def get(self, key: str, default: t.Optional[object] = None) -> t.Any:\n try:\n index = self.__keys.index(str(key))\n except ValueError:\n return default\n if 0 <= index < len(self):\n return self._super_getitem_single(index)\n else:\n return default", "def get_value(key, obj, default=missing):\n if isinstance(key, int):\n return _get_value_for_key(key, obj, default)\n return _get_value_for_keys(key.split('.'), obj, default)", "def lookup(self, key, default=None):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n return default\n for key_val_pair in bucket:\n k, v = key_val_pair\n if k == key:\n return v", "def __getitem__(self, key: ir.Value) -> ir.Value:\n return ops.MapGet(self, key).to_expr()", "def get(self, key: str) -> Optional[Value]:\n\n return self._blob.get(key)", "def get(self, key):\n _filter = {'_id': key}\n doc = self.collection.find_one(_filter)\n\n if doc and not self._verify_timeout(doc):\n return self._unpickle(doc['value'])", "def get(self, key: str, fn=None):\n value = self._redis.get(key)\n if fn is None:\n return value\n return fn(value)", "def get_value(self, key):\n try:\n return self.map[key]\n except KeyError:\n raise KeyError('key is not in map')", "def retrieve(self, key):\n index = self._hash_mod(key)\n node = self.storage[index]\n while node is not None:\n if node.key == key:\n return node.value\n node = node.next\n return None", "def get(self, key):\n if key in self._db:\n return self._db[key]\n else:\n return None", "def _resolve_schema_key(self, key):\n if super(Hdf5, self).__contains__(key):\n # If the dataset exists in the underlying HDF5 file, just return it\n return key, None\n\n # Straight mapping between the key and a dataset\n key = key.lstrip('/') if tokio.common.isstr(key) else key\n if key in self.schema:\n hdf5_key = self.schema.get(key)\n if super(Hdf5, self).__contains__(hdf5_key):\n return hdf5_key, None\n\n # Key maps to a transformation\n if key in self.dataset_providers:\n return None, self.dataset_providers[key]\n\n errmsg = \"Unknown key %s in %s\" % (key, self.filename)\n raise KeyError(errmsg)", "def get(self, key):\r\n\t\t# return None if the key doesn't exist\r\n\t\tif not self.contains_key(key):\r\n\t\t\treturn None\r\n\t\telse:\r\n\t\t\tindex = self.get_index(key) # get the index of the key\r\n\r\n\t\t\t# begin traversal of the linked list until we reach the key\r\n\t\t\tcur_node = self._buckets[index].head\r\n\t\t\twhile cur_node.key != key:\r\n\t\t\t\tcur_node = cur_node.next\r\n\r\n\t\t\treturn cur_node.value", "def find_value(dic, key):\n return dic[key]", "def visit(self, func: Callable[[str], Optional[Any]]) -> Optional[Any]:\n for key, _ in self._recurse():\n result = func(key)\n if result is not None:\n return result", "def get(self, key):\n\t\treturn self.__get(key, key[1:])", "def lookup(self, key):\n # check that this tree actually has a root node\n debug.printMsg(\"Call made to Lookup\")\n debug.printMsg(\"checking if we have a BST\")\n if self.root:\n debug.printMsg(\"Calling Recursive Lookup\")\n (result, err) = self.recursiveLookup(key, self.root)\n # if we did not find anything\n if err: \n debug.printMsg(\"Oops, we couldn't find anything\")\n return None\n else: \n # we found a result\n debug.printMsg(\"we found: \")\n return result\n else:\n debug.printMsg(\"Oops, the BST seems to not exist\")\n # root doesnt exist\n return None", "def get(self, key):", "def get(self, key):", "def get_value(key):\n\n request_dict = RequestFileCom.file_to_dict()\n\n try:\n\n return request_dict[key]\n\n except:\n\n return None", "def _get_value(match_entry: Dict, path0: str) -> any:\n if path0 is None:\n current_el = match_entry\n else:\n path = path0.split('/')\n current_el = match_entry\n for p in path:\n if current_el is None:\n break\n current_el = current_el.get(p)\n return current_el" ]
[ "0.75538653", "0.6813241", "0.67681676", "0.66341573", "0.64758044", "0.6463813", "0.64607257", "0.6392654", "0.6385483", "0.63703537", "0.63212407", "0.6292602", "0.6284046", "0.62764496", "0.619645", "0.6181006", "0.617452", "0.614984", "0.6148691", "0.6125853", "0.61204463", "0.611363", "0.610535", "0.6089134", "0.60884", "0.60846543", "0.60740733", "0.60708284", "0.6038302", "0.6037554", "0.60342723", "0.6034109", "0.60314757", "0.60213065", "0.6017173", "0.6015758", "0.60123986", "0.6001456", "0.5994992", "0.59899116", "0.59817934", "0.59762084", "0.59453714", "0.59381527", "0.59351104", "0.5922778", "0.5918769", "0.5901678", "0.5901575", "0.58990586", "0.58955973", "0.58931106", "0.5892004", "0.58891565", "0.5871098", "0.58665097", "0.5865487", "0.58558285", "0.5842927", "0.5840968", "0.58404326", "0.5838513", "0.58372706", "0.5831573", "0.5830609", "0.5830559", "0.5825645", "0.58072823", "0.58038807", "0.57995063", "0.5793654", "0.57915705", "0.578296", "0.5778612", "0.5775515", "0.57735616", "0.57735616", "0.57693106", "0.5768527", "0.5768501", "0.5761349", "0.5755889", "0.57466114", "0.574628", "0.5740622", "0.57401776", "0.5723776", "0.5719862", "0.5716286", "0.57077986", "0.5704595", "0.57025105", "0.5700118", "0.5698316", "0.56929576", "0.56902814", "0.5679246", "0.5679246", "0.56766874", "0.56752855" ]
0.7909129
0
Return all values available in the resolver.
def values(self) -> Dict[str, Any]: return self.dict.copy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getValues(self):\n return self.__get('values')", "def get_values(self):\n \n return []", "def _values(self):\n return self.__values", "def get_values(self):\n raise NotImplementedError(\"Abstract method not implemented.\")", "def values(self):\r\n return self.__values", "def values(self):\n return self[\"values\"]", "def values(self):\n return self[\"values\"]", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values(self):\n return self._values", "def values (self):\n return self._values", "def values (self):\n return self._values", "def values(self):\n\t\treturn self.myVals", "def values(self):\n return [self[name] for name in self.keys()]", "def values(self) -> list:\n return self.__values", "def __call__(self):\n return self._main._values()", "def valuerefs(self):\r\n return self.data.values()", "def values():", "def values(self):\n return ValueCollection()", "def get_all_variables(self):\n return [self.item]", "def GetValues(self):\n ...", "def GetValues(self):\n ...", "def get_all_values(self):\n return self.display_table.get_all_values(root=self.display_table_root,include=self.params)", "def values(self):\n with self.__plock:\n return map(self.get, self._keys)", "def GetValues(self):", "def values(cls):\n return cls._values", "def values(self, items_list):\n return [self.resolve(value) for value in items_list]", "def values(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:\n return pulumi.get(self, \"values\")", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\n return [self[key] for key in self.keys()]", "def values(self):\r\n return [self[k] for k in self]", "def valves(self):\n for name in self._valves:\n yield name, self._data[name]", "def values(self):\n return self._get_storage().items()", "def return_values(self):\r\n\r\n values = list(self.piDD.values())\r\n return values", "async def values(cls):\n result = []\n for key in await ProxyMethod.channel.call_remote(\n \"%s.values\" % (cls.__namespace__,),\n ):\n result.append(cls(key=key))\n return result", "def values(self):\n values = []\n for key in self.keys():\n values.append(self[key])\n return values", "def values(self, *args, **kwargs):\n return [ self._get(doc, *args, **kwargs) for doc in self.keys(*args, **kwargs) ]", "def get_res_values(name):\n pass", "def get_all_variables(self):\n return self.item.get_all_variables()", "def all_values(cls) -> List[str]:\n return list(member.value for member in cls.__members__.values())", "def get_values(self):\n return map(lambda x: x.value(),self)", "def values(self):\n return [ self[x] for x in self ]", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self) -> Sequence[str]:\n return pulumi.get(self, \"values\")", "def values(self):\n return [self[k] for k in self.keys()]", "def get_variable_values(self, vars):\n raise NotImplementedError()", "def values(self):\n return [_ for _ in self._dict.values()]", "def get_all_variables(self):\n return []", "def get_all(self):\n return [self.get(name) for name in self.factories.iterkeys()]", "def valuerefs(self):\n return [ref(value) for value in self.itervalues()]", "def values(self):\n return self._ctx.values()", "def values(self):\n return [p.value for p in self]", "def get_all(self):\n return self.__items", "def values(self):\n return [i.value for i in self.value]", "def all():\n return current().values", "def values(self) -> List:\n pass", "def get_sub_values(self):\n return list()", "def __call__(self):\n return self.get_items()", "def values(self, values=None):\n return [self[key] for key in self._sequence]", "def values(self) -> List[BaseValue]:\n raise NotImplementedError", "def returnAll(self):\n try:\n # self.checkValName()\n self.conn.execute(self.query, self.val)\n self.results = self.cursor.fetchall()\n except Exception as e:\n print \"Query failed: %s \" % e\n raise", "def values(self):\n return self._list.values()", "def values(self):\n return self.datasource.data[\"values\"]", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def get_all_variables(self):\n raise NotImplementedError()", "def getall(self, key):\n return self.values.get(key, [])", "def _values(self, items):\n # type: (Iterable[weakref.ReferenceType]) -> Iterable[Any]\n return map(self.value, items)", "def values(self):\n return self.d.keys()", "def values(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"values\")", "def values(self):\n x = []\n for k in list(self.keys()):\n x.append(self[k])\n return x", "def values(self):\n return [kvp.value for kvp in self.keyvaluepair_set.all()]", "def get_items(self):\r\n return self.items()", "def get_dynamic_values(self):\n \n # Start with an empty list.\n values = []\n \n # If a static list of values was provided, use that.\n if self._values:\n values.extend(self._values)\n \n # If a dynamic function to get values was provided, extend the values\n # with its return value.\n get_values = getattr(self, 'get_values', None)\n if callable(get_values):\n values.extend(get_values())\n \n # If a list of values to append was provided, do so.\n if self._append_values:\n values.extend(self._append_values)\n \n # Return the list of values.\n return values", "def values(self) -> Iterable[U]:\n return self._store.values()", "def get_values(self) -> list:\r\n values = []\r\n for key, value in self._items:\r\n values.append(value)\r\n return values", "def values(self):\r\n my_values = []\r\n for sleek_ref in self.data.values():\r\n try:\r\n my_values.append(sleek_ref())\r\n except SleekRefDied:\r\n pass\r\n return my_values", "def values(self):\n if not self.__values:\n self.rank()\n return self.__values", "def itervaluerefs(self):\r\n return self.data.itervalues()", "def get_all_variables(self):\n out = []\n for i in self.items:\n out += i.get_all_variables()\n return out" ]
[ "0.71192074", "0.71074104", "0.7013543", "0.6938158", "0.68647885", "0.6794239", "0.6794239", "0.6765329", "0.6765329", "0.6765329", "0.6765329", "0.67555463", "0.67555463", "0.6741106", "0.6719452", "0.67193747", "0.66979545", "0.668874", "0.66784626", "0.667716", "0.6666213", "0.66542363", "0.66542363", "0.6620889", "0.66153467", "0.6614304", "0.6600735", "0.65755785", "0.65743417", "0.656299", "0.656299", "0.656299", "0.6542688", "0.65301985", "0.6522858", "0.6522676", "0.65225464", "0.6504222", "0.6501298", "0.64902234", "0.64863855", "0.64832807", "0.64829564", "0.6477167", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.64687", "0.6464348", "0.64600265", "0.6438934", "0.6438128", "0.6432904", "0.64217126", "0.641816", "0.64164174", "0.6412385", "0.64122534", "0.6401091", "0.63998616", "0.63789153", "0.6378632", "0.63657653", "0.63442403", "0.63393533", "0.6334216", "0.63318634", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63263553", "0.63182586", "0.63122344", "0.6312045", "0.62984914", "0.6286657", "0.6273454", "0.6270745", "0.62523234", "0.6238314", "0.6234463", "0.62339824", "0.6233671", "0.6225189", "0.62244576", "0.6215591" ]
0.0
-1
run a command, returning output. raise an exception if it fails.
def run_or_die(command): (status, stdio) = commands.getstatusoutput(command) if status != 0: raise Exception("command '%s' failed with exit status %d and output '%s'" % (command, status, stdio)) return stdio
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(command):\n\n out = \"\"\n try:\n out = str(subprocess.check_output(command,\n shell=True,\n universal_newlines=True))\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n 'Failed to execute command %s: %s' % (e.cmd, e.returncode))\n else:\n return out", "def run(self, command):\n try:\n print(f\"RUNNING: {command}\")\n print(\"-\" * 80)\n print(subprocess.check_output(command, shell=True).decode('utf-8'))\n except subprocess.CalledProcessError as e:\n print(f\"ERROR calling '{command}'\")\n print(\"-\" * 20)\n print(e.output and e.output.decode('utf-8'))\n sys.exit(-1)", "def run_command(cmd, input=None, ignore_error=False):\n print(\"Running command: {}\".format(cmd))\n try:\n output = subprocess.check_output(\n cmd.split(),\n universal_newlines=True,\n input=input,\n )\n print(output)\n return output\n except subprocess.CalledProcessError as e:\n print(\"Failed to run {}. The error output is:\\n{}\".format(cmd, e.output))\n if not ignore_error:\n raise", "def run(self,command):\n #--------------------------------------------------------------------------\n res = subprocess.run(command,stdout=subprocess.DEVNULL,stderr=subprocess.STDOUT).returncode\n return res", "def run(self, command):\n log.debug(\"Executing command: \" + str(command))\n\n output, error = \"\", \"\"\n p = subprocess.Popen(command.full_command, stdout=subprocess.PIPE)\n\n for line in p.stdout:\n output += line\n log.debug(line)\n stdout, error = p.communicate()\n\n return output, error", "def run_command(cmd, dry_run=False, verbose=False, allow_error=False):\n if dry_run or verbose:\n print_info(quoted_cmd(cmd))\n if dry_run:\n return \"\", \"\"\n\n process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n stdout, stderr = process.communicate()\n\n if allow_error or process.returncode == 0:\n return stdout.decode(\"utf-8\"), stderr.decode(\"utf-8\")\n else:\n raise RuntimeError(\n f\"error running command: {quoted_cmd(cmd)}\\n{stderr.decode('utf-8')}\"\n )", "def run_command(cmd):\n if cmdl_args.debug:\n print(f'==DEBUG== Executing {cmd}')\n # In debug mode, keep the output. Otherwise, redirect it to devnull.\n out = tempfile.NamedTemporaryFile(suffix='.out', prefix=f'{cmd[0]}_', dir='./', delete=False)\n err = tempfile.NamedTemporaryFile(suffix='.err', prefix=f'{cmd[0]}_', dir='./', delete=False)\n else:\n out = open(os.devnull, 'w')\n err = open(os.devnull, 'w')\n\n return_value = subprocess.call(cmd, stdout=out, stderr=err)\n\n out.close()\n err.close()\n\n if return_value == 0:\n None\n else:\n print(f'==ERROR== {cmd} failed with return value {return_value}')\n\n if cmdl_args.debug:\n print(f'==DEBUG== See {out.name} and {err.name} for more details about the command execution.')\n\n return return_value", "def run(cmd, dieOnError=True):\n\n\tps = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)\n\texitcode = ps.returncode\n\tstdout,stderr = ps.communicate()\n\treturn exitcode, stdout, stderr", "def call(command):\n cmd = join_and_sanitize(command)\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n shell=True)\n result, _err = proc.communicate()\n return result", "def run_command(command):\n process = subprocess.Popen(\n command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n response, error = process.communicate()\n return response.decode().rstrip('\\n'), error.decode().rstrip('\\n')", "def _execute(self, cmd):\r\n stdout, stderr, return_code = self._remote_client.run_remote_cmd(cmd)\r\n if return_code:\r\n raise exceptions.ArgusError(\r\n \"Command {command!r} failed with \"\r\n \"return code {return_code!r}\"\r\n .format(command=cmd,\r\n return_code=return_code))\r\n return stdout, stderr", "def run(cmd):\n print(cmd)\n r = os.system(cmd)\n if r:\n print(\"ERROR: command returned {0}\".format(r))\n sys.exit(r)", "def _execute_command(self, cmd):\n LOG.info(\"Executing: %s\" % cmd)\n status, stdout, stderr = self.client.execute(cmd)\n if status:\n raise RuntimeError(\"Failed executing command: \",\n cmd, stderr)\n return stdout", "def call(self, cmd):\n exitcode, _stdout, _stderr = self.run(cmd, nonzero_e = None)\n return exitcode", "def run(cmd, directory, fail_ok=False, verbose=False):\n if verbose:\n print(cmd)\n p = subprocess.Popen(cmd,\n cwd=directory,\n stdout=subprocess.PIPE)\n (stdout, _) = p.communicate()\n if p.returncode != 0 and not fail_ok:\n raise RuntimeError('Failed to run {} in {}'.format(cmd, directory))\n return stdout", "def run(self, command):\n try:\n output = self.session.send_command(command)\n output = output.split('\\n')\n return output\n except Exception as e:\n backup_logger.exception(f'Exception {e} occurred while running command. '\n f'Trying again with send_command_expect')\n try:\n output = self.session.send_command(command, expect_string='#')\n output = output.split('\\n')\n return output\n except Exception as e:\n backup_logger.exception(f'Exception {e} occurred again while running command with expect.')\n return None", "def shell_call(cmd):\n try:\n x = subprocess.run(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True\n )\n ret = (x.returncode, str(x.stdout, \"utf-8\"), str(x.stderr, \"utf-8\"))\n return ret\n except subprocess.SubprocessError as e:\n logger.error(\"System error running command: \" + str(cmd))\n logger.error(str(e.output))\n raise RuntimeError()", "def run_command(command):\n\n return subprocess.run(\n command,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n universal_newlines=True)", "def run(cmd):\n print ' '.join(cmd)\n try:\n check_call(cmd)\n except CalledProcessError as cpe:\n print \"Error: return code: \" + str(cpe.returncode)\n sys.exit(cpe.returncode)", "def execute(self):\n\n (output, error) = self.process.communicate()\n\n if self.process.returncode != 0:\n decoded = self.decode_output(error)\n\n if not decoded:\n return \"Unkown error. for %s\" % (self.command)\n\n print(decoded)\n exit(1)\n return self.decode_output(output)", "def _run_command(command, cwd, output=True, decode=False, loop=None):\n loop = loop or asyncio.get_event_loop()\n\n if output:\n out = asyncio.subprocess.PIPE\n else:\n out = None\n\n process = yield from asyncio.create_subprocess_shell(\n command, loop=loop, stdout=out, stderr=out,\n limit=GIT_COMMAND_BUFFER_SIZE, cwd=cwd)\n\n if output:\n # communicate() also waits on the process termination\n stdout, stderr = yield from process.communicate()\n if decode:\n stdout = stdout.decode(sys.getdefaultencoding())\n stderr = stderr.decode(sys.getdefaultencoding())\n else:\n stdout, stderr = None, None\n yield from process.wait()\n\n if process.returncode:\n raise base.AiogitException(\n (stderr or stdout).decode(sys.getdefaultencoding()))\n\n return stdout, stderr", "def run_command(cmd):\n proc = subprocess.Popen(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n )\n stdout, stderr = proc.communicate()\n return proc.returncode, stdout, stderr", "def run_command(command, raise_on_try=True):\n try:\n p = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n close_fds=True)\n outdata,errdata = p.communicate()\n err = p.wait()\n except OSError, message:\n raise RuntimeError, \"%s subprocess error:\\n %s\" % \\\n (command, str(message))\n if err != 0 and raise_on_try:\n raise RuntimeError, '%s failed with exit code %d\\n%s' % \\\n (str(command), err, errdata)\n return outdata,errdata", "def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None):\n cwd = cwd or self.package_dir\n result = _run_cmd([cmd] + list(args), cwd=cwd)\n if result[2] and not allow_fail:\n raise Exception(\"Command failed retcode=%s\" % result[2])\n return result", "def call_command(command, env=None, cwd=None):\n\n try:\n LOG.debug('Run %s', ' '.join(command))\n out = subprocess.check_output(command,\n bufsize=-1,\n env=env,\n stderr=subprocess.STDOUT,\n cwd=cwd)\n LOG.debug(out)\n return out, 0\n except subprocess.CalledProcessError as ex:\n LOG.debug('Running command \"%s\" Failed.', ' '.join(command))\n LOG.debug(str(ex.returncode))\n LOG.debug(ex.output)\n return ex.output, ex.returncode\n except OSError as oerr:\n LOG.warning(oerr.strerror)\n return oerr.strerror, oerr.errno", "def run(cmd):\n cmd = str(cmd)\n\n if env['verbose']:\n sys.stdout.write('--> %s\\n' % cmd)\n\n cmd_list = shlex.split(cmd)\n\n p = subprocess.Popen(\n cmd_list,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n\n return p.communicate()", "def execute(command):\n process = subprocess.Popen(command, stdout=subprocess.PIPE)\n return process.communicate()", "def run(cmd: str, verbose: bool = False):\n\n if verbose:\n print(cmd)\n\n out = subprocess.check_output(cmd, shell=True).decode(\"utf-8\")\n\n if verbose:\n print(out)\n\n return out", "def run(self, command):\r\n boto.log.debug('running:%s on %s' % (command, self.server.instance_id))\r\n status = 0\r\n try:\r\n t = self._ssh_client.exec_command(command)\r\n except paramiko.SSHException:\r\n status = 1\r\n std_out = t[1].read()\r\n std_err = t[2].read()\r\n t[0].close()\r\n t[1].close()\r\n t[2].close()\r\n boto.log.debug('stdout: %s' % std_out)\r\n boto.log.debug('stderr: %s' % std_err)\r\n return (status, std_out, std_err)", "def run_command(command, **kwargs):\n command_str = ' '.join(command)\n logging.info('Running: {}'.format(command_str))\n try:\n exit_status = call(command, **kwargs)\n except OSError as e:\n message = \"Command '{}' failed due to O/S error: {}\".format(command_str, str(e))\n raise CommandError({\"message\": message})\n if exit_status != 0:\n message = \"Command '{}' failed with non-zero exit status: {}\".format(command_str, exit_status)\n raise CommandError({\"message\": message})", "def cli(self, cmd):\n p1 = Popen(cmd,stdout=PIPE, shell=True)\n output = p1.communicate()\n if p1.returncode != 0 :\n print('error returned from shell command: %s was %s'%(cmd,output[0]))\n return output[0],p1.returncode", "def runCommand(command):\n process = subprocess.Popen(command, shell=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n\n return process.communicate()", "def execute(cmd) :\n return os.system( cmd )", "def _run_command(command):\n full_command = \"xcrun simctl %s\" % (command,)\n # Deliberately don't catch the exception - we want it to bubble up\n return subprocess.check_output(full_command, universal_newlines=True, shell=True)", "def run_subprocess(command):\n if verbose:\n print \"Running \" + str(command)\n proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n output = proc.communicate()[0]\n if verbose:\n print \"Output: \" + output\n\n if proc.returncode != 0:\n raise CalledProcessError(command, proc.returncode, output)\n else:\n return output", "def execute(self, command):\n class_name = self.__class__.__name__\n logger.debug(\"%s: Executing: %s\", class_name, command)\n\n exit_code, stdout, stderr = self._execute(command)\n\n logger.debug(\"%s: Command returned %d\", class_name, exit_code)\n if stdout != '':\n for line in stdout.split('\\n'):\n logger.debug(\"%s: stdout: %s\", class_name, line)\n if stderr != '':\n for line in stderr.split('\\n'):\n logger.debug(\"%s: stderr: %s\", class_name, line)\n\n return exit_code, stdout, stderr", "def run(cmd, proc_stdout = sys.stdout, proc_stderr = sys.stderr,\n check = True):\n print cmd\n proc = subprocess.Popen(cmd, shell=True, bufsize=-1,\n stdout=proc_stdout, stderr=proc_stderr)\n output, errors = proc.communicate()\n sts = proc.wait()\n if check is True and sts != 0:\n raise RuntimeError(\"Command: %s exited with non-zero status %i\" % (cmd, sts))\n return output, errors", "def do_command(): # pragma: no cover\n args = parse_args(sys.argv[1:])\n status = run(args)\n sys.exit(status)", "def run(cmd, shell=False, cwd=None):\n try:\n out = check_output(cmd, shell=shell, cwd=cwd, stderr=STDOUT)\n except CalledProcessError as ex:\n return ex.returncode, ex.output\n else:\n return 0, out", "def run_command(self,command):\n from subprocess import Popen, PIPE, STDOUT\n if command == '':\n raise RuntimeError('no command for run_command :(')\n # print 'Running: ', command #debug\n proc = Popen([command], shell=True, stderr=PIPE)\n proc.wait()\n exitcode = proc.returncode\n if exitcode != 0:\n # print exitcode,'label:', self.calc_dir\n error='%s exited with error code %i in %s' % (\n command,exitcode,self.calc_dir)\n stdout,stderr = proc.communicate()\n print 'shell output: ',stdout,stderr\n raise RuntimeError(error)\n return 0", "def RunCommand(cmd):\n logging.debug(\"Running cmd %s\", cmd)\n\n p = subprocess.Popen(cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=True)\n o, e = p.communicate()\n s = p.returncode\n\n if s != 0:\n return (s, e)\n\n return (s, o)", "def run_command(cmd, dry=False):\n logger.debug(\"Running command: {}\".format(cmd))\n\n if dry:\n logger.debug(\"Dry mode specified, not actually running command\")\n return\n\n p = subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True\n )\n stdout, stderr = p.communicate()\n\n if p.returncode == 0:\n return stdout + stderr\n else:\n raise RuntimeError(\"Error running command {}: {}\".format(cmd, str(stderr)))", "def run_cmd(cmd):\n logging.debug('Run command \"'+cmd+'\"')\n try:\n process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n process.check_returncode()\n\n except Exception as e:\n logging.exception(str(e) +\"\\nCMD_SHELL : \"+cmd+\"\\nSTDOUT : \"+process.stdout.decode()+\"\\nSTDERR : \"+process.stderr.decode(), exc_info=True)\n #logging.critical(\"{CDM : \"+cmd+\", \"} : \"+cmd)\n #logging.critical(\"STDOUT : \"+process.stdout.decode())\n #logging.critical(\"STDERR : \"+process.stderr.decode())\n #raise e\n\n return process.stdout.decode()", "def run(self, cmd):\n log = logging.getLogger(self.name)\n try:\n retcode = subprocess.call(cmd, shell=True, env=self.augmented_environment())\n\n if retcode < 0:\n log.error('Command received signal %s: %s' % (-retcode, cmd))\n raise zc.buildout.UserError('System error')\n elif retcode > 0:\n log.error('Command failed with exit code %s: %s' % (retcode, cmd))\n raise zc.buildout.UserError('System error')\n except OSError as e:\n log.error('Command failed: %s: %s' % (e, cmd))\n raise zc.buildout.UserError('System error')", "def _run_shell(self, cmd):\n self._logger.info(\"Running command\\n{}\".format(\" \".join(cmd)))\n\n out = subprocess.Popen(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n stdout, stderr = out.communicate()\n result = stdout.decode(encoding='utf-8')\n if stderr:\n error_msg = stderr.decode(encoding='utf-8')\n print(error_msg)\n raise Exception(error_msg)\n\n return result", "def _run_cmd(self, cmd, log_level_debug=False):\n logger.info(\"## Running command: ##\")\n logger.info(cmd)\n\n proc = Popen(cmd, env=self._env, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n stdout, stderr = proc.communicate()\n return_code = proc.returncode\n\n logger.info(\"Return Code: {0}\\n\".format(return_code))\n\n if log_level_debug:\n logger.setLevel(logging.DEBUG)\n\n stdout_data = stdout.decode()\n if len(stdout_data):\n logger.debug(\"### stdout ###\")\n logger.debug(\"{0}\".format(stdout_data))\n\n stderr_data = stderr.decode()\n if len(stderr_data):\n logger.debug(\"### stderr ###\")\n logger.debug(\"{0}\".format(stderr.decode()))\n\n return stdout_data, stderr_data", "def Run(command_line):\n print >> sys.stderr, command_line\n return subprocess.check_output(command_line, shell=True)", "def run_cmd(self, cmd):\n command = \" \".join(cmd)\n print(command)\n logging.info(\"Running command \" + command)\n cmdProcess = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n shell=True)\n for line in cmdProcess.stdout:\n logging.info(line.decode(\"utf-8\").rstrip())\n cmdProcess.wait()\n logging.info('return code: ' + str(cmdProcess.returncode))\n if cmdProcess.returncode != 0:\n raise ValueError('Error in running command with return code: '\n + command\n + str(cmdProcess.returncode) + '\\n')\n logging.info(\"command \" + command + \" ran successfully\")\n return \"success\"", "def run_command(cmd):\n return subprocess.call(cmd, shell=True)", "def run_shell_command(command, checkReturnValue=True, verbose=False):\n process = subprocess.Popen(\n command,\n shell=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n universal_newlines=True,\n bufsize=1)\n outText = \"\"\n\n for line in iter(process.stdout.readline, ''):\n if verbose:\n sys.stdout.write(line)\n outText += line\n\n process.communicate()[0]\n \"\"\"\n returnValue = process.returncode\n if checkReturnValue and (returnValue != 0):\n raise Exception(outText)\n \"\"\"\n return outText", "def RunCommand(command):\n proc = subprocess.Popen(command, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT, shell=False)\n return proc.communicate()[0]", "def run_command(self, cmd, expects=0, shell=False, stdout=PIPE, stderr=PIPE):\n \n # If the command argument is a string\n if isinstance(cmd, str):\n cmd = cmd.split(' ')\n \n # Open the process\n try:\n proc = Popen(cmd, stdout=stdout, stderr=stderr, shell=shell)\n out, err = proc.communicate()\n \n # Make sure the expected return code is found\n if not proc.returncode == expects:\n self.die('Failed to run command \\'{0}\\', ERROR={1}'.format(str(cmd), err))\n \n # Return exit code / stdout / stderr\n return proc.returncode, out, err\n except Exception as e:\n self.die('Failed to run command \\'{0}\\': ERROR={1}'.format(str(cmd), str(e)))", "def _call_command(wrapper, command, no_out=False):\n\n child = subprocess.Popen(command.split(),\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n (out, err) = child.communicate()\n ret = child.returncode\n\n if not (no_out and ret == 0):\n for line in (out + err).splitlines():\n wrapper.pm(line.decode(\"utf-8\"))\n\n if ret != 0:\n if ret < 0:\n cause = \"signal\"\n ret *= -1\n else:\n cause = \"status\"\n\n wrapper.pm(messages[\"process_exited\"].format(command, cause, ret))\n\n return (ret, out)", "def run_cmd(cmd):\n return check_output(cmd, shell=True).decode('utf-8')", "def subprocess_call(command):\n try:\n return_out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)\n if return_out.strip():\n print return_out\n except subprocess.CalledProcessError, err:\n msg = \"Subprocess call failed!\"\\\n \"\\n command : {0}\"\\\n \"\\n console output: \\n\\n{1}\"\\\n \"\\n error message : {2}\"\\\n \"\\n arguments : {3}\"\\\n \"\\n return-code : {4}\\n\"\\\n .format(err.cmd, err.output, err.message, err.args, err.returncode)\n raise Exception(msg)\n\n return return_out", "def run(command: str,\n **kwargs) -> str:\n if 'errors' not in kwargs:\n kwargs['errors'] = 'ignore'\n log.info(command)\n try:\n cmd_list = shlex.split(command)\n result = subprocess.run(cmd_list, check=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n **kwargs)\n except subprocess.CalledProcessError as err:\n raise ValueError(f'failed to execute {command}: {err.stderr}')\n except FileNotFoundError as err:\n raise ValueError(f'failed to execute {command}: file not found')\n return result.stdout # No split. See __doc__.", "def run_command(self, command, timeout=None, stdout=True):\n print('Running \"{}\"...'.format(command))\n output = self._shell.run_command(\n command, timeout=timeout, async_=False\n )\n if stdout:\n print(output)\n print(\"Done!\")\n return output", "def run_command(cmd):\n if env.PY2 and isinstance(cmd, unicode):\n cmd = cmd.encode(sys.getfilesystemencoding())\n\n # In some strange cases (PyPy3 in a virtualenv!?) the stdout encoding of\n # the subprocess is set incorrectly to ascii. Use an environment variable\n # to force the encoding to be the same as ours.\n sub_env = dict(os.environ)\n encoding = output_encoding()\n if encoding:\n sub_env['PYTHONIOENCODING'] = encoding\n\n proc = subprocess.Popen(\n cmd,\n shell=True,\n env=sub_env,\n stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT\n )\n output, _ = proc.communicate()\n status = proc.returncode\n\n # Get the output, and canonicalize it to strings with newlines.\n if not isinstance(output, str):\n output = output.decode(output_encoding())\n output = output.replace('\\r', '')\n\n return status, output", "def execute(self, *args, **options):\n show_traceback = options.get('traceback', False)\n\n try:\n self.stdout = options.get('stdout', sys.stdout)\n self.stderr = options.get('stderr', sys.stderr)\n\n output = self.handle(*args, **options)\n if output:\n self.stdout.write(output)\n\n except CommandError as exception:\n if show_traceback:\n traceback.print_exc()\n else:\n self.stderr.write(\n smart_str(self.style.ERROR('Error: %s\\n' % exception)))\n sys.exit(1)", "def run_cmd(cmd, args, path=None, raise_error=True):\n\n if path is not None:\n # Transparently support py.path objects\n path = str(path)\n\n p = sp.Popen([cmd] + list(args), stdout=sp.PIPE, stderr=sp.PIPE,\n cwd=path)\n streams = tuple(s.decode('latin1').strip() for s in p.communicate())\n return_code = p.returncode\n\n if raise_error and return_code != 0:\n raise RuntimeError(\n \"The command `{0}` with args {1!r} exited with code {2}.\\n\"\n \"Stdout:\\n\\n{3}\\n\\nStderr:\\n\\n{4}\".format(\n cmd, list(args), return_code, streams[0], streams[1]))\n\n return streams + (return_code,)", "def cmd_run(cmd):\n return subprocess.run(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n check=True).stdout.decode(\"utf-8\")", "def execute(cmd, fail_ok=False, merge_stderr=False):\n cmdlist = shlex.split(cmd)\n result = ''\n result_err = ''\n stdout = subprocess.PIPE\n stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE\n proc = subprocess.Popen(cmdlist, stdout=stdout, stderr=stderr)\n result, result_err = proc.communicate()\n result = result.decode('utf-8')\n if not fail_ok and proc.returncode != 0:\n raise exceptions.CommandFailed(proc.returncode, cmd, result,\n result_err)\n return result", "def call(command, working_directory=BASE_DIR):\r\n LOG.info(command)\r\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=working_directory, shell=True)\r\n out, err = p.communicate()\r\n return (out, err)", "def run_command(cmd, shell=False):\n\tlog.debug(\"Running command: \" + ' '.join(cmd))\n\tprocess = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell)\n\tcmd_out = ''\n\tcmd_err = ''\n\twhile True:\n\t\tout = process.stdout.readline()\n\t\tif out == '' and process.poll() != None:\n\t\t\tcmd_err = process.stderr.read()\n\t\t\tbreak\n\t\tif out != '':\n\t\t\tsys.stdout.write(out)\n\t\t\tsys.stdout.flush()\n\t\t\tcmd_out += out\n\t\t\t\n\tif cmd_err != '':\n\t\tlog.warning(\"Error running command: \" + cmd_err)\n\treturn cmd_out, cmd_err, process.returncode", "def subprocess_run(cmd):\n print(shlex.join(cmd))\n try:\n ret = subprocess.run(cmd, capture_output=True,\n text=True, env=os.environ.copy(), check=True)\n if (ret.stdout):\n print(ret.stdout)\n return ret\n except subprocess.CalledProcessError as e:\n if (e.stderr):\n print(e.stderr)\n raise e", "def run(cmd: List[str]) -> int:\n logger.debug('cmd: %s', ' '.join(cmd))\n child = Popen(cmd, stdout=PIPE, stderr=PIPE)\n stdoutdata, stderrdata = child.communicate()\n\n if stdoutdata.strip():\n log_std('stdout', stdoutdata.decode(),\n logging.DEBUG if child.returncode == 0 else logging.ERROR)\n\n if stderrdata.strip():\n log_std('stderr', stderrdata.decode(), logging.ERROR)\n\n logger.debug(\"returncode %s\", child.returncode)\n return child.returncode", "def run_cmd( command ):\n return subprocess.check_output( command ).decode( \"utf-8\" )", "def run_command(cmd_str):\n cmd = Command(\"Local Command\", cmd_str)\n cmd.run(validateAfter = True)\n results = cmd.get_results()\n\n if results.rc != 0:\n return results.stderr.strip()\n else:\n return results.stdout.strip()", "def _execute(self, command):\n _, stdout, stderr = self.ssh_client.exec_command(command)\n exit_code = stdout.channel.recv_exit_status()\n stdout = stdout.read().decode().strip()\n stderr = stderr.read().decode().strip()\n\n return exit_code, stdout, stderr", "def _exec_cmd(self, cmd):\n proc = subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n (out, err) = proc.communicate()\n ret = proc.returncode\n logging.debug('cmd: %s, stdout: %s, stderr: %s, ret: %s', cmd, out,\n err, ret)\n if ret == 0:\n return out\n else:\n raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)", "def runCommand(command):\n None", "def check_output(command):\n process = Popen(command, shell=True, stdout=PIPE)\n output, err = process.communicate()\n if process.returncode == 0: # success\n return output\n else:\n raise RuntimeError(\"Command {0} running unsuccessfully\".format(command))", "def execute(self, cmd, cwd=None, capture_output=False, env=None, raise_errors=True):\n logging.info('Executing command: {cmd}'.format(cmd=str(cmd)))\n stdout = subprocess.PIPE if capture_output else None\n process = subprocess.Popen(cmd, cwd=cwd, env=env, stdout=stdout)\n output = process.communicate()[0]\n returncode = process.returncode\n if returncode:\n # Error\n if raise_errors:\n raise subprocess.CalledProcessError(returncode, cmd)\n else:\n logging.info('Command returned error status %s', returncode)\n if output:\n logging.info(output)\n return returncode, output", "def exec_cmd(cmd):\n print(' '.join(str(e) for e in cmd))\n try:\n res = subprocess.run(cmd, capture_output=True, check=True)\n print(res.stdout.decode(\"utf8\"))\n return res\n except subprocess.CalledProcessError as err:\n logging.error(err.stderr)\n raise err", "def run(command_list: List[str]) -> str:\n r = subprocess.run(command_list, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, encoding='utf-8')\n logging.debug(f\"subprocess.run({command_list}) got {r.returncode}.\")\n if r.returncode != 0:\n logging.error(f\"subprocess.run({command_list}) failed with code {r.returncode}.\")\n return f\"Error {r.returncode} trying to run({command_list})\"\n return r.stdout", "def _run_command(self, command=None, message=None, logger=None, log_stdout=True, shell=False, timeout=None, expected_failure=False):\n try:\n if not timeout:\n timeout = self._timeout\n\n if self.tc and 'spt' in command:\n pdata = self.tool_communicate(command=command, message=message, logger=logger,\n log_output=log_stdout, expected_failure=expected_failure)\n else:\n cmd = Command(command)\n pdata = cmd.run(message=message, logger=logger, log_stdout=log_stdout,\n shell=shell, timeout=timeout, expected_failure=expected_failure)\n return pdata\n\n except TimeoutExpiredError as exc:\n raise exc\n\n except (RuntimeError, AttributeError):\n raise RuntimeError", "def run_command(command, cwd=None):\n def decode_when_needed(result):\n \"\"\" check_output returns bytes or string depend on python version \"\"\"\n return result.decode('utf-8') if isinstance(result, bytes) else result\n\n try:\n directory = os.path.abspath(cwd) if cwd else os.getcwd()\n logging.debug('exec command %s in %s', command, directory)\n output = subprocess.check_output(command,\n cwd=directory,\n stderr=subprocess.STDOUT)\n return decode_when_needed(output).splitlines()\n except subprocess.CalledProcessError as ex:\n ex.output = decode_when_needed(ex.output).splitlines()\n raise ex", "def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):\n result = job.run(cmd, ignore_status=True, timeout=timeout)\n ret, out, err = result.exit_status, result.stdout, result.stderr\n\n logging.debug(\"cmd: %s, stdout: %s, stderr: %s, ret: %s\", cmd, out,\n err, ret)\n if \"Result: Parcel\" in out:\n return parsing_parcel_output(out)\n if ignore_status:\n return out or err\n if ret == 1 and DEVICE_NOT_FOUND_REGEX.match(err):\n raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)\n else:\n return out", "def run_cmd(self, cmd):\r\n if 'shell_id' in dir(self):\r\n #checking for the shell_id created in winrm object\r\n command_id = self.conn.run_command(self.shell_id, cmd)\r\n std_out, std_err, status_code = self.conn.get_command_output(\r\n self.shell_id, command_id)\r\n #runs the command and returns output,error,statuscode\r\n return std_out, std_err, status_code", "def call(command, check=True, pipe_stdout=False, retries=0, **run_kwargs):\n kwargs = dict(universal_newlines=True, stderr=_PIPE)\n kwargs.update(run_kwargs)\n\n if pipe_stdout:\n kwargs.setdefault('stdout', _PIPE)\n\n retried = 0\n while True:\n result = _run(command, **kwargs)\n\n if result.returncode and retried < retries:\n retried += 1\n continue\n break\n\n if check and result.returncode:\n raise _RuntimeException('\\n'.join((\n 'Error while running:', ' '.join(command), '',\n (result.stderr or result.stdout or\n warn('See stdout for more information.')).strip())))\n\n return result", "def run_with_output(self, cmd, end_strs=None, timeout=301, timeout_exception=True, api_call='write'):\n if api_call == 'write':\n self.write(cmd)\n out = ''\n else:\n out = self.runsingle(cmd)\n time.sleep(1)\n out += self.gather_output(cmd, out, end_strs, timeout, timeout_exception) # gather last of data buffer\n return out", "def run_command(cmd, cmd_input=None, ok_exit_codes=None):\n proc = make_subprocess(cmd, stdout=True, stderr=True, stdin=True,\n close_fds=True)\n return finish_subprocess(proc, cmd, cmd_input=cmd_input,\n ok_exit_codes=ok_exit_codes)", "def run(self, cmd, out_display=None, err_display=None, **kwargs):\n if os.name == 'nt':\n loop = asyncio.ProactorEventLoop() # for subprocess' pipes on Windows\n asyncio.set_event_loop(loop)\n else:\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(self.arun(cmd, out_display, err_display, **kwargs))\n return result", "def systemCommand(command):\n\n commStatus, commOut = commands.getstatusoutput(command)\n # If our command fails, abort entirely and notify CloudKick\n if commStatus != 0:\n sys.stderr.write('Error: Failure when executing the following ')\n sys.stderr.write(\"command: '%s'\\n\" % (command,))\n sys.stderr.write(\"Exit status: %d\\n\" % (commStatus,))\n sys.stderr.write(\"Output: %s\\n\\n\" % (commOut,))\n sys.stderr.write('status err System command failure: ')\n sys.stderr.write('%s\\n' % (command,))\n sys.exit(1)\n # If we get a 0 exit code, all is well. Return the data.\n else:\n return commOut", "def run_cmd(cmd, **kwargs):\n log.info(f\"Executing command: {cmd}\")\n if isinstance(cmd, str):\n cmd = shlex.split(cmd)\n r = subprocess.run(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdin=subprocess.PIPE,\n **kwargs\n )\n log.debug(f\"CMD output: {r.stdout.decode()}\")\n if r.stderr:\n log.error(f\"CMD error:: {r.stderr.decode()}\")\n if r.returncode:\n raise CommandFailed(\n f\"Error during execution of command: {cmd}.\"\n f\"\\nError is {r.stderr.decode()}\"\n )\n return r.stdout.decode()", "def Run(cmd):\n return os.popen(cmd).read()", "def run_cmd(cmd):\n cmdl = cmd.split(\" \")\n try:\n p = subprocess.Popen(cmdl, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n so, se = p.communicate()\n except subprocess.CalledProcessError, e:\n sys.stderr.write(\"Error encountered in running '\" + cmd +\n \"'. Return status is '\" + str(e.returncode) + \"'\\n\")\n sys.exit(1)\n except:\n sys.stderr.write(\"Unknown error encountered in running 'qhost -j -xml'.\\n\")\n sys.exit(1)\n return so", "def command(self, command, out=False, err=False):\n \n if out:\n std_out = subprocess.PIPE\n else:\n std_out = None\n \n if not err:\n std_err = subprocess.PIPE\n else:\n std_err = None\n \n \n proc = subprocess.Popen(command, stdout = std_out, stderr=std_err)#std_out)\n out, err = proc.communicate()\n \n return out, err", "def run_command(cmd, redirect_output=True, check_exit_code=True):\n # subprocess模块用于产生子进程\n if redirect_output:\n stdout = subprocess.PIPE\n else:\n stdout = None\n # cwd 参数指定子进程的执行目录为ROOT,执行cwd 函数\n proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)\n # 使用communicate() 返回值为 (stdoutdata , stderrdata )\n output = proc.communicate()[0]\n if check_exit_code and proc.returncode != 0:\n # 程序不返回0,则失败\n raise Exception('Command \"%s\" failed.\\n%s' % (' '.join(cmd), output))\n return output", "def execute_stdout(command):\n try:\n output = subprocess.check_output([command], stderr=subprocess.STDOUT,\n shell=True)\n return 0, output\n except subprocess.CalledProcessError as excp:\n return excp.returncode, excp.output", "def run_command(*args):\n cmd = sp.Popen(args, shell=True, stdout=sp.PIPE, stderr=sp.STDOUT, encoding='utf-8')\n stdout, _ = cmd.communicate()\n\n if cmd.returncode != 0:\n raise ValueError(f\"Running `{args[0]}` failed with return code {cmd.returncode}, output: \\n {stdout}\")\n else:\n return stdout.strip('\\n')", "def runCommand(self, cmd, stdin=None, env=None):\n\n\t mycmd=subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n\t output, error=mycmd.communicate()\n\t while not mycmd.wait():\n\t \t# do stuff\n\t \treturn 0\n\n\n\n\t #if not isList(cmd):\n\t #cmd = shlex.split(cmd)\n\t #opts = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n\t #if env:\n\t # opts.update(env=env)\n\t #if stdin:\n\t # opts.update(stdin=subprocess.PIPE)\n\t # stdout, stderr=subprocess.Popen(cmd, **opts).communicate(stdin)\n\t #else :\n\t # stdout, stderr=subprocess.Popen(cmd, **opts).communicate()\n\t #return stdout, stderr", "def exec_command(cmd):\n exit_code, stdout_text, stderr_text = exec_test_command(cmd)\n\n expected_exit_code = 0\n\n if expected_exit_code != exit_code:\n print(\"cmd = %s [%d != %d]\" % (str(cmd), expected_exit_code, exit_code))\n print(\"STDOUT= %s\" % stdout_text)\n print(\"STDERR= %s\" % stderr_text)\n\n assert expected_exit_code == exit_code\n return stdout_text", "async def _run_cmd(self, cmd, timeout=5):\n try:\n self._flush_buffer()\n self.pexpect_child.sendline(cmd)\n ret = self.pexpect_child.expect_exact(\n [self.cmd_prompt, pexpect.TIMEOUT], timeout=timeout\n )\n stdout = self.parse_cmd_output(self.pexpect_child.before) if ret == 0 else \"\"\n self.pexpect_child.sendline(\"echo $?\")\n ret = self.pexpect_child.expect_exact(\n [self.cmd_prompt, pexpect.TIMEOUT], timeout=timeout\n )\n exit_status = self.parse_cmd_output(self.pexpect_child.before) if ret == 0 else -1\n try:\n exit_status = int(exit_status)\n except ValueError:\n exit_status = -1\n return exit_status, stdout\n except Exception as e:\n self.applog.exception(\"Exception occured --> _run_command\", exc_info=e)\n raise", "def checked_subprocess_run(command):\n args = shlex.split(command)\n completed = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\n out = completed.stdout.decode()\n err = completed.stderr.decode()\n\n # Print the subprocess output to include in the test output\n print(out, file=sys.stdout)\n print(err, file=sys.stderr)\n\n # After printing the output, raise an exception on a non-zero exit status.\n completed.check_returncode()\n\n return out, err", "def system_command(cmd, logger, throw_exception=True, return_output=False, issue_error=True, timeout=None):\n\n # launch command\n status = 0\n try:\n logger.debug('SysCmd: '+cmd)\n s = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)\n except Exception as e:\n if issue_error:\n logger.error(e)\n logger.error('Problem with launching system command: '+cmd)\n status = -1\n\n # wait for command termination\n if not status:\n try:\n if timeout_supported and timeout:\n s.wait(timeout=int(timeout))\n else:\n s.wait()\n except subprocess.TimeoutExpired:\n logger.error('Timeout for system command: '+cmd)\n s.kill()\n s.wait()\n status = -2\n except Exception as e:\n logger.error(e)\n logger.error('Problem with waiting for system command: '+cmd)\n status = -3\n\n # wait for command termination and log output to logger\n lines=''\n if status != -1:\n while True:\n line = s.stdout.readline()\n if not line:\n break\n lines += line.decode('ascii')\n if not return_output:\n logger.debug(' Out: '+line.rstrip())\n if not status:\n status = s.returncode\n\n # trow exception if requested\n if status: \n if throw_exception:\n raise StopError('Error with system command: '+cmd)\n else:\n if issue_error:\n logger.error('Error with system command: '+cmd)\n\n if return_output:\n return (status,lines)\n else:\n return status", "def run(self):\n try:\n self.runCommand()\n except TortugaException as ex:\n print(ex.getErrorMessage())\n raise SystemExit(ex.getErrorCode())\n except SystemExit:\n raise\n except Exception as ex:\n print(str(ex))\n raise SystemExit(-1)", "def exec_command(cmd):\n with subprocess.Popen(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n shell=True) as p:\n stdout, _ = p.communicate()\n if p.returncode != 0:\n logger.error(stdout)\n return None\n\n return stdout", "def run_command(command: str) -> str:\n path_command = f\"PATH={shell_path()} {command}\"\n status, output = getstatusoutput(path_command)\n if status == 0:\n return output\n raise ShellError(status, output)", "def excecute_command(command):\n command = command.split(' ')\n process_respose = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n output, error = process_respose.communicate()\n\n if output:\n output = output.decode('utf-8')\n if error:\n error = error.decode('utf-8')\n\n return output, error" ]
[ "0.7624283", "0.74565816", "0.7429833", "0.7369673", "0.7366693", "0.7308195", "0.72908425", "0.72728914", "0.72698665", "0.72682434", "0.724774", "0.7206386", "0.72033876", "0.7180701", "0.7153353", "0.71275276", "0.71244043", "0.71204966", "0.71131384", "0.7102511", "0.7098788", "0.7093381", "0.7062345", "0.70539755", "0.70496464", "0.7044533", "0.7042709", "0.7041325", "0.7030069", "0.70289475", "0.7020048", "0.70175064", "0.7014024", "0.7011644", "0.69929194", "0.698877", "0.6978145", "0.6973104", "0.69707805", "0.69701064", "0.69646865", "0.69609356", "0.69452125", "0.69375604", "0.6931659", "0.6930682", "0.69303566", "0.6910938", "0.69094753", "0.6909068", "0.69001085", "0.6890764", "0.68833345", "0.6872049", "0.68679285", "0.6867243", "0.68666726", "0.68623126", "0.6861342", "0.6860519", "0.68473345", "0.68429184", "0.68334013", "0.6825492", "0.6815349", "0.68113613", "0.68084604", "0.68074334", "0.6806517", "0.68002087", "0.6798225", "0.67961943", "0.67932415", "0.67923725", "0.67923725", "0.6791874", "0.6789506", "0.6788822", "0.678839", "0.6786864", "0.6784886", "0.6781922", "0.67795527", "0.6775856", "0.6769748", "0.67676276", "0.6767197", "0.67665726", "0.6766548", "0.67609054", "0.676044", "0.6760161", "0.67591697", "0.6758928", "0.67551774", "0.6751858", "0.67426825", "0.67425287", "0.67399067", "0.67316324" ]
0.70195824
31
Determines the number of files each node will process in scatter gather environment
def number_of_files_per_node(files, number_of_nodes): files_per_node = float(len(files))/float(number_of_nodes) if files_per_node > 0.: return int(math.floor(files_per_node)) else: return int(math.ceil(files_per_node))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fileCount(self):\n pass", "def countDataSize(self,filename):\n \n d = h5py.File(filename,'r')\n features = d['spectrometer/features'][:]\n select = self.selectData(features.astype(float), self.ifeature, d)\n N = len(features[select])\n d.close()\n\n N = (N//self.offsetLen) * self.offsetLen\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def numberFiles(self):\n return self.n", "def countDataSize(self,filename):\n \n try:\n d = h5py.File(filename,'r')\n except:\n print(filename)\n return \n\n N = 0\n scan_edges = d['level2/Statistics/scan_edges'][:]\n for (start,end) in scan_edges:\n N += (end-start)//self.offsetLen * self.offsetLen\n d.close()\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def __len__(self):\n return self._num_samples_per_file * len(self._files) // self._world_size", "def getFileCount(self) -> int:\n ...", "def num_partitions(self): # -> int:\n ...", "def number_of_workers():\n return (cpu_count() * 2) + 1", "def fileCounter(directory):", "def __number_of_files(self):\n self.__get_files()\n return len(self.files)", "def n_file(self):\n self.assert_is_dir_and_exists()\n n = 0\n for _ in self.select_file(recursive=True):\n n += 1\n return n", "def count_data_items(fileids, train=True):\n sizes = 28000 if train else 22500\n return len(fileids) * sizes", "def totalfiles(self):\n return len([sz for sz in self.iterate()])", "def num_partitions(self): # -> None:\n ...", "def n_total_files(self):\n return len(self.fileinfo)", "def number_of_workers():\n return (multiprocessing.cpu_count() * 2) + 1", "def get_num_chunks(self) -> int:", "def get_number_files(dataset):\n HOME = os.environ['HOME']\n # cmds = ['das_client.py', '--query', 'summary dataset=%s' % dataset, '--format=json',\n # '--key=%s/.globus/userkey.pem' % HOME, '--cert=%s/.globus/usercert.pem' % HOME]\n cmds = ['das_client.py', '--query', 'summary dataset=%s' % dataset, '--format=json']\n output = subprocess.check_output(cmds, stderr=subprocess.STDOUT)\n summary_dict = json.loads(output)\n return int(summary_dict['data'][0]['summary'][0]['nfiles'])", "def num_chunking_units(self):\n if self._source_paths:\n return len(self._source_paths)\n return 1", "def n_subfile(self):\n self.assert_is_dir_and_exists()\n n = 0\n for _ in self.select_file(recursive=False):\n n += 1\n return n", "def num_dataload_workers() -> int:\n return 4 if common_util.is_linux() else 0", "def num_processes():\n return 1", "def part1():\n program = read_input()\n root = build_filesystem(program)\n all_sizes = root.make_size_list()\n return sum(size for size in all_sizes if size <= 100000)", "def get_amount_of_data(directory: str):\n size = sum([os.path.getsize(os.path.join(directory, item)) for item in os.listdir(directory) if os.path.isfile(os.path.join(directory, item))])\n print(size)\n return size", "def numberFiles(self):\n with open(self.inputfile) as fin:\n for n, _ in enumerate(fin, start=1): pass\n self.n = n\n return self.n", "def num_partitions(self): # -> Unknown:\n ...", "def total_files_to_process(self) -> float:\n return pulumi.get(self, \"total_files_to_process\")", "def getnrfiles(self):\n return len(self.filenames)", "async def num_fomod_files_to_install(self):\n n = 0\n for f in self.fomod.files_to_install:\n if f.type == \"folder\":\n n += await self.count_folder_contents(f.source)\n else:\n n += 1\n\n return n", "def getNumStatDataFiles(self):\n return self.nStatDataFiles", "def num_processes(self, new_value):", "def bpCount(file):\n amount_bp = len(file)\n return amount_bp", "def total_number():\r\n total_number = 0\r\n file_read = read_file()\r\n for key in file_read:\r\n total_number = total_number + len(file_read[key])\r\n return total_number", "def _get_num_objects_per_step(self, worker_id=0):\n data_layer = self.get_data_layer(worker_id)\n num_images = tf.shape(data_layer.input_tensors['source_tensors'][0])[0]\n return num_images", "def get_space_used():\n files = jobtracker.query(\"SELECT * FROM files \" \\\n \"WHERE status IN ('added', 'downloaded', 'unverified')\")\n\n total_size = 0\n for file in files:\n total_size += int(file['size'])\n return total_size", "def count_number_of_reads(filename: Path) -> int:\n\tif filename.suffix == '.gz':\n\t\tcommand = f\"zcat {filename}\"\n\telse:\n\t\tcommand = f\"cat {filename}\"\n\tprocess = subprocess.Popen(command.split(), stdout = subprocess.PIPE)\n\toutput = subprocess.check_output([\"wc\", \"-l\"], stdin = process.stdout)\n\n\treads = int(output.strip()) / 4\n\treturn int(reads)", "def n_tasks(self) -> int:\n pass", "def len(self):\n # print(self.processed_file_names)\n return self.len_", "def chunk_size(self) -> global___Expression:", "def get_num_of_images(self):", "def numcpu () :\n import multiprocessing\n return multiprocessing.cpu_count()", "def get_ncpu():\n from multiprocessing import cpu_count\n return cpu_count()", "def size(self, gather=True):\n raise NotImplementedError", "def count(train_dir):\r\n path = train_dir\r\n count = 0\r\n for fn in os.listdir(path): #fn 表示的是文件名\r\n count = count + 1\r\n return count", "def number_of_batches(self):\n return int(np.floor(len(self.file_paths_list) / self.batch_size))", "def determine_num_chunks(self, total_lines, socket=0, max_entries=5000):\n \n ret, msg = self.newportxps._xps.GatheringDataMultipleLinesGet(socket, 0, 1)\n num_entries = len(msg.split(';'))\n print('Number of columns saved:' + str(num_entries))\n max_lines = max_entries / num_entries\n nchunks = int(total_lines/max_lines)+1\n num_lines = total_lines \n success = False\n print('Acquiring gather file:')\n while(not success):\n print(' Current number of chunks ' + str(nchunks))\n ret, _ = self.newportxps._xps.GatheringDataMultipleLinesGet(socket, 0, int(num_lines))\n print(' Current number of lines ' + str(num_lines))\n # Did not successfully return all lines\n if(ret < 0):\n nchunks *= 1.5\n num_lines = total_lines/nchunks \n \n else:\n print('Gatherfile successfully chunked')\n success = True \n \n if(num_lines < 10):\n raise AttributeError('XPS not reading even though small enough chunks')\n return int(nchunks)", "async def get_counts_for_file(\n file_name: str,\n score: int = 0,\n par_length: int = 0,\n co_occ: int = 0,\n limit_collection: List[str] = Query([]),\n):\n limitcollection_positive, limitcollection_negative = get_collection_files_regex(\n limit_collection, get_language_from_filename(file_name)\n )\n query_graph_result = get_db().AQLQuery(\n query=main_queries.QUERY_TOTAL_NUMBERS,\n batchSize=100000,\n bindVars={\n \"filename\": file_name,\n \"score\": score,\n \"parlength\": par_length,\n \"coocc\": co_occ,\n \"limitcollection_positive\": limitcollection_positive,\n \"limitcollection_negative\": limitcollection_negative,\n },\n )\n return {\"parallel_count\": query_graph_result.result[0]}", "def get_num_files(self):\n\t\tif self.num_files_in_set is None and self.set_type == FAST5SET_TARBALL:\n\t\t\tself.num_files_in_set = len(self.files)\n\t\treturn self.num_files_in_set", "def get_total_n_cpu(self) -> int:", "def target_totalfiles(self):\n return self._cfg.get('totalfiles', None)", "def _total_size_controller_multi_fs(controller_fs_new_list):\n total_size = 0\n for fs in controller_fs_new_list:\n if fs.name == constants.FILESYSTEM_NAME_DATABASE:\n total_size += (2 * fs.size)\n else:\n total_size += fs.size\n return total_size", "def files_processed(self) -> float:\n return pulumi.get(self, \"files_processed\")", "def __len__(self):\n # print(\"len: \" + str(math.floor(len([name for name in os.listdir(self.imgs_dir) if os.path.isfile(self.imgs_dir+'//'+name)])/self.batch_size)-1)\n return math.floor(len([name for name in os.listdir(self.imgs_dir) if\n os.path.isfile(self.imgs_dir + '//' + name)]) / self.batch_size)", "def _n_workers(self, processes: int = 2) -> int:\n if 2 <= processes <= cpu_count():\n n_workers = processes\n else:\n n_workers = cpu_count()\n return n_workers", "def __number_of_jobs__(self):\n # | - __number_of_jobs__\n num_jobs = 0\n\n # Regular jobs\n if self.job_var_lst is not None:\n num_jobs = len(self.job_var_lst)\n\n # Individual dir jobs\n if self.indiv_dir_lst is not None:\n num_jobs += len(self.indiv_dir_lst)\n\n\n return(num_jobs)\n # __|", "def ncpu ( events ) :\n #\n n_cores = numcpu() \n if n_cores <= 1 : return ROOT.RooFit.NumCPU ( 1 ) ## fake!!! \n #\n n = events // _nemax\n if n <= 1 : return ROOT.RooFit.NumCPU ( 1 ) ## fake!!! \n #\n num = min ( n , n_cores , _ncmax )\n if not _ncpus : _ncpus.append ( num ) \n #\n return ROOT.RooFit.NumCPU ( num )", "def output_mb(self):\n total_output_size = sum([t.shuffle_mb_written for t in self.tasks])\n return total_output_size", "def count():", "def get_size(files):\n somesize = 0\n for f in files:\n somesize += int(f.get('file_size'))\n return somesize", "def num_instances_msp(infile_name):\n\tinfile = open(infile_name)\n\tnum_instances = 0\n\tfor line in infile:\n\t\tif line.startswith(\"Name: \"):\n\t\t\tnum_instances += 1\n\treturn(num_instances)", "def n_file_elements(cls):\n \n return randint(1, (3 * Root.size))", "def size(path):", "def file_count(self) -> int:\n if self.dataset is None:\n raise ValueError('No known dataset found!')\n return self._max_file_count", "def __len__(self):\n\n return math.ceil(len(self.img_files) * self.gen_count / self.batch_size)", "def __len__(self):\n return int(np.ceil(len(self.image_filenames) / (self.batch_size)))", "def _num_nodes(self):\n return len(self._nid2partid)", "def get_space_used():\n fs.get_space_used()", "def test_kernel_srcs_count(dataset: linux.LinuxSourcesDataset):\n # FIXME(cec): This value does not appear to stable across platforms, but it\n # should be.\n assert abs(len(dataset.kernel_srcs) - 310) < 10", "def pool_size():\r\n if DESIRED_THREADS > 1:\r\n return min(DESIRED_THREADS, multiprocessing.cpu_count())\r\n else:\r\n raise Exception(\"ARG ERROR: DESIRED_THREADS is not valid\")", "def calculate_chunk_size(thread_count, item_count):\n chunk_size = int(item_count / (thread_count * 10))\n if chunk_size < 1:\n chunk_size = 1\n if chunk_size > 20:\n chunk_size = 20\n return chunk_size", "def countsubcatchments(inputfilename=FileSettings.settingsdict['inputfilename']):\r\n global count\r\n with open(inputfilename, 'r') as swmmput:\r\n contents = swmmput.readlines()\r\n count = len(contents)\r\n return(count)", "def max_files(self):\n\n return 10 ** self.int_len(self.cnt_files())", "def size(self):\r\n return sum(pool.size() for pool in self.host_to_pool.values())", "def fs_files_used(self):\n return self._fs_files_used", "def get_num_samples(org_dir, file_names):\n count = 0\n # Loop through the files, which then loop through the trees\n for filename in file_names:\n # Skip files that are not .mrg\n if not filename.endswith('.mrg'):\n continue\n # File is .mrg. Start processing\n file_dir = os.path.join(org_dir, filename)\n with open(file_dir, 'r', encoding='utf-8') as reader:\n content = reader.readlines()\n for _ in content:\n count += 1\n\n return count", "def test_number_of_files(self):\n\n url = [\"http://archive.ics.uci.edu/ml/machine-learning-databases/wine/wine.data\",\n \"http://golakjsd.com/jl2kais\",\n \"http://stackoverflow.com/questions/17730173/python-cant-get-full-path-name-of-file\"]\n\n returned_fname=requester.batch_url_to_csv(url, fnames=[\"test_fname\",\n \"test2_fname\",\n \"test3_fname\"])\n number_files=len(returned_fname)\n self.assertEqual(number_files, 1)", "def test_all_srcs_count(dataset: linux.LinuxSourcesDataset):\n # FIXME(cec): This value does not appear to stable across platforms, but it\n # should be.\n assert abs(len(dataset.all_srcs) - 26091) < 1000", "def getThreads():\r\n return multiprocessing.cpu_count()", "def _total_size_controller_fs(controller_fs_new, controller_fs_list):\n total_size = 0\n\n for fs in controller_fs_list:\n size = fs['size']\n if controller_fs_new and fs['name'] == controller_fs_new['name']:\n size = controller_fs_new['size']\n if fs['name'] == \"database\":\n size = size * 2\n total_size += size\n\n LOG.info(\n \"_total_size_controller_fs total filesysem size %s\" % total_size)\n return total_size", "def file_count(self) -> str:\n return pulumi.get(self, \"file_count\")", "def get_num_servers():\n return 1", "def num_processes(self):\n return 1", "def get_number_of_files(directory: str):\n\n number_of_files = len([item for item in os.listdir(directory) if os.path.isfile(os.path.join(directory, item))])\n print(number_of_files)\n return number_of_files", "def node_count(self) -> int:\n return pulumi.get(self, \"node_count\")", "def number_of_data_nodes(self):\n return int(self._data['number_of_data_nodes'])", "def count_timepoints(sc, session, files):\n tuples = zip(range(len(files)), files)\n files_sc = sc.parallelize(tuples)\n\n def count_planes(kv):\n index, path2 = kv\n try:\n from ScanImageTiffReader import ScanImageTiffReader\n img = ScanImageTiffReader(path2).data()\n except Exception:\n import tifffile\n img = tifffile.imread(path2)\n return img.shape[0]\n\n data2 = files_sc.map(count_planes).collect()\n frame_numbers = np.array(data2)\n vol_numbers = frame_numbers / len(session.fieldMask)\n return vol_numbers.astype(int)", "def num_links(self):\n count=0.0\n for cluster in self.clusters:\n if self.clusters[cluster] == self.clusters[cluster].antecessor:\n numberofmembers=self.clusters[cluster].number_of_members\n count+=numberofmembers\n return count", "def get_num_nodes(self):\n assert self.is_fitted_\n nthreads = _process_nthreads(self.nthreads)\n n_nodes, n_terminal = self._cpp_obj.get_n_nodes(ctypes.c_bool(self._is_extended_).value,\n ctypes.c_int(nthreads).value)\n return n_nodes, n_terminal", "def filesInSeries_determine():\n def du(path):\n \"\"\"disk usage in human readable format (e.g. '2,1GB')\"\"\"\n return subprocess.check_output(['du','-sh', path]).split()[0].decode('utf-8')\n\n def duRaw(path):\n root = Path(path)\n return sum(f.stat().st_size for f in root.glob('**/*') if f.is_file())\n\n series_uid = self.processDicomField(dcm_info, \"SeriesInstanceUID\")\n str_seriesMapFile = os.path.join(self.series_mapDir, '%s.json' % series_uid)\n\n try:\n with open(str_seriesMapFile, 'r') as f:\n d_seriesInfo = json.load(f)\n str_path = d_seriesInfo[series_uid]\n fileCount = len([n for n in os.listdir(str_path) \\\n if os.path.isfile(os.path.join(str_path, n))])\n str_dirSize = du(str_path)\n dirSizeRaw = duRaw(str_path)\n d_ret = {\n 'status': True,\n 'fileCount': fileCount,\n 'str_dirSize': str_dirSize,\n 'dirSizeRaw': dirSizeRaw\n }\n except:\n d_ret = {\n 'status': False,\n 'fileCount': -1,\n 'str_dirSize': \"unknown\",\n 'dirSizeRaw': -1\n }\n\n return d_ret", "def getNFiles(self, config, base, logger=None):\n if 'nfiles' in config:\n return galsim.config.ParseValue(config, 'nfiles', base, int)[0]\n else:\n return 189", "def fs_size(fs_path):\n import shutil\n\n total, used, free = shutil.disk_usage(fs_path)\n return total", "def get_num_parallel_workers():\n return _config.get_num_parallel_workers()", "def number_of_nodes(self):\n return int(self._data['number_of_nodes'])", "def usedspace(self):\n self.log.info(\"freespace\")\n nbytes = 0\n keys = list(self.downloads.keys())\n keys.sort()\n for key in keys:\n download = self.downloads[key]\n nbytes += download['size']\n self.log.info(\"returning:\" + str(nbytes))\n return nbytes", "def get_array_size():\n tg_file = 'NA_CAS_gauges.txt'\n lines = open(tg_file).readlines()\n tg_nbr = len(lines)\n return tg_nbr", "def get_dataset_size(file_path):\n size = 1\n file_list = tf.io.gfile.glob(file_path)\n for file in file_list:\n for record in tf.compat.v1.io.tf_record_iterator(file, options=tf.io.TFRecordOptions(\n compression_type='GZIP')):\n size += 1\n return size", "def node_count(self) -> int:\n return int(self.graph_tuple_stats.node_count or 0)", "def node_count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"node_count\")", "def node_count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"node_count\")", "def num_nodes(self) -> int:\n return pulumi.get(self, \"num_nodes\")" ]
[ "0.6732709", "0.6647272", "0.6646703", "0.65596217", "0.655924", "0.6545861", "0.65446675", "0.6479833", "0.64772725", "0.6464439", "0.6455599", "0.6446589", "0.6436676", "0.641191", "0.6406125", "0.6391951", "0.636867", "0.6327254", "0.63119394", "0.63079107", "0.62990075", "0.62850386", "0.6271203", "0.6271065", "0.6241946", "0.6241917", "0.6211973", "0.62052494", "0.6188796", "0.61354184", "0.61241007", "0.6090062", "0.60858923", "0.60754955", "0.60525525", "0.604401", "0.604211", "0.6033762", "0.60251784", "0.60205466", "0.5997456", "0.59828514", "0.5978248", "0.59686863", "0.5968497", "0.5965308", "0.595912", "0.59559906", "0.59557664", "0.5943467", "0.59426135", "0.5940532", "0.5929232", "0.5925932", "0.5921102", "0.5912135", "0.59106475", "0.59096545", "0.5908762", "0.59066015", "0.5900608", "0.5894192", "0.58916247", "0.5887216", "0.5873621", "0.58656746", "0.5856907", "0.58511716", "0.5848954", "0.58448815", "0.5841122", "0.5839913", "0.5836425", "0.5832822", "0.5831114", "0.5825731", "0.5824718", "0.58237857", "0.5818451", "0.5818338", "0.5818005", "0.58139634", "0.5808201", "0.578328", "0.57739735", "0.57693386", "0.57580364", "0.574639", "0.5745517", "0.57416886", "0.57410467", "0.57378376", "0.5737538", "0.5737311", "0.57358164", "0.57300776", "0.57294875", "0.5725276", "0.5725276", "0.5723013" ]
0.7106992
0
Organizes all files to be distributed across all nodes of scatter gather equally based on total file size to process
def distribute_files_by_size(file_sizes, dx_file_objects, number_of_nodes): files_per_node = number_of_files_per_node(file_sizes, number_of_nodes) sorted_file_sizes = sorted(file_sizes.items(), key=operator.itemgetter(1)) job_idx = 1 jobs_object = {} for file_name, file_size in sorted_file_sizes: if job_idx > number_of_nodes: job_idx = 1 try: jobs_object[job_idx].append(dx_file_objects[file_name]) except KeyError: jobs_object[job_idx] = [dx_file_objects[file_name]] job_idx += 1 return jobs_object
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _distribute_files(self, distribution='one'):\n for k, files in self.file_lists.items():\n self.idle[k] = False\n if distribution.lower() == 'single':\n self.distribution_comms[k] = None\n if self.comm.rank >= 1:\n self.local_file_lists[k] = None\n self.idle[k] = True\n else:\n self.local_file_lists[k] = files\n elif distribution.lower() == 'even':\n if len(files) <= self.comm.size:\n if self.comm.rank >= len(files):\n self.local_file_lists[k] = None\n self.distribution_comms[k] = None\n self.idle[k] = True\n else:\n self.local_file_lists[k] = [files[self.comm.rank],]\n self.distribution_comms[k] = self.comm.Create(self.comm.Get_group().Incl(np.arange(len(files))))\n else:\n files_per = int(np.floor(len(files) / self.comm.size))\n excess_files = int(len(files) % self.comm.size)\n if self.comm.rank >= excess_files:\n self.local_file_lists[k] = list(files[int(self.comm.rank*files_per+excess_files):int((self.comm.rank+1)*files_per+excess_files)])\n else:\n self.local_file_lists[k] = list(files[int(self.comm.rank*(files_per+1)):int((self.comm.rank+1)*(files_per+1))])\n self.distribution_comms[k] = self.comm", "def gatherfiles(self):\n\t\tfrom subprocess import Popen,PIPE\n\t\timport os\n\t\timport tarfile\n\t\timport glob\n\t\t\n\t\tprint \"=== \",self.nameID,\": Joining all the files in one\"\n\t\t# FIXME: Only there are 1 file, not needed the hadd\n\t\tfinalfile = os.path.join(\"Results\",self.outputfile)\n\t\t# FIXED BUG: just cp when there is only one file, otherwise\n\t\t# there are problems with the TTree\n\t\tif len(self.outputfiles) == 1:\n\t\t\t# Note that when there is only 1 file, always its #task=1\n\t\t\tcommand = [ 'cp', self.outputfiles[1], finalfile ]\n\t\telse:\n\t\t\tcommand = [ 'haddPlus', finalfile ]\n\t\t\tfor f in self.outputfiles.itervalues():\n\t\t\t\tcommand.append( f )\n\t\tp = Popen( command ,stdout=PIPE,stderr=PIPE ).communicate()\n\t\t# Checking if everything was allright\n\t\ttotalevts = self.getevents(finalfile,True)\n\t\tif totalevts != self.nevents:\n\t\t\tmessage = \"\\033[33;1mclustermanager.gatherfiles: WARNING\\033[0m the total file\"\n\t\t\tmessage += \"'\"+finalfile+\"' do not contain all the events:\\n\"\n\t\t\tmessage += \"Total events to be processed:\"+str(self.nevents)+\"\\n\"\n\t\t\tmessage += \"Total events in '\"+finalfile+\"':\"+str(totalevts)+\"\\n\"\n\t\t\tprint message\n\t\t\treturn \n\t\t# If everything was fine, deleting the files \n\t\t# and cleaning the directory\n\t\tfor f in self.outputfiles.itervalues():\n\t\t\tos.remove( f )\n\t\t# Taring and compressing\n\t\tfilestotar = glob.glob(\"./*.*\")\n\t\tfilestotar.append( \".storedmanager\")\n\t\ttar = tarfile.open(os.path.basename(self.cwd)+\".tar.gz\",\"w:gz\")\n\t\tfor f in filestotar:\n\t\t\ttar.add(f)\n\t\ttar.close()\n\t\t# if everything was fine, deleting the files\n\t\tif os.path.exists(os.path.basename(self.cwd)+\".tar.gz\"):\n\t\t\tfor f in filestotar:\n\t\t\t\tos.remove(f)\n\t\telse:\n\t\t\tmessage = \"\\033[33;1mclustermanager.gatherfiles: WARNING\\033[0m I can't manage\\n\"\n\t\t\tmessage += \"to create the backup .tar.gz file\\n\"\n\t\t\tprint message\n\n\t\tprint \"Created \"+finalfile\n\t\tprint \"========= Process Completed =========\"", "def _distribute_data(cfg, file_name, rank=None):\n if rank is None or cfg.DATASET.DISTRIBUTED == False:\n return file_name\n\n world_size = cfg.SYSTEM.NUM_GPUS\n num_files = len(file_name)\n ratio = num_files / float(world_size)\n ratio = int(math.ceil(ratio-1) + 1) # 1.0 -> 1, 1.1 -> 2\n\n extended = [file_name[i % num_files] for i in range(world_size*ratio)]\n splited = [extended[i:i+ratio] for i in range(0, len(extended), ratio)]\n\n return splited[rank]", "def clustering(self): \n clusterOfFiles=self.getClusters()\n \n #group files based on the hash of their contents\n self.keyingMethod=md5Hash\n [self.addFile(afile) for acluster in clusterOfFiles for afile in acluster]\n clusterOfFiles=self.getClusters()\n self.showClusters(clusterOfFiles)", "def group_by_size(fileset, min_size=DEFAULT_MIN_SIZE, max_size=DEFAULT_MAX_SIZE,\n min_group_size=2, workers=16):\n return group_by_key(fileset, size_key(min_size=min_size, max_size=max_size),\n min_group_size=min_group_size, workers=workers)", "def test_parallel_dataflow():\n\n if os.path.exists('all.txt'):\n os.remove('all.txt')\n\n # create 5 files with random numbers\n output_files = []\n for i in range(5):\n if os.path.exists('random-%s.txt' % i):\n os.remove('random-%s.txt' % i)\n output_files.append(generate(outputs=[File('random-%s.txt' % i)]))\n\n # concatenate the files into a single file\n cc = concat(inputs=[i.outputs[0]\n for i in output_files], outputs=[File(\"all.txt\")])\n\n # calculate the average of the random numbers\n totals = total(inputs=[cc.outputs[0]])\n print(totals.result())", "def csvs_scattered_to_grouped(path_dir, inlist, outlist, gcols,\n sort=1, scols=None, catalog=\"\", supersede=False):\n\n filelist=[os.path.join(path_dir,i) for i in inlist]\n n_split=len(outlist)\n\n pdfs=pd.read_csv(filelist[0],usecols=gcols)\n pdfs.drop_duplicates(inplace=True)\n\n print(\"csvs_scattered_to_grouped: Collecting items for group.\\n\")\n for i in range(1,len(filelist)):\n pdfs=pdfs.append(pd.read_csv(filelist[i],usecols=gcols),ignore_index=True)\n pdfs.drop_duplicates(inplace=True)\n\n if sort==1:\n pdfs.sort_values(gcols,inplace=True, ascending=True)\n elif sort==-1:\n pdfs.sort_values(gcols,inplace=True, ascending=False)\n\n aa_ed=np.array_split(pdfs, n_split)\n\n if supersede:\n for i in outlist:\n if os.path.isfile(os.path.join(path_dir,i)):\n os.remove(os.path.join(path_dir,i))\n if os.path.isfile(os.path.join(path_dir,str(catalog))):\n os.remove(os.path.join(path_dir,str(catalog)))\n\n print(\"csvs_scattered_to_grouped: Start processing files:\\n\")\n for i in range(0,len(filelist)):\n fi=pd.read_csv(filelist[i],usecols=scols)\n for j,ja in enumerate(aa_ed):\n wrtj=pd.merge(ja, fi, how='inner', on=gcols)\n append_to_csv(wrtj, os.path.join(path_dir,outlist[j]))\n print('csvs_scattered_to_grouped: '+str(i)+' file(s) finished.')\n\n if catalog:\n for i, d in enumerate(aa_ed):\n d['_@_FILE_']=outlist[i]\n append_to_csv(d, os.path.join(path_dir,str(catalog)))\n print('csvs_scattered_to_grouped: Catalog file created.')", "def processSetOfCerFiles(files):\n printHeader()\n \n k = 0\n for f in files:\n k = k + 1\n sz = get_file_size(f)\n with open(f, 'rb') as fb:\n processCerFile(k, fb, sz=sz)", "def dispatch_files_bysize(nb_list, files):\r\n\r\n logging.info('Having {} files to dispatch in {} lists'.format(len(files), nb_list))\r\n #\r\n # 1 - Init N lists of size 0.\r\n #\r\n sublists = {}\r\n for list_id in range(0,nb_list):\r\n sublists[list_id] = {\r\n 'files' : [],\r\n 'size' : 0\r\n }\r\n\r\n #\r\n # 2 - For each file, get the smallest sublist and append file.\r\n #\r\n\r\n def _get_smallest_sublist(sublists):\r\n \"\"\" get the smallest sublist\r\n \"\"\"\r\n smallest_list_id = 0\r\n for list_id, sublist in sublists.items():\r\n if sublist['size'] < sublists[smallest_list_id]['size']:\r\n smallest_list_id = list_id\r\n\r\n return smallest_list_id\r\n\r\n for file in files:\r\n logging.info('dispatching {}'.format(file))\r\n list_id = _get_smallest_sublist(sublists)\r\n sublists[list_id]['files'].append(file)\r\n sublists[list_id]['size'] += os.stat(file).st_size\r\n \r\n for list_id, sublist in sublists.items():\r\n logging.warning(' List [{}] Having {} files for a size of {}'.format(list_id, len(sublist['files']), sublist['size'] ))\r\n\r\n return [ sublist['files'] for list_id, sublist in sublists.items()]", "def packFiles(source, filesPerBlock, dest):\n\tfileCount = 1\n\t\n\ttmpFileName = \"tmp.h5\"\t\n\n\n\toutFile = createBlockFile(tmpFileName)\t\n\tfor dirname, subdirs, files in os.walk(source):\t\n\t print 'Scanning ' + dirname + '...'\t\n\t for f in files:\t\n\t if f.endswith('.h5'):\t\n\t inFile = h5py.File(os.path.join(dirname, f), 'r')\t\n\t outFile.copy(inFile, outFile['songs'], f)\t\n\t inFile.close()\t\n\t fileCount = fileCount + 1\t\n\t if(fileCount > filesPerBlock):\t\n\t outFile.close()\t\n\t upload(tmpFileName, bucket)\t\n\t fileCount = 1\t\n\t outFile = createBlockFile(tmpFileName)\t\n\n \toutFile.close()\n \tif fileCount > 1:\n\t \tupload(tmpFileName, bucket)\n\n\tos.remove(tmpFileName)", "def chunk_input(self, input_files, chunksize):\n part_lists = [] # Lists of partial files\n known_nlines = None\n part_suffix = \"\"\n chunk_nlines = chunksize * 2\n\n for input_file in input_files:\n # Count number of lines in the file\n nlines = int(command.execute_with_output(\"wc -l %s\" % input_file)\n .strip().split()[0])\n # Number of lines should be the same in paired files\n if known_nlines is not None:\n msg = \"Mismatched line counts in supposedly paired files: {}\".format(\n input_files)\n assert nlines == known_nlines, msg\n known_nlines = nlines\n\n # Set number of pieces and names\n numparts = (nlines + chunk_nlines - 1) // chunk_nlines\n ndigits = len(str(numparts - 1))\n part_suffix = \"-chunksize-%d-numparts-%d-part-\" % (chunksize, numparts)\n out_prefix_base = os.path.basename(input_file) + part_suffix\n out_prefix = os.path.join(self.chunks_result_dir_local, out_prefix_base)\n\n # Split large file into smaller named pieces\n command.execute(\"split -a %d --numeric-suffixes -l %d %s %s\" %\n (ndigits, chunk_nlines, input_file, out_prefix))\n command.execute_with_retries(f\"aws s3 sync --only-show-errors {self.chunks_result_dir_local}/ {self.chunks_result_dir_s3}/ --exclude '*' --include '{out_prefix_base}*'\")\n\n # Get the partial file names\n partial_files = []\n paths = command.execute_with_output(\"ls %s*\" % out_prefix).rstrip().split(\"\\n\")\n for pf in paths:\n partial_files.append(os.path.basename(pf))\n\n # Check that the partial files match our expected chunking pattern\n pattern = \"{:0%dd}\" % ndigits\n expected_partial_files = [(out_prefix_base + pattern.format(i))\n for i in range(numparts)]\n msg = \"something went wrong with chunking: {} != {}\".format(\n partial_files, expected_partial_files)\n assert expected_partial_files == partial_files, msg\n part_lists.append(partial_files)\n\n # Ex: [[\"input_R1.fasta-part-1\", \"input_R2.fasta-part-1\"],\n # [\"input_R1.fasta-part-2\", \"input_R2.fasta-part-2\"],\n # [\"input_R1.fasta-part-3\", \"input_R2.fasta-part-3\"],...]\n input_chunks = [list(part) for part in zip(*part_lists)]\n return part_suffix, input_chunks", "def distribute_load(files, bins):\n files_size = {file: os.path.getsize(file) for file in files}\n separated_files = binpacking.to_constant_bin_number(files_size, bins)\n return [list(file_dict.keys()) for file_dict in separated_files]", "def split_train_into_chunks(chunk_size):\n for syscall_type in SYSCALLS:\n syscalls_split_file = open(f\"{TEMP_DIR}/{syscall_type}-split.train\", \"w\")\n snd_train_path = f\"{FILE_PATH}/{syscall_type}/{syscall_type}.train\"\n with open(snd_train_path) as train_file:\n for syscall in train_file:\n # Generate all n-grams of the current syscall\n n_grams = extract_n_grams(syscall.strip(),chunk_size,unique=True)\n if len(n_grams)==0:\n continue\n # Write n-grams to syscall chunks file\n syscalls_split_file.writelines(n_grams)\n syscalls_split_file.close()", "def data_distribution(path):\r\n \r\n pos_list = os.listdir(path)\r\n dist_list = []\r\n sum = 0\r\n \r\n for i in range(1,31):\r\n obj = str(i)+'_rgb'\r\n count = 0\r\n for pos in pos_list:\r\n object_list_path = os.path.join(path,pos)\r\n \r\n object_list = os.listdir(object_list_path)\r\n \r\n for object_name in object_list:\r\n image_list_path = os.path.join(object_list_path,object_name)\r\n \r\n image_list = os.listdir(image_list_path)\r\n for imagepath in image_list:\r\n if imagepath == 'mm':\r\n continue\r\n if object_name == obj:\r\n count += 1\r\n #print(object_name,obj)\r\n \r\n # print(\"Count of \",obj,\" is\",count)\r\n print(\"Count of \",obj,\" is\",count)\r\n sum +=count\r\n print(i)\r\n dist_list.append(count)\r\n \r\n print(\"Total Sum: \",sum)\r\n return dist_list", "def process_ecr(cas, cas_dir, sortiefile, ncsize):\n\n xcpt = [] # try all files for full report\n # ~~ copy output files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n for key in cas.out_files:\n submit = cas.out_files[key].split(';')\n tmp_file_name = submit[1]\n file_name = cas.values[key]\n if submit[5] == 'MULTI': # POSTEL3D\n npsize = 1\n while 1: # HORIZONTAL SECTION FILES\n file_name = path.join(cas_dir,\n file_name\\\n + '_{0:03d}'.format(npsize))\n if path.isfile(file_name):\n base, ext = path.splitext(file_name)\n i = 0\n # this would be an infinite loop only if you have an\n # inifite number of files\n while 1:\n i = i + 1\n if not path.isfile(base+'_old'+str(i)+ext):\n break\n shutil.move(file_name, base+'_old'+str(i)+ext)\n tmp_file_name = tmp_file_name+\\\n '_{0:03d}'.format(npsize)\n if not path.isfile(tmp_file_name):\n break\n shutil.move(tmp_file_name, file_name)\n print(' moving: '+ path.basename(file_name))\n npsize = npsize + 1\n npsize = 1\n while 1: # VERTICAL SECTION FILES\n nptime = 1\n v_file = tmp_file_name+\\\n '_{0:03d}'.format(npsize)+'-{0:03d}'.format(nptime)\n if not path.isfile(v_file):\n break\n while 1:\n file_name = path.join(cas_dir,\n file_name+\\\n '_{0:03d}'.format(npsize)+\\\n '-{0:03d}'.format(nptime))\n if path.isfile(file_name):\n base, ext = path.splitext(file_name)\n i = 0\n # this would be an infinite loop only if you have an\n # inifite number of files\n while 1:\n i = i + 1\n if not path.isfile(base+'_old'+str(i)+ext):\n break\n shutil.move(file_name, base+'_old'+str(i)+ext)\n tmp_file_name = tmp_file_name\\\n + '_{0:03d}'.format(npsize)\\\n + '-{0:03d}'.format(nptime)\n if not path.isfile(tmp_file_name):\n break\n shutil.move(tmp_file_name, file_name)\n print(' moving: '+ path.basename(file_name))\n nptime = nptime + 1\n npsize = npsize + 1\n # MAIN MODULE\n elif submit[5] == 'PARAL' and ncsize > 1:\n npsize = 0\n c_base, c_ext = path.splitext(file_name)\n while 1:\n file_name = path.join(cas_dir,\n c_base\\\n + '{0:05d}-{1:05d}'\\\n .format(ncsize-1, npsize)\\\n + c_ext)\n if path.isfile(file_name):\n base, ext = path.splitext(file_name)\n i = 0\n # this would be an infinite loop only if you have an\n # inifite number of files\n while 1:\n i = i + 1\n if not path.isfile(base+'_old'+str(i)+ext):\n break\n shutil.move(file_name, base+'_old'+str(i)+ext)\n tmp_file_name_par = tmp_file_name+\\\n '{0:05d}-{1:05d}'.format(ncsize-1, npsize)\n if not path.isfile(tmp_file_name_par):\n break\n shutil.move(tmp_file_name_par, file_name) #shutil.copy2(tmp_file_name,file_name)\n print(' moving: '+ path.basename(file_name))\n npsize = npsize + 1\n elif submit[5] == 'MULTI2':\n for itmp_file_name in listdir('.'):\n if itmp_file_name.count(tmp_file_name) == 1:\n base, ext = path.splitext(file_name)\n new_tmp_file_name = \\\n itmp_file_name.lower()\\\n .replace(tmp_file_name.lower(),\n base)\n new_file_name = path.join(cas_dir, new_tmp_file_name) + ext\n if path.isfile(new_file_name):\n base, ext = path.splitext(new_file_name)\n i = 0\n # this would be an infinite loop only if you have an\n # inifite number of files\n while 1:\n i = i + 1\n if not path.isfile(base+'_old'+str(i)+ext):\n break\n shutil.move(new_file_name, base+'_old'+str(i)+ext)\n shutil.move(itmp_file_name, new_file_name)\n print(' moving: '+ path.basename(new_file_name))\n else:\n file_name = path.join(cas_dir, file_name)\n if path.isfile(file_name):\n base, ext = path.splitext(file_name)\n i = 0\n # this would be an infinite loop only if you have an\n # inifite number of files\n while 1:\n i = i + 1\n if not path.isfile(base+'_old'+str(i)+ext):\n break\n shutil.move(file_name, base+'_old'+str(i)+ext)\n if not path.isfile(tmp_file_name):\n xcpt.append({'name':'process_ecr',\n 'msg':'did not create outfile: '+\\\n path.basename(file_name)+' ('+tmp_file_name+')'})\n continue\n shutil.move(tmp_file_name, file_name)\n print(' moving: '+ path.basename(file_name))\n\n # ~~~ copy the sortie file(s) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n sortiefiles = []\n if sortiefile != None:\n crun = path.basename(sortiefile)\n cref = path.join(cas_dir, sortiefile)\n if not path.isfile(crun):\n xcpt.append({'name':'process_ecr',\n 'msg':'did not create listing file: '+\\\n path.basename(cref)+' ('+crun+')'})\n raise TelemacException(xcpt) # raise full report\n shutil.copy(crun, cref)\n print(' copying: '+ path.basename(cref))\n sortiefiles.append(cref)\n\n # ~~~> If in parallel, also copy the slave log files\n # called PEnnnnn_xxxxx.log\n # for slave x of n but for the last one called the sortie file\n if ncsize > 1:\n for i in range(ncsize-1):\n slavefile = 'PE{0:05d}-{1:05d}.LOG'.format(ncsize-1, i+1)\n base, ext = path.splitext(sortiefile)\n slogfile = base+'_p'+'{0:05d}'.format(i+1)+ext\n crun = slavefile\n cref = path.join(cas_dir, slogfile)\n if not path.isfile(crun):\n xcpt.append({'name':'process_ecr',\n 'msg':'could not find the listing file: '\\\n + crun})\n raise TelemacException(xcpt) # raise full report\n shutil.copy(crun, cref)\n print(' copying: '+ path.basename(cref))\n sortiefiles.append(cref)\n # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n if xcpt != []:\n raise TelemacException(xcpt) # raise full report\n return sortiefiles", "def pass1(self, verbose):\n \n for root, dirs, files in os.walk(self.dir_to_check, topdown=False):\n t_size = 0\n for f in files:\n new_f = os.path.join(root,f) #complete path in case of homonyms\n size = os.path.getsize(new_f)\n t_size += size\n self.cache[new_f] = HumanReadableSize(size)\n t_size += sum ([self.cache[os.path.join(root,d)].val for d in dirs])\n self.cache[root] = HumanReadableSize(t_size)\n if verbose:\n print ('.................... Computing size of {}!'.format(root))\n \n #print (self.cache) #debugging", "def distribute_sampling(numSamples, localDevices=None, numChainsPerDevice=1):\n\n global globNumSamples\n\n # Determine number of samples per process\n samplesPerProcess = numSamples // commSize\n\n if rank < numSamples % commSize:\n samplesPerProcess += 1\n\n if localDevices is None:\n\n globNumSamples = numSamples\n\n return samplesPerProcess\n\n numChainsPerProcess = localDevices * numChainsPerDevice\n\n def spc(spp):\n return (spp + numChainsPerProcess - 1) // numChainsPerProcess\n\n a = numSamples % commSize\n globNumSamples = (a * spc(1 + numSamples // commSize) + (commSize - a) * spc(numSamples // commSize)) * numChainsPerProcess\n\n return spc(samplesPerProcess)", "def run(self):\r\n filesizes = {}\r\n # Build up dict with key as filesize and value is list of filenames.\r\n for path, dirs, files in walk( self._path ):\r\n for filename in files:\r\n filepath = joinpath( path, filename )\r\n filesize = stat( filepath ).st_size\r\n filesizes.setdefault( filesize, [] ).append( filepath )\r\n\r\n\r\n #Compare content hash of all files which have the same size\r\n #if two or more files have same hash and size they are added to the queue \r\n for files in [ flist for flist in filesizes.values() if len(flist)>1 ]:\r\n #run over all files in dir with the same size if there is more then one\r\n duplicates = {}\r\n for filepath in files:\r\n with open( filepath ) as openfile:\r\n filehash = md5( openfile.read() ).hexdigest()\r\n if filehash not in duplicates:\r\n duplicates.setdefault(filehash, []).append (filepath)\r\n else:\r\n duplicates[filehash].append(filepath)\r\n for duplicate in [ duplicate for duplicate in duplicates.values() if len(duplicate)>1 ]:\r\n self._queue.put(duplicate)\r\n self._finished_scan[0] = 1", "def main():\n args = parse_args(sys.argv[1:])\n dir = args.directory\n wild = args.wildcard\n rec = args.recursive\n nb_clusters = args.clusters\n bricks = args.bricks\n\n path_for_display = os.path.abspath(dir)\n\n all_paths = None\n if rec:\n all_paths = Path(dir).rglob(wild)\n path_for_display += \" (recursive)\"\n else:\n all_paths = Path(dir).glob(wild)\n path_for_display += \" (non recursive)\"\n\n f_sizes = []\n\n for path in all_paths:\n\n full_path = os.path.join(path.parent, path.name)\n byte_size = os.path.getsize(full_path)\n # print(full_path, byte_size)\n f_sizes.append(byte_size)\n\n f_sizes = np.array(f_sizes)\n # print(f_sizes)\n\n min_byte_length = np.amin(f_sizes)\n max_byte_length = np.amax(f_sizes)\n mean_byte_length = np.mean(f_sizes)\n std_byte_length = np.std(f_sizes)\n median_byte_length = np.median(f_sizes)\n\n histo, bin_edges = np.histogram(f_sizes, nb_clusters)\n histo = histo.astype(\"float32\")\n histo_normalized = (np.copy(histo) / histo.max() * bricks).astype(\"uint32\")\n\n print(\"[BYTESPREAD REPORT]\")\n print()\n print(\"Date: \", datetime.now().strftime(\"%d/%m/%Y %H:%M:%S\"))\n print(\"Directory: \", path_for_display)\n print(\"Filename match: \", wild)\n print(\"Number of files: \", f_sizes.shape[0])\n print(\"Smallest file: \", byteToHumanReadable(min_byte_length))\n print(\"Largest file: \", byteToHumanReadable(max_byte_length))\n print(\"Average size: \", byteToHumanReadable(mean_byte_length))\n print(\"Standard deviation: \", byteToHumanReadable(std_byte_length))\n print(\"Median size: \", byteToHumanReadable(median_byte_length))\n print(\"Histogram intervals: \", byteToHumanReadable(bin_edges[1] - bin_edges[0]))\n print(\"Histogram:\")\n print()\n\n for i in range(0, len(bin_edges)-1):\n lower_bound = byteToHumanReadable(bin_edges[i])\n upper_bound = byteToHumanReadable(bin_edges[i+1])\n print(\"|\", \"▓\" * histo_normalized[i], \"[{}-{}], {} files\".format(lower_bound, upper_bound, math.ceil(histo[i])))", "def files_distribute(self):\n self._post('files/distribute')", "def create_input_chunks_distributed(cs, partition, data_dir, file_format):\n if not file_format == \"HDF5\":\n print(\"File format not supported yet. Aborting...\")\n sys.exit(1)\n\n for i in range(6):\n for filename in os.listdir('/disk' + str(i) + '/gtimothee'):\n if filename.endswith(\".json\") or filename.endswith(\".hdf5\"):\n os.remove(os.path.join('/disk' + str(i) + '/gtimothee', filename))\n print(f\"Creating input chunks...\")\n\n disk_index = 0\n repartition_dict = dict()\n\n for i in range(partition[0]):\n for j in range(partition[1]):\n for k in range(partition[2]):\n print(f\"Creating random array... shape: {cs}\")\n arr = da.random.uniform(size=cs)\n print(f\"Done, converting to float16...\")\n arr = arr.astype(np.float16)\n out_filename = f'{i}_{j}_{k}.hdf5'\n print(f\"Building {out_filename} with shape {cs}\")\n data_dirpath = os.path.join('/disk' + str(disk_index), 'gtimothee')\n outfilepath = os.path.join(data_dirpath, out_filename)\n print(f\"Storing on {data_dirpath}...\")\n da.to_hdf5(outfilepath, '/data', arr, chunks=None, compression=None)\n\n repartition_dict[str((i,j,k))] = outfilepath\n\n disk_index += 1\n if disk_index == 6:\n disk_index = 0\n\n print(f\"Writing repartition file...\")\n json_file = os.path.join('/disk0', 'gtimothee', 'repartition_dict.json')\n if os.path.isfile(json_file):\n os.remove(json_file)\n\n with open(json_file, 'w+') as outfile:\n json.dump(repartition_dict, outfile)", "def initiallize_buffer(self):\n assert os.path.isdir(self.directory)\n #sorting files topologically, files' format is -> data_num.h5 \n files_list = sorted(os.listdir(self.directory + '/' + self.name + '/'), key = lambda x: int(x.split(\"_\")[1].split(\".\")[0]))\n self.files_counter = 0\n if files_list != []: \n for file_name in files_list:\n self.memorize(name = file_name, error = 1)\n self.files_counter += 1\n self.files_tracker = file_name\n else:\n self.files_tracker = 'data_-1.h5'", "def number_of_files_per_node(files, number_of_nodes):\n\n files_per_node = float(len(files))/float(number_of_nodes)\n if files_per_node > 0.:\n return int(math.floor(files_per_node))\n else:\n return int(math.ceil(files_per_node))", "def size(**kwargs):\n mpath = kwargs['path']\n if not os.path.exists(mpath):\n print(\"Invalid path\")\n sys.exit(-1)\n\n # Basic Counter variables\n foldercount = 0\n count = 0\n\n # List containing the collected information\n elist = []\n\n # Indices for the 2 dimensional list\n iext = 0\n icount = 1\n icsums = 2\n imins = 3\n imaxs = 4\n\n start_depth = len(mpath.split('/')) - 2\n depth = 0\n\n for root, dirs, files in os.walk(mpath, topdown=True):\n\n indircount = 0\n for name in files:\n pathfile = os.path.join(root, name)\n indircount += 1\n # Extension\n ext = (os.path.splitext(name)[1]).lower()[1:]\n if ext == '': ext = 'no ext'\n # Size\n size = os.stat(pathfile).st_size\n\n # Folder depth\n cdepth = len(os.path.abspath(pathfile).split('/')) - start_depth\n if depth < cdepth: depth = cdepth\n\n # Getting the index of the current file extension using python built-in functions\n try:\n index = list(zip(*elist))[iext].index(ext)\n except IndexError:\n # The list is empty\n index = -1\n except ValueError:\n # The list doesn't contain the extension\n index = -1\n\n if index >= 0:\n elist[index][icount] += 1\n elist[index][icsums] += size\n if size < elist[index][imins]: elist[index][imins] = size\n if size > elist[index][imaxs]: elist[index][imaxs] = size\n\n else: # Adding the new extension in the list\n elist.append([ext, 1, size, size, size])\n count += indircount\n\n # Updating the directory count\n for name in dirs:\n foldercount += 1\n\n # Mapping arguments with indices in the list\n dict = {\n 'ext': iext,\n 'count': icount,\n 'size': icsums\n }\n\n # Sorting the list\n elist.sort(key=lambda x: x[dict.get(kwargs['sort'])], reverse=not kwargs['asc'])\n\n print(\"%d files in %d folders max depth: %s\\n\" % (count, foldercount, depth))\n if kwargs['human']:\n print(f\"{'Ext.':<8}{'Count':<13}{'Total':<10}{'Min':<11}{'Max':<13}{'Avg':<9}\")\n for l in elist:\n print(f\"{l[iext]:<7} {l[icount]:<12,d} {sizeformat(l[icsums]):<9} {sizeformat(l[imins]):<10} \\\n{sizeformat(l[imaxs]):<12} {sizeformat(l[icsums] / l[icount]):<9}\")\n else:\n print(f\"{'Ext.':<8}{'Count':<13}{'Total':<13}{'Min':<13}{'Max':<13}{'Avg':<2}\")\n for l in elist:\n print(f\"{l[iext]:<7} {l[icount]:<12,d} {l[icsums]:<12} {l[imins]:<12} {l[imaxs]:<12} \\\n{int(round(l[icsums] / l[icount], 0)):<12}\")", "def compile_global_stats(results_dir='./../data/*/*cr_sizes*hdf5'):\n\n flist = glob.glob(results_dir)\n output = defaultdict(list)\n flist = [f for f in flist if 'nicmos' not in f]\n print(flist)\n flist.append('./../data/STIS/stis_cr_sizes.hdf5')\n results = [dask.delayed(tally_stats)(f) for f in flist]\n results = list(dask.compute(*results, scheduler='processes'))\n\n for instr, data in results:\n output[instr].append(data)\n\n for key in output.keys():\n cr_count = 0\n img_count = 0\n total_exptime = 0\n for val in output[key]:\n cr_count += val.cr_count\n img_count += val.img_count\n total_exptime += val.total_exptime\n output[key] = [cr_count, img_count, total_exptime]\n\n df = pd.DataFrame(output, index=['cr_count', 'img_count', 'total_exptime'])\n print(df)\n print('Total CR count: {}'.format(df.loc['cr_count', :].sum()))\n print('Total number of images analyzed: {}'.format(df.loc['img_count', :].sum()))\n print('Cumulative exposure time: {}'.format(df.loc['total_exptime', :].sum()))", "def getASDistributionbyCpu(num_as,node_files):\n node_list = []\n num_cpus = 0\n for n in node_files:\n with open(n) as nfd:\n dati = json.load(nfd)\n obj = {}\n obj['name'] = n.split(\"/\", 1)[1]\n obj['cpus'] = dati['ansible_facts']['ansible_processor_vcpus']\n obj['as_list'] = []\n node_list.append(obj)\n num_cpus += obj['cpus']\n\n as_per_cpu = num_as / num_cpus\n as_list = []\n if as_per_cpu < 1:\n as_per_cpu = 1\n print(num_cpus)\n print(as_per_cpu)\n # Deploy an even number of ases on the nodes\n as_to_deploy = 1\n for n in node_list:\n for i in range(0,n['cpus']):\n for j in range(0,int(as_per_cpu)):\n if as_to_deploy <= num_as:\n n['as_list'].append(as_to_deploy)\n as_list.append({'as':as_to_deploy, 'node':n['name']})\n as_to_deploy += 1\n\n # Now the nodes are evenly loaded, if we still have something to place, just put it\n # one per core\n if as_to_deploy < num_as:\n for n in node_list:\n for i in range(0,n['cpus']):\n if as_to_deploy <= num_as:\n n['as_list'].append(as_to_deploy)\n as_list.append({'as':as_to_deploy, 'node':n['name']})\n as_to_deploy += 1\n\n return(node_list,as_list)", "def split(self):\n \n spl = self.which('split')\n if spl:\n self.__tmp = \"/tmp\"\n self.__tmpout = \"/tmp/output\"\n if not os.path.exists(self.__tmpout):\n os.makedirs(self.__tmpout)\n #os.chdir(\"/tmp\")\n '''\n assume split prog overwrites existing files if\n there is a conflict in file names\n '''\n #thecommand = \"%s -a 3 -b 500k %s %s/%s\" % (spl, self.__filename, self.__tmpout, self.__filename + self.__postfix)\n thecommand = \"%s -a 3 -b 10m %s %s/%s\" % (spl, self.__filename, self.__tmpout, self.__filename + self.__postfix)\n os.system(thecommand)\n dirList=os.listdir(self.__tmpout)\n #self.constructCat(dirList)\n for chunkfilename in dirList:\n #print chunkfilename \n #self.__cat += self.__remotepath + \"/\" + chunkfilename + \" \"\n #print self.__cat\n self.__flist.append(self.__tmpout + \"/\" + chunkfilename)\n #print self.__flist\n self.writeLog(chunkfilename, self.md5(fileName=self.__tmpout + \"/\" + chunkfilename))\n self.__numchunks = len([item for item in os.listdir(self.__tmpout) if os.path.isfile(self.__tmpout + \"/\" + item)])\n else:\n try:\n f = open(self.__filename, 'rb')\n except (OSError, IOError), e:\n raise FileSplitterException, str(e)\n \n bname = (os.path.split(self.__filename))[1]\n # Get the file size\n fsize = os.path.getsize(self.__filename)\n # dynamically calculate number of chunks\n strfsize = str(fsize)\n '''\n in MB's\n 8 - teens\n 9 - hundreds\n 10 - gigabytes\n '''\n if len(strfsize) == 8:\n #self.__numchunks = fsize/100000\n self.__numchunks = fsize/50000\n elif len(strfsize) == 9:\n #self.__numchunks = fsize/1000000\n self.__numchunks = fsize/500000\n elif len(strfsize) == 10:\n #self.__numchunks = fsize/10000000\n self.__numchunks = fsize/5000000\n #print '\\nSplitting file %s into %d chunks' % (self.__filename, self.__numchunks)\n # Get size of each chunk\n self.__chunksize = int(float(fsize)/float(self.__numchunks))\n \n chunksz = self.__chunksize\n total_bytes = 0\n \n for x in range(self.__numchunks):\n #chunkfilename = bname + '-' + str(x+1) + self.__postfix\n chunkfilename = bname + ('-%03d' % (x+1)) + self.__postfix\n # kill residual file if it exists\n if os.path.exists(chunkfilename):\n os.remove(chunkfilename)\n \"\"\"\n if reading the last section, calculate correct\n chunk size.\n \"\"\"\n if x == self.__numchunks - 1:\n chunksz = fsize - total_bytes\n \n try:\n if self.__debug:\n print 'Writing file chunk: %s' % chunkfilename\n data = f.read(chunksz)\n total_bytes += len(data)\n chunkf = file(chunkfilename, 'wb')\n chunkf.write(data)\n chunkf.close()\n #self.__cat += self.__remotepath + \"/\" + chunkfilename + \" \"\n self.__flist.append(chunkfilename)\n self.writeLog(chunkfilename, self.md5(fileName=chunkfilename))\n except (OSError, IOError), e:\n print e\n continue\n except EOFError, e:\n print e\n break\n\n print '\\nSplit complete on file: %s into %d chunks\\n' % (self.__filename, self.__numchunks)\n self.__logfhandle.close()\n #self.__cat += \"> \" + self.__remotepath + \"/\" + self.__filename\n self.set_cat_statement()", "def main(sign, label):\n files = h5files(os.getcwd())\n n_arti_total = 0\n for fname in files:\n man = Combinato(fname, sign, label)\n if not man.initialized:\n continue\n groups = man.get_groups(times=False, spikes=False)\n if 0 in groups.keys():\n n_unassigned = len(groups[0])\n if -1 in groups.keys():\n n_arti = len(groups[-1])\n else:\n n_arti = 0\n\n\n print('{} {} groups, {} artifacts'.\n format(os.path.basename(fname), len(groups), n_arti))\n\n n_arti_total += n_arti\n\n return n_arti_total", "def _distribute_data_to_cluster(self):\n\n for data in self.data:\n _distances = self._calculate_distances(data)\n _cluster = self._get_closest_cluster(_distances)\n self.clusters[_cluster].append(data)", "def WriteFileSize(self):\n # Simply a calculation of the number of clusters (e.g. sectors) * 512\n total_size = 0\n for cluster_range in self.cluster_ranges:\n clusters = cluster_range.split(\"-\")\n difference = int(clusters[1]) - int(clusters[0]) + 1\n self.cluster_list.extend(self.CreateList(int(clusters[0]), int(clusters[1])))\n print(f\"Cluster difference between {clusters[1]} and {clusters[0]} is {difference}\")\n total_size += difference*512\n print(f\"Total size has been calculated as {total_size}\")\n with open(self.output_file, \"r+b\") as fh:\n seeker = (self.root_directory_offset*self.sector_size)+((self.index_number-1)*self.directory_index_size)+(self.file_size_offset)\n #s_array = bytearray()\n print(f\"Reversing {total_size}\")\n ba_size = (total_size).to_bytes(4, byteorder='little')\n print(f\"Preparing to write {ba_size} to {seeker}\")\n fh.seek(seeker)\n fh.write(ba_size)\n print(\"File size written to root directory\")\n return True", "def process_files(exp_folders):\n pool = mp.Pool()\n results = pool.imap_unordered(read_and_serialize, exp_folders)\n\n stat = []\n for res in results:\n print(res)\n stat.append(res)\n\n pool.close()\n pool.join()", "def __init__(self, run_dir, sub_dirs=['slices',], num_files=[None,], start_file=1, comm=MPI.COMM_WORLD, **kwargs):\n self.run_dir = os.path.expanduser(run_dir)\n self.sub_dirs = sub_dirs\n self.file_lists = OrderedDict()\n self.comm = comm\n if comm.rank == 0:\n print('reading files from {}'.format(run_dir))\n import sys\n sys.stdout.flush()\n\n for d, n in zip(sub_dirs, num_files):\n files = []\n for f in os.listdir('{:s}/{:s}/'.format(self.run_dir, d)):\n if f.endswith('.h5'):\n file_num = int(f.split('.h5')[0].split('_s')[-1])\n if file_num < start_file: continue\n if n is not None and file_num > start_file+n: continue\n files.append(['{:s}/{:s}/{:s}'.format(self.run_dir, d, f), file_num])\n self.file_lists[d], nums = zip(*sorted(files, key=lambda x: x[1]))\n\n self.local_file_lists = OrderedDict()\n self.distribution_comms = OrderedDict()\n self.idle = OrderedDict()\n self._distribute_files(**kwargs)", "def process():\n print(OPTS)\n\n # Remove dataset files if they exist from before\n p = PATH.proc\n if p.exists():\n shutil.rmtree(p)\n p.mkdir()\n\n with multiprocessing.Pool(OPTS['cpus']) as pool:\n chunks = [0, 1] if OPTS['dev'] else range(100)\n partition_paths = pool.map(_process, chunks)\n\n fastparquet.writer.merge(partition_paths)", "def baseline_rechunk(indir_path, outdir_path, O, I, R, file_format, addition, distributed, debug_mode=False, clean_out_dir=False, dont_write=False):\n\n print(f\"Setting arguments...\")\n global DEBUG_LOCAL\n global DONT_WRITE\n global tracker\n global outdirs_dict, outdir_index\n outdirs_dict = dict()\n outdir_index = 0\n tracker = Tracker()\n DEBUG_LOCAL = True if debug_mode else False\n DONT_WRITE = True if dont_write else False\n\n print(\"Addition mode:\", addition)\n print(\"DONT_WRITE: \", DONT_WRITE)\n\n O, I, R = tuple(O), tuple(I), tuple(R)\n\n file_manager = get_file_manager(file_format)\n\n infiles_partition = get_blocks_shape(R, I)\n infiles_volumes = get_named_volumes(infiles_partition, I)\n outfiles_partition = get_blocks_shape(R, O)\n outfiles_volumes = get_named_volumes(outfiles_partition, O)\n outfiles_volumes = outfiles_volumes.values()\n\n if distributed:\n repartition_dict = None\n \n json_filename = '/disk0/gtimothee/repartition_dict.json'\n if not os.path.isfile(json_filename):\n # print(\"cannot find association dict json file\")\n sys.exit(1)\n else:\n pass # print(f\"json file found\")\n\n try: \n with open(json_filename) as f:\n repartition_dict = json.load(f)\n except Exception as e: \n print(e)\n # print(\"error (1)\")\n sys.exit(1)\n\n if repartition_dict == None:\n # print(\"error (2)\")\n sys.exit(1)\n else:\n pass # print(f\"Found reparition dict: {repartition_dict}\")\n\n input_files = repartition_dict.values()\n else:\n input_files = file_manager.get_input_files(indir_path)\n\n t_read = 0\n t_write = 0\n\n vols_written = list()\n nb_infile_openings = 0\n nb_infile_seeks = 0\n nb_outfile_openings = 0\n nb_outfile_seeks = 0\n buffer_index = 1\n for input_file in input_files:\n print(f\"Treating buffer: {buffer_index}...\")\n buffer_index += 1\n nb_infile_openings += 1\n\n involume = get_volume(input_file, infiles_volumes, infiles_partition)\n t1 = time.time()\n if not DONT_WRITE:\n data = file_manager.read_data_from_fp(input_file, slices=None)\n else:\n data = None\n t1 = time.time() - t1\n t_read += t1\n \n for outvolume in outfiles_volumes:\n if hypercubes_overlap(involume, outvolume):\n shape, t2, nb_outfile_seeks_tmp = write_to_outfile(involume, outvolume, data, outfiles_partition, outdir_path, O, file_manager, addition, tracker)\n t_write += t2\n vols_written.append(shape)\n # nb_outfile_openings += 1 already included in nb_outfile_seeks\n nb_outfile_seeks += nb_outfile_seeks_tmp\n \n file_manager.close_infiles()\n\n if DONT_WRITE:\n assert tracker.is_complete(((0,0,0), R))\n\n # print(\"\\nShapes written:\")\n # for row in vols_written: \n # print(row)\n\n if clean_out_dir:\n print(\"Cleaning output directory\")\n file_manager.clean_directory(outdir_path)\n\n get_opened_files()\n\n return t_read, t_write, [nb_outfile_openings, nb_outfile_seeks, nb_infile_openings, nb_infile_seeks]", "def print_local_output_files_stats():\n print \"\\n\\nFILES CREATED:\"\n for filename in os.listdir('../output'):\n filesize = os.path.getsize('../output/' + filename)\n print str(filesize) + \"\\t\" + filename\n print \"\\n\"", "def ilastik_classify_mpi():\n comm = MPI.COMM_WORLD\n rank = comm.Get_rank()\n size = MPI.COMM_WORLD.Get_size()\n name = MPI.Get_processor_name()\n start_time = int(time.time())\n # Allow Ilatisk to use all available threads of the server/compute node.\n threads = int(no_of_threads/1)\n# threads = 1\n # Allow Ilastik to use available memory of the server/compute node.\n ram = ram_size\n# ram = int(ram_size/12)\n if rank == 0:\n print(\"*** size is %d, No of thread is %d, ram size is %d\" % (size, threads, ram))\n # assumes sub-volume image file extension is .hdf5\n input_files = sorted(glob(hdf_subvol_files_location + '/*.hdf5'))\n if not input_files:\n print(\"*** Did not find any file ending with .hdf5 extension ***\")\n return\n # Delete existing files created by ilastik (*.h5 files).\n if rank == 0:\n print(\"Ilastik input files/hdf_files_location\", hdf_subvol_files_location)\n oldoutput_files = sorted(glob(hdf_subvol_files_location + '/*.h5'))\n for file in oldoutput_files:\n print(\"*** Removing old Ilastik created file %s ***\" % file)\n os.remove(file)\n \n comm.Barrier()\n \n data_sets = []\n indices_ds = []\n rightoverlap_ds = []\n leftoverlap_ds = []\n # Get the dataset name in each sub-volume file. Dataset name is the same as file name.\n # Convert from unicode to ASCII since Ilastik does not like unicode\n for file in input_files:\n f = h5py.File(file, 'r')\n name, ext = os.path.splitext(os.path.basename(file))\n data_sets.append((file + '/' + name).encode('ascii'))\n indices_ds.append(f['orig_indices'][...])\n rightoverlap_ds.append(f['right_overlap'][...])\n leftoverlap_ds.append(f['left_overlap'][...])\n f.close()\n \n if rank == 0:\n print(\"Number of input/HDF5 files is %d, and Number of processes is %d\" % ((len(data_sets)), size))\n \n # Figure out how many sub-volume files each rank should handle.\n iterations = int(len(data_sets) / size) + (len(data_sets) % size > 0)\n # Divide pixel classification of sub-volume files among processes/ranks. \n for idx in range(iterations):\n if (rank + (size * idx)) >= len(data_sets):\n print(\"\\nBREAKING out, this rank is done with its processing, my rank is %d, number of files is %d, size is %d and idx is %d\" %\n (rank, len(data_sets), size, idx))\n break\n start_loop_time = time.time()\n data_set_name = data_sets[(rank + size * idx)]\n start_classify_time = time.time()\n hdf_dataset_path = classify_pixel_hdf(data_set_name, classifier, threads, ram)\n end_classify_time = time.time()\n classify_time = end_classify_time - start_classify_time\n print(\"Exec time for classification is %d Sec, rank is %d, hdf_dataset_path is %s\" % \n (classify_time, rank, hdf_dataset_path))\n # Create a dataset and save indices of the sub-volume into the whole volume.\n filename, dataset = os.path.split(hdf_dataset_path[0])\n file = h5py.File(filename, 'r+')\n subvol_indx = file.create_dataset('orig_indices', (6,), dtype='uint64')\n subvol_indx[...] = indices_ds[(rank + size * idx)]\n \n # Save the overlap sizes.\n subvol_rightoverlap = file.create_dataset('right_overlap', (3,), dtype='uint8')\n subvol_rightoverlap[...] = rightoverlap_ds[(rank + size * idx)]\n \n subvol_leftoverlap = file.create_dataset('left_overlap', (3,), dtype='uint8')\n subvol_leftoverlap[...] = leftoverlap_ds[(rank + size * idx)]\n file.close()\n end_loop_time = time.time()\n file_classify_time = end_loop_time - start_loop_time\n print(\"Exec Time per classifying one file is %d Sec, read/write time is %d Sec and rank is %d\" % \n (file_classify_time, (file_classify_time - classify_time), rank))\n \n end_time = int(time.time())\n exec_time = end_time - start_time\n print(\"*** My Rank is %d, exec time is %d sec - Done with classifying pixels in sub-volume files ***\" % (rank, exec_time))", "def analyze():\n global secondary_shards\n global total_size_secondary\n global total_size_primary\n global max_size_node_name\n global primary_shards\n max_size = 0\n for log_ele in log_elements:\n # req 3\n if log_ele.storage_size > max_size:\n max_size_node_name = log_ele.node_name\n max_size = log_ele.storage_size\n # req 2 and 1\n if log_ele.node_type == \"p\":\n primary_shards = primary_shards+1\n total_size_primary = total_size_primary+log_ele.storage_size\n elif log_ele.node_type == \"r\":\n secondary_shards = secondary_shards+1\n total_size_secondary = total_size_secondary+log_ele.storage_size\n if log_ele.storage_size > (128*1024*1024)*80/100 :\n watermark_breached.append(log_ele.node_name)", "def FSC2(input_dir, num_reps=50, min_sims=100000, max_ecm=20, calc_CI=False, numcores=1, scratch_mb='200', time_scratch=\"01:50:00\", mem=\"200\", print1=False, overwrite=\"None\", fsc2_path=\"/storage/plzen1/home/holcovam/programs/fsc26_linux64/fsc26\"):\n Data_Files = []\n tpl_files = []\n est_files = []\n CI_Data_Files = []\n shlist = []\n\n if input_dir.endswith(\"/\") is False:\n input_dir += \"/\"\n\n for path in os.listdir(input_dir):\n if os.path.isdir(input_dir + path) and path.startswith(\"FSC2input\"):\n samp_name = path.split(\"_\")[1]\n #folder_name = samp_name\n if samp_name + \"_DSFS.obs\" in os.listdir(input_dir + path):\n for i in range(0, num_reps):\n new_file = open(input_dir + path + \"/\" + samp_name + str(i) + \"_DSFS.obs\", 'w')\n with open(input_dir + path + \"/\" + samp_name + \"_DSFS.obs\") as data_file:\n for line in data_file:\n new_file.write(line)\n new_file.close()\n Data_Files.append(input_dir + path + \"/\" + samp_name + str(i) + \"_DSFS.obs\")\n else:\n print(\"Did not find input data file for: \", samp_name)\n if calc_CI == \"True\":\n num_files = 0\n for file in os.listdir(input_dir + path):\n if file.endswith(\"_DSFS.obs\") and file.split(\"_\")[-2].split(\".\")[-1][0:3] == \"rep\" and file != samp_name + \"_DSFS.obs\":\n for i in range(0, num_reps):\n new_file = open(input_dir + path + \"/\" + samp_name + file.split(\"_\")[-2].split(\".\")[-1].split(\"_\")[0]+ \"_\" + str(i) + \"_DSFS.obs\", 'w')\n with open(input_dir + path + \"/\" + file) as data_file:\n for line in data_file:\n new_file.write(line)\n new_file.close()\n CI_Data_Files.append(input_dir + path + \"/\" + samp_name + file.split(\"_\")[-2].split(\".\")[-1].split(\"_\")[0]+ \"_\" + str(i) + \"_DSFS.obs\")\n num_files += 1\n if len(CI_Data_Files) < 1:\n print(\"Did not find bootstrap replicates for: \", samp_name)\n else:\n print(\"Found \", num_files, \" replicate dsfs files for CI calculation for \", samp_name)\n if path.endswith(\".tpl\"):\n tpl_files.append(path)\n est_files.append(path.split(\".\")[0])\n if len(tpl_files) == 0:\n print(\"Did not find any tpl files!! Aborting!!\")\n else:\n if calc_CI == \"True\":\n Data_Files = CI_Data_Files\n for file in Data_Files:\n name = file.split(\"_DSFS\")[0]\n samp_name = name.split(\"/\")[-1]\n folder_name = samp_name [0:11]\n for tpl in tpl_files:\n tpl_name = tpl.split(\".tpl\")[0]\n if os.path.isdir(name + \"_\" + tpl_name) is False or overwrite == \"hard\":\n new_tpl = open(name + \"_\" + tpl_name + \".tpl\", 'w')\n new_data = open(name + \"_\" + tpl_name + \"_DSFS.obs\", 'w')\n\n with open(file, 'r') as data:\n for i, line in enumerate(data):\n if i == 1:\n pop_info = line.strip(\"\\n\").strip(\"\\t\").split(\"\\t\")\n pop_num = int(pop_info[0])\n samp_nums = pop_info[-pop_num:]\n new_data.write(line)\n with open(input_dir + tpl, 'r') as template:\n samp_num_lines = pop_num + 4\n for i, line in enumerate(template):\n if i < samp_num_lines:\n new_tpl.write(line)\n elif i == samp_num_lines:\n for num in samp_nums:\n new_tpl.write(num + \"\\n\")\n elif i >= samp_num_lines + len(samp_nums):\n new_tpl.write(line)\n new_est = open(name + \"_\" + tpl_name + \".est\", 'w')\n try:\n with open(input_dir + tpl_name + \".est\") as est:\n for line in est:\n new_est.write(line)\n except FileNotFoundError:\n print(\"Did not find est file for: \", tpl)\n #folder_name = samp_name ''.join(i for i in s if not i.isdigit())\n shname = name + \"_\" + tpl_name + \".sh\"\n shfile5 = open(shname, 'w')\n shfile5.write('#!/bin/bash -e\\n' +\n '#PBS -N '+samp_name+'\\n' +\n '#PBS -l walltime='+str(time_scratch)+'\\n' +\n '#PBS -l select=1:ncpus='+str(numcores)+':mem='+str(mem)+'mb:scratch_local='+str(scratch_mb)+'mb\\n' +\n '#PBS -m abe\\n' +\n '#PBS -j oe\\n\\n' +\n 'module add python-3.4.1-gcc\\n'+\n 'module add python34-modules-gcc\\n'+\n 'trap \\'clean_scratch\\' TERM EXIT\\n'+\n 'if [ ! -d \"$SCRATCHDIR\" ] ; then echo \"Scratch not created!\" 1>&2; exit 1; fi \\n' +\n 'DATADIR=\"/storage/plzen1/home/holcovam/ScanTools\"\\n' +\n 'cp $DATADIR/'+ input_dir + \"FSC2input_\" + folder_name+ \"/\" + samp_name + \"_\" + tpl_name + '* $SCRATCHDIR || exit 1\\n'+\n 'cp '+fsc2_path+' $SCRATCHDIR || exit 1\\n'+\n 'cd $SCRATCHDIR || exit 2\\n' +\n 'echo data loaded at `date`\\n\\n' +\n 'chmod +x fsc26 \\n' +\n #'ls -l \\n' +\n './fsc26 -t ' + samp_name + \"_\" + tpl_name + '.tpl -e ' + samp_name + \"_\" + tpl_name + '.est -n ' + str(min_sims) + ' -u -d -q -L ' + str(max_ecm) + ' -M \\n' + \n 'rm seed.txt \\n'+\n 'rm fsc26\\n'+\n 'rm *DSFS.obs\\n'+\n 'rm *.sh\\n'+\n 'rm *.tpl \\n'+\n 'rm *.est \\n'+\n #'ls -l \\n' +\n 'cp $SCRATCHDIR/*.par $DATADIR/'+ input_dir + \"FSC2input_\" + folder_name+' || exit 1\\n'+\n 'rm *.par \\n'+\n 'cp -r $SCRATCHDIR/* $DATADIR/'+input_dir+' || export CLEAN_SCRATCH=false\\n'+\n 'printf \"\\\\nFinished\\\\n\\\\n\"\\n')\n shfile5.close()\n shlist.append(shname)\n\n############IF PROBLEM WITH EXCESS OF NONCONVERGED CHAINS, COPY /home/majda/alpine/fastsimcoal2/afterWPSG/scripts/notConverged.py here ###################\n\n else:\n print(\"Output for \" + samp_name + \"_\" + tpl_name + \" already exists. Use hard_overwrite = True to overwrite.\")\n return shlist", "def get_shards(data_dir, file_list, shard_size, istraining):\n file_idxs = np.arange(0, len(file_list))\n np.random.shuffle(file_idxs) # randomly extract data from files\n\n shard_num = len(file_list) // shard_size\n\n for shard_idx in range(shard_num):\n\n start_idx = shard_idx * shard_size\n end_idx = (shard_idx + 1) * shard_size\n shard_files_idxs = file_idxs[start_idx: end_idx]\n\n all_data, all_label, all_names, all_node_img = [], [], [], []\n for fn in shard_files_idxs:\n\n if not data_dir:\n raw_data = np.load(file_list[fn])\n else:\n raw_data = np.load(os.path.join(data_dir, file_list[fn]))\n\n current_data = raw_data['vgg_features']\n node_img_path = raw_data['img_path']\n # pid = raw_data['pid']\n # time = raw_data['time']\n if len(current_data) < MIN_NUM_POINT:\n # skip WSI of too few patches\n continue\n\n # if len(current_data) > MAX_NUM_POINT:\n # continue\n\n curr_path = file_list[fn]\n\n curr_type = curr_path.split('/')[-4]\n curr_filename = curr_path.split('/')[-1]\n\n if curr_type == 'LUAD':\n # LUAD -> class 0, LUSC -> class 1\n current_label = 0\n else:\n current_label = 1\n\n # if istraining:\n \"random select at most MAX_NUM_POINT nodes for WSI\"\n list_node_idx = np.arange(0, current_data.shape[0])\n np.random.shuffle(list_node_idx)\n sel_ids = list_node_idx[0: MAX_NUM_POINT]\n\n current_data = current_data[sel_ids]\n current_data = np.expand_dims(current_data, 0)\n node_img_path = node_img_path[sel_ids]\n\n all_data.append(current_data)\n all_label.append(current_label)\n all_names.append(curr_filename)\n all_node_img.append(node_img_path)\n\n \"\"\" create numpy for all data and label\"\"\"\n all_label = np.squeeze(np.hstack(all_label))\n\n yield all_data, all_label, all_names, all_node_img", "def run(n):\n base_files = os.listdir(DATA_DIR)\n\n # Keep track of files we have uploaded\n uploaded = []\n\n # Upload files\n for _ in range(n):\n files = random.choices(base_files, k=random.randint(1, 15))\n name, checksum = assemble_file(files)\n upload_file(name)\n os.remove(name)\n uploaded.append((name, checksum))\n\n # Download files\n for name, checksum in uploaded:\n download_and_validate_checksum(name, checksum)\n\n # Validation checks\n check_pack_sizes()\n check_pack_checksums()\n\n print(server_stats())\n\n # Delete all files except for the last two. This should force the vacuum to rebalance\n # some packfiles.\n to_delete = uploaded[:-2]\n remaining = uploaded[-2:]\n for name, _ in to_delete:\n delete_file(name)\n\n # Run a vacuum and wait for it to complete\n vacuum_id = vacuum()\n status = None\n for _ in range(10):\n status = vacuum_status(vacuum_id)\n if status != \"RUNNING\":\n break\n time.sleep(1)\n if status != \"SUCCEEDED\":\n raise ValueError(f\"vacuum failed {status}\")\n\n # Check that the remaining files can still be downloaded after the vacuum\n for name, checksum in remaining:\n download_and_validate_checksum(name, checksum)", "def part1():\n program = read_input()\n root = build_filesystem(program)\n all_sizes = root.make_size_list()\n return sum(size for size in all_sizes if size <= 100000)", "def outputFiles(self, filesizelist):\n self.outputs = filesizelist\n self.outputSize = reduce(lambda x,y: x + y[1], filesizelist, 0)", "def process_stat_files(param):\n\n #get the files that are actually in the output directory\n call = ['cp', '-R']\n call.append(param['working_dir']+'results/featureCount/')\n call.append(param['working_dir']+'report/')\n _, _ = subprocess.Popen(call,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE).communicate()\n\n featurecount_file = (param['working_dir']+\n 'results/featureCount/featureCount_stats.txt')\n #extract table\n table = []\n filehandle = open(featurecount_file)\n #header\n table.append(filehandle.readlines()[0].rstrip().split('\\t'))\n table[0] = table[0][1:]\n filehandle.close()\n\n #total number of aligned reads\n tot_reads = param['bam_qc']['unique_aligned_reads']\n counter = [0] * len(param['bam_qc']['unique_aligned_reads'])\n \n filehandle = open(featurecount_file)\n for line in filehandle.readlines()[1:]:\n cur_line = line.rstrip().split('\\t')\n cur_line[0] = re.sub(r'_',' ',cur_line[0])\n if cur_line[0] not in ['Unassigned MultiMapping','Assigned']:\n counter = [ct + int(cr) for ct, cr in zip(counter, cur_line[1:])]\n perc = ([cur_line[0]]+\n MODULE_HELPER.get_percentage(cur_line[1:],\n tot_reads,\n len(cur_line)-1))\n table.append(perc)\n filehandle.close()\n assigned = [tot_reads[idx] - counter[idx] for idx in range(len(tot_reads))]\n perc = ['Assigned'] + MODULE_HELPER.get_percentage(assigned,\n tot_reads,\n len(counter))\n return table", "def getBigFiles(self):\n toMove = {self.resPath:[], self.runPath:[]}\n\n for tgt in toMove.keys():\n for root, dirs, files in os.walk(tgt):\n files = [os.path.join(root, f) for f in files]\n def isBig(f): # only files, no links\n return os.path.isfile(f) and not os.path.islink(f) and os.path.getsize(f)>=self.bigFilesSize\n toMove[tgt].extend([f for f in files if isBig(f)])\n return toMove", "def countDataSize(self,filename):\n \n try:\n d = h5py.File(filename,'r')\n except:\n print(filename)\n return \n\n N = 0\n scan_edges = d['level2/Statistics/scan_edges'][:]\n for (start,end) in scan_edges:\n N += (end-start)//self.offsetLen * self.offsetLen\n d.close()\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def concat(file_list: list, output_dir: str, chunk_size: int=40,\n num_workers: int=None, name: str=None, rate=None,\n trim_silence_threshold: float=0, min_duration: int=None,\n exist_ok=False, verbose_level=0):\n # Todo: remove trim silence feature from this function to a new function\n if len(file_list) == 0:\n raise ValueError('Not possible to process an empty list of files.')\n\n os.makedirs(output_dir, exist_ok=True)\n file_list = update_list_fixing(file_list, target_rate=rate, channels=1,\n min_duration=min_duration,\n verbose_level=verbose_level,\n num_workers=num_workers)\n print('[INFO] creating data set [%s]' % output_dir)\n\n # Temp files (will be removed after concatenation process)\n temp_files = set()\n\n # Concat all files\n print('[INFO] concatenating chunks of size %d' % chunk_size)\n while len(file_list) > 1: # Will concat chunk of files until lasts one left\n print('[remaining files to process: %d]' % len(file_list))\n # Create chunks\n chunks = [file_list[i:i + chunk_size]\n for i in range(0, len(file_list), chunk_size)]\n # Reset file list\n file_list = []\n\n if str(verbose_level) == '2' and workers == 1:\n # This code is duplicated for debugging purposes\n for chunk in chunks:\n temp_file = concat_chunks(file_list=chunk,\n output_path=output_dir + os.sep,\n verbose_level=verbose_level)\n if os.path.isfile(output_dir + os.sep + temp_file):\n # Add file to temp_files:\n temp_files.add(output_dir + os.sep + temp_file)\n # Add file to file_list to process again\n file_list.append(output_dir + os.sep + temp_file)\n else:\n # Make parallel calls to concat chunks\n with concurrent.futures.ProcessPoolExecutor(num_workers) \\\n as executor:\n futures = [\n executor.submit(fn=concat_chunks,\n file_list=chunk,\n output_path=output_dir + os.sep,\n verbose_level=verbose_level)\n for chunk in chunks]\n\n kwargs = {\n 'total': len(futures),\n 'unit': 'chunks',\n 'unit_scale': True,\n 'leave': True\n }\n\n for f in tqdm(concurrent.futures.as_completed(futures),\n **kwargs):\n pass\n\n for f in futures:\n if os.path.isfile(output_dir + os.sep + f.result()):\n # Add file to temp_files:\n temp_files.add(output_dir + os.sep + f.result())\n # Add file to file_list to process again\n file_list.append(output_dir + os.sep + f.result())\n # Remove temporary files:\n if len(temp_files) == 0:\n print(\"[FATAL ERROR]: the concatenated file is missing. You might want \"\n \"to run again with the chunk_size=2, workers=1, and the \"\n \"verbosity_level=2 parameters for debugging purposes.\")\n exit(-1)\n final_file = file_list[0]\n temp_files.remove(final_file)\n for file in temp_files:\n try:\n os.remove(file)\n except FileNotFoundError:\n print('[WARN] File not found:', file)\n\n if trim_silence_threshold is not None and trim_silence_threshold > 0:\n temp_file = final_file + '_temp_trs.wav'\n cmd = 'sox -V' + str(verbose_level) + ' ' + final_file + ' ' + \\\n temp_file + ' silence 1 0.1 {}% -1 0.1 {}%'.\\\n format(trim_silence_threshold, trim_silence_threshold)\n os.system(cmd)\n os.remove(final_file)\n os.rename(temp_file, final_file)\n\n if name is not None:\n if os.path.isfile(output_dir + os.sep + name + '.wav'):\n if exist_ok:\n os.remove(output_dir + os.sep + name + '.wav')\n else:\n raise FileExistsError\n os.rename(file_list[0], output_dir + os.sep + name + '.wav')\n\n rmtree(temp_folder)", "def mpirun(self):\n comm = MPI.COMM_WORLD\n rank = comm.Get_rank()\n size = comm.Get_size()\n print rank \n print size\n data = []\n dcds = self.getdcds()\n for i in range(0, len(dcds)):\n pid = i % size \n if pid == rank:\n dcd = dcds[i]\n dcdpath = self.d + \"/\" + dcd\n data.extend(self.metric(self.dcdtopsf(dcd), dcdpath))\n self.write(data)", "def bulk_train(self):\n logger.info(\"collecting subfolders - relations\")\n relations = self.collect_subfolders(self.input_dir)\n logger.info(\"relations - {}\".format(relations))\n\n execution_times = []\n\n for rel, rel_path in tqdm(relations.items(), desc=\"relations\"):\n logger.info(\"collecting training files from {}\".format(rel_path))\n tr_files = self.collect_files(rel_path, self.regexp_train)\n hyper_params = self.get_hyperparams()\n hyper_params['graph'] = tr_files\n\n output_folder = os.path.join(self.output_dir, rel)\n if not os.path.exists(output_folder):\n logger.info(\"creating {} (did not exist)\".format(output_folder))\n os.makedirs(output_folder)\n\n for params in tqdm(ParameterGrid(hyper_params), desc=\"training embedding\"):\n logger.info(\"hyperparams: {}\".format(params))\n train_file = params['graph']\n model_name = self.compute_model_name(params, output_folder)\n logger.info('training starspace model \"{}\" from file \"{}\"'.format(\n model_name, train_file))\n external_output, delta = self.call_starspace(params, train_file, model_name)\n logger.info(\"executed in {:0.2f}s\".format(delta))\n\n logger.info(\"external command output logged in {}\".format(self.external_log))\n if not os.path.exists(self.output_dir):\n logger.info(\"creating {} (did not exist)\".format(self.output_dir))\n os.makedirs(self.output_dir)\n\n with open(self.external_log, 'a') as f:\n f.write(external_output)\n\n execution_times.append(dict({ 'time': delta }, **params))\n \n return execution_times", "def process_datasets(size, counts):\n global FUNCTION_LOGS\n FUNCTION_LOGS.append((f\"-----> Processing size {size}\", counts))\n # process small data sets\n counts = import_data('data',\n f'products_{size}.csv',\n f'customers_{size}.csv',\n f'rentals_{size}.csv')\n logging.info('Imported %d products, %d customers, and %d rentals', *counts)\n\n show_available_products()\n show_rentals('prd0000')\n\n drop_data()", "def process():\n config = read_config()\n \n\n img_dir = config['DEFAULT']['images_directory']\n results_dict = {}\n images = list(get_image_files(img_dir))\n for image in tqdm.tqdm(images):\n info = hash_file(image)\n if info == 0:\n continue\n\n hash_value = info['hash']\n\n if hash_value not in results_dict:\n file_name = os.path.basename(info['_id'])\n results_dict[hash_value] = [file_name, 1]\n else:\n results_dict[hash_value][1] += 1\n\n count = list(results_dict.values())\n sorted_count = sorted(count, key=lambda x: x[1], reverse=True)\n \n with ImagesDB(IMG_INFO_DB_FILENAME) as imgDb: \n imgDb.insert_batch(sorted_count)", "def batch_per_file(self):\n return(self.X_x_Y // self.batch_size)", "def __len__(self):\n return self._num_samples_per_file * len(self._files) // self._world_size", "def _assign_sizes(self):", "def run(self, dataset_size=4, n_jobs=-1, starting_block=0):\n data_files = sorted(self.input_directory.glob(\"**/*.txt\"))\n log.info(f\"Creating shape file based on {len(data_files)} samples.\")\n\n n_blocks = int(len(data_files) / dataset_size)\n data_file_blocks = split(data_files, n_blocks)\n dataset_blocks_ids = np.arange(len(data_file_blocks))\n\n if starting_block != 0:\n data_file_blocks = data_file_blocks[starting_block:]\n dataset_blocks_ids = dataset_blocks_ids[starting_block:]\n log.info(f\"Starting at a different block number: {starting_block}.\")\n n_blocks = int(len(data_file_blocks))\n\n log.info(f\"Going through {n_blocks} blocks in parallel.\")\n Parallel(n_jobs=n_jobs)(\n delayed(self.generate_single_block)(data_file_block, dataset_block_id)\n for (data_file_block, dataset_block_id) in tqdm(\n zip(data_file_blocks, dataset_blocks_ids)\n )\n )\n\n log.info(\"Combining the separate index files..\")\n index_floorplan = sorted(self.output_directory.glob(\"index_floorplans_*.csv\"))\n log.info(f\"Found {len(index_floorplan)} index block files.\")\n index_files = pd.concat([pd.read_csv(_file) for _file in index_floorplan])\n index_files = index_files.fillna(0)\n index_files.to_csv(self.output_directory / \"index_floorplans.csv\", index=False)", "def main():\n\n file_name_base = \"./lab-record/result/fairness/\"\n scenarios = ['lan', 'wan1', 'wan2']\n scenario = scenarios[2]\n\n algorithms = [\"bbr\", \"scalable\", \"bic\", \"highspeed\", \"htcp\", \"hybla\",\n \"illinois\", \"vegas\", \"yeah\"]\n names = [\"BBR\", \"Scalable\", \"BIC\", \"High Speed\",\n \"H-TCP\", \"Hybla\", \"Illinois\", \"Vegas\", \"YeAH\"]\n\n test_types = [\"vs_reno\", \"vs_cubic\", \"vs_itself\"]\n\n fsize = 36\n \n index_reno = []\n index_cubic = []\n index_itself = []\n\n data = []\n \n print 'Loadint statistics for ' + file_name_base + '/' + scenario\n\n for algorithm in algorithms:\n for test in test_types:\n path_base = file_name_base + \"/\" + scenario + \"/\" + test + \"/\" + \\\n algorithm + \"/\"\n if test == \"vs_itself\":\n exp_name = names[algorithms.index(algorithm)] + \"_1\"\n con_name = names[algorithms.index(algorithm)] + \"_2\"\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \"_1.log\"\n con_filename = \"/\" + algorithm + \"_2.log\"\n process(path_base, exp_filename, con_filename, index_itself)\n if test == \"vs_reno\":\n exp_name = names[algorithms.index(algorithm)]\n con_name = \"Reno\"\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \".log\"\n con_filename = \"/reno.log\"\n process(path_base, exp_filename, con_filename, index_reno)\n if test == \"vs_cubic\":\n con_name = \"CUBIC\"\n exp_name = names[algorithms.index(algorithm)]\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \".log\"\n con_filename = \"/cubic.log\"\n process(path_base, exp_filename, con_filename, index_cubic)\n\n size = 9\n x = numpy.arange(size)\n\n total_width, n = 1.2, 2.5\n width = 1.0 / n\n x = x - (total_width - width) / 2\n\n for i in range(0, len(x)):\n x[i] += 0.5 * i\n\n # Exp\n fig = plt.figure()\n\n # Con\n con_reno = plt.bar(x + 0 * width - 1.2,\n index_reno,\n width=width,\n label='Against Reno',\n alpha=0.5,\n color=\"darkorange\")\n\n con_cubic = plt.bar(x + 1 * width - 1.2,\n index_cubic,\n width=width,\n label='Against CUBIC',\n alpha=0.5,\n color=\"lawngreen\")\n\n con_itself = plt.bar(x + 2 * width - 1.2,\n index_itself,\n width=width,\n label='Against Another Same CCA',\n alpha=0.5,\n color=\"dodgerblue\")\n\n # Index\n plt.xticks(x + 1.5 * width - 1.2, [\"BBR\", \"Scalable\", \"BIC\", \"High Speed\",\n \"H-TCP\", \"Hybla\", \"Illinois\", \"Vegas\",\n \"YeAH\"],\n fontsize=fsize,\n rotation=\"45\")\n plt.ylabel(\"Jain`s Fairness Index\", fontsize=fsize)\n plt.yticks(fontsize=fsize)\n plt.ylim(0.5, 1.1)\n\n ax = plt.subplot(111)\n ax.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,\n ncol=3, mode=\"expand\", borderaxespad=0., fontsize=fsize)\n\n plt.subplots_adjust(left=0.07, right=0.98, top=0.9, bottom=0.2)\n\n plt.show()", "def estimate_num_spill_files(num_words, key_num_bytes, value_num_bytes, mapreduce_task_io_sort_mb, mapreduce_map_sort_spill_percent):\n # extra bytes added when each (k,v) pair is added to output buffer\n KEY_VALUE_META_DATA_NUM_BYTES = 16\n\n key_len_num_bytes = zero_compress.size_of_zero_compressed_int64(key_num_bytes)\n value_len_num_bytes = zero_compress.size_of_zero_compressed_int64(value_num_bytes)\n\n return math.ceil((num_words * (KEY_VALUE_META_DATA_NUM_BYTES + key_len_num_bytes + key_num_bytes + value_len_num_bytes + value_num_bytes)) /\n (util.MiB_to_bytes(mapreduce_task_io_sort_mb) * mapreduce_map_sort_spill_percent))", "def inputFiles(self, filesizelist):\n self.inputs = filesizelist\n self.inputSize = reduce(lambda x,y: x + y[1], filesizelist, 0)", "def _preprocess(self):\n for f in self._variables:\n self._path.joinpath(f).mkdir(parents=True, exist_ok=True)\n\n for i in tqdm(range(self._size)):\n linear, w = self._get_spectrograms(i)\n self._store_entry(i, linear, w)", "def process_file_metrics(root_dir, in_file_names, file_processors):\n manager = mp.Manager()\n file_metrics = manager.dict()\n\n parameters = [(root_dir, key, file_metrics, file_processors) for key in in_file_names]\n\n # main loop\n p = mp.Pool(max(1, mp.cpu_count() - 1))\n p.starmap(_process_file_metrics_parallel, parameters)\n p.close()\n p.join()\n\n return file_metrics", "def fileAgglomeration(self, dataset: list):\n result = dict()\n\n startTimeForAgglomeration = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)\n print(\"CPU Model,Index, Filename, Elapsed Time\")\n for idx, e in enumerate(dataset):\n # CPU TIME\n startTime = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)\n result[idx] = self._count_occurrences(filename=e)\n endTime = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)\n\n # CPU Model, Index, Filename, Time Taken Processing File\n fileName = e.split(\"/\")[-1]\n print(f\"{self.cpuModel},{idx + 1},{fileName},{endTime - startTime}\") # Logger ^ Markdown\n\n endTimeForAgglomeration = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)\n print(\n f\"Total Files Aggregated: {len(dataset)} and total {endTimeForAgglomeration - startTimeForAgglomeration} seconds elapsed.\")\n\n return result", "def countDataSize(self,filename):\n \n d = h5py.File(filename,'r')\n features = d['spectrometer/features'][:]\n select = self.selectData(features.astype(float), self.ifeature, d)\n N = len(features[select])\n d.close()\n\n N = (N//self.offsetLen) * self.offsetLen\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def run_sc(self,toRun,begin,stop):\n\n po = Pool(processes=cpu_count()-1)\n _results = po.map_async(mp_worker,toRun[begin:stop])\n results = _results.get()\n \n ## write the cluster sizes to file\n for r,sc in enumerate(results):\n k,sigma,dpath,dtype = toRun[begin:stop][r]\n clusterSizes = self.get_cluster_sizes(sc) \n self.writer1.writerow([k,sigma] + [round(sc.avgSilValue,4)])\n self.writer2.writerow([k,sigma] + clusterSizes)\n\n po.close()\n po.join()", "def main():\n\n\n\n skulls_folder = os.listdir(RAW_IMAGE_DIRECTORY)\n\n # fetch and sort the .mnc and .tag files\n mnc_files = [f for f in skulls_folder if 'mnc' in f]\n tag_files = [f for f in skulls_folder if 'tag' in f]\n mnc_names = [i.split('.mnc')[0] for i in mnc_files]\n \n mnc_files.sort()\n tag_files.sort()\n mnc_names.sort()\n\n # Process and package ndarrays as tuples inside npy file\n package_to_npy(RAW_IMAGE_DIRECTORY, mnc_files, tag_files, mnc_names)\n \n print('\\n' * 5)\n\n # Push the npy files to GCP Cloud Storage\n upload_to_gcp(PROCESSED_IMAGE_DIRECTORY, GCP_PROJECT_NAME, GCP_BUCKET_NAME)", "def generate_set(input_path, output_path, size=200, layover=0.1, input_size=1000, thread_count=8):\n\n # Assuming that the files are located in the folders 'labels' and 'examples'\n label_paths = utils.get_file_paths(\"{}/labels\".format(input_path))\n example_paths = utils.get_file_paths(\"{}/examples\".format(input_path))\n\n # Defines the output path based on the size\n output_path = \"{0}/{1}x{1}\".format(output_path, size)\n\n export_path_example = \"{}/examples/\".format(output_path)\n export_path_label = \"{}/labels/\".format(output_path)\n\n # Make the path if it does not exist\n utils.make_path(export_path_example)\n utils.make_path(export_path_label)\n\n path_length = len(label_paths)\n\n q = Queue()\n for i in range(path_length):\n q.put(i)\n\n # Starts n threads\n for i in range(thread_count):\n # Create a new database connection for each thread.\n t = threading.Thread(\n target=work,\n args=(\n q,\n example_paths,\n label_paths,\n path_length,\n export_path_example,\n export_path_label,\n size,\n layover,\n input_size\n )\n )\n\n # Sticks the thread in a list so that it remains accessible\n t.daemon = True\n t.start()\n\n q.join()\n \n # Empty the console after progress print\n print(\"\")", "def main(field_data, height, width, rgb_dir=None, hyperspectral_dir=None, sensor=\"hyperspectral\", savedir=\".\", chunk_size=200, extend_box=0, hyperspectral_savedir=\".\", n_workers=20, saved_model=None, use_dask=True, shuffle=True, classes_file=None):\n #Check sensor type has paths\n if sensor == \"hyperspectral\":\n assert not hyperspectral_dir is None\n if sensor==\"rgb\":\n assert not rgb_dir is None\n \n df = gpd.read_file(field_data)\n plot_names = df.plotID.unique()\n \n hyperspectral_pool = glob.glob(hyperspectral_dir, recursive=True)\n rgb_pool = glob.glob(rgb_dir, recursive=True)\n \n labels = []\n crops = []\n box_indexes = [] \n if use_dask:\n client = start_cluster.start(cpus=n_workers, mem_size=\"10GB\")\n futures = []\n for plot in plot_names:\n future = client.submit(\n run,\n plot=plot,\n df=df,\n rgb_pool=rgb_pool,\n hyperspectral_pool=hyperspectral_pool,\n sensor=sensor,\n extend_box=extend_box,\n hyperspectral_savedir=hyperspectral_savedir,\n saved_model=saved_model\n )\n futures.append(future)\n \n wait(futures)\n for x in futures:\n try:\n plot_crops, plot_labels, plot_box_index = x.result()\n print(plot_box_index[0])\n \n #Append to general plot list\n crops.extend(plot_crops)\n labels.extend(plot_labels)\n box_indexes.extend(plot_box_index) \n except Exception as e:\n print(\"Future failed with {}\".format(e)) \n else:\n for plot in plot_names:\n plot_crops, plot_labels, plot_box_index = run(plot=plot, df=df, rgb_pool=rgb_pool, hyperspectral_pool=hyperspectral_pool, \n sensor=sensor, extend_box=extend_box, hyperspectral_savedir=hyperspectral_savedir, saved_model=saved_model) \n \n #Append to general plot list\n crops.extend(plot_crops)\n labels.extend(plot_labels)\n box_indexes.extend(plot_box_index)\n \n if shuffle:\n z = list(zip(crops, box_indexes, labels))\n random.shuffle(z)\n crops, box_indexes, labels = zip(*z)\n \n #Convert labels to numeric\n \n #If passes a label dict\n if classes_file is not None:\n classdf = pd.read_csv(classes_file)\n label_dict = classdf.set_index(\"taxonID\").label.to_dict()\n else:\n #Create and save a label dict\n unique_labels = np.unique(labels)\n label_dict = {}\n \n for index, label in enumerate(unique_labels):\n label_dict[label] = index\n pd.DataFrame(label_dict.items(), columns=[\"taxonID\",\"label\"]).to_csv(\"{}/class_labels.csv\".format(savedir))\n\n numeric_labels = [label_dict[x] for x in labels]\n \n #Write tfrecords\n tfrecords = create_records(crops, numeric_labels, box_indexes, savedir, height, width, chunk_size=chunk_size)\n \n return tfrecords", "def npz_dir_dataset(file_dir_or_list, features, randomize=True, num_parallel=5, shuffle_size=500, filesystem=None):\n\n files = file_dir_or_list\n\n # If dir, then list files\n if isinstance(file_dir_or_list, str):\n if filesystem is None:\n dir_list = os.listdir(file_dir_or_list)\n else:\n dir_list = filesystem.listdir(file_dir_or_list)\n \n files = [os.path.join(file_dir_or_list, f) for f in dir_list]\n\n fields = list(features.keys())\n feature_names = [features[f] for f in features]\n\n # Read one file for shape info\n file = next(iter(files))\n\n if filesystem is None:\n data = np.load(file)\n else:\n data = np.load(filesystem.openbin(file))\n\n np_arrays = [data[f] for f in fields]\n\n # Append norm arrays \n perc99, meanstd_mean, meanstd_std = _construct_norm_arrays(file)\n \n np_arrays.append(perc99)\n np_arrays.append(meanstd_mean)\n np_arrays.append(meanstd_std)\n\n # Read shape and type info\n types = tuple(arr.dtype for arr in np_arrays)\n shapes = tuple(arr.shape[1:] for arr in np_arrays)\n# print(shapes)\n\n # Create datasets\n datasets = [_npz_file_lazy_dataset(file, fields, feature_names, types, shapes, filesystem) for file in files]\n ds = tf.data.Dataset.from_tensor_slices(datasets)\n\n # Shuffle files and interleave multiple files in parallel\n if randomize:\n ds = ds.shuffle(shuffle_size)\n \n ds = ds.interleave(lambda x:x, cycle_length=num_parallel)\n\n return ds", "def splitFileIntoShards(filename, shardsize):\n os.popen('split -a 4 -d --additional-suffix=_shard -l{} {}'.format(shardsize, filename))", "def all_gather_sizes(x: torch.Tensor) -> List[int]:\n dist_rank = torch.distributed.get_rank()\n world_size = torch.distributed.get_world_size()\n current_device = torch.device(\"cuda\", torch.cuda.current_device())\n sizes = torch.zeros(size=(world_size,), device=current_device, dtype=torch.int64)\n sizes[dist_rank] = x.shape[0]\n torch.distributed.all_reduce(sizes)\n return list(sizes.cpu().numpy())", "def split_file(self, input_file):\r\n file_list = [] \r\n with open(input_file, 'r', encoding='GB18030', errors='ignore') as f_in:\r\n data = f_in.readlines()\r\n lines_num = len(data)\r\n size = lines_num // self.num_workers # lines splitted in a chunk\r\n start = 0\r\n end = size\r\n w_path = \"../data/\"\r\n for i in range(lines_num//size):\r\n chunk_name = \"chunk_\" + str(i) + \".dat\"\r\n with open(w_path + chunk_name, 'w', encoding='utf-8') as f_out:\r\n f_out.write(''.join(data[start:end]))\r\n start = start + size\r\n end = end + size\r\n file_list.append(\"../data/chunk_\" + str(i) + \".dat\")\r\n \r\n print(f\"File splitted into {self.num_workers} chunks.\")\r\n return file_list, size", "def _iterate_over_files(self):\n stats = Statistics()\n\n args = arguments.Args()\n\n for file in args.files:\n\n if isimage(file):\n before_size = stats.calculate_before_optimization(file)\n\n puts(\"%s %s\" % (\n e(\"==>\"),\n os.path.basename(file))\n )\n\n if \"--lossy\" in args.flags:\n Optimize.lossy(file)\n if \"--lossless\" in args.flags:\n Optimize.lossless(file)\n after_size = stats.calculate_after_optimization(file)\n\n puts(\"%s %s (%s)\" % (\n p(\"<==\"),\n os.path.basename(file),\n s(after_size) if after_size < before_size else after_size\n ))\n\n stats.show_statistics()", "def generate_all_cost_plots(suffix):\n directory_name = \"inputs/\"\n directory = os.fsencode(directory_name)\n outfolder = \"plots/\" + suffix.strip(\".in\") + \"/\"\n try:\n os.makedirs(outfolder)\n except FileExistsError:\n pass\n for file in os.listdir(directory):\n filename = os.fsdecode(file)\n if filename.endswith(suffix):\n print(\"Solving : \", filename)\n inputfile = directory_name + filename\n num_clusters, cost = cost_vs_clusters(inputfile)\n outfile = outfolder + filename.strip(\".in\") + \".png\"\n plot_cost_vs_clusters(cost,num_clusters,outfile)", "def reduce_files_type(target_path=None):\n\n global FILELIST\n i = 0\n for path, subdirs, files in os.walk(target_path):\n for name in files:\n fn = os.path.join(path, name)\n if valid_package(filename=fn):\n f = File(fn)\n FILELIST[f.get_md5()] = os.path.join(path, name)\n i = i + 1\n\n return i", "def scanDirectory(directoryName=\".\"):\n cluster=Cluster()\n\n #reading files and folders \n for path, folders, files in os.walk(directoryName):\n for afile in files:\n cluster.addFile(os.path.join(path,afile))\n \n cluster.clustering()", "def _process_image_files(name, cnts, roots, num_shards): \n \n # Break all images into batches with a [ranges[i][0], ranges[i][1]].\n spacing = np.linspace(0, sum(cnts), FLAGS.num_threads + 1).astype(np.int)\n ranges = []\n for i in range(len(spacing) - 1):\n ranges.append([spacing[i], spacing[i + 1]])\n\n # Launch a thread for each batch.\n print('Launching %d threads for spacings: %s' % (FLAGS.num_threads, ranges))\n sys.stdout.flush()\n\n # Create a mechanism for monitoring when all threads are finished.\n coord = tf.train.Coordinator()\n\n threads = []\n for thread_index in range(len(ranges)):\n args = (thread_index, ranges, name, cnts, roots, num_shards)\n t = threading.Thread(target=_process_image_files_batch, args=args)\n t.start()\n threads.append(t)\n\n # Wait for all the threads to terminate.\n coord.join(threads)\n print('%s: Finished writing all %d images in data set.' %\n (datetime.now(), sum(cnts)))\n sys.stdout.flush()", "def transform_folder(source_folder,\n output_folder,\n temp_folder,\n rate_limit=6000.0,\n overwrite=True,\n plot=False,\n image_folder=None,\n multiprocess=False,\n encoder='mpg123',\n step=5.0):\n merged_file = os.path.join(output_folder, 'merged_file.json')\n\n os.makedirs(temp_folder, exist_ok=True)\n os.makedirs(output_folder, exist_ok=True)\n if os.path.isfile(merged_file):\n os.remove(merged_file)\n if plot:\n os.makedirs(image_folder, exist_ok=True)\n\n # Check if mp3 is already transformed into wav. Right\n # now, foucluster doesn't have a direct read from mp3\n logger.info('Checking if songs are in WAV format...')\n if source_folder != temp_folder:\n [check_wav(song=song,\n source_folder=source_folder,\n temp_folder=temp_folder,\n encoder=encoder)\n for song in os.listdir(source_folder)]\n\n if multiprocess is True:\n logger.debug('Fourier is applied in multiprocess')\n songs = [(song, temp_folder, output_folder, rate_limit,\n overwrite, plot, image_folder, step)\n for song in os.listdir(source_folder)]\n\n # with mp.Pool(processes=max(int(mp.cpu_count() / 2.0), 1)) as p:\n with mp.Pool(processes=mp.cpu_count(), maxtasksperchild=1) as p:\n p.starmap(time_to_frequency, songs)\n else:\n logger.debug('Fourier is applied in single core')\n [time_to_frequency(song=song,\n temp_folder=temp_folder,\n output_folder=output_folder,\n rate_limit=rate_limit,\n overwrite=overwrite,\n plot=plot,\n image_folder=image_folder,\n step=step)\n for song in os.listdir(source_folder)]\n\n # read_files = glob.glob(os.path.join(output_folder, '*.json'))\n # with open(merged_file, 'w') as outfile:\n # file_contents = [open(f).read() for f in read_files]\n # outfile.write('[{}]'.format(','.join(file_contents)))", "def processDistribution(number_processes, file_list):\r\n num_proc_list = [] # list with the distribution for each process\r\n\r\n NUM_FILES = len(file_list) # check variables\r\n NUM_PROC_INT = NUM_FILES // number_processes\r\n NUM_PROC_TEST = NUM_FILES % number_processes\r\n number_Processes_perFile_int = number_processes // NUM_FILES\r\n\r\n if number_processes <= NUM_FILES:\r\n\r\n if NUM_PROC_TEST == 0:\r\n for number in range(number_processes):\r\n num_proc_list.append(NUM_PROC_INT)\r\n\r\n else:\r\n for number in range(number_processes):\r\n num_proc_list.append(NUM_PROC_INT)\r\n\r\n if sum(num_proc_list) < NUM_FILES:\r\n for number in range (NUM_FILES - sum(num_proc_list)):\r\n num_proc_list[number] += 1\r\n else:\r\n for number in range(NUM_FILES):\r\n num_proc_list.append(number_Processes_perFile_int)\r\n\r\n if sum(num_proc_list) < number_processes:\r\n for number in range (number_processes - sum(num_proc_list)):\r\n num_proc_list[number] += 1\r\n\r\n return num_proc_list", "def split_start(infiles, outfiles):\n\n # split always runs exactly one job (unlike @subdivide)\n # So it implicitly combines all its inputs before running and generating multiple output\n # @originate generates multiple output so the input for @split is a list...\n infile = infiles[0]\n\n # clean up previous\n for f in outfiles:\n os.unlink(f)\n\n\n #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n #\n # Create more files than the previous invocation\n #\n #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n n_to_produce = len(outfiles) + 1\n for i in range(n_to_produce):\n f = '{}{}.split'.format(tempdir, i)\n open(f, 'a').close()", "def calculate_total_size(apps, schema_editor):\n Data = apps.get_model(\"flow\", \"Data\")\n for data in Data.objects.all():\n hydrate_size(data, force=True)\n data.save()", "def main():\n\n \"\"\"\n nodes, hd3 = erdos_rennie_like(100,8333,5)\n export('d3',hd3)\n\n nodes, hd5 = erdos_rennie_like(100,8333,6)\n export('d5',hd5)\n\n nodes, hd6 = erdos_rennie_like(100,8333,7)\n export('d6',hd6)\n \"\"\"\n\n \"\"\"\n nodes, sparse1 = erdos_rennie_like(600, 1200, 3)\n export('sparse_diag1', sparse1)\n\n nodes, sparse2 = erdos_rennie_like(600, 2400, 3)\n export('sparse_diag2',sparse2)\n\n nodes, sparse3 = erdos_rennie_like(600, 5800, 3)\n export('sparse_diag3',sparse3)\n\n nodes, sparse4 = erdos_rennie_like(600,11600, 3)\n export('sparse_diag4',sparse4)\n\n nodes, sparse5 = erdos_rennie_like(600,23200, 3)\n export('sparse_diag5',sparse5)\n \"\"\"\n\n nodes, size1 = erdos_rennie_like(100, 500, 3)\n nodes, size2 = erdos_rennie_like(200,1000,3)\n nodes,size3 = erdos_rennie_like(300,1500,3)\n nodes,size4 = erdos_rennie_like(400,2000,3)\n nodes,size5 = erdos_rennie_like(500,2500,3)\n\n export('size_diag1',size1)\n export('size_diag2',size2)\n export('size_diag3',size3)\n export('size_diag4',size4)\n export('size_diag5',size5)", "def split_large_groups(ctx):\n asyncio.run(split_large_groups_impl(ctx.obj[\"config\"]))", "def sort_collected_data():\n\n def is_from_valid_set(fn):\n return fn.find(\"validation\") != -1\n\n source_dir = \"data\"\n\n x_train_dir = os.path.join(SEG_DATA_FOLDER, \"train\")\n y_train_dir = os.path.join(SEG_DATA_FOLDER, \"train_label\")\n x_valid_dir = os.path.join(SEG_DATA_FOLDER, \"val\")\n y_valid_dir = os.path.join(SEG_DATA_FOLDER, \"val_label\")\n\n for direc in [x_train_dir, y_train_dir, x_valid_dir, y_valid_dir]:\n mkdir_if_not_exist(direc)\n\n images = [x for x in os.listdir(source_dir) if x.find(\"png\") >= 0]\n inputs = [x for x in images if x.find(\"label\") == -1]\n labels = [x for x in images if x.find(\"label\") != -1]\n\n train_x = [x for x in inputs if not is_from_valid_set(x)]\n valid_x = [x for x in inputs if is_from_valid_set(x)]\n train_y = [x for x in labels if not is_from_valid_set(x)]\n valid_y = [x for x in labels if is_from_valid_set(x)]\n\n for f in train_x:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(x_train_dir, f))\n\n for f in train_y:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(y_train_dir, f))\n\n for f in valid_x:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(x_valid_dir, f))\n\n for f in valid_y:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(y_valid_dir, f))", "def gatherSamplesPerLane(self):\n if self.verbose > 0:\n msg = \"gather samples per lane...\"\n sys.stdout.write(\"%s\\n\" % msg)\n sys.stdout.flush()\n \n groupJobId = self.makeGroupJobId(\"%s_gbs-step4-gather\" % self.project2Id)\n if self.verbose > 0:\n msg = \"groupJobId=%s\" % groupJobId\n sys.stdout.write(\"%s\\n\" % msg)\n sys.stdout.flush()\n iJobGroup = JobGroup(groupJobId, self.queue2, self.lResources)\n self.jobManager.insert(iJobGroup)\n \n for laneId,iLane in self.dLanes.items():\n outDir = \"%s/%s\" % (self.allSamplesDir, iLane.id)\n iLane.gather(self.jvmXms, self.jvmXmx, self.tmpDir,\n outDir, iJobGroup)\n \n self.jobManager.submit(iJobGroup.id)\n self.jobManager.wait(iJobGroup.id, self.rmvBash, self.verbose)", "def sixteen_graphs(the_dir):\n # TODO change to deprecation warning\n warnings.warn(\"Does not call sv_pipeline functoins correctly\", DeprecationWarning)\n\n plb.rcParams['figure.figsize'] = 30, 30\n plt.clf()\n plt.figure(1)\n\n # should look like: read_data/all_files/chr4_124,017,492_124,029,032_merged.txt\n merged_files = glob.glob(the_dir + '*merged.txt')\n print(\"Running for {} regions\".format(len(merged_files)))\n for merged_filename in merged_files:\n # get filenames\n prefix = merged_filename[len(the_dir):-11]\n fasta_filename = the_dir + prefix + \".fa\"\n bed_filename = the_dir + prefix + \"-refcoords.bed\"\n print('Using ' + prefix)\n\n for min_matching_length in range(100, 1700, 100):\n print(min_matching_length)\n # used for ground truth\n preset, postset, spanset, gapset = get_read_classifications(prefix,\\\n bed_filename, merged_filename=merged_filename)\n # Generate and prune graph\n graph = generate_graph(prefix, fasta_filename, min_matching_length)\n graph = nx_helpers.remove_nodes(graph, preset)\n graph = nx_helpers.remove_nodes(graph, postset)\n\n # Plot the graph\n plt.subplot(4, 4, min_matching_length/100)\n communities = nx_helpers.get_communities(graph)\n graph, communities = drop_small_communities(graph, communities)\n node_colors = node_community_colors(graph, communities)\n pos = nx.spring_layout(graph)\n title = \"Chr {0};\\n L={1}; NumCom={2}\\nComQual = {3}, MapQual={4}\"\\\n .format(prefix, min_matching_length, len(communities),\\\n community_quality(communities, spanset, gapset),\\\n mapping_quality(graph, spanset, gapset))\n nx.draw(graph, node_color=node_colors, node_size=100, pos=pos)\n plt.title(title)\n plt.savefig(\"figs/\" + prefix + '-16-communities.pdf')\n plt.clf()", "def __divide_into_batches(self):\n print('Creating batches for parallel execution')\n num_suites = len(self.execution_file_json['suites'])\n full_batches = num_suites // self.max_suites\n print('- Full batches=%s' % full_batches)\n if num_suites % self.max_suites > 0:\n has_partial = True\n else:\n has_partial = False\n print('- Partial batch at end: %s' % has_partial)\n if has_partial:\n total_batches = full_batches + 1\n else:\n total_batches = full_batches\n print('- %s suites will be divided into %s container batches using max suites %s' % (\n num_suites, total_batches, self.max_suites))\n self.suite_batches = []\n # split full batches\n for batch_counter in range(0, full_batches):\n start_index = batch_counter * self.max_suites\n batch = []\n for counter in range(start_index, start_index + self.max_suites):\n batch.append(self.execution_file_json['suites'][counter])\n self.suite_batches.append(batch)\n print('- full batches created', self.suite_batches)\n # add partial batch\n if has_partial:\n start_index = full_batches * self.max_suites\n batch = []\n for counter in range(start_index, num_suites):\n batch.append(self.execution_file_json['suites'][counter])\n self.suite_batches.append(batch)\n print('- partial batch created', self.suite_batches)", "def aggregate_results(output_files, agg_filename):\n\n print(file_marker + \"STARTING AGGREGATION\")\n feather_files = output_files\n\n results = []\n for i in range(len(feather_files)):\n print(file_marker + str(i))\n x = pd.read_feather(feather_files[i])\n results.append(x)\n \n overall_results = pd.concat(results, ignore_index=True, sort=False)\n opt_diff_results = overall_results\n\n opt_diff_results.reset_index(inplace=True, drop=True) \n # drop=True: column 'index' gets removed\n\n opt_diff_results.to_feather(agg_filename)\n print(file_marker + \"Aggregated results saved to: \" + agg_filename)", "def process(self):\n level = self.parameter['level-of-operation']\n assert_file_grp_cardinality(self.input_file_grp, 1)\n assert_file_grp_cardinality(self.output_file_grp, 1)\n\n for (n, input_file) in enumerate(self.input_files):\n self.logger.info(\"INPUT FILE %i / %s\", n, input_file.pageId or input_file.ID)\n file_id = make_file_id(input_file, self.output_file_grp)\n\n pcgts = page_from_file(self.workspace.download_file(input_file))\n self.add_metadata(pcgts)\n page_id = pcgts.pcGtsId or input_file.pageId or input_file.ID # (PageType has no id)\n page = pcgts.get_Page()\n \n page_image, page_xywh, page_image_info = self.workspace.image_from_page(\n page, page_id, feature_filter='binarized')\n if self.parameter['dpi'] > 0:\n zoom = 300.0/self.parameter['dpi']\n elif page_image_info.resolution != 1:\n dpi = page_image_info.resolution\n if page_image_info.resolutionUnit == 'cm':\n dpi *= 2.54\n self.logger.info('Page \"%s\" uses %f DPI', page_id, dpi)\n zoom = 300.0/dpi\n else:\n zoom = 1\n \n if level == 'page':\n self.process_page(page, page_image, page_xywh, zoom,\n input_file.pageId, file_id)\n else:\n if level == 'table':\n regions = page.get_TableRegion()\n else: # region\n regions = page.get_AllRegions(classes=['Text'], order='reading-order')\n if not regions:\n self.logger.warning('Page \"%s\" contains no text regions', page_id)\n for region in regions:\n region_image, region_xywh = self.workspace.image_from_segment(\n region, page_image, page_xywh, feature_filter='binarized')\n if level == 'region':\n self.process_region(region, region_image, region_xywh, zoom,\n input_file.pageId, file_id + '_' + region.id)\n continue\n lines = region.get_TextLine()\n if not lines:\n self.logger.warning('Page \"%s\" region \"%s\" contains no text lines',\n page_id, region.id)\n for line in lines:\n line_image, line_xywh = self.workspace.image_from_segment(\n line, region_image, region_xywh, feature_filter='binarized')\n self.process_line(line, line_image, line_xywh, zoom,\n input_file.pageId, region.id,\n file_id + '_' + region.id + '_' + line.id)\n\n # update METS (add the PAGE file):\n file_path = os.path.join(self.output_file_grp, file_id + '.xml')\n pcgts.set_pcGtsId(file_id)\n out = self.workspace.add_file(\n ID=file_id,\n file_grp=self.output_file_grp,\n pageId=input_file.pageId,\n local_filename=file_path,\n mimetype=MIMETYPE_PAGE,\n content=to_xml(pcgts))\n self.logger.info('created file ID: %s, file_grp: %s, path: %s',\n file_id, self.output_file_grp, out.local_filename)", "def split_test_into_chunks(chunk_size):\n syscall_type_dict = dict()\n # Loop over all files\n for syscall_type in SYSCALLS:\n for label in LABELS:\n syscall_positions = []\n syscall_pos = 0\n filename = f\"{TEMP_DIR}/{syscall_type}-{label}\"\n # Create file for the syscall chunks\n syscalls_split_file = open(filename+\"-split.test\", \"w\")\n with open(filename+\".test\") as syscalls_file:\n for syscall in syscalls_file:\n # Generate all n-grams of the current syscall\n n_grams = extract_n_grams(syscall.strip(),chunk_size,unique=False)\n if len(n_grams)==0:\n continue\n # Write n-grams to syscall chunks file\n syscalls_split_file.writelines(n_grams)\n # Keep track of end position in chunks file of current syscall\n syscall_pos += len(n_grams)\n syscall_positions.append(syscall_pos)\n syscall_type_dict[f\"{syscall_type}-{label}\"] = syscall_positions\n syscalls_split_file.close()\n return syscall_type_dict", "def output_mb(self):\n total_output_size = sum([t.shuffle_mb_written for t in self.tasks])\n return total_output_size", "def data_process(self):\n logging.info('Processing the data and split files')\n lines = Utility.file_len(self.fname)\n self.lines_to_be, self.split_files = Utility.split_files(self.fname, lines,\n cpu_count().real)", "def _iter_assignments_by_transfer_sizes(self, worker_quotas, input_chunk_metas):\n total_transfers = dict((k, sum(v.chunk_size for v in chunk_to_meta.values()))\n for k, chunk_to_meta in input_chunk_metas.items())\n # operands with largest amount of data will be allocated first\n sorted_chunks = sorted(total_transfers.keys(), reverse=True,\n key=lambda k: total_transfers[k])\n for op_key in sorted_chunks:\n # compute data amounts held in workers\n worker_stores = defaultdict(lambda: 0)\n for meta in input_chunk_metas[op_key].values():\n for w in meta.workers:\n worker_stores[w] += meta.chunk_size\n\n max_size, max_workers = self._get_workers_with_max_size(worker_stores)\n if max_workers and max_size > 0.5 * total_transfers[op_key]:\n max_worker = random.choice(max_workers)\n if worker_quotas.get(max_worker, 0) <= 0:\n continue\n worker_quotas[max_worker] -= 1\n yield op_key, max_worker", "def gather_ps(rank, size, comm, k_allmodels, P21_allmodels, PHII_allmodels,\n first_snap_allmodels, last_snap_allmodels):\n\n def generate_tag(rank):\n tag = int(rank*100)\n\n return tag\n\n # Rank 0 will gather the wavenumber bins/power spectra from all other\n # ranks. \n if rank == 0:\n k_master = []\n P21_master = []\n PHII_master = []\n\n # Go through each model. \n for model_number in range(len(k_allmodels)):\n\n k_master.append([])\n P21_master.append([])\n PHII_master.append([])\n\n model_k = k_allmodels[model_number]\n model_P21 = P21_allmodels[model_number]\n model_PHII = PHII_allmodels[model_number]\n\n num_snaps = last_snap_allmodels[model_number] - \\\n first_snap_allmodels[model_number]\n rank_count = 0\n my_count = 0\n\n # Then go through each snapshot.\n # In the main data loop (``generate_data()``) the snapshots are\n # scatter sequentially. Hence when we gather, we get snap0 from\n # rank 0, snap1 from rank 1 etc. So we increase rank_count for each\n # snapshot and then reset it when we reach `size`.\n for snap_idx in range(num_snaps):\n\n if rank_count == 0:\n this_k = model_k[my_count] \n this_P21 = model_P21[my_count] \n this_PHII = model_PHII[my_count] \n my_count += 1\n else:\n # Each rank will use a unique tag.\n tag = generate_tag(rank_count) \n\n # Then the tag is offset for each data array. \n this_k = comm.recv(source = rank_count,\n tag = tag)\n this_P21 = comm.recv(source = rank_count,\n tag = tag+1)\n this_PHII = comm.recv(source = rank_count,\n tag = tag+2)\n\n # Now we have the data, append it to the master.\n k_master[model_number].append(this_k)\n P21_master[model_number].append(this_P21)\n PHII_master[model_number].append(this_PHII)\n\n rank_count += 1\n if rank_count == size:\n rank_count = 0\n\n # Snapshot Loop.\n # Model Loop.\n\n return k_master, P21_master, PHII_master\n\n else:\n\n # For all other ranks, go through the power spectra it calculated and\n # send it back to the root rank.\n for model_number in range(len(k_allmodels)):\n for idx in range(len(P21_allmodels[model_number])):\n\n tag = generate_tag(rank) \n\n k_this_idx = k_allmodels[model_number][idx]\n P21_this_idx = P21_allmodels[model_number][idx]\n PHII_this_idx = PHII_allmodels[model_number][idx]\n\n comm.send(k_this_idx, dest = 0, tag = tag)\n comm.send(P21_this_idx, dest = 0, tag = tag+1)\n comm.send(PHII_this_idx, dest = 0, tag = tag+2)\n\n # Non-zero ranks return junk.\n return None, None, None", "def summarize(data, verbal=False, using_files=True):\n\n if using_files:\n for file_name in tqdm(data):\n fill_table(pd.read_csv(file_name))\n else:\n for table in tqdm(data):\n fill_table(table)\n\n for cluster in table_summary:\n #total_genes = sum(table_summary[cluster][\"phylum\"].values) # number of genes\n #total_genes = table_summary[cluster][\"N\"] # number of samples\n total_genes = table_summary[cluster][\"eggNOG\"].eggNOG.sum() # number of genes in COGs with duplicates\n \n phylum_percent = table_summary[cluster][\"phylum\"].apply(lambda x: x/total_genes * 100)\n phylum_percent.columns = [\"percent\"]\n table_summary[cluster][\"phylum\"] = pd.concat([table_summary[cluster][\"phylum\"],phylum_percent],axis=1)\n\n #Read above for fix\n genus_percent = table_summary[cluster][\"genus\"].apply(lambda x: x/total_genes * 100)\n genus_percent.columns = [\"percent\"]\n table_summary[cluster][\"genus\"] = pd.concat([table_summary[cluster][\"genus\"],genus_percent],axis=1)\n\n #read above for fix\n cog_percent = table_summary[cluster][\"eggNOG\"].apply(lambda x: x/table_summary[cluster][\"gene_cog\"] * 100)\n cog_percent.columns = [\"percent\"]\n table_summary[cluster][\"eggNOG\"] = pd.concat([table_summary[cluster][\"eggNOG\"],cog_percent],axis=1)\n\n #Print the data\n if verbal:\n print \"Cluster %s:\\n\" % cluster\n print \"Number of Samples: %d\\n\" % table_summary[cluster][\"N\"]\n print \"Taxonomy:\"\n print table_summary[cluster][\"phylum\"].sort(\"percent\", ascending=False)\n print \"----------------------------------\"\n print table_summary[cluster][\"genus\"].sort(\"percent\", ascending=False)\n print \"-----------------------------------\"\n print \"COGS:\"\n print table_summary[cluster][\"eggNOG\"].sort(\"percent\", ascending=False)\n print \"------------------------------------\"\n print \"End Summary\"", "def batchProcessDirectory(self,baseDir,startTeam=1):\n\n import fnmatch\n\n # find all directories containing the target pattern\n resultDirs = {}\n patientNumbers = {}\n for root, dirnames, filenames in os.walk(baseDir):\n resultDirs[root] = []\n for filename in filenames:\n if fnmatch.fnmatch(filename, 'patient*tract_team*.vtk'):\n resultDirs[root].append(os.path.join(root, filename))\n patientNumbers[root] = filename[len('patient'):filename.index('_')]\n\n distanceMatrix = {}\n # calculate results for each pair of files in each directory\n for dir,files in resultDirs.items():\n if len(files) > 0:\n teamCount = len(files) / 2 # left and right per team\n teamRange = range(startTeam,startTeam+teamCount)\n for side in ('left','right'):\n for teamA in teamRange:\n for teamB in teamRange:\n fmt = 'patient%(patient)s_%(side)s_tract_team%(team)d.vtk'\n fileA = fmt % {'patient': patientNumbers[dir], 'side': side, 'team': teamA}\n fileB = fmt % {'patient': patientNumbers[dir], 'side': side, 'team': teamB}\n print (\"Compare %s with %s\" % (fileA, fileB))\n print((os.path.join(dir,fileA),os.path.join(dir,fileB)))\n\n # close the scene and calculate the distance\n slicer.mrmlScene.Clear(0) \n pathA, pathB = os.path.join(dir,fileA),os.path.join(dir,fileB)\n distanceMatrix[dir,side,teamA,teamB] = self.loadAndCalculate(pathA,pathB)\n print('\\n\\n' + str(distanceMatrix.keys()) + '\\n\\n')\n print(distanceMatrix)\n\n # write csv files\n import csv\n header = ['team',]\n for team in teamRange:\n header.append('team_%d' % team)\n for dir in resultDirs.keys():\n print ('checking %s' % dir)\n print (len(resultDirs[dir]))\n if len(resultDirs[dir]) > 0:\n for side in ('left','right'):\n fp = open(os.path.join(dir,\"../distanceMatrix-%s.csv\"%side),'w')\n csvWriter = csv.writer(fp, dialect='excel', quotechar='\"', quoting=csv.QUOTE_ALL)\n csvWriter.writerow(header)\n for teamA in teamRange:\n teamARow = ['team_%d' % teamA,]\n for teamB in teamRange:\n teamARow.append(distanceMatrix[dir,side,teamA,teamB])\n csvWriter.writerow(teamARow)\n fp.close()\n\n return(distanceMatrix)", "def create_output_files(self):\n namenode = self.runner.namenode\n for i in range(self.cnt_reducers):\n fname = '%s.%s' % (self.output_dir, reduce_output(self.id, i))\n namenode.create_file(fname)\n self.result_files.append(fname)\n self.open_files.append(fname)\n\n for j in range(self.cnt_mappers):\n fname = map_output(self.id, j, i)\n namenode.create_file(fname)\n self.open_files.append(fname)", "def merge_pairs_of_procs(self, n_procs):\n assert self.args.action == 'partition'\n assert self.args.smc_particles > 1\n if n_procs > 1:\n groups_to_merge = [[i, i+1] for i in range(0, n_procs-1, 2)] # e.g. for n_procs = 5, we merge the groups [0, 1], [2, 3, 4]\n else:\n groups_to_merge = [[], ]\n if n_procs % 2 != 0: # if it's odd, add the last proc to the last group\n groups_to_merge[-1].append(n_procs-1)\n self.smc_info.append([])\n for group in groups_to_merge:\n if n_procs == 1:\n infnames = [self.hmm_outfname, ]\n else:\n infnames = [self.args.workdir + '/hmm-' + str(iproc) + '/' + os.path.basename(self.hmm_outfname) for iproc in group]\n assert len(self.smc_info[-2]) == n_procs\n previous_info = None\n if len(self.smc_info) > 2:\n previous_info = [self.smc_info[-2][iproc] for iproc in group]\n glomerer = Glomerator(self.reco_info)\n paths = glomerer.read_cached_agglomeration(infnames, self.args.smc_particles, previous_info=previous_info, calc_adj_mi=self.args.debug, debug=self.args.debug) #, outfname=self.hmm_outfname)\n self.smc_info[-1].append(paths)\n\n # ack? self.glomclusters.append(glomerer)\n # boof? self.list_of_preclusters.append(glomerer.combined_conservative_best_minus_ten_partitions)\n\n if n_procs > 1: # TODO I don't think this is right any more...\n self.merge_subprocess_files(self.hmm_cachefname, n_procs)\n \n if not self.args.no_clean:\n if n_procs == 1:\n os.remove(self.hmm_outfname)\n else:\n for iproc in range(n_procs):\n subworkdir = self.args.workdir + '/hmm-' + str(iproc)\n os.remove(subworkdir + '/' + os.path.basename(self.hmm_infname))\n os.remove(subworkdir + '/' + os.path.basename(self.hmm_outfname))\n os.rmdir(subworkdir)", "def copyToDFS(address, dfs_path, filename, password, crypto):\n\n # Create a connection to the data server\n\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect(address)\n\n #get filesize\n\n filesize = os.path.getsize(filename)\n\n # Create a Put packet with the filename and the length of the data,\n # and sends it to the metadata server\n\n p = Packet()\n p.BuildPutPacket(dfs_path, filesize)\n sendall_with_size(sock, p.getEncodedPacket())\n\n # If no error or file exists\n # Get the list of data nodes.\n # Divide the file in blocks\n # Send the blocks to the data servers\n\n message = recv_with_size(sock)\n #print message\n p.DecodePacket(message)\n\n #Packet.getDataNodes() returns a list of elements\n #They are of the form (address, port)\n data_nodes = p.getDataNodes()\n node_amount = len(data_nodes)\n\n #for distributing the workload\n if(filesize / node_amount == 0):\n partition_size = filesize\n else:\n partition_size = filesize / node_amount\n\n #blocks of about 4K\n block_size = 2 ** 12\n if(filesize > 40 * (10 ** 6)):\n #blocks of size about 4096K\n block_size = 2 ** 22\n\n blocks = [] #for the metadata server\n\n rfile = open(filename, \"r\")\n for IP, PORT in data_nodes:\n temp_load = partition_size\n while(temp_load):\n node_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n node_sock.connect((IP, PORT))\n\n if(temp_load < block_size):\n segment = rfile.read(temp_load)\n temp_load = 0\n else:\n segment = rfile.read(block_size)\n temp_load -= block_size\n\n #variable from the main function to indicate if crypto is needed\n if(crypto):\n crpt_seg = encrypt(password, segment)\n else:\n crpt_seg = segment\n\n #sending a put message to the data node\n p.BuildPutPacket(dfs_path, len(crpt_seg))\n sendall_with_size(node_sock, p.getEncodedPacket())\n\n #the OK message\n OK = recv_with_size(node_sock)\n\n #sends the block to the data node\n sendall_with_size(node_sock, crpt_seg)\n\n #receive the unique block ID\n blockid = recv_with_size(node_sock)\n\n #adding muh blocks\n blocks.append((IP, str(PORT), blockid))\n\n node_sock.close()\n\n #not everything was cleared, the last node gets the load\n #almost the same code as above\n if(filesize % node_amount != 0):\n while(1):\n node_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n node_sock.connect((data_nodes[-1][0], data_nodes[-1][1]))\n\n segment = rfile.read(block_size)\n #encountered an end of file\n if(segment == \"\"):\n break\n\n if(crypto):\n crpt_seg = encrypt(password, segment)\n else:\n crpt_seg = segment\n\n #sending a put message to the data node\n p.BuildPutPacket(dfs_path, len(crpt_seg))\n sendall_with_size(node_sock, p.getEncodedPacket())\n\n #the OK message\n OK = recv_with_size(node_sock)\n\n #sends the block to the data node\n sendall_with_size(node_sock, crpt_seg)\n\n #receive the unique block ID\n blockid = recv_with_size(node_sock)\n\n #adding muh blocks\n blocks.append((IP, str(PORT), blockid))\n\n node_sock.close()\n\n rfile.close()\n\n # Notify the metadata server where the blocks are saved.\n\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect(address)\n p.BuildDataBlockPacket(dfs_path, blocks)\n sendall_with_size(sock, p.getEncodedPacket())\n meta_message = recv_with_size(sock)\n\n if(meta_message == \"ACK\"):\n #print \"Acknowledged.\"\n pass\n else:\n print \"Something happened.\"\n print meta_message\n\n sock.close()", "def doCollectTask(filename, topK):\n f = open(filename)\n dataDict = json.load(f)\n weirdOutCollect = Counter()\n for key in dataDict:\n if dataDict[key][\"weird\"]:\n # Check direction first to get the inner server.\n srcIP = dataDict[key][\"addr\"][0]\n dstIP = dataDict[key][\"addr\"][2]\n if srcIP.startswith(\"136.159.\"):\n # Which means srcIP is within our campus. it should be an outbound traffic\n weirdOutCollect[getIPCluster(dstIP)] += 1\n else:\n weirdOutCollect[getIPCluster(srcIP)] += 1\n\n return Counter(dict(weirdOutCollect.most_common(topK)))", "def run(input_folder, H5_FILEPATH, precomputed_artist_ranking, precomputed_album_ranking, precomputed_track_ranking):\t\t\n\n\t# def _dump_to_dict(dump_filepath):\n\t# \t\"\"\"\n\t# \tConvert a numpy array in the form (('k1', v1), ('k2', v2), ... , ('kn', vn)) to a dictionary. It also deletes an empty key (''), and the dictionary is converted to a collection and is ordered by value\n\t# \t\"\"\"\n\t# \twith open(dump_filepath, 'rb') as handle:\n\t# \t\tf = cPickle.load(handle)\n\t# \tt0 = time.time()\n\t# \td = {k : v for k, v in f}; del f\t\n\t# \tprint '{0} secs for creating dict from dump {1}'.format(int(time.time() - t0), dump_filepath),\n\t# \t# do not consider empty MBID's\n\t# \tif d.has_key(''): d.pop('', None) \n\t# \t# return sorted ranking by value\n\t# \treturn collections.OrderedDict(sorted(d.items(), key=lambda t: t[1])) \n\n\n\n\tglobal size\n\tglobal rank\n\n\t# Generating ordered dictionaries of the rankings\n\tt0 = time.time()\n\toverall_ranking_artist = GVM_classes.dump_to_dict(precomputed_artist_ranking)\n\t# if rank == 0: print ' size: {0}'. format(sys.getsizeof(overall_ranking_artist))\n\n\toverall_ranking_album = GVM_classes.dump_to_dict(precomputed_album_ranking)\n\t# if rank == 0: print ' size: {0}'. format(sys.getsizeof(overall_ranking_album))\n\n\toverall_ranking_track = GVM_classes.dump_to_dict(precomputed_track_ranking)\n\t# if rank == 0: print ' size: {0}'. format(sys.getsizeof(overall_ranking_track))\n\tprint 'Rank', rank, 'features in', str(int(time.time() - t0)), 'secs'\n\n\t# ##########################################################\n\t# Iterate over all files in a TAR, searching for all MBIDs\n\t# ##########################################################\n\n\tfile_list = [] # List of all files in input_folder\n\tfor root, subFolders, files in os.walk(input_folder):\n\t\tfor f in files:\n\t\t\tif f.split('/')[-1].startswith('.'):\n\t\t\t\tcontinue\n\t\t\tfile_list.append('/'.join([root,f]))\n\n\t# print 'RANK:', rank, '\\nFILE_LIST:', file_list\n\t# print 'FILE: ', file_list[size * int(factor) + rank]\n\ttar_object = tarfile.open('/'.join([file_list[size * int(factor) + rank]]))\n\ttar_object.extractall(TEMP_FOLDER)\n\n\t# print size * int(factor) + rank, file_list[size * int(factor) + rank]\n\n\t#list with dictionaries of aggregated features\n\tlist_of_dict_agg_feat= []\n\n\n\n\tfor file_in_tar in GVM_classes.folder_iterator(TEMP_FOLDER)[:]:\n\t\tlistening_features = Features.ListeningFeatures(file_in_tar) \n\t\ttry:\n\t\t\t# Metadata\n\t\t\t\n\t\t\t\n\t\t\t# Feature Extraction\n\t\t\tcollected_features = dict()\n\n\t\t\tcollected_features['metadata'] = listening_features.metadata_dict()\n\t\t\tcollected_features['mainstreamness'] = listening_features.mainstreamness(overall_ranking_artist, overall_ranking_album, overall_ranking_track)\n\n\t\t\t\n\n\t\t\tlist_of_dict_agg_feat.append(collected_features)\n\n\t\t\t# print \"In file {0}, there are {1} extracted users\".format(file_list[size * int(factor) + rank], len(list_of_dict_agg_feat))\n\n\t\texcept:\n\t\t\tprint file_list[size * int(factor) + rank].split('/')[-1], file_in_tar.split('/')[-1], sys.exc_info()\n\n\treturn list_of_dict_agg_feat", "def get_chunks(self,file_size):\n chunk_start = 0\n chunk_size = 0xA00000 # 10485760 bytes, default max ssl buffer size\n while chunk_start + chunk_size <= file_size:\n yield(chunk_start, chunk_size)\n chunk_start += chunk_size\n final_chunk_size = file_size - chunk_start\n yield(chunk_start, final_chunk_size)", "async def get_graph_for_file(\n file_name: str,\n score: int = 0,\n par_length: int = 0,\n co_occ: int = 0,\n target_collection: List[str] = Query([]),\n):\n database = get_db()\n query_graph_result = database.AQLQuery(\n query=main_queries.QUERY_GRAPH_VIEW,\n batchSize=15000,\n bindVars={\n \"filename\": file_name,\n \"score\": score,\n \"parlength\": par_length,\n \"coocc\": co_occ,\n \"targetcollection\": target_collection,\n },\n )\n collection_keys = []\n total_collection_dict = {}\n total_histogram_dict = {}\n\n # extract a dictionary of collection numbers and number of parallels for each\n for parallel in query_graph_result.result:\n count_this_parallel = parallel[\"parlength\"]\n target_filename = re.sub(\"_[0-9][0-9][0-9]\",\"\",parallel[\"textname\"])\n if target_filename in total_histogram_dict.keys():\n total_histogram_dict[target_filename] += count_this_parallel\n else:\n total_histogram_dict[target_filename] = count_this_parallel\n\n collection_key = re.search(COLLECTION_PATTERN, target_filename)\n\n if not collection_key:\n continue\n\n collection = collection_key.group()\n if collection not in total_collection_dict.keys():\n total_collection_dict[collection] = count_this_parallel\n else:\n total_collection_dict[collection] += count_this_parallel\n if collection not in collection_keys:\n collection_keys.append(collection)\n\n # find the proper full names vor each collection\n collections = database.AQLQuery(\n query=menu_queries.QUERY_COLLECTION_NAMES,\n bindVars={\n \"collections\": collection_keys,\n \"language\": get_language_from_filename(file_name),\n },\n )\n\n collections_with_full_name = {}\n for collection_result in collections.result[0]:\n collections_with_full_name.update(collection_result)\n\n parallel_graph_name_list = {}\n for key in total_collection_dict:\n parallel_graph_name_list.update(\n {key + \" \" + collections_with_full_name[key]: total_collection_dict[key]}\n )\n\n unsorted_graphdata_list = list(map(list, parallel_graph_name_list.items()))\n\n histogram_data = []\n for name, count in total_histogram_dict.items():\n displayname = name\n query_displayname = database.AQLQuery(\n query=main_queries.QUERY_DISPLAYNAME,\n bindVars={\n \"filename\": name\n },\n rawResults=True\n )\n displayname_results = query_displayname.result\n if displayname_results:\n displayname = displayname_results[0][0] + ' (' + displayname_results[0][1] + ')'\n\n histogram_data.append([displayname, count])\n\n # returns a list of the data as needed by Google Graphs\n return {\n \"piegraphdata\": sorted(\n unsorted_graphdata_list, reverse=True, key=lambda x: x[1]\n ),\n \"histogramgraphdata\": sorted(histogram_data, reverse=True, key=lambda x: x[1]),\n }" ]
[ "0.66015565", "0.6507331", "0.59870154", "0.5919278", "0.5917296", "0.58891445", "0.5861876", "0.5806694", "0.5750406", "0.57269675", "0.57239187", "0.5673634", "0.567278", "0.5663586", "0.56529", "0.5636067", "0.5622266", "0.5600914", "0.5597496", "0.5590672", "0.5590012", "0.556879", "0.552362", "0.55213255", "0.55097604", "0.5509023", "0.54999024", "0.5495231", "0.5487537", "0.54749286", "0.5453796", "0.5448273", "0.5444161", "0.5440766", "0.53979903", "0.5395818", "0.5390886", "0.5363172", "0.5329822", "0.53046626", "0.5299221", "0.5298098", "0.5282316", "0.5250644", "0.5247667", "0.52433443", "0.5234982", "0.52343786", "0.5230256", "0.52188915", "0.5218565", "0.5212385", "0.52071553", "0.519838", "0.5196509", "0.5189632", "0.5179356", "0.51767206", "0.5172773", "0.51677144", "0.5156612", "0.5151955", "0.5151709", "0.5148479", "0.5143015", "0.5140156", "0.5138479", "0.5135406", "0.5134612", "0.51336783", "0.5133189", "0.5117804", "0.5115693", "0.5109762", "0.5108838", "0.5099793", "0.5099512", "0.50960267", "0.5090936", "0.50903803", "0.50895786", "0.50895566", "0.5087269", "0.5083623", "0.50829697", "0.5079446", "0.5074719", "0.5073784", "0.5067186", "0.5059356", "0.5059016", "0.5057516", "0.50528026", "0.50466204", "0.5044824", "0.5041917", "0.50416523", "0.50379837", "0.50338626", "0.5032231" ]
0.6488863
2
Load parameters from SSM Parameter Store.
def load_params(namespace: str, env: str) -> dict: config = {} path = f"/{namespace}/{env}/" ssm = boto3.client("ssm") more = None args = {"Path": path, "Recursive": True, "WithDecryption": True} while more is not False: if more: args["NextToken"] = more params = ssm.get_parameters_by_path(**args) for param in params["Parameters"]: key = param["Name"].split("/")[3] config[key] = param["Value"] more = params.get("NextToken", False) return config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_parameter_store_config(config):\n parameter_store = config\n ps_keys = list(parameter_store.keys())\n ssm = boto3.client('ssm')\n LOGGER.info(f\"Loading configurations from Parameter Store for {config}\")\n for ps_key in ps_keys:\n ps_name = parameter_store[ps_key]\n result = ssm.get_parameter(\n Name=ps_name,\n WithDecryption=True\n )\n parameter_store[ps_key] = result['Parameter']['Value']\n return parameter_store", "def load_params():\r\n return pickle.load(open('params.p', mode='rb'))", "def load_params(self):\n return self.params", "def load():\n\n global R, P, NP, update, update_available, region_dict\n\n loader = GoSmartParameterLoader(gosmart._prefix)\n loader.initiate()\n\n R = loader.get_regions()\n P, NP = loader.get_parameters()\n\n region_dict = loader.get_region_dict()\n\n update = gosmart.status.StatusUpdater()\n update_available = update.connect()", "def load_params():\n with open('params.p', mode='rb') as in_file:\n return pickle.load(in_file)", "def _load_parameter(self):", "def _load(self, load_dict):\n if self.v_locked:\n raise pex.ParameterLockedException(\n \"Parameter `%s` is locked!\" % self.v_full_name\n )\n\n try:\n serial_string = load_dict[\"data%s\" % SparseParameter.IDENTIFIER]\n self._data = self._reconstruct_matrix(serial_string)\n\n if \"explored_data\" + SparseParameter.IDENTIFIER in load_dict:\n explore_table = load_dict[\"explored_data\" + SparseParameter.IDENTIFIER]\n idx_col = explore_table[\"idx\"]\n explore_list = []\n for irun, name_idx in enumerate(idx_col):\n serial_string = load_dict[\n \"xspm%s%08d\" % (SparseParameter.IDENTIFIER, name_idx)\n ]\n matrix = self._reconstruct_matrix(serial_string)\n explore_list.append(matrix)\n\n self._explored_range = explore_list\n self._explored = True\n\n except KeyError as e:\n super(SparseParameter, self)._load(load_dict)\n\n self._default = self._data\n self._locked = True", "def _get_parameters(cls, *names):\n # Create the ssm boto3 client that will be cached and used throughout this execution\n # if one does not exist already\n if AppConfig.SSM_CLIENT is None:\n boto_config = client.Config(\n connect_timeout=cls.BOTO_TIMEOUT,\n read_timeout=cls.BOTO_TIMEOUT\n )\n AppConfig.SSM_CLIENT = boto3.client('ssm', config=boto_config)\n\n LOGGER.debug('Retrieving values from parameter store with names: %s',\n ', '.join('\\'{}\\''.format(name) for name in names))\n try:\n parameters = AppConfig.SSM_CLIENT.get_parameters(\n Names=list(names),\n WithDecryption=True\n )\n except ClientError as err:\n joined_names = ', '.join('\\'{}\\''.format(name) for name in names)\n raise AppConfigError('Could not get parameter with names {}. Error: '\n '{}'.format(joined_names, err.response['Error']['Message']))\n\n decoded_params = {}\n for param in parameters['Parameters']:\n try:\n decoded_params[param['Name']] = json.loads(param['Value'])\n except ValueError:\n raise AppConfigError('Could not load value for parameter with '\n 'name \\'{}\\'. The value is not valid json: '\n '\\'{}\\''.format(param['Name'], param['Value']))\n\n return decoded_params, parameters['InvalidParameters']", "def load_params(self, path: str):\n DistributedWorker.load_params(self, path)\n\n params = torch.load(path)\n self.dqn.load_state_dict(params[\"dqn_state_dict\"])\n print(\"[INFO] loaded the model and optimizer from\", path)", "def load_parameters():\n\n retval = RP_LIB.rp_LoadLockboxConfig()\n if retval != 0:\n LOG.error(\"Failed to load parameters. Error code: %s\", ERROR_CODES[retval])", "def _use_existing_params(self):\n sh = shelve.open(os.path.expanduser('~/.config/scheduler/params'))\n self.params = sh['params']\n sh.close()", "def load_cls_params(self):\n with open('models/Final/linear_svc.p', 'rb') as model_file:\n model = pickle.load(model_file)\n self.svc = model['svc']\n self.X_scaler = model['X_scaler']\n self.parameters = model['parameters']\n\n print(self.parameters)", "def load(self,params):\n self._register.clear()\n for key in params:\n self._register[key] = params[key]", "def load_parameters(self):\n json_data = open(\"param.json\")\n data = json.load(json_data)\n self.items = data[\"items\"]\n self.pollInterval = self.items[0]['poll_interval']", "def load_params(self, params):\n params.cp_latest_filename = \"latest_checkpoint_v\"+params.version\n params.cp_load_latest_filename = \"latest_checkpoint_v\"+params.cp_load_ver\n params.cp_load_dir = params.out_dir + params.cp_load_name+ \"/checkpoints/\"\n if not hasattr(params, \"model_out_dir\"):\n params.model_out_dir = params.out_dir + params.model_name\n params.cp_save_dir = params.model_out_dir + \"/checkpoints/\"\n params.log_dir = params.model_out_dir + \"/logfiles/\"\n params.save_dir = params.model_out_dir + \"/savefiles/\"\n params.disp_dir = params.model_out_dir + \"/vis/\"\n params.num_pixels = int(np.prod(params.data_shape))\n self.params = params\n self.params_loaded = True", "def load_parameters(self, params):\n # load (aka. deep copy) parameters in params into network\n c=0\n self.params = []\n names = ['W_i']\n for n,p in zip(names, params):\n self.params.append(theano.shared(name = p.name,\n value = p.get_value(borrow=True)))\n \n setattr(self, n, self.params[c])\n c+=1\n assert( len(self.params) == c )", "def _load(self, load_dict):\n if self.v_locked:\n raise pex.ParameterLockedException(\n \"Parameter `%s` is locked!\" % self.v_full_name\n )\n\n if \"data\" in load_dict:\n dump = load_dict[\"data\"]\n self._data = pickle.loads(dump)\n else:\n self._logger.warning(\n \"Your parameter `%s` is empty, \"\n \"I did not find any data on disk.\" % self.v_full_name\n )\n\n try:\n self.v_protocol = load_dict[PickleParameter.PROTOCOL]\n except KeyError:\n # For backwards compatibility\n self.v_protocol = PickleParameter._get_protocol(dump)\n\n if \"explored_data\" in load_dict:\n explore_table = load_dict[\"explored_data\"]\n\n name_col = explore_table[\"idx\"]\n\n explore_list = []\n for name_id in name_col:\n arrayname = self._build_name(name_id)\n loaded = pickle.loads(load_dict[arrayname])\n explore_list.append(loaded)\n\n self._explored_range = explore_list\n self._explored = True\n\n self._default = self._data\n self._locked = True", "def _load(self, load_dict):\n if self.v_locked:\n raise pex.ParameterLockedException(\n \"Parameter `%s` is locked!\" % self.v_full_name\n )\n\n if \"data\" in load_dict:\n self._data = load_dict[\"data\"][\"data\"][0]\n self._default = self._data\n else:\n self._logger.warning(\n \"Your parameter `%s` is empty, \"\n \"I did not find any data on disk.\" % self.v_full_name\n )\n\n if \"explored_data\" in load_dict:\n self._explored_range = [\n x for x in load_dict[\"explored_data\"][\"data\"].tolist()\n ]\n self._explored = True\n\n self._locked = True", "def constructor_ssm_parameter(loader, node) -> Any: # type: ignore\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n _logger.debug(f\"match: {g}\")\n (ssm_param_name, jsonpath) = g.split(\"::\")\n if \"${\" in ssm_param_name:\n ssm_param_name = ssm_param_name.replace(\"$\", \"\").format(os.environ)\n _logger.debug(f\"found injected parameter {(ssm_param_name, jsonpath)}\")\n if ssm_param_name not in SSM_CONTEXT:\n ssm = boto3_client(\"ssm\")\n try:\n SSM_CONTEXT[ssm_param_name] = json.loads(\n ssm.get_parameter(Name=ssm_param_name)[\"Parameter\"][\"Value\"]\n )\n ssm_parameters.add(ssm_param_name)\n except Exception as e:\n _logger.error(f\"Error resolving injected parameter {g}: {e}\")\n\n json_expr = jsonpath_ng.parse(jsonpath)\n json_data = SSM_CONTEXT[ssm_param_name]\n json_match = json_expr.find(json_data)\n\n if len(json_match) > 1:\n raise Exception(f\"Injected parameter {g} is ambiguous\")\n elif len(json_match) == 0:\n raise Exception(f\"Injected parameter {jsonpath} not found in SSM {ssm_param_name}\")\n\n param_value: str = json_match[0].value\n _logger.debug(f\"injected SSM parameter {g} resolved to {param_value}\")\n return param_value\n return full_value\n return value", "def load_params(self):\n\n self.curr_ts_state = None\n\n # Get TS from param\n self.transition_system = import_ts_from_file(rospy.get_param('transition_system_textfile'))\n\n # Get monitored TS state model\n self.state_dimension_name = rospy.get_param(\"~state_dimension_name\", \"load\")\n\n # Get monitored action\n self.monitored_action = rospy.get_param(\"~monitored_action\", \"pick\")\n \n # Create dict to retrieve next state given current state and next action\n self.action_to_state = dict()\n for state in self.transition_system['state_models'][self.state_dimension_name]['nodes']:\n temp_dict = dict()\n for connected_state in self.transition_system['state_models'][self.state_dimension_name]['nodes'][state]['connected_to']:\n temp_dict.update({self.transition_system['state_models'][self.state_dimension_name]['nodes'][state]['connected_to'][connected_state]: connected_state})\n self.action_to_state.update({state: temp_dict})", "def load_params_from_pickle_file(session: tf.Session,\n params_filename: Text) -> None:\n with open(params_filename, 'rb') as f:\n params = pickle.load(f)\n for var in tf.trainable_variables():\n session.run(var.assign(params[var.name]))", "def get_params(param_names, param_store, serialize_param=base.serialize_param,\r\n translate_param=translate_inequality):\r\n return base.get_params(param_names, param_store, serialize_param,\r\n translate_param)", "def load_params(param_file):\n with open(param_file, 'r') as pfile:\n eff_params = json.load(pfile)\n return eff_params", "def _load(self, load_dict):\n if self.v_locked:\n raise pex.ParameterLockedException(\n \"Parameter `%s` is locked!\" % self.v_full_name\n )\n\n try:\n self._data = load_dict[\"data\" + ArrayParameter.IDENTIFIER]\n\n if \"explored_data\" + ArrayParameter.IDENTIFIER in load_dict:\n explore_table = load_dict[\"explored_data\" + ArrayParameter.IDENTIFIER]\n\n idx = explore_table[\"idx\"]\n\n explore_list = []\n\n # Recall the arrays in the order stored in the ObjectTable 'explored_data__rr__'\n for name_idx in idx:\n arrayname = self._build_name(name_idx)\n explore_list.append(load_dict[arrayname])\n\n self._explored_range = [x for x in explore_list]\n self._explored = True\n\n except KeyError:\n super(ArrayParameter, self)._load(load_dict)\n\n self._default = self._data\n self._locked = True", "def inject_params(model_name: str) -> ListenerParams:\n params_file = model_name + '.params'\n try:\n with open(params_file) as f:\n pr.__dict__.update(compatibility_params, **json.load(f))\n except (OSError, ValueError, TypeError):\n if isfile(model_name):\n print('Warning: Failed to load parameters from ' + params_file)\n return pr", "def reload(self):\n for name, param in self.components.items():\n param_path = os.path.join(self.model_path, \"%s.mat\" % name)\n param_values = scipy.io.loadmat(param_path)\n if hasattr(param, 'params'):\n for p in param.params:\n set_values(p.name, p, param_values[p.name])\n else:\n set_values(name, param, param_values[name])", "def loadParameters(self, parmfile=''):\n if not parmfile:\n raise IOError(\"You need to specify a parameter filename\")\n parmdir = os.getenv('ATMOSPHERE_PARAMETERS_DIR')\n parmpath = os.join.path(parmdir, parmfile)\n # Read from file\n with open(parmpath, 'r') as parmf:\n data = pickle.load(parmf)\n # Dictionary list\n self.modtran_visits = data[0]\n # Tuple list\n self.aerosol_visits = data[1]\n # seed value\n nruns = len(self.modtran_visits)\n print('Parameters for {1} runs computed with seed = {0}'.format(data[2],\n nruns))\n # Init transmission array\n self.initTransmissionArray(nruns)", "def load_params(params_filename: str) -> Dict:\n \n # If no params filename is specified, return the default parameter setting.\n if not params_filename:\n return RunParams()\n\n return RunParams(**load_json(params_filename))", "def _load(self):\n for k,v in self.parameters.items():\n if isinstance(v,list):\n setattr(self,k,np.array(v,dtype=np.float32))\n else:\n setattr(self,k,v)", "def save_params(self):\n sh = shelve.open(os.path.expanduser('~/.config/scheduler/params'))\n sh['params'] = self.params\n sh.close()", "def load_parameters(self, session, data_dict):\n for layer in self.layers:\n layer.load_parameters(session, data_dict)", "def load_parameters(self):\n with open(INTERNAL_DATA_DIR / self.name_default_params, 'r') as f:\n return yaml.load(f, Loader=yaml.FullLoader)", "def getLocalParameters():\n try:\n config_file = open(\"./meta-files/parameters.yml\")\n params = yaml.load(config_file, Loader=yaml.FullLoader)\n return params\n except:\n raise ValueError(\"Unable to read or parse the system's parameters file\")", "def test_parameters_stored_decrypted_successful_load(self):\n tmpl = template_format.parse('''\n heat_template_version: 2013-05-23\n parameters:\n param1:\n type: string\n description: value1.\n param2:\n type: string\n description: value2.\n hidden: true\n resources:\n a_resource:\n type: GenericResourceType\n ''')\n env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})\n self.stack = stack.Stack(self.ctx, 'test',\n template.Template(tmpl, env=env1))\n cfg.CONF.set_override('encrypt_parameters_and_properties', False)\n\n # Verify that hidden parameters are stored decrypted\n self.stack.store()\n db_tpl = db_api.raw_template_get(self.ctx, self.stack.t.id)\n db_params = db_tpl.environment['parameters']\n self.assertEqual('foo', db_params['param1'])\n self.assertEqual('bar', db_params['param2'])\n\n # Verify that stack loads without error\n loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)\n params = loaded_stack.t.env.params\n self.assertEqual('foo', params.get('param1'))\n self.assertEqual('bar', params.get('param2'))", "def load_workflow_params() -> dict:\n\n try:\n workflow_params_file = upsearch(WORKFLOW_PARAMS_FILENAME)\n except FileNotFoundError:\n message = \"Unable to find .params file; ensure that you are in a workflow directory.\"\n raise FileNotFoundError(message)\n\n with workflow_params_file.open() as f:\n workflow_params = json.load(f)\n\n return workflow_params", "def _instantiate_parameter_states(self, context=None):\n\n from PsyNeuLink.Components.States.ParameterState import _instantiate_parameter_states\n _instantiate_parameter_states(owner=self, context=context)", "def load_saved_params():\n st = 0\n for f in glob.glob(\"saved_params_*.npy\"):\n iter = int(op.splitext(op.basename(f))[0].split(\"_\")[2])\n if (iter > st):\n st = iter\n if st > 0:\n print \"Loading saved params %d\" % st\n with open(\"saved_params_%d.npy\" % st, \"r\") as f:\n params = pickle.load(f)\n state = pickle.load(f)\n return st, params, state\n else:\n return st, None, None", "def load_params_file(filename):\n with open(filename, 'r') as f:\n params = yaml.safe_load(f)\n return params", "def load_parameters(self, filename=None):\n if not filename:\n filename = os.path.join(self.directory, 'learned_parameters.npy')\n params = numpy.load(filename)\n lasagne.layers.set_all_param_values(self.__network, params)", "def load(self, path):\n parameters = paddle.load(path)\n self.set_dict(parameters)", "def load_params_from_file(self, fn):\n f = file(fn, 'r')\n params = json.load(f)\n return params", "def load_yaml_params(self, params_file):\n self._update_params(params_file)", "def test_params_loading(datadir: Path):\n config_fn = datadir / \"datapane.yaml\"\n initial_vals = dict(p1=\"a\", p3=3)\n\n assert len(dp.Params) == 0\n\n # load some values\n api._reset_runtime(initial_vals)\n assert len(dp.Params) == 2\n assert dp.Params[\"p1\"] == initial_vals[\"p1\"]\n\n # clear and load again\n api._reset_runtime({})\n assert len(dp.Params) == 0\n api._reset_runtime(initial_vals)\n\n # load from file\n dp.Params.load_defaults(config_fn=config_fn)\n # ensure values are merged\n assert len(dp.Params) == 3\n assert dp.Params[\"p1\"] == \"hello\"\n assert dp.Params[\"p2\"] == 4\n assert dp.Params[\"p3\"] == initial_vals[\"p3\"]", "def load_params(param_vector=[]):\n params = {}\n param_vector_default = [-1.43,0.05,7.5,0.05,1.,40.,0.6,1.,5.5]\n\n if len(param_vector) != 0:\n params['alpha'], params['sigma_M'], params['M50'], params['sigma_mpeak'], params['B'], params['A'], params['sigma_r'], params['n'], params['Mhm'] = param_vector\n else:\n params['alpha'], params['sigma_M'], params['M50'], params['sigma_mpeak'], params['B'], params['A'], params['sigma_r'], params['n'], params['Mhm'] = param_vector_default\n\n return params", "def load_params(exe, prog, path, ignore_params=[]):\n if not (os.path.isdir(path) or os.path.exists(path + '.pdparams')):\n raise ValueError(\"Model pretrain path {} does not \"\n \"exists.\".format(path))\n\n logger.info('Loading parameters from {}...'.format(path))\n\n ignore_set = set()\n state = _load_state(path)\n\n # ignore the parameter which mismatch the shape\n # between the model and pretrain weight.\n all_var_shape = {}\n for block in prog.blocks:\n for param in block.all_parameters():\n all_var_shape[param.name] = param.shape\n ignore_set.update([\n name for name, shape in all_var_shape.items()\n if name in state and shape != state[name].shape\n ])\n\n if ignore_params:\n all_var_names = [var.name for var in prog.list_vars()]\n ignore_list = filter(\n lambda var: any([re.match(name, var) for name in ignore_params]),\n all_var_names)\n ignore_set.update(list(ignore_list))\n\n if len(ignore_set) > 0:\n for k in ignore_set:\n if k in state:\n logger.warning('variable {} not used'.format(k))\n del state[k]\n fluid.io.set_program_state(prog, state)", "def load_ps(self):\n self.ps = self.read_var(self.psvar)\n self.test_shape(self.psvar, self.ps.shape, 2)", "def loaders_from_params(params,\n distributed=False,\n world_size=1,\n first_epoch='asc'):\n\n sets = datasets_from_params(params)\n\n data_loaders = {}\n for split in ['train', 'val', 'test']:\n if split not in sets:\n continue\n\n loader_params = params.pop(f'{split}_data_loader', None)\n if not loader_params:\n loader_params = params.get('data_loader')\n\n # TODO: put it in a better place\n if distributed:\n logger.info('Using distributed bucketing sampler')\n sampler = samplers.DistributedBucketingSampler\n else:\n logger.info('Using normal bucketing sampler')\n sampler = samplers.BucketingSampler\n\n batch_sampler = sampler(sets[split],\n batch_size=params['trainer']['batch_size'],\n first_epoch=first_epoch)\n\n data_loaders[split] = loaders.from_params(loader_params,\n dataset=sets[split],\n batch_sampler=batch_sampler)\n return data_loaders", "def get_params(param_names, param_store, serialize_param=serialize_param,\r\n translate_param=translate_identity):\r\n if param_names is None:\r\n param_names = [name for name in param_store.keys() if name != 'self']\r\n\r\n return dict((translate_param(name), serialize_param(param_store[name]))\r\n for name in param_names if param_store.get(name) is not None)", "def data_store_parameters_list(self) -> Optional[Sequence['outputs.AzureOperationalStoreParametersResponse']]:\n return pulumi.get(self, \"data_store_parameters_list\")", "def load_sond(parameter_list):\n pass", "def from_params(self, params):\n raise NotImplementedError()", "def load(self, path):\n parameters = torch.load(path)\n\n if \"optimizer\" in parameters:\n parameters = parameters[\"model\"]\n\n self.load_state_dict(parameters)", "def on_load_parameters(self, filename=None):\n if filename is None:\n path, _ = QtWidgets.QFileDialog.getOpenFileName(self, \"Choose a parameter file.\", \"\", \"JSON Files (*.json)\")\n else:\n path = filename\n\n if path == '' or path is None:\n return\n\n self.param_file = path\n\n with open(self.param_file, 'r') as f:\n params = json.loads(f.read())\n\n obj_points = params['object positions']\n cam_pos = params['camera positions']\n dist_coeff = params['distortion coefficients']\n\n for p in obj_points:\n x, y = p['x'], p['y']\n lat, lon, alt = p['lat'], p['lon'], p['alt']\n self.add_known_image_points((x, y), latlonalt=(lat, lon, alt))\n\n self.camera_lat_line.setValue(float(cam_pos['lat']))\n self.camera_lon_line.setValue(float(cam_pos['lon']))\n self.camera_alt_line.setValue(float(cam_pos['alt']))\n self.cx_line.setValue(float(cam_pos['cx']))\n self.cy_line.setValue(float(cam_pos['cy']))\n self.phi_line.setValue(float(cam_pos['phi']))\n self.theta_line.setValue(float(cam_pos['theta']))\n self.psi_line.setValue(float(cam_pos['psi']))\n\n self.k1_line.setValue(float(dist_coeff['k1']))\n self.k2_line.setValue(float(dist_coeff['k2']))\n self.k3_line.setValue(float(dist_coeff['k3']))\n self.p1_line.setValue(float(dist_coeff['p1']))\n self.p2_line.setValue(float(dist_coeff['p2']))\n\n self.statusBar().showMessage(f'Loaded parameters from {self.param_file}')", "def set_model_ps(filepath, dicname='PARAMETERS'):\n psmod = importlib.import_module(path_to_modline(filepath))\n model.PARAMETERS = getattr(psmod, dicname)", "def import_parameters(self, file_name):\n parameters = []\n\n with open(file_name) as in_file:\n parameters = json.load(in_file)\n\n if parameters:\n self.put_parameters(parameters)", "def load(proxy=None, mode=None, parameters=None, json_path=None):\n ps = load_params(json_path)\n \n try:\n ps = ps[proxy]\n except:\n raise KeyError(\"`proxy` incorrect. Try one of: ['{}']\".format(\"', '\".join(ps.keys())))\n\n try:\n ps = ps[mode]\n except:\n raise KeyError(\"`mode` incorrect. Try one of: ['{}']\".format(\"', '\".join(ps.keys())))\n \n try:\n ps = ps[parameters]\n except:\n raise KeyError(\"`parameters` incorrect. Try one of: ['{}']\".format(\"', '\".join(ps.keys())))\n \n p = params(values=ps)\n p.param_name = parameters\n \n return p", "def load_by_params(cls, **_params):\n try:\n return cls._prepare_parametrized_queue(**_params).first()\n except SQLAlchemyError:\n cls.s.rollback()\n raise", "def load_parameters(handle):\n\n logging.info('Loading scaling parameters from {}'.format(handle.name))\n\n result = pd.read_csv(handle, index_col=0)\n\n result.index = result.index.astype(str)\n\n logging.info('Result is a table with shape {}'.format(result.shape))\n\n return result", "def get_parameters(self):\n self.parameters = dict()\n ssm = self.session.client('ssm')\n parameters = ssm_utils.get_parameters_by_path(\n ssm,\n Path=self.confetti_path,\n Recursive=self.recursive,\n )\n\n for parameter in parameters:\n name = parameter['Name'].replace(self.confetti_path + '/', '')\n value = parameter['Value']\n attributes = dict()\n\n if parameter['Type'] == 'SecureString':\n attributes['encrypted'] = value\n attributes['decrypted'] = ssm.get_parameter(\n Name=parameter['Name'],\n WithDecryption=True\n ).get('Parameter').get('Value')\n value = attributes['decrypted']\n\n self.parameters[name] = self.Parameter(value, **attributes)", "def _load_parameters(self, default):\n params = {}\n for (key, value) in default:\n params[key] = self._parse_parameter(value)\n \n if not os.path.exists(self._datadir):\n os.makedirs(self._datadir)\n \n # Check if the file already exists, and create a new one, using the \n # passed default values, if necessary\n paramfile = os.path.join(self._datadir, self.id.lower() + '.cfg')\n if (os.path.isfile(paramfile)):\n paramjson = open(paramfile)\n params_var = json.load(paramjson)\n params.update(params_var)\n else:\n params_var = {}\n params_var['eta'] = [params['eta']]*24\n params_var['cov'] = [params['sigma']**2]*24\n params.update(params_var)\n \n with open(paramfile, 'w') as paramjson:\n json.dump(params_var, paramjson)\n \n return params", "def parseBoardParameters(self, parametersFromRegistry):\n self.boardParams = dict(parametersFromRegistry)\n #for key, val in dict(parametersFromRegistry).items():\n # setattr(self, key, val)", "def load(self, filename):\n param_dict = pickle.load(open('%s' % filename, 'rb'))\n self.learningrate = param_dict['learningrate']\n self.verbose = param_dict['verbose']\n self._loadsize = param_dict['loadsize']\n self._batchsize = param_dict['batchsize']\n self.momentum = param_dict['momentum']\n self.epochcount = param_dict['epochcount']\n self._momentum_batchcounter = param_dict['momentum_batchcounter']\n for param_name in param_dict['incs'].keys():\n for p in self._params:\n if p.name == param_name:\n self._incs[p].set_value(param_dict['incs'][param_name])\n if self.rmsprop is not None:\n for param_name in param_dict['avg_grad_sqrs'].keys():\n for p in self._params:\n if p.name == param_name:\n self._avg_grad_sqrs[p].set_value(param_dict['avg_grad_sqrs'][param_name])\n self._numbatches = self._loadsize // self._batchsize\n if self._inputs_type != 'function':\n self._numloads = self._inputs.shape[0] // self._loadsize\n if self._inputs_type == 'h5':\n self._inputs_theano.set_value(\n self._inputs.read(stop=self._loadsize))\n else:\n self._inputs_theano.set_value(self._inputs[:self._loadsize])", "def load_standard_parameters(self):\n paradic = {'x':'0',\n 'y':'0',\n 'n_oct':'8',\n 'n_spo':'3',\n 'sigma_min':'0.8',\n 'delta_min':'0.5',\n 'sigma_in':'0.5',\n 'C_DoG':'0.015',\n 'C_edge':'10',\n 'n_bins':'36',\n 'lambda_ori':'1.5',\n 't':'0.8',\n 'n_hist':'4',\n 'n_ori':'8',\n 'lambda_descr':'6',\n 'flag_match':'1',\n 'C_match':'0.6'}\n self.cfg['param']['paradic'] = paradic\n self.cfg.save()", "def data_store_parameters_list(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AzureOperationalStoreParametersArgs']]]]:\n return pulumi.get(self, \"data_store_parameters_list\")", "def data_store_parameters_list(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AzureOperationalStoreParametersArgs']]]]:\n return pulumi.get(self, \"data_store_parameters_list\")", "def _add_ssm_param_injector(tag: str = \"!SSM\") -> Set[str]:\n # pattern for global vars: look for ${word}\n pattern = re.compile(\".*?\\${([^}]+::[^}]*)}.*?\") # noqa: W605\n loader = yaml.SafeLoader\n\n # the tag will be used to mark where to start searching for the pattern\n # e.g. somekey: !SSM somestring${MYENVVAR}blah blah blah\n loader.add_implicit_resolver(tag, pattern, None) # type: ignore\n\n ssm_parameters = set()\n\n def constructor_ssm_parameter(loader, node) -> Any: # type: ignore\n \"\"\"\n Extracts the environment variable from the node's value\n :param yaml.Loader loader: the yaml loader\n :param node: the current node in the yaml\n :return: the parsed string that contains the value of the environment\n variable\n \"\"\"\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n _logger.debug(f\"match: {g}\")\n (ssm_param_name, jsonpath) = g.split(\"::\")\n if \"${\" in ssm_param_name:\n ssm_param_name = ssm_param_name.replace(\"$\", \"\").format(os.environ)\n _logger.debug(f\"found injected parameter {(ssm_param_name, jsonpath)}\")\n if ssm_param_name not in SSM_CONTEXT:\n ssm = boto3_client(\"ssm\")\n try:\n SSM_CONTEXT[ssm_param_name] = json.loads(\n ssm.get_parameter(Name=ssm_param_name)[\"Parameter\"][\"Value\"]\n )\n ssm_parameters.add(ssm_param_name)\n except Exception as e:\n _logger.error(f\"Error resolving injected parameter {g}: {e}\")\n\n json_expr = jsonpath_ng.parse(jsonpath)\n json_data = SSM_CONTEXT[ssm_param_name]\n json_match = json_expr.find(json_data)\n\n if len(json_match) > 1:\n raise Exception(f\"Injected parameter {g} is ambiguous\")\n elif len(json_match) == 0:\n raise Exception(f\"Injected parameter {jsonpath} not found in SSM {ssm_param_name}\")\n\n param_value: str = json_match[0].value\n _logger.debug(f\"injected SSM parameter {g} resolved to {param_value}\")\n return param_value\n return full_value\n return value\n\n loader.add_constructor(tag, constructor_ssm_parameter) # type: ignore\n return ssm_parameters", "def load_params_from_file(path):\n save_dict = mx.nd.load(path)\n arg_params = {}\n aux_params = {}\n for k, v in save_dict.items():\n tp, name = k.split(':', 1)\n if tp == 'arg':\n arg_params[name] = v\n if tp == 'aux':\n aux_params[name] = v\n return arg_params, aux_params", "def _Load(self, vm, **kwargs):\n kwargs.setdefault('threads', self._default_preload_threads)\n if FLAGS.ycsb_record_count:\n kwargs.setdefault('recordcount', FLAGS.ycsb_record_count)\n for pv in FLAGS.ycsb_load_parameters:\n param, value = pv.split('=', 1)\n kwargs[param] = value\n command = self._BuildCommand('load', **kwargs)\n stdout, stderr = vm.RobustRemoteCommand(command)\n return ycsb_stats.ParseResults(\n str(stderr + stdout), self.measurement_type, _ERROR_RATE_THRESHOLD.value\n )", "def load_params(self, params):\n super(MlpModel, self).load_params(params)\n self.input_shape = [None,] + self.params.data_shape\n self.label_shape = [None, self.params.num_classes]\n self.mlp_act_funcs = [activation_picker(act_func_str)\n for act_func_str in self.params.mlp_activation_functions]", "def initialize(filename='params.yaml'):\n home_path = str(Path.home())\n project_path = 'Documents/SideProjects/sailboatsfactory'\n work_path = 'src/nn-core'\n params_path = join(home_path, join(project_path, work_path))\n yaml_file = join(params_path, filename)\n print(\"Reading parameters from:\", filename)\n with open(yaml_file, 'r') as f:\n my_params = load(f)\n my_params['x_scaler'] = MinMaxScaler(feature_range=(-1, 1))\n my_params['y_scaler'] = MinMaxScaler(feature_range=(-1, 1))\n\n raw = data.read(my_params)\n adjusted = adjust(raw, my_params)\n\n return adjusted, my_params", "def restore(self):\n if os.path.isfile( \\\n os.path.join(self.network_path,'net_parameters.nnprm.index')):\n self.load_network_parameters(\n file_name='net_parameters', file_path=self.network_path)\n else:\n self.log(\"Could not load previous network parameters from:\\n{}\".format(\\\n os.path.join(self.network_path,'net_parameters.nnprm') ))\n self.log(\"Starting with untrained parameters\")", "def GetAllParameters(self, path, service=None):\n\n param_dict = None\n\n _model_container = None\n \n if len(self._loaded_services) == 0:\n\n return None\n \n elif len(self._loaded_services) == 1:\n\n _model_container = self._loaded_services[0].GetObject()\n\n param_dict = _model_container.GetAllParameters(path)\n\n # now that we're done with the service, we check the solvers for\n # values just in case they have changed. Or for solved variables\n # that are only generated in the solvers.\n\n if self._compiled:\n\n _type = _model_container.GetComponentType(path)\n\n if _type == 'SEGMENT':\n \n vm = self.GetSolverParameter(path, 'Vm')\n\n param_dict['Vm'] = vm\n\n\n return param_dict", "def load_parameters(self, session, data_dict):\n for w in self.weights:\n name = w.name.rsplit(':', 1)[0]\n if name in data_dict:\n session.run(w.assign(data_dict[name]))", "def load(self, *args, **kwargs):\r\n for store_attr in self.__store_attrs__:\r\n setattr(self, store_attr, {})", "def load_params(self, model_path: str, with_mask=True) -> None:\n checkpt = torch.load(model_path, map_location=self.device)\n model_utils.initialize_params(\n self.model, checkpt[\"state_dict\"], with_mask=with_mask\n )\n model_utils.initialize_params(\n self.optimizer, checkpt[\"optimizer\"], with_mask=False\n )\n self.best_acc = checkpt[\"test_acc\"]\n logger.info(f\"Loaded parameters from {model_path}\")", "def loadParams(self, paramsFile):\n dataDir = os.path.abspath(os.path.join(radiomics.__path__[0], 'schemas'))\n schemaFile = os.path.join(dataDir, 'paramSchema.yaml')\n schemaFuncs = os.path.join(dataDir, 'schemaFuncs.py')\n c = pykwalify.core.Core(source_file=paramsFile, schema_files=[schemaFile], extensions=[schemaFuncs])\n params = c.validate()\n\n inputImages = params.get('inputImage', {})\n enabledFeatures = params.get('featureClass', {})\n kwargs = params.get('setting', {})\n\n self.logger.debug(\"Parameter file parsed. Applying settings\")\n\n if len(inputImages) == 0:\n self.inputImages = {'Original': {}}\n else:\n self.inputImages = inputImages\n\n self.logger.debug(\"Enabled input images: %s\", self.inputImages)\n\n if len(enabledFeatures) == 0:\n self.enabledFeatures = {}\n for featureClassName in self.getFeatureClassNames():\n self.enabledFeatures[featureClassName] = []\n else:\n self.enabledFeatures = enabledFeatures\n\n self.logger.debug(\"Enabled features: %s\", enabledFeatures)\n\n # Set default settings and update with and changed settings contained in kwargs\n self.kwargs = self._getDefaultSettings()\n self.kwargs.update(kwargs)\n\n self.logger.debug(\"Settings: %s\", kwargs)", "def load(self, uri):\r\n self._encoder = load_model(uri+\"_lstm_encoder.hdf5\")\r\n self._autoencoder = load_model(uri+\"_lstm_autoencoder.hdf5\")\r\n\r\n pf = PyFolder(os.path.dirname(os.path.realpath(uri)))\r\n dict_options = pf[os.path.basename(uri)+\"_options.json\"]\r\n\r\n self._latent_space = dict_options['latent_space']\r\n self._input_cells = dict_options['input_cells']", "def load_params_from_file(self, input_file):\n\n ### FILL IN ###", "def load_module_state_dict(model, state_dict):\n import warnings\n from torch.nn import Parameter\n\n own_state = model.state_dict()\n for name, param in state_dict.items():\n if name not in own_state:\n warnings.warn('Skipping unexpected key \"{}\" in state_dict'.format(name))\n continue\n if isinstance(param, Parameter):\n # backwards compatibility for serialized parameters\n param = param.data\n try:\n own_state[name].copy_(param)\n except Exception, msg:\n warnings.warn(\"Error occurs when copying from state_dict['{}']: {}\"\n .format(name, str(msg)))\n\n missing = set(own_state.keys()) - set(state_dict.keys())\n if len(missing) > 0:\n warnings.warn(\n \"Keys not found in state_dict and thus not overwritten: '{}'\"\n .format(missing))", "def load_params(path):\n try:\n with open(path, \"rb\") as f:\n params = yaml.full_load(f)\n return params\n except Exception as e:\n print(e)\n with open(path, \"r\") as f:\n params = yaml.full_load(f, encoding='utf-8')\n return params", "def _load_model_from_trained_params(self):\n self.ent_emb = tf.constant(self.trained_model_params[0])\n self.rel_emb = tf.constant(self.trained_model_params[1])", "def gather_params(self):\n for layer in self.layers:\n for name, value in layer.params.iteritems():\n self.params[name] = value", "def test_requesting_invalid_parameters(self):\n self.parameter_store.client.get_parameters.return_value = {\n \"Parameters\": [{\"Name\": \"/test/foo\", \"Value\": \"foo_ssm_value_1\"}],\n \"InvalidParameters\": [\"/test/bar\"],\n }\n\n with self.assertRaises(\n InvalidParametersError, msg='Invalid parameters [\"/test/bar\"] requested'\n ) as exc_info:\n self.parameter_store.get_parameters([\"/test/foo\", \"/test/bar\"])\n assert exc_info.exception.invalid_parameters == [\"/test/bar\"]", "def get_params(self, paramFile):\n\n with open(paramFile, 'r') as f:\n titleLine = next(f)\n\n for line in f:\n p, i, v = line.split(\",\")\n\n self.params.update(p, v, i)", "def load_model(self,\n model: Union[str, io.IOBase, DM],\n name: Optional[str] = None):\n super().load_model(model, name=name)\n content = self.model[self.modelroot]\n\n self.key = content['key']\n self.id = content['id']\n self.family = content['system-family']\n self.__parameters = []\n for cp in content.aslist('calculation-parameter'):\n self.__parameters.append(dict(cp))", "def loadFromParams(self): \n \n parser = argparse.ArgumentParser(description=\"SUMID - Script used for mass items discovering\")\n \n parser.add_argument (\n \"--linklistURL\",\n dest=\"linklistURL\",\n default=argparse.SUPPRESS,\n required=False,\n help=\"Path to linklist. Linklist is the main input of Sumid. It contains all the urls, which gonna be processed.\"\n )\n \n parser.add_argument(\"-w\",\n \"--workDir\",\n dest=\"workDir\",\n default=argparse.SUPPRESS,\n required=False,\n help=\"Place on filesystem where the results gonna be stored.\"\n )\n\n parser.add_argument(\n \"--allowedMIMETypes\",\n dest=\"allowedMIMETypes\",\n default=argparse.SUPPRESS,\n required=False,\n help=\"Comma separated list of MIME type, which will be considered as a successful hit.\"\n )\n\n parser.add_argument(\"--sibsLimit\",\n dest=\"sibsLimit\",\n default=argparse.SUPPRESS,\n required=False,\n type=int,\n help=\"How many misses on each side will be tolerated before moving further.\"\n )\n \n parser.add_argument(\"-v\",\n \"--verbose\",\n dest=\"verbosity\",\n default=argparse.SUPPRESS,\n required=False,\n action='store_const',\n const=42,\n help=\"Tweak to be able to use verbosity in unittests. In future can make Sumid silent.\"\n ) \n \n argparseResults = parser.parse_args()\n return argparseResults", "def __loadParametersAndDefaults(self, dataPath, confFilename, nkeys, nvalues, keyType, valueType):\n params = self.loadConf(dataPath, confFilename=confFilename)\n\n # filter dict to include only recognized field names:\n for k in params.keys():\n if k not in SeriesLoader.BinaryLoadParameters._fields:\n del params[k]\n keywordParams = {'nkeys': nkeys, 'nvalues': nvalues, 'keytype': keyType, 'valuetype': valueType}\n for k, v in keywordParams.items():\n if not v:\n del keywordParams[k]\n params.update(keywordParams)\n return SeriesLoader.BinaryLoadParameters(**params)", "def _setup_from_parameters(self,params):\n\n # SHOULD WE CHECK HERE THAT INPUT PARAMETERS HAVE SAME KP / Z_STAR ?\n\n # copy input dictionary\n self.linP_params=params.copy()\n\n # will add polynomial describing the log power, around kp_kms\n linP_kms_2=0.5*params['alpha_star']\n linP_kms_1=params['n_star']\n A_star=(2*np.pi**2)*params['Delta2_star']/self.kp_kms**3\n linP_kms_0=np.log(A_star)\n linP_kms = np.poly1d([linP_kms_2,linP_kms_1,linP_kms_0])\n # why are we storing this poly1d object? When do we actually use it?\n self.linP_params['linP_kms']=linP_kms", "def loadDict(self, sd):\n self.setName(sd.get(\"name\", None))\n self.setDriverName(sd.get(\"driverName\", None))\n self.setOptDriverName(sd.get(\"optDriverName\", None))\n self.setAuxDriverName(sd.get(\"auxDriverName\", None))\n self.setRunType(sd.get(\"runType\", None))\n self.setInputNames(sd.get(\"inputNames\", None))\n self.setOutputNames(sd.get(\"outputNames\", None))\n self.setInputTypes(sd.get(\"inputTypes\", None))\n self.setInputMins(sd.get(\"inputMins\", None))\n self.setInputMaxs(sd.get(\"inputMaxs\", None))\n self.inputDists = []\n if \"inputDists\" in sd:\n for distDict in sd[\"inputDists\"]:\n distr = Distribution(Distribution.UNIFORM)\n distr.loadDict(distDict)\n self.inputDists.append(distr)\n\n if not self.inputDists:\n self.inputDists = None\n self.setInputDefaults(sd.get(\"inputDefaults\", None))\n self.setSelectedOutputs(sd.get(\"outputSelections\", None))\n self.setNamesIncludeNodes(sd.get(\"namesIncludeNodes\", None))\n stats = sd.get(\"emulatorOutputStats\", None)\n for i, stat in enumerate(stats):\n self.setEmulatorOutputStatus(i, stat)\n self.setEmulatorTrainingFile(sd.get(\"emulatorTrainingFile\", None))\n self.inputsFlowsheetFixed = sd.get(\"inputsFlowsheetFixed\", None)", "def load(self, configs, container):\n pass;", "def set_params(self, state_dicts):\n raise NotImplementedError", "def InitParams(ss):\n ss.Params.OpenJSON(\"pat_assoc.params\")", "def _loadDummyModelParameters(self, params):\n\n for key, value in params.iteritems():\n if type(value) == list:\n index = self.modelIndex % len(params[key])\n self._params[key] = params[key][index]\n else:\n self._params[key] = params[key]", "def get_params(name, *optionals):\n name = name.lower()\n optionals = [opt.lower() for opt in optionals]\n\n partial = os.path.join(cfg.param_dir, name+'.json')\n full = os.path.join(cfg.param_dir, '_'.join([name]+optionals)+'.json')\n\n param = {}\n if os.path.isfile(partial):\n with open(partial, 'r') as f:\n tf.logging.info('Loading parameters from %s', partial)\n _update(param, json.load(f))\n if os.path.isfile(full):\n with open(full, 'r') as f:\n tf.logging.info('Loading parameters from %s', full)\n _update(param, json.load(f))\n\n if not param:\n tf.logging.info('No parameter file found')\n return param", "def set_params(self, params):\n for step_id, step_params in _iteritems(params):\n for name, value in _iteritems(step_params):\n self.add_param(step_id, name, value)", "def para_loader(self, stdpath=True):\n if stdpath is True:\n path = self.lineEdit_params.text()\n else:\n path = \"./data/parameters/parameters_blur.pcl\"\n self.lineEdit_params.setText(path)\n\n file = open(path, 'rb')\n loaded_p_list = pickle.load(file)\n file.close()\n counter = 0\n\n radio_is_image = self.radioButton_circle.isChecked()\n\n if radio_is_image:\n self.radioButton_image.setChecked(True)\n\n for clazz in SliderClass.all_sliders:\n\n # ## if you added a new sliderclass and want to load your old parameters, you can use this little hack\n # classname = 'linefinder'\n # #print(loaded_p_list[counter])\n # #print(f\"the latest keyword = {clazz.keyword}\")\n # if clazz.keyword == [classname]:\n # warnings.warn(\"parameters are messed up! save new ones\")\n # break\n\n try:\n clazz.settr(loaded_p_list[counter])\n counter += 1\n if clazz.radio_image is not None:\n clazz._params_circle = loaded_p_list[counter]\n counter += 1\n except IndexError:\n # again a small hack for when you add an extra element to the class and this is not yet implemented\n # in your parameter saves.\n warnings.warn(\"parameters are messed up! save new ones\")\n counter += 1\n print(f\"should be morph: {loaded_p_list[counter]}\")\n # in the save, manually set the morph_state as the final thing to load in\n self.morph_state = loaded_p_list[counter]\n self.checkBox_morph.setChecked(self.morph_state[0][0])\n self.textEdit_morph.setText(self.morph_state[0][1])\n\n self.coords = loaded_p_list[counter + 1]\n self.checkBox_segment.setChecked(loaded_p_list[counter + 2])\n self.lineEdit_coords.setText(str(self.coords))\n radio_is_circle = loaded_p_list[counter + 3]\n\n if radio_is_circle:\n self.radioButton_circle.setChecked(True)\n self.radioButton_circle.click()", "def load(cls, load_folder: Path | str) -> \"Parameters\":\n serializer = serializer_factory(fmt=SerializerEnum.NUMPY)\n return serializer.load(class_obj=cls, folder_path=load_folder)", "def load_parameters(file_name='exit_lockdown_parameters.json'):\n with open(file_name) as json_file:\n prm = json.load(json_file)\n return prm", "def load_params(num_sources, fname):\n path_p = '/import/c4dm-04/alvarado/results/sampling_covariance/'\n # path_p = '/home/pa/Desktop/sampling_covariance/'\n pitches = [\"60\", \"64\", \"67\"]\n hparam = []\n lengthscale = []\n variance = []\n frequency = []\n\n for i in range(num_sources):\n hparam.append(pickle.load(open(path_p + fname + \"_M\" + pitches[i] + \"_hyperparams.p\", \"rb\")))\n lengthscale.append(hparam[i][1].copy())\n variance.append(hparam[i][2].copy() / sum(hparam[i][2].copy()))\n frequency.append(hparam[i][3].copy())\n\n return lengthscale, variance, frequency", "def _load(self):\n self.logger.debug(\"Loading from persistence\")\n # load whole item from persistence\n data = self._persistence.load(self.id(), default={})\n if not data:\n return\n\n try:\n self.persistence_deserialize(data)\n except NotImplementedError:\n # allow backwards compatibility or persisted_values way\n for persisted_var in self.persisted_values():\n if persisted_var in data:\n self.logger.debug(\"Loaded value {} for attribute {}\".format(\n data[persisted_var], persisted_var))\n # Set the loaded value to the attribute on this class\n setattr(self, persisted_var, data[persisted_var])\n except:\n # log exception while loading and let it continue\n self.logger.exception(\n \"Failed to deserialize block with data: {}\".format(data))" ]
[ "0.67715704", "0.6456362", "0.63223475", "0.62987643", "0.62267524", "0.6217192", "0.6191533", "0.61013824", "0.59506804", "0.5887682", "0.58793163", "0.587145", "0.5848228", "0.5811435", "0.57710826", "0.57590187", "0.5749164", "0.56775564", "0.56462854", "0.5638049", "0.5561557", "0.55508393", "0.5513005", "0.54603565", "0.54460263", "0.5445359", "0.5417198", "0.537676", "0.535325", "0.53404486", "0.5337445", "0.5333858", "0.5320535", "0.5317396", "0.5316588", "0.5309132", "0.529446", "0.5272812", "0.5271554", "0.5267251", "0.5254618", "0.52459663", "0.52418447", "0.52374494", "0.5233331", "0.5225734", "0.5221745", "0.522031", "0.5193947", "0.5182438", "0.51563466", "0.5147514", "0.5098466", "0.5095161", "0.5081941", "0.50815433", "0.5078942", "0.5075767", "0.506831", "0.50475335", "0.50443727", "0.5043773", "0.50383365", "0.5031239", "0.5031239", "0.5030821", "0.5017982", "0.5017096", "0.5010165", "0.5010047", "0.50047046", "0.50033075", "0.50030285", "0.499651", "0.4987291", "0.4979491", "0.4978998", "0.49595785", "0.49515224", "0.49478698", "0.49459544", "0.49361202", "0.49350598", "0.49327028", "0.49295446", "0.49135682", "0.48908347", "0.48759398", "0.48669308", "0.48649856", "0.4862221", "0.48600453", "0.48491254", "0.48414382", "0.48382354", "0.4835693", "0.48356187", "0.48269144", "0.48257858", "0.48224807" ]
0.51215804
52
Send a request to Slack and validate the response
def slack_request(url: str, headers: dict, data: dict) -> dict: logger.debug(f'\nSending request to Slack API using {url}') response = requests.post(url=url, headers=headers, data=data) if response.status_code != 200: logger.error(f'Got status {r.status_code} while trying to post to the slack url {url}.') # todo: check for error details, since their reponse format is not always consistent then converting to json # doesn't work all the time. #data = response.json() #if not data['ok']: # logger.error(f"Got the following errors back from slack: {data}") return response
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_slackWH_send_good(get_slackwebhook, capsys):\n s = get_slackwebhook\n s.send()\n out, err = capsys.readouterr()\n assert \"Message sent\" in out", "def __call(self, headers, method, data):\n url = 'https://slack.com/api/'+method\n req = requests.post(\n url=url,\n data=data,\n headers=headers\n )\n return req", "def slack_it(request):\n # Validate the Boon AI JWT\n jwt_valid = True\n encoded_jwt = request.headers.get('X-BoonAI-Signature-256').encode('utf-8')\n try:\n jwt.decode(encoded_jwt, os.environ['SECRET'], algorithms=[\"HS256\"])\n except jwt.InvalidSignatureError:\n jwt_valid = False\n\n # Send a slack message with the payload information.\n body = {\n \"text\": \"Webhook received from Boon AI\",\n \"blocks\": [\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"plain_text\",\n \"text\": \"Webhook received from Boon AI\",\n \"emoji\": True\n }\n },\n {\n \"type\": \"divider\"\n },\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"*JWT Validated*: {jwt_valid}\\n \"\n f\"*JWT*: {request.headers.get('X-BoonAI-Signature-256')}\\n \"\n f\"*Content-Type*: {request.content_type}\\n \"\n f\"*Webhook Payload*\\n```{pprint.pformat(request.get_json(force=True))}```\"\n }\n }\n ]\n }\n requests.post(os.environ['SLACK_URL'], json=body)\n\n return {}", "def post(self):\n send_slack_log('Entered /slack/submit')\n send_slack_log('Request info:')\n send_slack_log(str(request.form))\n if request.form.get('payload') is None:\n send_slack_log('Invalid request: no payload')\n return\n else:\n return handle_interaction(json.loads(request.form['payload']))", "def hears(request):\n\n #Wit makes our responses timeout, so we ignore Slack retries\n if \"HTTP_X_SLACK_RETRY_NUM\" in request.META:\n return HttpResponse(\"OK\", 200)\n\n slack_event = json.loads(request.body)\n\n # ============= Slack URL Verification ============ #\n # In order to verify the url of our endpoint, Slack will send a challenge\n # token in a request and check for this token in the response our endpoint\n # sends back.\n # For more info: https://api.slack.com/events/url_verification\n if \"challenge\" in slack_event:\n return HttpResponse(slack_event[\"challenge\"], 200)\n #removed {\"content_type\":\"application/json\"} from flask response\n\n # ============ Slack Token Verification =========== #\n # We can verify the request is coming from Slack by checking that the\n # verification token in the request matches our app's settings\n if pyBot.verification != slack_event.get(\"token\"):\n print \"Invalid Slack verification token: %s \\npyBot has: \\\n %s\\n\\n\" % (slack_event[\"token\"], pyBot.verification)\n # By adding \"X-Slack-No-Retry\" : 1 to our response headers, we turn off\n # Slack's automatic retries during development.\n return HttpResponse(message, 403)\n\n # ====== Process Incoming Events from Slack ======= #\n # If the incoming request is an Event we've subcribed to\n if \"event\" in slack_event:\n event_type = slack_event[\"event\"][\"type\"]\n # Then handle the event by event_type and have your bot respond\n return _event_handler(event_type, slack_event)\n\n # If our bot hears things that are not events we've subscribed to,\n # send a quirky but helpful error response\n return HttpResponse(\"[NO EVENT IN SLACK REQUEST] These are not the droids\\\n you're looking for.\", 404)", "def request_slack(api_method, params):\n url = BASE_URL + api_method\n response = requests.get(url, params=params)\n if response.status_code != 200:\n raise RuntimeError('Issue connecting to Slack API!')\n decoded_response = json.loads(response.text)\n if not decoded_response['ok']:\n raise RuntimeError('Issue pulling data from Slack API!')\n return decoded_response", "def is_request_valid(request: request) -> bool:\n \n key = os.environ.get(\"SLACK_SIGNING_SECRET\")\n basestring = 'v0:' + request.headers['X-Slack-Request-Timestamp'] + ':' + str(request.get_data(), 'utf-8')\n\n # Hash the basestring using the signing secret as the key in order to get the signature\n signature = 'v0=' + hmac.new(\n bytes(key, 'utf-8'),\n bytes(basestring, 'utf-8'),\n hashlib.sha256\n ).hexdigest()\n slacksig = request.headers['X-Slack-Signature']\n\n # If the signature is equal to the signature sent by slack, then it is indeed from slack.\n return hmac.compare_digest(slacksig, signature)", "def save_slack_token(request):\n logger.debug(\"Slack callback just landed\")\n\n error = request.query_params.get('error', False)\n error_description = request.query_params.get('error_description', '')\n if error:\n raise APIException(\"Slack: \" + error_description)\n\n original_payload = request.query_params.get('payload', None)\n payload = request.query_params.get('payload', None)\n if payload is None:\n raise ValidationError(\"No payload specified\")\n else:\n try:\n payload = base64.b64decode(payload).decode(\"utf-8\")\n payload = parse_qs(payload)\n except:\n raise ValidationError(\"Cannot decode payload in base64\")\n\n if \"url\" not in payload:\n logger.exception(payload)\n raise ValidationError(\"No url specified from the slack payload\")\n\n if \"user\" not in payload:\n logger.exception(payload)\n raise ValidationError(\"No user id specified from the slack payload\")\n\n if \"a\" not in payload:\n logger.exception(payload)\n raise ValidationError(\"No academy id specified from the slack payload\")\n\n try:\n academy = Academy.objects.get(id=payload[\"a\"][0])\n except Exception as e:\n raise ValidationError(\"Not exist academy with that id\") from e\n\n user = None\n try:\n user = User.objects.get(id=payload[\"user\"][0])\n except Exception as e:\n raise ValidationError(\"Not exist user with that id\") from e\n\n code = request.query_params.get('code', None)\n if code is None:\n raise ValidationError(\"No slack code specified\")\n\n params = {\n 'client_id': os.getenv('SLACK_CLIENT_ID', \"\"),\n 'client_secret': os.getenv('SLACK_SECRET', \"\"),\n 'redirect_uri': os.getenv('SLACK_REDIRECT_URL', \"\")+\"?payload=\"+original_payload,\n 'code': code,\n }\n # print(\"params\", params)\n resp = requests.post('https://slack.com/api/oauth.v2.access', data=params)\n if resp.status_code == 200:\n\n logger.debug(\"Slack responded with 200\")\n\n slack_data = resp.json()\n if 'access_token' not in slack_data:\n print(\"Slack response body\", slack_data)\n raise APIException(\"Slack error status: \"+slack_data['error'])\n\n slack_data = resp.json()\n logger.debug(slack_data)\n\n # delete all previous credentials for the same team and cohort\n CredentialsSlack.objects.filter(\n app_id=slack_data['app_id'], team_id=slack_data['team']['id'], user__id=user.id).delete()\n credentials = CredentialsSlack(\n user=user,\n app_id=slack_data['app_id'],\n bot_user_id=slack_data['bot_user_id'],\n token=slack_data['access_token'],\n team_id=slack_data['team']['id'],\n team_name=slack_data['team']['name'],\n authed_user=slack_data['authed_user']['id'],\n )\n credentials.save()\n\n team = SlackTeam.objects.filter(\n academy__id=academy.id, slack_id=slack_data['team']['id']).first()\n if team is None:\n team = SlackTeam(\n slack_id=slack_data['team']['id'],\n owner=user,\n academy=academy\n )\n\n team.name = slack_data['team']['name']\n team.save()\n\n return HttpResponseRedirect(redirect_to=payload[\"url\"][0])", "def test_validation(self):\n challenge = \"challenge-string\"\n data = {\n 'hub.mode': 'subscribe',\n 'hub.verify_token': settings.VERIFY_TOKEN,\n 'hub.challenge': challenge\n }\n c = Client()\n response = c.get(self.webhook, data=data)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(str(response.content, 'utf-8'), challenge)", "def test_slackP_send(get_slackpost, capsys):\n s = get_slackpost\n s.send()\n out, err = capsys.readouterr()\n assert \"Message sent\" in out", "def slack_callback(request):\n client_id = slack_access_keys[\"client_id\"]\n client_secret = slack_access_keys[\"client_secret\"]\n\n if request.method == 'GET':\n code = request.GET.get('code')\n get_token_url = \"https://slack.com/api/oauth.access?client_id={}&client_secret={}&code={}\".format(client_id,\n client_secret,\n code)\n r = requests.post(get_token_url,\n auth=HTTPBasicAuth(client_id, client_secret),\n headers={\"content-type\": \"application/x-www-form-urlencoded\"},\n params={\"code\": code, \"grant_type\": \"authorization_code\"})\n\n try:\n access_token = r.json()['access_token']\n\n get_activity_url = \"https://slack.com/api/users.identity\"\n r = requests.post(get_activity_url,\n headers={\"Authorization\": \"Bearer \" + access_token})\n return r.json()\n except Exception as e:\n # Authorization failed.\n return None", "def send_message_to_slack(text):\n\n try:\n post = {\n \"text\": \":fire: :sad_parrot: *SSL Certificate BACKUP SCRIPT Status for HTTPD Proxy:* :sad_parrot: :fire:\",\n \"attachments\": [\n {\n \"text\": \"{0}\".format(text),\n \"color\": \"#B22222\",\n \"attachment_type\": \"default\",\n \"fields\": [\n {\n \"title\": \"Priority\",\n \"value\": \"High\",\n \"short\": \"false\"\n }\n ],\n \"footer\": \"AWS HTTPD\",\n \"footer_icon\": \"https://platform.slack-edge.com/img/default_application_icon.png\"\n }\n ]\n }\n\n ssm_param_name = 'slack_notification_webhook'\n ssm = boto3.client('ssm', config=CONFIG, region_name='eu-west-2')\n try:\n response = ssm.get_parameter(\n Name=ssm_param_name, WithDecryption=True)\n except ClientError as e:\n if e.response['Error']['Code'] == 'ParameterNotFound':\n LOGGER.info(\n 'Slack SSM parameter %s not found. No notification sent', ssm_param_name)\n return\n else:\n logging.error(\n \"Unexpected error when attempting to get Slack webhook URL: %s\", e)\n return\n if 'Value' in response['Parameter']:\n url = response['Parameter']['Value']\n\n json_data = json.dumps(post)\n req = urllib.request.Request(\n url,\n data=json_data.encode('ascii'),\n headers={'Content-Type': 'application/json'})\n LOGGER.info('Sending notification to Slack')\n response = urllib.request.urlopen(req)\n\n else:\n LOGGER.info(\n 'Value for Slack SSM parameter %s not found. No notification sent', ssm_param_name)\n return\n\n except Exception as err:\n logging.error(\n 'The following error has occurred on line: %s',\n sys.exc_info()[2].tb_lineno)\n logging.error(str(err))", "def slack_me(msg):\n # sanitise.\n msg = unicodedata.normalize('NFKD',msg).encode('ascii','ignore').decode('ascii')\n msg = re.sub('[^\\w\\s\\-.,;?!@#()\\[\\]]','', msg)\n r = requests.post(url=os.environ['SLACK_WEBHOOK'],\n headers={'Content-type': 'application/json'},\n data=f\"{{'text': '{msg}'}}\")\n if r.status_code == 200 and r.content == b'ok':\n return True\n else:\n return False", "def send(self):\n payload = self.format_payload()\n\n # Makes sure that the required fields are provided before\n # sending the payload.\n if not self.webhook_url:\n print ('Error: Webhook URL is required.')\n\n elif not payload:\n print ('Error: Message payload cannot be empty.')\n\n else:\n try:\n request = requests.post(self.webhook_url,\n data=json.dumps(payload),\n headers={'Content-Type': 'application/json'})\n\n request.raise_for_status()\n\n except requests.exceptions.RequestException as error:\n print('Error: %s' % error)", "def _perform_http_request(\n self, *, body: Dict[str, any], headers: Dict[str, str]\n ) -> WebhookResponse:\n body = json.dumps(body)\n headers[\"Content-Type\"] = \"application/json;charset=utf-8\"\n\n if self.logger.level <= logging.DEBUG:\n self.logger.debug(\n f\"Sending a request - url: {self.url}, body: {body}, headers: {headers}\"\n )\n try:\n url = self.url\n opener: Optional[OpenerDirector] = None\n # for security (BAN-B310)\n if url.lower().startswith(\"http\"):\n req = Request(\n method=\"POST\", url=url, data=body.encode(\"utf-8\"), headers=headers\n )\n if self.proxy is not None:\n if isinstance(self.proxy, str):\n opener = urllib.request.build_opener(\n ProxyHandler({\"http\": self.proxy, \"https\": self.proxy}),\n HTTPSHandler(context=self.ssl),\n )\n else:\n raise SlackRequestError(\n f\"Invalid proxy detected: {self.proxy} must be a str value\"\n )\n else:\n raise SlackRequestError(f\"Invalid URL detected: {url}\")\n\n # NOTE: BAN-B310 is already checked above\n resp: Optional[HTTPResponse] = None\n if opener:\n resp = opener.open(req, timeout=self.timeout) # skipcq: BAN-B310\n else:\n resp = urlopen( # skipcq: BAN-B310\n req, context=self.ssl, timeout=self.timeout\n )\n charset: str = resp.headers.get_content_charset() or \"utf-8\"\n response_body: str = resp.read().decode(charset)\n resp = WebhookResponse(\n url=url,\n status_code=resp.status,\n body=response_body,\n headers=resp.headers,\n )\n _debug_log_response(self.logger, resp)\n return resp\n\n except HTTPError as e:\n # read the response body here\n charset = e.headers.get_content_charset() or \"utf-8\"\n body: str = e.read().decode(charset)\n resp = WebhookResponse(\n url=url,\n status_code=e.code,\n body=body,\n headers=e.headers,\n )\n if e.code == 429:\n # for backward-compatibility with WebClient (v.2.5.0 or older)\n resp.headers[\"Retry-After\"] = resp.headers[\"retry-after\"]\n _debug_log_response(self.logger, resp)\n return resp\n\n except Exception as err:\n self.logger.error(f\"Failed to send a request to Slack API server: {err}\")\n raise err", "def __notify_slack(self):\n\t\ttry:\n\t\t\tprint(\"[+] Sending Slack notifications...\")\n\t\t\tslack_http_headers = {\n\t\t\t\t'User-Agent': 'GitHubScrap',\n\t\t\t\t'Content-type': 'application/json',\n\t\t\t}\n\t\t\tslack_http_data = {}\n\t\t\tfor ix in range(0,len(self.final_results[\"results\"]),SLACK_CHUNK_SIZE):\n\t\t\t\tdata_to_send = \"\"\n\t\t\t\tchunk_results = self.final_results[\"results\"][ix:ix+SLACK_CHUNK_SIZE]\n\t\t\t\tfor url in chunk_results:\n\t\t\t\t\tdata_to_send += \"{} ({})\\n\".format(url[\"query\"], url[\"link\"])\n\n\t\t\t\tslack_http_data.update({\n\t\t\t\t\t'text': data_to_send,\n\t\t\t\t})\n\t\t\t\trequests.post(\n\t\t\t\t\tself.slack_webhook,\n\t\t\t\t\theaders = slack_http_headers,\n\t\t\t\t\tdata = json.dumps(slack_http_data),\n\t\t\t\t)\n\t\t\t\tsleep(SLACK_HTTP_DELAY)\n\n\t\texcept Exception as exception:\n\t\t\traise MsgException('Slack notifications could not be sent', exception)", "def slash_command():\n form_text = request.form[\"text\"]\n \n if len(form_text) > 0:\n data = {\n \"response_type\": \"in_channel\",\n \"text\": \"My response\",\n }\n else:\n \"\"\"\n If the user didn't type a message send a note that only\n they see about typing a message\n \"\"\"\n data = {\n \"response_type\": \"ephemeral\",\n \"text\": \"Error: No status message entered. Please try again.\",\n }\n\n \"\"\"\n Create the response object to send to Mattermost with the\n data object written as json, 200 status, and proper mimetype\n \"\"\"\n response = app.response_class(\n response=json.dumps(data),\n status=200,\n mimetype='application/json'\n )\n return response", "def test_bot_message():\n send_json_message_to_bot(request.get_json())\n return \"ok\"", "def flask_slack_test():\n _log('@channel: slack is working?')\n return 'slack test'", "def do_GET(self): # pylint: disable=invalid-name\n parsed_url = urlparse(self.path)\n parsed_query = parse_qs(parsed_url.query)\n\n helper.log_info(f'Incoming request from {self.client_address[0]} - {self.path}')\n\n # Strava webhook expects a reply with the hub.challenge parameter\n challenge = parsed_query['hub.challenge'][0]\n request_verify_token = parsed_query['hub.verify_token'][0]\n\n # Respond with hub.challenge parameter if verify_token is correct\n if request_verify_token == verify_token:\n self.write_response(200, {\"hub.challenge\": challenge})\n else:\n self.write_empty_response(400)", "async def send_response(\n self, response_url: Optional[str] = None, **kwargs: Optional[Any]\n ):\n req_args = dict(\n # contents of messenger[UserDict]\n **self,\n # any other API fields\n **kwargs,\n )\n\n api_url = response_url or self.response_url\n\n res = await self.client._request( # noqa\n http_verb=\"POST\", api_url=api_url, req_args=dict(json=req_args)\n )\n\n status = res[\"status_code\"]\n\n if status != 200:\n raise SlackApiError(\n message=\"Failed to send response_url: {}: status={}\".format(\n api_url, status\n ),\n response=res,\n )\n\n return True", "def send_slack_notification(url: str, title: str, message: str):\n\n content = {\n \"text\": f\"{title}\",\n \"blocks\": [\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"{message}\",\n },\n }\n ],\n }\n\n response = requests.post(url, json=content)\n\n # Raise exception if response is not 200\n response.raise_for_status()", "def webhook_sender(url=WEBHOOK_URL):\n data = runner()\n print(json.dumps(data))\n try:\n r = requests.post(url,json=data)\n print(r)\n except requests.exceptions.RequestException as e:\n raise SystemExit(e)", "def get_slack_token(request):\n url = request.query_params.get('url', None)\n if url is None:\n raise ValidationError(\"No callback URL specified\")\n\n user_id = request.query_params.get('user', None)\n if user_id is None:\n raise ValidationError(\"No user specified on the URL\")\n\n academy = request.query_params.get('a', None)\n if academy is None:\n raise ValidationError(\"No academy specified on the URL\")\n\n url = base64.b64decode(url).decode(\"utf-8\")\n # Missing scopes!! admin.invites:write, identify\n scopes = (\"app_mentions:read\", \"channels:history\", \"channels:join\", \"channels:read\",\n \"chat:write\", \"chat:write.customize\", \"commands\", \"files:read\", \"files:write\",\n \"groups:history\", \"groups:read\", \"groups:write\", \"incoming-webhook\", \"team:read\",\n \"users:read\", \"users:read.email\", \"users.profile:read\", \"users:read\")\n\n query_string = f'a={academy}&url={url}&user={user_id}'.encode(\"utf-8\")\n payload = str(base64.urlsafe_b64encode(query_string), \"utf-8\")\n params = {\n \"client_id\": os.getenv('SLACK_CLIENT_ID', \"\"),\n \"redirect_uri\": os.getenv('SLACK_REDIRECT_URL', \"\")+\"?payload=\"+payload,\n \"scope\": \",\".join(scopes)\n }\n redirect = \"https://slack.com/oauth/v2/authorize?\"\n for key in params:\n redirect += f\"{key}={params[key]}&\"\n\n if settings.DEBUG:\n return HttpResponse(f\"Redirect to: <a href='{redirect}'>{redirect}</a>\")\n else:\n return HttpResponseRedirect(redirect_to=redirect)", "def send(data, webhook_url):\n dis_data = data\n url = webhook_url\n headers = {\"Content-Type\": \"application/json\"}\n discord_request = requests.post(url, data=json.dumps(dis_data), headers=headers)\n\n try:\n discord_request.raise_for_status()\n except requests.exceptions.HTTPError as err:\n print(err)\n else:\n print(\"Payload delivered successfully, code {}.\".format(discord_request.status_code))", "def _send_request(self, url, text=None, params=None):\n if params is not None:\n for k, v in params.items():\n params[k] = v.encode(\"utf-8\")\n else:\n params = {}\n\n params['email'] = self._username\n\n if self._password:\n params['pass'] = self._password\n\n if self._hash:\n params['hash'] = self._hash\n\n if text is not None:\n params['s'] = self._stripslashes(text)\n\n\n try:\n response = requests.post(url, data=params)\n except Exception as e:\n print(str(e))\n\n result = response.content.decode('utf-8')\n \n\n try:\n json_data = json.loads(result)\n except ValueError as e:\n print(str(e))\n\n if json_data['status'] == \"Success\":\n return json_data\n elif json_data['status'] == \"Failure\":\n if json_data['error'].startswith(\"Error Authenticating.\"):\n print(json_data['error'])\n else:\n print(json_data['error'])\n else:\n print(json_data)", "def slack(message):\n slack_hook = 'https://hooks.slack.com/services/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'\n headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}\n requests.post(slack_hook, json.dumps({'text': message}), headers=headers)", "def slackMessage(binState):\n log = logging.getLogger('iob')\n\n if binState:\n location = \"Out\"\n else:\n location = \"In\"\n \n url = \"https://hooks.slack.com/services/{}\"\n \n payload = {\"text\": \"Bin is: {}\".format(location)}\n\n headers = {\"Content-Type\": \"application/json\"}\n\n response = requests.request(\n \"POST\",\n url,\n data=json.dumps(payload),\n headers=headers\n )\n\n log.debug(response.text)\n return", "def send_request(url, user, passwd, payload):\n response = requests.post(url,\n data=json.dumps(payload),\n auth=(user, passwd),\n verify=False,\n timeout=30)\n\n if response.status_code != 200:\n print(\"Status code {}\".format(response.status_code))\n return ERR_STATUS_CODE\n\n try:\n print(json.dumps(response.json(), indent = 4, sort_keys=True))\n except ValueError:\n print(\"{}\".format(response.text))\n return ERR_WRONG_JSON\n\n return SUCCESS", "def slackbuild_webhook(req: Request):\n global config\n global slack\n global cloudbuild\n\n # slack submits a POST\n if req.method != \"POST\":\n return abort(405)\n\n # not a true request from slack\n verified, err = slack.verify_webhook(req)\n if not verified:\n print(err)\n return abort(403)\n\n body = Slack.parse_request(req)\n argv = Slack.parse_command(body)\n msg = \"\"\n\n output, success = Command.run(argv, cloudbuild, config)\n\n if output is None:\n if success:\n # intentionaly not responding with a slack message\n return ('', 200)\n else:\n return abort(500)\n elif Slack.is_interactive_message(body):\n msg = slack.render_interactive_message(body, success, output)\n else:\n color = Colors.SUCCESS if success else Colors.FAILURE\n msg = slack.render_message({\"result\": output, \"color\": color}, \"command.json\")\n\n msg = json.dumps(msg)\n print(msg)\n return Response(response=msg, content_type=\"application/json\")", "def call(self, api_method: str, **kwargs) -> SlackResponse:\n return self.client.api_call(api_method, **kwargs)", "def test_slackWH_send_badAuth(get_slackwebhook):\n s = get_slackwebhook\n s.url = 'https://hooks.slack.com/services/badAuthCreds'\n with pytest.raises(MessageSendError):\n s.send()", "def test_returns_200(self):\r\n c = Client()\r\n response = c.post('/sg/send', {\r\n 'username': 'foo',\r\n 'password': 'bar',\r\n 'from': 'from@from.com',\r\n 'to': 'to@to.com',\r\n 'subj': 'subj text',\r\n 'body': 'body text',\r\n })\r\n\r\n self.assertEqual(200,response.status_code)", "def test_connection(self):\n try:\n response = self.call(\"auth.test\")\n response.validate()\n except SlackApiError as e:\n return False, str(e)\n except Exception as e:\n return False, f\"Unknown error occurred while testing connection: {e}\"\n\n if isinstance(response.data, bytes):\n # If response data binary then return simple message\n return True, f\"Connection successfully tested (url: {response.api_url}).\"\n\n try:\n return True, json.dumps(response.data)\n except TypeError:\n return True, str(response)", "def send(self, request: Request, **requests_kwargs) -> Response:", "def _api_call(self, url, response_checker):\n self.request_compare(url)", "def post(self):\n send_slack_log('Entered /slack/get_msg')\n send_slack_log('Request info:')\n send_slack_log(str(request.form))\n msg_id = request.form['text']\n channel_id = request.form['channel_id']\n try:\n id = int(msg_id)\n except ValueError:\n return \"Invalid Msg ID: \" + str(msg_id)\n text = read_msg(id)\n formated_msg = slack_format_msg(text)\n response = send_json_to_slack_channel(formated_msg, channel_id)\n send_slack_log('Response info:')\n send_slack_log(response)\n return \"Msg \" + str(id) + \" fetched\"", "def call_slack(*args, **kwargs):\n no_exeception = kwargs.pop('no_exeception', False)\n\n try:\n slack = Slack(token=kwargs.pop('token', None))\n return slack(*args, **kwargs)\n except Exception as e:\n if no_exeception:\n return e.message\n else:\n raise", "def test_standup_send_invalid_channel (url, _pre_setup):\n\n token = _pre_setup[0]['token']\n\n standup_send_data = {\n 'token': token,\n 'channel_id': 99999,\n 'message': \"message\"\n }\n\n response = requests.post(url + \"standup/send\", json=standup_send_data)\n assert response.status_code == 400", "def _send_in_request(self):\n try:\n req_params = urllib.urlencode(self._params)\n except Exception as ex:\n raise ProxyError('Error signing request string') \n \n try:\n self.logger.debug('Send api request to: %s' % self._api_url)\n self.logger.debug('Request params: %s' % req_params)\n self.logger.debug('Request timeout: %s' % self._timeout)\n if len(self._params) > 0:\n f = urllib2.urlopen(self._api_url, req_params, self._timeout)\n response = f.read()\n self.logger.debug('Response length: %s' % len(response))\n f.close() \n return response\n else:\n return \"{'command':'ping', 'message':'ok'}\" \n except (urllib2.URLError) as ex:\n self._error = json.loads(ex.fp.readline()).values()\n raise ProxyResponseError()\n except (IOError) as ex:\n raise ProxyError(ex)", "async def test_receive_post_ok(self):\n await self.webhook_connection.connect()\n assert self.webhook_connection.is_connected is True\n payload = {\"hello\": \"world\"}\n call_task = self.loop.create_task(self.call_webhook(\"test_topic\", json=payload))\n envelope = await asyncio.wait_for(self.webhook_connection.receive(), timeout=10)\n\n assert envelope\n\n message = cast(HttpMessage, envelope.message)\n dialogue = self.skill_dialogues.update(message)\n assert dialogue is not None\n assert message.method.upper() == \"POST\"\n assert message.body.decode(\"utf-8\") == json.dumps(payload)\n await call_task", "async def matrix_webhook(request):\n LOGGER.debug(f\"Handling {request=}\")\n\n # healthcheck\n if request.rel_url.path == \"/health\":\n return utils.create_json_response(HTTPStatus.OK, \"OK\")\n\n data_b = await request.read()\n\n try:\n data = json.loads(data_b.decode())\n except json.decoder.JSONDecodeError:\n return utils.create_json_response(HTTPStatus.BAD_REQUEST, \"Invalid JSON\")\n\n # legacy naming\n if \"text\" in data and \"body\" not in data:\n data[\"body\"] = data[\"text\"]\n\n # allow key to be passed as a parameter\n if \"key\" in request.rel_url.query and \"key\" not in data:\n data[\"key\"] = request.rel_url.query[\"key\"]\n\n if \"formatter\" in request.rel_url.query:\n try:\n data = getattr(formatters, request.rel_url.query[\"formatter\"])(\n data,\n request.headers,\n )\n except AttributeError:\n return utils.create_json_response(\n HTTPStatus.BAD_REQUEST,\n \"Unknown formatter\",\n )\n\n if \"room_id\" in request.rel_url.query and \"room_id\" not in data:\n data[\"room_id\"] = request.rel_url.query[\"room_id\"]\n if \"room_id\" not in data:\n data[\"room_id\"] = request.path.lstrip(\"/\")\n\n # If we get a good SHA-256 HMAC digest,\n # we can consider that the sender has the right API key\n if \"digest\" in data:\n if data[\"digest\"] == HMAC(conf.API_KEY.encode(), data_b, \"sha256\").hexdigest():\n data[\"key\"] = conf.API_KEY\n else: # but if there is a wrong digest, an informative error should be provided\n return utils.create_json_response(\n HTTPStatus.UNAUTHORIZED,\n \"Invalid SHA-256 HMAC digest\",\n )\n\n missing = []\n for key in [\"body\", \"key\", \"room_id\"]:\n if key not in data or not data[key]:\n missing.append(key)\n if missing:\n return utils.create_json_response(\n HTTPStatus.BAD_REQUEST,\n f\"Missing {', '.join(missing)}\",\n )\n\n if data[\"key\"] != conf.API_KEY:\n return utils.create_json_response(HTTPStatus.UNAUTHORIZED, \"Invalid API key\")\n\n if \"formatted_body\" in data:\n formatted_body = data[\"formatted_body\"]\n else:\n formatted_body = markdown(str(data[\"body\"]), extensions=[\"extra\"])\n\n # try to join room first -> non none response means error\n resp = await utils.join_room(data[\"room_id\"])\n if resp is not None:\n return resp\n\n content = {\n \"msgtype\": \"m.text\",\n \"body\": data[\"body\"],\n \"format\": \"org.matrix.custom.html\",\n \"formatted_body\": formatted_body,\n }\n return await utils.send_room_message(data[\"room_id\"], content)", "def create_request(self):\n try:\n stock_name, request = self.text.split(\" \")\n print(stock_name)\n print(request)\n if request in run_commands.keys():\n endpoint: str = run_commands.get(request).get('endpoint')\n url_maker: str = f\"stocks/{stock_name}{endpoint}\"\n print(url_maker)\n response: Optional[str, Dict[str, str]] = self.make_request(method=\"GET\", endpoint=url_maker)\n if isinstance(response, dict):\n print(response)\n api_response: Dict[str, str] = slack_response.stock_info(stock_name, response.get(\"message\"))\n print(api_response)\n return api_response\n else:\n return \"release in progress\"\n except ValueError as e:\n return slack_response.help_response()\n except KeyError as e:\n return slack_response.help_response()", "def test_chat_send_message(self):\n body = SendMessageRequest()\n response = self.client.open(\n '/api/chat/send_message',\n method='POST',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def send_to_slack(message):\n\n if debug:\n print(message)\n return True\n else:\n slack_data = {'text': message, 'channel': slack_channel, \"username\": slack_username,\n \"icon_emoji\": slack_icon_emoji}\n\n response = requests.post(\n slack_webhook_url, data=json.dumps(slack_data),\n headers={'Content-Type': 'application/json'},\n proxies=proxyDict\n )\n if response.status_code != 200:\n raise ValueError(\n 'Request to slack returned an error %s, the response is:\\n%s'\n % (response.status_code, response.text)\n )", "def test_chat_poll_messages(self):\n response = self.client.open(\n '/api/chat/messages',\n method='POST')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def notify_via_slack(webhook_url, msg):\n slack_data = {\"text\": msg}\n post(webhook_url, json=slack_data)", "def send_slack_message(hook, summary, cmd, label=None):\n py_version = '%d.%d' % (sys.version_info.major, sys.version_info.minor)\n label_sec = \" of _%s_\" % label if label else ''\n message = \"Nosetests%s failed in *Python %s* when running:\\n`%s`\"\\\n % (label_sec, py_version, cmd)\n data_json = {'text': message,\n 'attachments': [{'color': 'danger',\n 'text': '```%s```' % summary,\n 'mrkdwn_in': ['text', 'pretext']}]}\n resp = requests.post(hook, data=json.dumps(data_json),\n headers={'Content-type': 'application/json'})\n if resp.status_code is not 200:\n print(\"Message failed to send.\")\n print(resp.reason)\n return", "def call_chatbot():\n\t# prase the given data\n\tdata = flask.json.loads(flask.request.data.decode('utf-8'))\n\tuse_tts, text, current_id= data[\"useTTS\"], data[\"text\"], data[\"id\"]\n\tapp.logger.info(\"User Query: \"+ text)\n\t\n\tresult = []\n\t# call rasa: some requests get lost, loop till you get a response\n\twhile(True):\n\t\ttry:\n\t\t\t# rasa rest API\n\t\t\turl = \"http://localhost:5005/webhooks/rest/webhook\"\n\t\t\t# rasa request must have a \"sender\" and \"message\" keys\n\t\t\tres = requests.post(\n\t\t\t\t\turl=url,\n\t\t\t\t\tdata=flask.json.dumps({\"sender\": \"Rasa\", \"message\": text}),\n\t\t\t\t\ttimeout=5).json()\n\t\texcept:\n\t\t\t# mimic the rasa response when something wrong happens\n\t\t\tres = [{'recipient_id': 'Rasa',\n\t\t\t\t\t'text': \"[SOMETHING WENT WRONG!!]\"}]\n\t\tif res: break\n\tapp.logger.info(\"Rasa Response:\")\n\tapp.logger.info(\"\\t\"+ str(res))\n\tfor item in res:\n\t\td = {}\n\t\tcurrent_id += 1\n\t\td[\"id\"] = current_id\n\t\td[\"type\"] = \"text\" if \"text\" in item.keys() else \"image\"\n\t\td[\"body\"] = item[d[\"type\"]]\n\t\tif use_tts and d[\"type\"] == \"text\":\n\t\t\ttic = time.time()\n\t\t\twav, sr = tts_model.synthesize(d[\"body\"])\n\t\t\tduration = len(wav) / sr\n\t\t\td[\"snd\"] = {\n\t\t\t\t\"audio\": wav.tolist(),\n\t\t\t\t\"sample_rate\": sr,\n\t\t\t}\n\t\t\ttoc = time.time()\n\t\t\tapp.logger.info( \"TTS Duration: {} seconds\".format(toc-tic) )\n\t\tresult.append(d)\n\n\t# get back the result\n\tflask_response = app.response_class(response=flask.json.dumps(result),\n\t\t\t\t\t\t\t\t\t\tstatus=200,\n\t\t\t\t\t\t\t\t\t\tmimetype='application/json' )\n\treturn flask_response", "def webhook():\n if request.method == 'POST':\n data = request.get_json(force=True)\n # For logging and debugging, print incoming requests\n print(request, '\\n', data)\n # The two LNbits extensions used return data in\n # different formats. This try-except handles both.\n try:\n sats = int(data['amount'] / 1000)\n comment = data['comment']\n except KeyError:\n sats = int(data['amount'])\n comment = data['description']\n if not comment:\n comment = \"No message!\"\n amount = convert_to_fiat(sats, 'usd')\n url = \"https://streamlabs.com/api/v1.0/donations\"\n data = {\n \"name\": \"bitcoin\",\n \"message\": f\"{str(sats)} sats: {comment}\",\n \"identifier\": \"bitcoin_donos\",\n \"amount\": amount,\n \"currency\": fiat.upper(),\n \"access_token\": access_token,\n }\n response = requests.post(url, data=data)\n # For logging/debugging purposes\n print(response.json())\n return \"Success!\", 200\n else:\n abort(400)", "def check(url, body_check_re=r'', timeout=10.0, producer=None):\n timestamp = time.time()\n code = 0\n body = None\n response_time = timeout\n\n try:\n response = requests.get(\n url,\n timeout=timeout,\n allow_redirects=False,\n headers={\n 'UserAgent': 'Aiven health checker/0.1'\n }\n )\n except requests.Timeout:\n code = 499 # client closed request\n except requests.RequestException:\n pass\n else:\n code = response.status_code\n response_time = response.elapsed.total_seconds()\n body = response.content\n\n body_check_valid = check_body(body, body_check_re)\n\n message = {\n 'timestamp': timestamp,\n 'code': code,\n 'body_check_valid': body_check_valid,\n 'time': response_time,\n 'url': url,\n }\n if producer:\n producer.send(message)\n return message", "def slackbot(self, *args, **kwargs):\n while True:\n self.slack.api_connect()", "def send_request(self, request):\n json_results = requests.get(request).json()\n\n status = json_results['status']\n\n if status == const.STATUS_OK:\n return json_results['results']\n\n self.log.warning(self.get_status_code(status))", "async def test_send(self):\n await self.webhook_connection.connect()\n assert self.webhook_connection.is_connected is True\n\n http_message = HttpMessage(\n dialogue_reference=(\"\", \"\"),\n target=0,\n message_id=1,\n performative=HttpMessage.Performative.REQUEST,\n method=\"get\",\n url=\"/\",\n headers=\"\",\n body=\"\",\n version=\"\",\n )\n envelope = Envelope(\n to=\"addr\",\n sender=\"my_id\",\n message=http_message,\n )\n with patch.object(self.webhook_connection.logger, \"warning\") as mock_logger:\n await self.webhook_connection.send(envelope)\n await asyncio.sleep(0.01)\n mock_logger.assert_any_call(\n RegexComparator(\n \"Dropping envelope=.* as sending via the webhook is not possible!\"\n )\n )", "def slackbot_alert():\n\n func_name = inspect.stack()[0][3]\n logging.info('[{}] -> Starting Job'.format(func_name))\n if slack_client.rtm_connect(with_team_state=False):\n\n # load the plant definition file\n with open(PLANT_DEF, 'r') as src:\n plant_def = json.load(src)\n\n # iterate through plants\n for p in plant_def['plants']:\n logging.info('[{}] -> Checking {}'.format(func_name, p['name']))\n\n # query latest plant information\n data = latest_data(p['name'], num=1)[0]\n\n # logic based on moisture\n if data['moisture'] < p['min_moisture']:\n logging.info('[{}] -> Need to water {} [{}%]!!!'.format(func_name, p['name'], data['moisture']))\n\n # get url for search term for slack\n url = giphy_grabber('water')\n\n message = '\\n:potable_water: *{}* needs water!:potable_water:'.format(p['name'])\n message += '\\n\\n*Moisture* = {} %'.format(data['moisture'])\n message += '\\n*Temperature* = {} °C'.format(data['temperature'])\n message += '\\n*Light* = {} lux'.format(data['light'])\n message += '\\n*Conductivity* = {} uS/cm'.format(data['conductivity'])\n message += '\\n\\n{}'.format(url)\n\n # post message\n resp = slack_client.api_call(\"chat.postMessage\", text=message, channel=CHANNEL)\n\n for e in EMOJI_LIST:\n slack_client.api_call(\"reactions.add\", name=e, timestamp=resp['ts'], channel=resp['channel'])\n else:\n logging.info('[{}] -> Healthy moisture ({} %)!'.format(func_name, data['moisture']))\n else:\n logging.info('[{}] -> Connection failed :('.format(func_name))", "def send_to_checking_server(enquiry: str) -> str:\n method = \"АМОЖНА? РКСОК/1.0\"\n try:\n conn = socket.create_connection((\"vragi-vezde.to.digital\", 51624))\n request = f\"{method}\\r\\n {enquiry}\\r\\n\\r\\n\".encode()\n conn.send(request)\n official_response = conn.recv(1024).decode()\n if parse_response_check_server(official_response):\n return True\n else:\n return official_response\n\n except:\n print(\"Партия занята и не может ответить на твои глупые запросы\")", "def verify_slack_token(request_token):\n encrypted_token = os.environ['kmsEncryptedToken']\n\n if encrypted_token == 'local':\n return True\n\n kms = boto3.client('kms')\n expected_token = kms.decrypt(CiphertextBlob=b64decode(encrypted_token))['Plaintext']\n\n return request_token == expected_token", "def __send_msg_by_webex__(self, text):\n\n print(\"NotifyManager __send_msg_by_webex__ enters\")\n\n # http header\n http_header = {\"Content-type\": \"application/json\",\n \"Authorization\": \"Bearer \" + self.__webex_token__}\n\n # http body\n body = {\"roomId\": self.__webex_space__,\n \"text\": text}\n\n try:\n response = requests.post(\n url=self.__webex_url__,\n json=body,\n headers=http_header)\n print(response.status_code)\n except Exception as e:\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n print(\"Error! Exception is found to post message over webex teams\")\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n print(e)\n return\n\n if response.status_code == 200:\n pass\n elif response.status_code == 401:\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n print(\"Webex Team Token Expired\")\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n else:\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n print(\"Fail to send message=%s\" % text)\n print(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n print(response.status_code)\n print(response.text)\n\n return", "def ping_response():\n\n return Response(\"ok\", status=200)", "def respond(response, callback_url):\n \n # Basecamp requires the message to be 'content=stuff'\n requestbody = \"\".join([\"content=\", response])\n \n r = requests.post(callback_url, data = requestbody.encode('utf-8'))\n \n #TODO: Maybe replace this with a Try: statement?\n if r.status_code == 200:\n result=True\n else:\n result=False\n \n return result", "def ping(request):\r\n rdict = request.matchdict\r\n params = request.params\r\n username = rdict.get('username', None)\r\n api_key = params.get('api_key', None)\r\n user = UserMgr.get(username=username)\r\n # Check if user provided the correct api_key\r\n if api_key == user.api_key:\r\n return _api_response(request, {\r\n 'success': True,\r\n 'message': 'Looks good'\r\n })\r\n else:\r\n return _api_response(request, {\r\n 'success': False,\r\n 'message': 'API key is invalid.'\r\n })", "def on_bot_message():\n handle_bot_message(request.get_json())\n return \"ok\"", "def request(parameters):\n resp = requests.post(WEBHOOK_URL, json.dumps(parameters),\n headers={\"Content-Type\": \"application/json\", \"Accept\": \"application/json\"})\n print(parameters, resp)", "def get(self, request, *args, **kwargs):\n token = request.GET.get('hub.verify_token')\n challenge = request.GET.get('hub.challenge')\n if token == VERIFY_TOKEN:\n if challenge:\n return HttpResponse(challenge)\n else:\n HttpResponse('Error, invalid challenge')\n else:\n return HttpResponse('Error, invalid token')", "def sentiment():\n\n request_json = request.json\n power = request_json['power']\n angle = request_json['angle']\n\n print(power, angle)\n\n resp_dict = dict()\n resp_dict['kick'] = 'ok'\n\n resp = Response(json.dumps(resp_dict), status=200)\n\n return resp", "def valid_connection():\n sms = YesssSMS.YesssSMS(\"\", \"\", provider=\"yesss\")\n with requests_mock.Mocker() as m:\n m.register_uri(\n \"POST\",\n # pylint: disable=protected-access\n sms._login_url,\n status_code=302,\n # pylint: disable=protected-access\n headers={\"location\": sms._kontomanager},\n )\n m.register_uri(\n \"GET\",\n # pylint: disable=protected-access\n sms._kontomanager,\n status_code=200,\n text=\"test...\" + LOGIN + \"</a>\",\n )\n m.register_uri(\n \"GET\",\n # pylint: disable=protected-access\n sms._sms_form_url,\n status_code=200,\n text=TEST_FORM_TOKEN_SAMPLE,\n )\n m.register_uri(\n \"POST\",\n # pylint: disable=protected-access\n sms._send_sms_url,\n status_code=200,\n text=\"<h1>Ihre SMS wurde erfolgreich \" + \"verschickt!</h1>\",\n )\n m.register_uri(\n \"GET\",\n # pylint: disable=protected-access\n sms._logout_url,\n status_code=200,\n )\n yield", "def api_sendessage():\n message = request.args.get('message')\n send_lyrics(message)\n # stash_lyrics(message)\n return jsonify(status=\"success\")", "def health_check(request):\n return Response(\"OK\",\n status=status.HTTP_200_OK)", "async def test_url(\n self,\n resp: httpx.Response,\n data: Dict[str, Any],\n query: Dict[str, Any], # pylint: disable=unused-argument\n ):\n code: str = \"\"\n if resp.url.path == \"/void/callback\":\n code = resp.url.query.get(\"code\")\n if resp.url.path == \"/static/404.html\":\n code = URL(str(resp.history[-1].url)).query.get(\"code\")\n if code:\n username = data.get(\"identity\")\n self._callback_url = self.init_query.get(\"callback_url\")\n self.waf_retry = 0\n _LOGGER.debug(\"Success! Oauth code %s for %s captured.\", code, username)\n await self.session.aclose()\n # 302 redirect\n return URL(self._callback_url).update_query(\n {\"code\": code, \"username\": username, \"domain\": self._host_url.host}\n )\n if get_content_type(resp) == \"text/html\":\n text = resp.text\n if \"<noscript>Please enable JavaScript to view the page content.\" in text:\n _LOGGER.debug(\"WAF discovered %s times in a row.\", self.waf_retry)\n self.waf_retry += 1\n return return_timer_countdown_refresh_html(\n max(30 * (self.waf_retry - self.waf_limit), 120)\n if self.waf_retry > self.waf_limit\n else random.random() * self.waf_retry + 10,\n f\"Detected Tesla web application firewall block #{self.waf_retry}. \"\n f\"Please wait and then reload the page or wait for the auto reload.\",\n False,\n )\n self.waf_retry = 0\n if get_content_type(resp) == \"application/json\":\n text = orjson.loads(resp.text) # pylint: disable=no-member\n _LOGGER.debug(\"Json response: %s\", text)", "def webhook():\n\n json_request = request.get_json(silent=True, force=True)\n\n\n # Extract the data from the json-request (first get the result section of the json)\n result = json_request.get(\"result\")\n\n # Then get the parameters and action_name from the result\n parameters = result.get(\"parameters\")\n\n # Get the action name\n action_name = result.get(\"action\")\n\n facebook_id = json_request.get(\"originalRequest\").get(\"data\").get(\"sender\").get(\"id\")\n\n # Retreives the username by looking up with the unique facebook id\n username = None\n try:\n username = DatabaseConnector.get_values(\"Select username from user where facebook_id = \\\"\" + facebook_id + \"\\\"\")[0][0]\n except:\n username = None\n\n\n # Retrieve the course code\n course_code = parameters.get(\"course_code\")\n parameter = [username, course_code, facebook_id]\n\n # Creates the string that should be sent back to the user\n print(action_name, facebook_id, course_code, parameter[0], parameter[1])\n speech = process_actions(parameter, action_name)\n\n # Create a response to API.AI and return it\n response = json.dumps(speech, indent=4)\n created_response = make_response(response)\n created_response.headers['Content-Type'] = 'application/json'\n\n return created_response", "def health_check():\n app.logger.info(\"Health Check!\")\n return Response(\"All Good!\", status=200)", "def user_should_get_an_ok_response():\n assert web_app.validate_reponse()", "def api_github_message():\n if request.headers['Content-Type'] == 'application/json':\n print('inside server ')\n my_info = json.dumps(request.json)\n payload = json.loads(my_info)\n if not payload['action'] == 'closed':\n model = StoreModel().loadData()\n tdf = TestData()\n tdf1 = TestData1()\n parameter_dict = tdf.fetcher(my_info)\n extension_file = tdf1.file_fetcher(my_info)\n feature_dict = parameter_dict['feature_dict']\n comment_url = parameter_dict['comment_url']\n comment_body = tdf.test_feeder(feature_dict, model)\n file_comment_body = tdf1.file_test_feeder(extension_file[0], extension_file[1])\n Comment.post_comment(comment_url, comment_body)\n Comment.post_comment(comment_url, str(file_comment_body))\n app.logger.info(comment_body)\n prediction_response = json.dumps({\"state\": comment_body})\n app.logger.info(comment_body)\n res = Response(prediction_response, status=200, mimetype='application.json')\n return res\n prediction_response = json.dumps({\"state\": \"closed pull request\"})\n app.logger.info(\"closed pull request\")\n res = Response(prediction_response, status=200, mimetype='application.json')\n return res", "def respond():\n\n # Retrieve message in JSON and transform it to Telegram object\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n chat_id = update.effective_message.chat.id\n msg_id = update.effective_message.message_id\n\n # UTF-8 formatting\n text = update.message.text.encode('utf-8').decode()\n # for debugging\n print(\"Got text message: \", text)\n\n ### Slash Command\n # Welcome message / Start Message\n if text == \"/start\":\n bot_welcome = \"\"\"\n Hello. This is rSlashBot. Give me a subreddit from which you wish to receive updates from.\n \"\"\"\n bot.sendMessage(chat_id=chat_id, text=bot_welcome,\n reply_to_message_id=msg_id)\n\n else:\n try:\n # Clear non-alphabets from message\n #text = re.sub(r\"\\W\", \"_\", text)\n # create the api link for the avatar based on http://avatars.adorable.io/\n if text[0:2] == \"r/\":\n text = text[2:]\n bot.sendMessage(chat_id=chat_id,\n text=\"Hold a sec. Fetching from r/{}\".format(text))\n url = \"https://www.reddit.com/r/{}.json\".format(text)\n r = requests.get(\n url, headers={'User-agent': 'rSlashBot'}, allow_redirects=True)\n with open('reddit.json', 'wb') as f:\n f.write(r.content)\n with open('reddit.json', 'r') as f:\n l = json.load(f)\n\n i = 0\n while l[\"data\"][\"children\"][i][\"data\"][\"stickied\"] == True:\n i += 1\n\n # Sends text content\n title = l[\"data\"][\"children\"][i][\"data\"][\"title\"]\n content = l[\"data\"][\"children\"][i][\"data\"][\"selftext\"]\n msg_text = \"*\"+title+\"*\"+\"\\n\\n\"+content\n if len(msg_text) > 4096:\n for x in range(0, len(msg_text), 4096):\n if x == 0:\n bot.sendMessage(\n chat_id=chat_id, text=msg_text[x:x+4096], parse_mode='Markdown', reply_to_message_id=msg_id)\n else:\n bot.sendMessage(\n chat_id=chat_id, text=msg_text[x:x+4096], parse_mode='Markdown')\n else:\n bot.sendMessage(chat_id=chat_id, text=msg_text,\n parse_mode='Markdown', reply_to_message_id=msg_id)\n\n # For sending images or gifs\n latest = l[\"data\"][\"children\"][i][\"data\"]\n format = latest[\"url\"][-3:]\n formats = ['jpg', 'png']\n if format in formats:\n bot.sendPhoto(\n chat_id=chat_id, photo=latest[\"url\"])\n elif format == 'gif':\n bot.sendAnimation(chat_id=chat_id, animation=latest[\"url\"])\n\n bot.sendMessage(chat_id, \"http://www.reddit.com\" +\n latest[\"permalink\"])\n\n except Exception as e:\n # if things went wrong\n bot.sendMessage(\n chat_id=chat_id, text=\"I am sorry. There was an error fetching from that subreddit.\", reply_to_message_id=msg_id)\n print(e)\n\n return 'ok'", "def test_default_success(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.admin_user_tok,\n )\n\n self.assertEqual(200, channel.code, msg=channel.json_body)\n self._check_fields(channel.json_body)", "def test_message_send(url):\n test_clear(url)\n admin_tk = channel_user_create_0(url)[0]\n \n test_channels = {\n 'token': admin_tk,\n 'name': 'channel_1',\n 'is_public': True,\n }\n ch_id_resp = requests.post(url + \"channels/create\", json=test_channels)\n ch_id = ch_id_resp.json()\n\n resp = requests.get(url + \"channel/messages\", params={\n 'token': admin_tk,\n 'channel_id': 1,\n 'start': 0 \n })\n channel_msgs_resp = resp.json()\n assert channel_msgs_resp == {\n 'messages': [], \n 'start': 0, \n 'end': 50,\n }\n \n test_message = {\n 'token': admin_tk,\n 'channel_id': ch_id['channel_id'],\n 'message': 'Hello'\n }\n resp = requests.post(url + \"message/send\", json=test_message)\n message_send_resp = resp.json()\n assert message_send_resp['message_id'] == 1\n \n resp = requests.get(url + \"channel/messages\", params={\n 'token': admin_tk,\n 'channel_id': 1,\n 'start': 0, \n })\n channel_msgs_resp = resp.json()\n assert channel_msgs_resp['messages'][0]['message_id'] == 1\n assert channel_msgs_resp['messages'][0]['u_id'] == 1\n assert channel_msgs_resp['messages'][0]['message'] == 'Hello'", "def healthcheck():\n return make_response(jsonify(status=200, message='Healthy'), status.HTTP_200_OK)", "def health_check():\n # TODO: implement any other checking logic.\n return '', 200", "def send_validation_request(self):\r\n self.send_request(send_function=self._assemble_and_send_validation_request)", "def response():\n\n granpy = GrandPy(request.form[\"text\"])\n return granpy.bot_response", "async def call_webhook(self, topic: str, **kwargs) -> ClientResponse:\n path = self.path.format(topic=topic)\n method = kwargs.get(\"method\", \"post\")\n url = f\"http://{self.host}:{self.port}{path}\"\n\n try:\n async with aiohttp.ClientSession() as session:\n async with session.request(method, url, **kwargs) as resp:\n await resp.read()\n return resp\n except Exception:\n print_exc()\n raise", "def validate_response(response: json):\n if \"error\" in response:\n print(\"ERROR: Request returned error\")\n print_request_response(response)\n exit(1)", "def slack(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"slack\")", "def command_webhook(request):\n print(json.dumps(request.POST.copy(), indent=2))\n\n return JsonResponse({\"text\": \"ChangeTip services have been discontinued. See https://www.reddit.com/r/changetip/comments/5dn3rc/changetip_shutting_down/ Please close your account and disconnect ChangeTip from Slack.\"})\n\n if request.POST.get(\"noop\"):\n return JsonResponse({\"text\": \"Hi!\"})\n\n # Separated so we can still support the legacy webhook integration\n if 'command' in request.POST.keys():\n return slash_command(request)\n else:\n return outgoing_webhook(request)", "def send(msg):\n r = \"No response\"\n try:\n r = requests.post(\"http://127.0.0.1:8000\", data=msg)\n except:\n pass\n #print(r.status_code, r.reason)\n print(r.text[:300] + '...')", "def post(self):\n send_slack_log('Entered /slack/update_msg')\n send_slack_log('Request info:')\n send_slack_log(str(request.form))\n trigger_id = request.form['trigger_id']\n channel_id = request.form['channel_id']\n response = open_form(channel_id,\n trigger_id,\n config['slack_update_form_path'])\n send_slack_log('Response info:')\n send_slack_log(str(response))\n return 'Please enter the updated msg information in the form'", "def check_response(response):\n status = response.get('status')\n ret = status and status == 'OK'\n if not ret:\n logging.error('Received unexpected failure response from polyswarmd: %s', response)\n return ret", "def rhevPost(url,data):\n conn = rhevConnect()\n conn.request(\"POST\", url, body = data.encode('utf-8'), headers = getHeaders())\n print url\n r = conn.getresponse()\n ## DEBUG \n ## TODO: check status \n status = r.read()\n print r.status,r.reason\n if int(r.status)>=400:\n print r.reason\n print status\n return status\n ## return r.read()", "def postea(channel, response):\n slack_client.api_call('chat.postMessage', channel=channel,\n text=response, as_user=True)", "def get_slackpost():\n return SlackPost(\n channel='#tester',\n subject='[Messages] Integration Test',\n body='Conducting Integration Testing',\n profile='integration_tester',\n attachments='https://imgs.xkcd.com/comics/python.png',\n save=False)", "def test_valid_webhook(self, mock_send):\n send_notification(\"valid_webhook\", self.message)\n mock_send.assert_called()", "def index():\n # curl -k -X POST https://127.0.0.1:43210/api/v1.0 -H 'content-type: application/json' -d '{\"data\": \"exhaust\"}'\n return jsonify({'meta': {'success': True, 'code': 200}, 'result': {\"message\": request.get_json()}}), 200", "def webhook():\n if request.headers.get('content-type') == 'application/json':\n\n json_string = request.get_data().decode('utf-8')\n update = Update.de_json(json_string)\n bot.process_new_updates([update])\n return ''\n\n else:\n abort(403)", "def thanks(request):\n # Let's grab that temporary authorization code Slack's sent us from\n # the request's parameters.\n code_arg = request.GET['code']\n # The bot's auth method to handles exchanging the code for an OAuth token\n pyBot.auth(code_arg)\n return render(request, \"thanks.html\")", "async def callback(request):\n\tsignature = request.headers['X-Line-Signature']\n\tbody = await request.text()\n\ttry:\n\t\tevents = line_parser.parse(body, signature)\n\t\tfor event in events:\n\t\t\tawait _line_to_discord(event)\n\texcept LineBotApiError as e:\n\t\tprint(\"got exception from LINE Message API: {0}\".format(e.message))\n\t\tfor m in e.error.details:\n\t\t\tprint(\" {0} : {1}\".format(m.property, m.message))\n\t\treturn web.Response(status=200)\n\texcept InvalidSignatureError:\n\t\treturn web.Response(status=400)\n\n\treturn web.Response()", "def incoming_sms():\n txt = request.form['Body']\n\n # remove leading and trailing white space and make lowercase\n txt = txt.strip()\n txt = txt.lower()\n\n # handle random searches differently than breed searches\n if txt == 'random' or txt == 'dog':\n url = get_dogs.get_random_dog()\n else:\n url = get_dogs.request_breed(txt)\n \n resp = MessagingResponse()\n if url:\n resp.message(url)\n else:\n resp.message(\"Sorry! We couldn't find a dog matching that query. Please try \\\n a more general search term.\")\n return str(resp)", "def check_action_status(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def verify(self, response):", "def request_command(tv_command: TVCommand, data_dict=None):\n\n if tv_command is None:\n return\n\n url = command_url(tv_command)\n logger.debug('url: {}, data_dict: {}'.format(url, data_dict))\n\n # in call to requests.post, supplying json= automatically sets content type\n # headers = {'Content-Type': 'application/json'}\n\n # https://stackoverflow.com/questions/16511337/correct-way-to-try-except-using-python-requests-module\n try:\n # https://stackoverflow.com/questions/20001229/how-to-get-posted-json-in-flask\n response = requests.post(url, json=data_dict)\n # https://2.python-requests.org//en/latest/api/#requests.Response.raise_for_status\n response.raise_for_status()\n except requests.exceptions.RequestException as error:\n logger.debug(error)\n return\n\n if response.status_code != 200:\n # http error\n logger.debug('status_code should be 200, but is {}'.format(str(response.status_code)))\n return\n\n try:\n # response_dict is a python dictionary\n response_dict = response.json()\n # logger.debug('response_dict: {}'.format(response_dict))\n # 2019-04-22 17:25:41 DEBUG request_command line:94 response_dict:\n # {'api_name': 'tv', 'response': 'transmitted command mute', 'version': '1.0'}\n\n except ValueError:\n logger.debug('Could not convert response json')\n return\n\n # convert json dictionary to string\n response_json_string = json.dumps(response_dict)\n logger.debug('response_json_string: {}'.format(response_json_string))\n # 2019-04-22 17:25:41 DEBUG request_command line:101 response_json_string:\n # {\"api_name\": \"tv\", \"response\": \"transmitted command mute\", \"version\": \"1.0\"}", "async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:\n client = MatterClient(data[CONF_URL], aiohttp_client.async_get_clientsession(hass))\n await client.connect()" ]
[ "0.67223775", "0.66398156", "0.6534951", "0.6358252", "0.6333655", "0.62352383", "0.6171377", "0.6148408", "0.6085096", "0.6081135", "0.60650325", "0.6040874", "0.6038498", "0.60302174", "0.59917426", "0.59649956", "0.59637076", "0.5943963", "0.59014314", "0.58998", "0.5889386", "0.5874803", "0.5823254", "0.57881576", "0.5778226", "0.57659745", "0.5765907", "0.5757712", "0.57409817", "0.573228", "0.5718245", "0.56783", "0.5668476", "0.5643535", "0.56348914", "0.56112623", "0.5590938", "0.556975", "0.5562231", "0.55577695", "0.55566525", "0.5542054", "0.55408055", "0.55340916", "0.5512237", "0.5511268", "0.55037534", "0.5497976", "0.5486176", "0.5485792", "0.54810035", "0.5469051", "0.5455828", "0.5451973", "0.54417294", "0.54299664", "0.5418375", "0.54161173", "0.5410429", "0.54067856", "0.5403243", "0.538713", "0.5382026", "0.53809214", "0.5367336", "0.53566", "0.5352286", "0.53465664", "0.53394365", "0.5325731", "0.5323781", "0.53077155", "0.5301321", "0.52984685", "0.5297417", "0.5290551", "0.5285617", "0.52853", "0.5283861", "0.52771026", "0.5274187", "0.5272734", "0.5271768", "0.52683085", "0.5267042", "0.5253616", "0.5249703", "0.52480793", "0.524539", "0.52431476", "0.5232407", "0.522465", "0.52224505", "0.5221159", "0.5213759", "0.52103883", "0.5209894", "0.52094924", "0.52039546", "0.52016" ]
0.72009796
0
Create a report about stale branches for a list of repositories.
def check_stale_branches(event: dict, context) -> dict: ssm_parameters = load_params('dev_tools', 'dev') if 'jira_statuses_for_task_completion' in ssm_parameters and ssm_parameters['jira_statuses_for_task_completion']: jira_statuses_for_task_completion = ssm_parameters['jira_statuses_for_task_completion'] else: jira_statuses_for_task_completion = ('Resolved', 'Closed') repository_names = ssm_parameters['github_repository_names'] github_repository_names = repository_names.split(',') jira_oauth_dict = { 'access_token': ssm_parameters['jira_access_token'], 'access_token_secret': ssm_parameters['jira_access_token_secret'], 'consumer_key': ssm_parameters['jira_consumer_key'], 'key_cert': ssm_parameters['jira_private_key'] } auth_jira = JIRA(ssm_parameters['jira_url'], oauth=jira_oauth_dict) # Github authentication setup g = Github(ssm_parameters['github_access_token']) # Look for stale branches for all the specified repos total_stale_branches = 0 general_report = '' author_count = defaultdict(int) for repo_name in github_repository_names: logger.debug(f'\nChecking repo: {repo_name}') try: repo = g.get_repo(f"{ssm_parameters['github_account']}/{repo_name}") except GithubException: logger.error(f"Github repository '{ssm_parameters['github_account']}/{repo_name}' not found!") continue repo_report = '' # confirm the name for the main develop branch main_develop_branch = 'develop' try: _ = repo.get_branch('develop') except GithubException: main_develop_branch = 'master' logger.debug('Develop branch not found, using master as the main develop branch.') continue branches = repo.get_branches() for branch in branches: # only check feature and hotfix branches if not branch.name.startswith('feature/') and not branch.name.startswith('hotfix/'): continue # compare the branch against the main develop branch try: comparison = repo.compare(main_develop_branch, branch.name) except GithubException as error: logger.error(f'GithubException: Error while trying to compare {main_develop_branch} and {branch.name}.') logger.error(f'GithubException: {error}.') if comparison.behind_by == 0: # the branch is up to date, nothing to do continue # try to get the jira ticket number from the branch name ticket = None result = re.search(r'feature/(?P<ticket>[a-zA-Z]+-[0-9]+).*', branch.name) if result: ticket = result.groupdict()['ticket'].upper() try: issue = auth_jira.issue(ticket) except jira_exceptions.JIRAError: logger.debug(f"The ticket {ticket} specified in the branch name doesn't exist in Jira.") if issue and issue.fields.status.name not in jira_statuses_for_task_completion: # the issue hasn't been marked as resolved in jira, so the branch may still be needed continue author = branch.commit.author.login if branch.commit.author else 'unknown' author_count[author] += 1 repo_report += f'Branch: {branch.name}\nComparison status: {comparison.status}\nAuthor: {author}\n' if ticket: repo_report += f'Ticket status: "{issue.fields.status.name}\n' repo_report += '\n' total_stale_branches += 1 if repo_report: general_report += f'Repo: {repo_name}, develop branch name: {main_develop_branch}\n{repo_report}' if total_stale_branches: count_by_author = '' for author, count in sorted(author_count.items(), key=operator.itemgetter(1), reverse=True): count_by_author += f'{author}: {count}\n' report_overview = f'Current number of stale branches: {total_stale_branches}\n\n'\ f'Count by author:\n{count_by_author}\n' report_details = f'Details:\n\n{general_report}' _ = slack_request(url=ssm_parameters['slack_webhook_url'], headers={'Content-type': 'application/json', 'Authorization': f"Bearer {ssm_parameters['slack_access_token']}"}, data=json.dumps({'text': report_overview}) ) _ = slack_request(url='https://slack.com/api/files.upload', headers={'Content-type': 'application/x-www-form-urlencoded'}, data={'token': ssm_parameters['slack_access_token'], 'channels': 'GE8NS0FT5', 'content': report_details, 'title': 'Stale branches details'} )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stale_pr_branches(config, args):\n repo = config.repo\n for pr in repo.pull_requests(state=\"closed\"):\n if pr.head.repo == pr.base.repo and repo.branch(pr.head.ref):\n yield {\n \"html_url\": pr.html_url,\n \"base_branch\": pr.base.ref,\n \"head_branch\": pr.head.ref,\n }", "def ensure_tracking_branches(args):\n man = load_manifest()\n for (name, project) in man.projects.iteritems():\n repo = GitRepo(workdir_for_project(project))\n branch_missing = repo.command(\n [\"rev-parse\", \"--verify\", \"-q\", project.refspec],\n capture_stdout=True)\n \n if branch_missing:\n logging.warn(\"Branch %s does not exist in project %s. checking out.\" %\n (project.refspec, name))\n repo.command([\"branch\", \"--track\",\n project.tracking_branch, project.remote_refspec])", "def get_updated_all_commits(self, old_commits): # pylint: disable=too-many-locals\n\n # get new list of branches\n newest_branches_names = [branch_info['name'] for branch_info in self.get_branches()]\n\n # get old list of branches from old metadata\n old_branches_names = list(old_commits['metadata'].keys())\n\n # get old metadata\n old_commits_metadata = old_commits['metadata']\n result = {}\n\n # delete all items in old metadata where branch name is not exist in new list of branches\n for old_branch_name in old_branches_names:\n if not newest_branches_names.count(old_branch_name):\n old_commits_metadata.pop(old_branch_name)\n\n checked_commits_metadata = old_commits_metadata\n # add to checked_commits_metadata all metadata that is not exist in old_commits_metadata\n for branch in newest_branches_names:\n if not old_branches_names.count(branch):\n checked_commits_metadata[branch] = None\n\n # get dict of old commits with key - hash of commit for further mapping by branch\n repo_commits = {commit['hash']: commit for commit in old_commits['data']}\n\n # get list of new commits from all branches in repository\n for branch_name, newest_commit in checked_commits_metadata.copy().items():\n updated_list_of_branch_commits = \\\n self.get_updated_commits_by_branch(branch_name, newest_commit, only_new=True)\n if updated_list_of_branch_commits is None:\n return None\n\n # adds key 'branches' with branch name in list to every commit in branch,\n # or if key 'branches' is existing add branch name to existing branches list\n for commit_in_branch in updated_list_of_branch_commits:\n commit = repo_commits.get(commit_in_branch['hash'])\n if commit:\n commit['branches'].append(branch_name)\n else:\n commit_in_branch['branches'] = [branch_name]\n repo_commits[commit_in_branch['hash']] = commit_in_branch\n\n # add new metadata to method response for further updates by get_updated_all_commits\n if updated_list_of_branch_commits:\n checked_commits_metadata[branch_name] = updated_list_of_branch_commits[0]\n else:\n # if given old commit is the newest - add it to new metadata. P.S unnecessary ???\n checked_commits_metadata[branch_name] = newest_commit[0]\n\n updated_list_of_branch_commits.clear()\n\n # sorts all commits in repository by date in reverse order\n updated_sorted_commits = sorted(list(repo_commits.values()), key=lambda x: x['date'],\n reverse=True)\n\n result['data'] = updated_sorted_commits\n result['metadata'] = checked_commits_metadata\n\n return result", "def get_commits(self):\n\n repo_commits = {}\n\n # gets all branches in repository\n branches = self.get_branches()\n if branches is None:\n return None\n\n # get list of commits pages from all branches in repository\n for branch in branches:\n list_of_branch_commits = self.get_commits_by_branch(branch['name'])\n if list_of_branch_commits is None:\n return None\n\n # adds key 'branches' with branch name in list to every commit in branch,\n # or if key 'branches' is existing add branch name to existing branches list\n for commit_in_branch in list_of_branch_commits:\n commit = repo_commits.get(commit_in_branch['hash'])\n if commit:\n commit['branches'].append(branch['name'])\n else:\n commit_in_branch['branches'] = [branch['name']]\n repo_commits[commit_in_branch['hash']] = commit_in_branch\n list_of_branch_commits.clear()\n\n # sorts all commits in repository by date in reverse order\n sorted_commits = sorted(list(repo_commits.values()), key=lambda x: x['date'], reverse=True)\n\n # forms a list of commits as an 'get commits API' response\n commits_amount = 30 if len(sorted_commits) >= 30 else len(sorted_commits)\n result_list = sorted_commits[:commits_amount]\n\n return result_list", "def force_pr_branch_stale_review_dismissal(org: Organization, repo: Repository,\n branches: Dict[str, Branch]) -> List[Change[str]]:\n prb = get_pr_branch(repo, branches)\n if prb:\n return _set_dismiss_stale_approvals(prb, True)\n else:\n return []", "def ticket_branches(self, msrp, cred_hash):\n branches = []\n repos = self.get_repos()\n if not repos['status']:\n return repos\n\n for repo in repos['data']:\n response = self.find_branch(repo_name=repo['name'], msrp=msrp, cred_hash=cred_hash)\n if response['status']:\n branches.append({'repo': repo['name'], 'branches': response['data'], 'all': response['all']})\n\n if len(branches) > 0:\n return {'status': True, 'data': branches}\n else:\n return {'status': False, 'data': f'No branches found with MSRP {msrp}'}", "def get_commits(self):\n\n repo_commits = {}\n\n # gets all branches in repository\n branches = self.get_branches()\n if branches is None:\n raise BitbucketRequestSenderExc('Can\\'t get branches for get_commits method')\n\n # get list of commits pages from all branches in repository\n for branch in branches:\n list_of_branch_commits = self.get_commits_by_branch(branch['name'])\n if list_of_branch_commits is None:\n raise BitbucketRequestSenderExc(\n 'Can\\'t get commits by branch for get_commits method')\n\n # adds key 'branches' with branch name in list to every commit in branch,\n # or if key 'branches' is existing add branch name to existing branches list\n for commit_in_branch in list_of_branch_commits:\n commit = repo_commits.get(commit_in_branch['hash'])\n if commit:\n commit['branches'].append(branch['name'])\n else:\n commit_in_branch['branches'] = [branch['name']]\n repo_commits[commit_in_branch['hash']] = commit_in_branch\n list_of_branch_commits.clear()\n\n # sorts all commits in repository by date in reverse order\n sorted_commits = sorted(list(repo_commits.values()), key=lambda x: x['date'], reverse=True)\n\n # forms a list of commits as an 'get commits API' response\n commits_amount = 30 if len(sorted_commits) >= 30 else len(sorted_commits)\n result_list = sorted_commits[:commits_amount]\n\n return result_list", "def fetch_branches(self):\n for jrepo in self.json_repos['repos']:\n title = str(jrepo[\"title\"])\n self.branches[title] = str(jrepo['current'])", "async def fetch_commits(self):\n for repo in self.config['repos'].split(','):\n since = datetime.min\n async for msg in self.channel.history(limit=None):\n if not msg.embeds:\n continue\n e = msg.embeds[0]\n if e.title == 'github commit' and e.timestamp and repo in e.description: # type: ignore\n since = e.timestamp\n break\n \n await self.update_commit_activity(repo, since)", "def get_all_commits(self):\n\n repo_commits = {}\n metadata = {}\n\n # gets all branches in repository\n branches = self.get_branches()\n if branches is None:\n return None\n\n # get list of commits pages from all branches in repository\n for branch in branches:\n list_of_branch_commits = self.get_all_commits_by_branch(branch['name'])\n\n if list_of_branch_commits is None:\n return None\n\n # adds key 'branches' with branch name in list to every commit in branch,\n # or if key 'branches' is existing add branch name to existing branches list\n for commit_in_branch in list_of_branch_commits:\n commit = repo_commits.get(commit_in_branch['hash'])\n if commit:\n commit['branches'].append(branch['name'])\n else:\n commit_in_branch['branches'] = [branch['name']]\n repo_commits[commit_in_branch['hash']] = commit_in_branch\n\n # add metadata to method response for further updates by get_updated_all_commits\n metadata[branch['name']] = list_of_branch_commits[0]\n\n list_of_branch_commits.clear()\n\n # sorts all commits in repository by date in reverse order\n sorted_commits = sorted(list(repo_commits.values()), key=lambda x: x['date'], reverse=True)\n\n return {'data': sorted_commits, 'metadata': metadata}", "def test_sort_bzr_latest(self):\n identifiers = [\"master\", \"1.0\", \"2.0\", \"1.1\", \"1.9\", \"1.10\"]\n self.project.repo_type = REPO_TYPE_BZR\n self.project.save()\n self.project.versions.get(slug=LATEST).delete()\n\n for identifier in identifiers:\n get(\n Version,\n project=self.project,\n type=BRANCH,\n identifier=identifier,\n verbose_name=identifier,\n slug=identifier,\n )\n\n versions = list(Version.objects.filter(project=self.project))\n self.assertEqual(\n [\"2.0\", \"1.10\", \"1.9\", \"1.1\", \"1.0\", \"master\"],\n [v.slug for v in sort_version_aware(versions)],\n )", "def get_branches(self, repo_name, cred_hash):\n branch_names = []\n\n url = f'{self.code_cloud_api.branch_api}/{repo_name}/branches?start=0&limit=30'\n response = self.code_cloud_api.get(url=url, cred_hash=cred_hash)\n if not response['status']:\n return response\n \n for item in response.get('data', {}).get('values', {}):\n branch_names.append(item.get('displayId', ''))\n\n return {'status': True, 'data': branch_names}", "def branches_full(config, args):\n for b in config.repo.branches():\n yield config.repo.branch(b.name)", "def checkout_branches(args):\n\n ensure_tracking_branches([])\n if check_dirty([]) and '-f' not in args:\n raise Exception(\"Cannot checkout new branches with dirty projects.\")\n \n man = load_manifest()\n for (name, project) in man.projects.iteritems():\n print >>sys.stderr, \"Checking out tracking branch in project: %s\" % name\n repo = GitRepo(workdir_for_project(project))\n # Check that sucker out\n repo.check_command([\"checkout\", project.tracking_branch])", "def reap_broken_repos():\n token = os.environ.get('GITHUB_TOKEN', None)\n if token is None:\n print('MISSING GITHUB_TOKEN')\n return\n\n # Do graphql nonsense\n query = '''\n query{\n organization(login:\"os3224\"){\n repositories(first:100,orderBy:{field:CREATED_AT,direction:DESC}){\n nodes{\n ref(qualifiedName:\"master\") {\n target {\n ... on Commit {\n history(first: 20) {\n edges { node { oid } }\n }\n }\n }\n }\n name \n url\n }\n }\n }\n }\n '''\n url = 'https://api.github.com/graphql'\n json = {'query': query}\n headers = {'Authorization': 'token %s' % token}\n\n # Make the graph request over http\n try:\n r = requests.post(url=url, json=json, headers=headers)\n data = r.json()['data']\n organization = data['organization']\n repositories = organization['repositories']['nodes']\n except Exception as e:\n print(traceback.format_exc())\n print(f'Request to github api Failed {e}')\n return\n\n # Running map of unique_code -> assignment objects\n assignments = dict()\n\n # Parse out repo name and url from graphql response\n repos = map(lambda node: (node['name'], node['url'], node['ref']), repositories)\n for repo_name, repo_url, ref in repos:\n assignment = None\n\n # Try to get the assignment object from running map\n for code in repo_name.split('-'):\n assignment = assignments.get(code, None)\n\n # If not in the map, then try to get from the database\n if assignment is None:\n assignment = Assignment.query.filter(\n Assignment.unique_code.in_(repo_name.split('-'))\n ).first()\n\n if assignment is not None:\n assignments[assignment.unique_code] = assignment\n\n # If not in database or map, then eject\n if assignment is None:\n print(f'Could not find assignment for {repo_name}')\n continue\n\n # Guess github username, then create the repo if it doesn't yet exist\n user, github_username = guess_github_username(assignment, repo_name)\n repo = check_repo(assignment, repo_url, github_username, user)\n\n if user is None:\n continue\n\n # Check for broken submissions\n submissions = []\n for submission in Submission.query.filter(Submission.assignment_repo_id == repo.id).all():\n if submission is None:\n continue\n if submission.owner_id != user.id:\n print(f'found broken submission {submission.id}')\n submission.owner_id = repo.owner_id\n submissions.append(submission.id)\n db.session.commit()\n for sid in submissions:\n enqueue_autograde_pipeline(sid)\n\n # Check for missing submissions\n for commit in map(lambda x: x['node']['oid'], ref['target']['history']['edges']):\n submission = Submission.query.filter(\n Submission.commit == commit\n ).first()\n if submission is None:\n print(f'found missing submission {github_username} {commit}')\n submission = Submission(\n commit=commit,\n owner=user,\n assignment=assignment,\n repo=repo,\n state=\"Waiting for resources...\",\n )\n db.session.add(submission)\n db.session.commit()\n init_submission(submission)\n enqueue_autograde_pipeline(submission.id)\n\n r = AssignmentRepo.query.filter(AssignmentRepo.repo_url == repo_url).first()\n if r is not None:\n if r.owner_id != user.id:\n print(f'fixing broken repo owner {r.id}')\n r.owner_id = user.id\n submissions = []\n for submission in Submission.query.filter(\n Submission.assignment_repo_id == r.id\n ).all():\n submission.owner_id = user.id\n submissions.append(submission.id)\n\n db.session.commit()\n for sid in submissions:\n enqueue_autograde_pipeline(sid)\n\n if repo:\n print(f'checked repo: {repo_name} {github_username} {user} {repo.id}')", "def repo_refresh_for_unfinished():\n project_list = Project.objects()\n crawl_list = []\n for repo in project_list:\n if repo.analyser_progress != \"100%\":\n crawl_list.append(repo.project_name)\n analyser.add_repos(current_user.username, crawl_list)\n flash('Refresh for unfinished successfully!', 'success')\n return redirect(url_for('main.admin_manage'))", "def list_all_repos_info():\n repos = ALL_REPOS\n for repo_name, repo in zip(repos, _repos(repos)):\n repo_name = shorten_path(repo_name)\n print(repo_name)\n try:\n nbr_ahead, nbr_behind = _nbr_commits_ahead_and_behind(repo)\n except git.exc.GitCommandError:\n print(f\" {repo.active_branch.name}\")\n except DetachedHeadError:\n print(f\" HEAD --> {repo.head.commit}\")\n else:\n nb_tabul = 3 if len(repo.active_branch.name) < 6 else 2\n tabuls = \"\\t\" * nb_tabul\n print(f\" {repo.active_branch.name}{tabuls}↓ {nbr_behind} ↑ {nbr_ahead}\")\n if repo.index.diff(None):\n print(\" !!! With unstaged changes !!!\")\n if repo.index.diff(\"HEAD\"):\n print(\" !!! With uncommited changes !!!\")", "def protect_pr_branch_with_tests_if_any_exist(org: Organization, repo: Repository,\n branches: Dict[str, Branch]) -> List[Change[str]]:\n def execute_test_protection(change: Change[str], branch: Branch, existing_checks: Set[str],\n known_status_checks: Set[str], known_checkruns: Set[str]) -> Change[str]:\n\n all_known_checks = known_status_checks | known_checkruns # For convenience later to treat them as a single set\n\n print_debug(\"[%s] Changing status checks on branch '%s' to [%s]\" %\n (highlight(repo.name), highlight(branch.name),\n highlight(\", \".join(list(all_known_checks)))))\n try:\n if existing_checks:\n branch.edit_required_status_checks(strict=True, contexts=list(all_known_checks))\n else:\n safe_branch_edit_protection(\n branch,\n strict=True,\n contexts=list(all_known_checks),\n )\n except GithubException as e:\n print_error(\"Can't edit required status checks on repo %s branch %s: %s\" %\n (repo.name, branch.name, str(e)))\n return change.failure()\n return change.success()\n\n prb = get_pr_branch(repo, branches)\n if not prb:\n return []\n\n existing_checks = set() # type: Set[str]\n try:\n rqs = prb.get_required_status_checks()\n except GithubException:\n # the repository has currently no status checks\n pass\n else:\n if len(rqs.contexts) > 0:\n # The repository already has some status checks\n existing_checks = set(rqs.contexts)\n print_debug(\"Branch %s on repo %s already has status checks [%s]\" %\n (highlight(prb.name), highlight(repo.name), highlight(\", \".join(existing_checks))))\n\n # the repository currently has no status checks, let's see if any came in within the last 7 days\n sevendaysago = datetime.now() - timedelta(days=7)\n commits = repo.get_commits(prb.name, since=sevendaysago)\n known_status_checks = set() # type: Set[str]\n known_checkruns = set() # type: Set[str]\n for commit in commits:\n for status in commit.get_statuses(): # type: CommitStatus\n if status.context not in known_status_checks:\n print_debug(\"New status check [%s]: %s %s '%s'\" %\n (commit.sha, status.updated_at,\n status.context, status.description))\n known_status_checks.add(status.context)\n for checkrun in commit.get_check_runs(): # type: CheckRun\n if checkrun.name not in known_checkruns:\n print_debug(\"New check run [%s]: %s %s %s\" %\n (commit.sha, checkrun.completed_at, checkrun.name, checkrun.app))\n known_checkruns.add(checkrun.name)\n\n all_known_checks = known_status_checks | known_checkruns # For convenience later to treat them as a single set\n print_debug(\"Found status checks [%s]\" % \", \".join(all_known_checks))\n\n if all_known_checks and all_known_checks != existing_checks:\n # add all known checks as required checks\n print_debug('Adding checks [%s] to branch %s on repo %s' %\n (highlight(\", \".join((all_known_checks) - existing_checks)),\n highlight(prb.name), highlight(repo.name)))\n return [Change(\n meta=ChangeMetadata(\n executor=execute_test_protection,\n params=[prb, existing_checks, known_status_checks, known_checkruns]\n ),\n action=ChangeActions.REPLACE if existing_checks else ChangeActions.ADD,\n before=\"%s checks\" % len(existing_checks) if existing_checks else \"No checks\",\n after=\"%s checks\" % len(all_known_checks),\n )]\n return []", "def list_all_branches(self) -> dict:\n try:\n branches_response = self.repo.get_branches()\n branches_list = []\n for branch in branches_response:\n branches_list.append(branch.raw_data.get('name'))\n return make_success_response(200, branches_list)\n except GithubException as github_exc:\n return make_error_response(github_exc.status, github_exc.data)", "def check_branch(\n comp_name: str, branch_name: str, branch: Dict[str, defs.ComponentVersion]\n ) -> None:\n uptodate_files: Dict[pathlib.Path, Tuple[pathlib.Path, defs.ComponentFile]] = {}\n\n if not RE_BRANCH_NAME.match(branch_name):\n res.append(f\"{comp_name}: Invalid branch name: {branch_name}\")\n\n for ver, version in sorted(branch.items()):\n if not RE_VERSION_STRING.match(ver):\n res.append(f\"{comp_name}/{branch_name}: Invalid version string: {ver}\")\n\n other_cksums, driver_cksums = _split_by_existence(comp_name, branch_name, version.files)\n if version.outdated:\n update_to = [\n o_version\n for o_version in branch.values()\n if not o_version.outdated\n and _split_by_existence(comp_name, branch_name, o_version.files)[0]\n == other_cksums\n ]\n if len(update_to) != 1:\n res.append(\n f\"{comp_name}/{branch_name}/{ver}: Got {len(update_to)} possible \"\n f\"versions to update to instead of exactly one\"\n )\n else:\n bad_files = sorted(\n relpath\n for relpath, (path, fdata) in driver_cksums.items()\n if util.file_sha256sum(path) != fdata.sha256\n )\n if bad_files:\n res.append(f\"{comp_name}/{branch_name}/{ver}: Bad checksum for {bad_files}\")\n\n if not uptodate_files:\n uptodate_files = driver_cksums\n elif uptodate_files != driver_cksums:\n res.append(\n f\"{comp_name}/{branch_name}: All the up-to-date versions should \"\n f\"define the same set of files with the same checksums\"\n )\n\n if not any(not version.outdated for version in branch.values()):\n res.append(f\"{comp_name}/{branch_name}: No non-outdated versions\")", "def repos(request):\n # Clean up garbage created by buggy edits\n bad_branch_keys = models.Branch.query(models.Branch.owner == None).fetch(\n 100, keys_only=True)\n if bad_branch_keys:\n ndb.delete_multi(bad_branch_keys)\n repo_map = {}\n for repo in models.Repository.query().fetch(1000, batch_size=100):\n repo_map[repo.key] = repo\n branches = []\n for branch in models.Branch.query().fetch(2000, batch_size=100):\n repo_key = branch.repo_key\n if repo_key in repo_map:\n branch.repository = repo_map[repo_key]\n branches.append(branch)\n branches.sort(key=lambda b: map(\n unicode.lower, (b.repository.name, b.category, b.name)))\n return respond(request, 'repos.html', {'branches': branches})", "def _computeobsoleteset(repo):\n obs = set()\n getrev = repo.changelog.nodemap.get\n getphase = repo._phasecache.phase\n for node in repo.obsstore.successors:\n rev = getrev(node)\n if rev is not None and getphase(repo, rev):\n obs.add(rev)\n return obs", "def new_commits(repo, sha):\n from datetime import datetime\n\n dateformat = \"%a, %d %b %Y %H:%M:%S GMT\"\n release_commit = repo.get_commit(sha)\n since = datetime.strptime(release_commit.last_modified, dateformat)\n commits = repo.get_commits(since=since)\n if len(list(commits)) == 1:\n return False\n return reversed(list(commits)[:-1])", "def get_changes(access_token, organization_url, target_repo_name, source_branches, target_branch_name, pull_quantity, ignore_words=[]) -> dict:\n print('\\nConnecting to API\\n')\n try:\n # Create a connection to the org\n credentials = BasicAuthentication('', access_token)\n connection = Connection(base_url=organization_url, creds=credentials)\n\n # Get git Client\n # See azure.devops.v5_0.models for models\n # azure.devops.git.git_client_base for git_client methods\n git_client = connection.clients.get_git_client()\n\n # Get the repo\n repositories = git_client.get_repositories()\n\n except MSExceptions.ClientRequestError as err:\n print('Client Request Error:', str(err))\n return None\n except MSExceptions.AuthenticationError as err:\n print('Authentication Error: ', str(err))\n\n target_repo = None\n for repo in repositories:\n if repo.name == target_repo_name:\n target_repo = repo\n\n if not target_repo:\n print(f'Repository {target_repo_name} not found.')\n return None\n\n all_changes = {}\n\n ignored_commits = []\n processed_commits = []\n\n for branch in source_branches:\n\n # Find commits for the specific branch combination\n search_criteria = GitPullRequestSearchCriteria (\n source_ref_name = f'refs/heads/{branch}',\n target_ref_name = f'refs/heads/{target_branch_name}',\n status = 'Completed'\n )\n\n pull_requests = git_client.get_pull_requests(target_repo.id, search_criteria, top=9999)\n\n\n print(f\"Proccesing PR commits for {branch}...\")\n with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:\n future_prs = { executor.submit(process_pull_requests, git_client, target_repo.id, pull, ignore_words): pull for pull in pull_requests}\n for future in tqdm(futures.as_completed(future_prs), unit=' PRs'):\n data, processed, ignored = future.result()\n for change in data.keys():\n if all_changes.get(change):\n all_changes[change] = all_changes[change] + data[change]\n else:\n all_changes[change] = data[change]\n for commit in processed:\n processed_commits.append(commit)\n for commit in ignored:\n ignored_commits.append(commit)\n print()\n\n return all_changes, processed_commits, ignored_commits", "def base_branches() -> list[str]:\n branches = []\n\n default = sh(\"git rev-parse --abbrev-ref origin/HEAD\").removeprefix(\"origin/\")\n branches.append(default)\n\n releases = sh(\n \"git branch --all --sort=-committerdate --list *release/* | head -10\"\n ).splitlines()\n releases = [b.removeprefix(\"*\").strip() for b in releases]\n branches.extend(releases)\n\n return branches", "def load_commits(db, repo_name):\n\n SEP = \"-=:=-=:=-=:=-=:=-=:=-=:=-=:=-\"\n GITLOG = f\"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'\"\n SHORT_LINES = 5\n\n # $ git log --format=\"format:---------------------%ndate: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b\"\n # ---------------------\n # date: 2021-04-21T16:13:23-04:00\n # hash: efa13ff1d2fb3d8b2ddee8be0868ae60f9bc35a6\n # auth: julia.eskew@edx.org\n # name: Julia Eskew\n # subj: fix: TNL-8233: Change exception raised at problem creation failure from generic exception to LoncapaProblemError. (#27361)\n # Raising this specific exception will cause the failure to be handled more gracefully by problem rescoring code.\n # ---------------------\n # date: 2021-04-15T21:36:47-04:00\n # hash: a1fe3d58dc112bd975f1237baaee787ba22929f1\n # auth: astaubin@edx.org\n # name: Albert (AJ) St. Aubin\n # subj: [bug] Corrected issue where program dash showed incorrect completed count\n # [MICROBA-1163]\n # \n # This change will correct an issue in the Program Dashboard where a user\n # would see a course as completed, but not see their Certificate because\n # it was not available to them yet.\n # ---------------------\n\n with db:\n commit_table = db[\"commits\"]\n\n log = get_cmd_output(GITLOG)\n for i, commit in enumerate(log.split(SEP + \"\\n\")):\n if commit:\n lines = commit.split(\"\\n\", maxsplit=SHORT_LINES)\n row = {\"repo\": repo_name}\n for line in lines[:SHORT_LINES]:\n key, val = line.split(\": \", maxsplit=1)\n row[key] = val\n row[\"body\"] = lines[SHORT_LINES].strip()\n analyze_commit(row)\n commit_table.insert(row)", "def test_get_latest_results_for_repo(self):\n for x in xrange(98, 103):\n self.db.insert_single_result(generate_mock_result(project='TEST', repository='test-repo', run_id=x))\n self.db.insert_single_result(generate_mock_result(project='NEWTEST', repository='newtest-repo', run_id=x))\n testlatest = self.db.get_latest_results_for_project('TEST')\n self.assertEqual(len(testlatest), 1)\n self.assertEqual(testlatest[0].repository, 'test-repo')\n self.assertEqual(testlatest[0].run_id, '102')", "def make_branches(self, api_json=None):\n if api_json is None:\n return []\n\n obj = simplejson.loads(api_json)\n branches = [item[\"commit\"][\"sha\"] for item in obj]\n\n print branches\n\n return branches", "def branches(self):\n return sorted([\n br[20:] for br in self.repo.refs.keys() if (\n br.startswith('refs/remotes/origin/') and\n br[20:] != 'HEAD'\n )\n ])", "def get_github_commits():\n utcnow = datetime.datetime.utcnow()\n yesterday = utcnow - datetime.timedelta(hours=24)\n yesterday = yesterday.replace(hour=12, minute=0, second=0)\n iso = yesterday.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n\n txt = [\"> IEM Code Pushes <to branch> on Github\\n\"]\n html = [\"<h3>IEM Code Pushes &lt;to branch&gt; on Github</h3>\"]\n\n # get branches, main is first!\n branches = [\"main\"]\n req = exponential_backoff(requests.get, IEM_BRANCHES, timeout=30)\n for branch in req.json():\n if branch[\"name\"] == \"main\":\n continue\n branches.append(branch[\"name\"])\n\n hashes = []\n links = []\n for branch in branches:\n uri = (\n f\"https://api.github.com/repos/akrherz/iem/commits?since={iso}&\"\n f\"sha={branch}\"\n )\n req2 = exponential_backoff(requests.get, uri, timeout=30)\n # commits are in reverse order\n for commit in req2.json()[::-1]:\n if commit[\"sha\"] in hashes:\n continue\n hashes.append(commit[\"sha\"])\n timestring = commit[\"commit\"][\"author\"][\"date\"]\n utcvalid = datetime.datetime.strptime(\n timestring, \"%Y-%m-%dT%H:%M:%SZ\"\n )\n valid = utcvalid.replace(tzinfo=pytz.utc).astimezone(\n pytz.timezone(\"America/Chicago\")\n )\n data = {\n \"stamp\": valid.strftime(\"%b %-d %-2I:%M %p\"),\n \"msg\": commit[\"commit\"][\"message\"],\n \"htmlmsg\": htmlize(commit[\"commit\"][\"message\"])\n .replace(\"\\n\\n\", \"\\n\")\n .replace(\"\\n\", \"<br />\\n\"),\n \"branch\": branch,\n \"url\": commit[\"html_url\"][:-20], # chomp to make shorter\n \"i\": len(links) + 1,\n }\n links.append(\"[%(i)s] %(url)s\" % data)\n txt.append(\n mywrap(\" %(stamp)s[%(i)s] <%(branch)s> %(msg)s\" % data)\n )\n html.append(\n (\n '<li><a href=\"%(url)s\">%(stamp)s</a> '\n \"&lt;%(branch)s&gt; %(htmlmsg)s</li>\\n\"\n )\n % data\n )\n\n if len(txt) == 1:\n txt = txt[0] + \" No code commits found in previous 24 Hours\"\n html = html[0] + (\n \"<strong>No code commits found \" \"in previous 24 Hours</strong>\"\n )\n else:\n txt = \"\\n\".join(txt) + \"\\n\\n\" + \"\\n\".join(links)\n html = html[0] + \"<ul>\" + \"\\n\".join(html[1:]) + \"</ul>\"\n\n return txt + \"\\n\\n\", html + \"<br /><br />\"", "def clone_all_repo(self):\n repo_path = self.cfg['paths']['repo']\n since = datetime.datetime.strptime(self.cfg['filters']['since'], self.cfg['filters']['date_format'])\n threads = []\n # INITIALIZE ALL REPOS\n for repo in self.cfg[\"repos\"]:\n if (not self.cfg['replace_existing_repo']) and (os.path.exists(repo_path + repo['name'])):\n print(self.repo_exist_msg.format(repo_path + repo['name']))\n else:\n if os.path.exists(repo_path + repo['name']):\n # delete directory and contents\n shutil.rmtree(repo_path + repo['name'])\n # creat directory\n os.makedirs(repo_path + repo['name'])\n thread = threading.Thread(target=self.clone_repo, args=(repo_path, since, repo,),\n name=f\"task_{repo['name']}\")\n thread.start()\n threads.append(thread)\n\n # Wait all finished cloning\n for t in threads:\n t.join()\n print(\"Done cloning!\")", "def get_details(self, repo=None):\n api_json = []\n\n #get all branches from this repo\n branches = self.make_branches(self.getBranch(repo))\n\n today = datetime.date.today()\n yesterday = today - datetime.timedelta(2)\n\n for branch in branches:\n args = {\"per_page\": \"100\",\n \"sha\": branch,\n \"author\": self.username,\n \"since\": yesterday.isoformat()}\n args = self.make_args(args)\n repo_url = \"/\".join([self.url, \"repos\", repo, \"commits\"])\n repo_url = repo_url + args\n\n request = urllib2.Request(repo_url, headers=self.headers)\n response = urllib2.urlopen(request)\n raw_data = response.read()\n commits_info = self.process_factory(simplejson.loads(raw_data))\n api_json = api_json + commits_info\n\n print repo_url\n\n print api_json\n return api_json", "def cli(ctx, paths, **kwds):\n\n # In a little bit of cheating, we're defining this variable here to collect\n # a \"report\" on our shed_diff\n collected_data = {\n 'results': {\n 'total': 0,\n 'errors': 0,\n 'failures': 0,\n 'skips': 0,\n },\n 'tests': [],\n }\n\n def diff(realized_repository):\n result = shed.diff_repo(ctx, realized_repository, **kwds)\n # Collect data about what happened\n collected_data['results']['total'] += 1\n if result >= 200:\n collected_data['results']['errors'] += 1\n elif result > 0:\n collected_data['results']['failures'] += 1\n collected_data['tests'].append({\n 'classname': realized_repository.name,\n 'result': result,\n })\n return result\n\n exit_code = shed.for_each_repository(ctx, diff, paths, **kwds)\n\n if kwds.get('report_xunit', False):\n with open(kwds['report_xunit'], 'w') as handle:\n handle.write(build_report.template_data(\n collected_data, template_name='diff_xunit.tpl'))\n\n sys.exit(exit_code)", "def hunks_from_last_commits(n):\n \n assert n > 0\n \n diff_output = subprocess.check_output([\"git\", \"diff\", \"HEAD~\"+str(n) , \"--diff-filter=d\", \"--unified=0\"]\n ).decode(\"utf-8\").splitlines()\n \n return _hunks_from_diff(diff_output)", "def fetch_feedstock(repo_dir):\n repo = Repo(repo_dir)\n for remote in repo.remotes:\n try:\n remote.fetch()\n except GitCommandError:\n print(\"Failed to fetch {} from {}.\".format(remote.name, remote.url))", "def schedule_pulls():\n for repo in Repository.objects.all():\n # distribute pulls by delaying evenly across an hour\n pk_hash = hashlib.sha256(str(repo.pk).encode())\n delay_s = int(pk_hash.hexdigest(), 16) % (60 * 60)\n\n pull.apply_async(\n args=(repo.remote_id, repo.provider),\n countdown=delay_s,\n expires=delay_s + (60 * 60),\n )", "def get_failed_builds(builder, horizon):\n builder.builds.cache()\n return [\n builder.builds[i] for i in xrange(-horizon, 0)\n if not builder.builds[i].simplified_result\n ]", "def clean_stale_issues():\n from security_monkey.common.audit_issue_cleanup import clean_stale_issues\n clean_stale_issues()", "def remove_all_status_checks_on_pr_branch(org: Organization, repo: Repository,\n branches: Dict[str, Branch]) -> List[Change[str]]:\n def execute_remove_all_status_checks(change: Change[str], branch: Branch, existing_checks: Set[str]) -> Change[str]:\n print_debug(\"Removing all status checks from branch %s\" % highlight(branch.name))\n try:\n if existing_checks:\n branch.remove_required_status_checks()\n except GithubException as e:\n print_error(str(e))\n return change.failure()\n else:\n return change.success()\n\n prb = get_pr_branch(repo, branches)\n if not prb:\n return []\n\n try:\n rqs = prb.get_required_status_checks()\n except GithubException:\n # the repository has currently no status checks\n pass\n else:\n if len(rqs.contexts) > 0:\n existing_checks = set(rqs.contexts) # type: Set[str]\n return [Change(\n meta=ChangeMetadata(\n executor=execute_remove_all_status_checks,\n params=[prb, existing_checks]\n ),\n action=ChangeActions.REPLACE,\n before=\"%s checks\" % len(existing_checks),\n after=None,\n )]\n return []", "def buildList(self,list_all=False,push_all=False):\n print \"=== Loading\"\n\n ## Walk in repo and get GitRepo\n for repo in self.repos:\n if repo not in self.ignore or list_all:\n a = GitRepo(repo)\n if not self.animate:\n a.branchStatus()\n if a.globalStatus():\n if a.forward and push_all:\n a.push()\n self.gitrepos.append(a)\n\n print \"\\r=== \"+str(len(self.gitrepos))+\" repos scanned\"", "def find_branches(self, commit, repo):\n ref_dict = repo.repo.refs.as_dict()\n branches = []\n for branch, branch_id in [(b, ref_dict[b]) for b in repo.branches]:\n obj = repo.repo[branch_id]\n if commit.id == obj.id:\n branches.append((branch, obj))\n return branches", "def get_deploy_revs_and_diffs(environment, args):\n default_branch = environment.fab_settings_config.default_branch\n branches = [\n ('commcare', 'commcare_rev', default_branch),\n ]\n for repo in environment.meta_config.git_repositories:\n branches.append((repo.name, '{}_rev'.format(repo.name), repo.version))\n\n diffs = []\n actuals = {}\n for repo_name, arg_name, default in branches:\n actual = getattr(args, arg_name, None)\n actuals[repo_name] = actual or default\n if actual and actual != default:\n diffs.append(\"'{}' repo: {} != {}\".format(repo_name, default, actual))\n\n return actuals, diffs", "def branches(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'branches')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def test_single_repo_disconnected(self):\n self.Mokes.add_repo_to_pi()\n self.Mokes.make_new_latinLit_test(coverage=55.0, session=self.db.session)\n response = self.client.get(\"/repo/PerseusDl/canonical-latinLit\").data.decode()\n\n self.assertNotIn(\n \"Settings\", response, \"There is no Settings for unlogged people\"\n )\n self.assertNotIn(\"$('#state')\", response, \"We have the switch script\")\n index = BeautifulSoup(response, 'html.parser')\n\n tests = index.select(\"#body tbody tr\")\n self.assertEqual(len(tests), 3, \"There should be 3 tests\")\n last_test = tests[0]\n self.assertEqual(\n len(last_test.select('a[href=\"/repo/PerseusDl/canonical-latinLit/3\"]')), 1,\n \"There should be a link to the last test\"\n )\n self.assertEqual(\n len(last_test.select('a[href=\"https://github.com/PerseusDL/canonical-latinLit/commit/'\n '7d3d6a0b62f0d244b684843c7546906d742013fd#all_commit_comments\"]')),\n 1,\n \"There should be a link to the commit on GitHub\"\n )\n self.assertIn(\"<td>55.0</td>\", str(last_test), \"There should be the coverage shown\")\n second_test = tests[1]\n self.assertEqual(\n len(second_test.select('a[href=\"/repo/PerseusDl/canonical-latinLit/2\"]')), 1,\n \"There should be a link to the second test\"\n )\n self.assertIn(\"<td>99.85</td>\", str(second_test), \"There should be the coverage shown\")", "def list_branches(repository: Repository):\n for branch in repository.branches:\n if branch == repository.head:\n print(\" * {}\".format(branch))\n else:\n print(\" {}\".format(branch))", "def compare_commits(self, repo, start_sha, end_sha):\n installation = self.get_installation(repo.integration_id,\n repo.organization_id)\n client = installation.get_client()\n try:\n if start_sha is None:\n res = client.get_last_commits(repo.config['project_id'], end_sha)\n return self._format_commits(client, repo, res)\n else:\n res = client.compare_commits(repo.config['project_id'], start_sha, end_sha)\n return self._format_commits(client, repo, res['commits'])\n except Exception as e:\n installation.raise_error(e)", "def test_sort_git_master_and_latest(self):\n identifiers = [\"latest\", \"master\", \"1.0\", \"2.0\", \"1.1\", \"1.9\", \"1.10\"]\n self.project.repo_type = REPO_TYPE_GIT\n self.project.save()\n self.project.versions.get(slug=LATEST).delete()\n\n for identifier in identifiers:\n get(\n Version,\n project=self.project,\n type=BRANCH,\n identifier=identifier,\n verbose_name=identifier,\n slug=identifier,\n )\n\n versions = list(Version.objects.filter(project=self.project))\n self.assertEqual(\n [\"master\", \"latest\", \"2.0\", \"1.10\", \"1.9\", \"1.1\", \"1.0\"],\n [v.slug for v in sort_version_aware(versions)],\n )", "def test_repo_built():\n\n count = BehavioralUtils.count_repos_updated('builds')\n # If 1 repo Siteupdates in report repo built successfully.\n assert count == 1", "def all_status():\n print(\"Getting repo status.\\n\\nYou may be prompted for credentials...\")\n\n os.chdir(STATUS_DIR)\n attention = \"\"\n messages = []\n TIME_STAMP = datetime.now().strftime(\"%a_%d_%b_%Y_%H_%M_%S_%p\")\n\n fname = \"REPO_STATUS_@_{}.md\".format(TIME_STAMP)\n with open(fname, 'w+') as f:\n f.write(\"# Repository status as at {}\\n\\n\".format(TIME_STAMP))\n \n for each in load_multiple(_all=True):\n name = each.name\n status = each.status()\n\n messages.append(\"## {}\\n\\n```cmd\\n{}```\\n\".format(name, status))\n\n if need_attention(status):\n attention += \"1. {}\\n\".format(name)\n\n f.write(\"## REPOS NEEDING ATTENTION\\n\\n\")\n f.write(attention)\n f.write(\"\\n-------\\n\\n\")\n f.write(\"## STATUS MESSAGES\\n\\n\")\n f.write(\"\\n\".join(messages))\n\n print(\"\\n\\nDone. Status file saved in \", STATUS_DIR)\n os.chdir(BASE_DIR)\n return", "def hard_reset_branches(args):\n checkout_branches(args)\n man = load_manifest()\n for (name, project) in man.projects.iteritems():\n print >>sys.stderr, \"Hard resetting tracking branch in project: %s\" % name\n repo = GitRepo(workdir_for_project(project))\n repo.check_command([\"reset\", \"--hard\", project.remote_refspec])", "def fetch_all_repos_info():\n\n def fetch(*args, **kwargs):\n kwargs[\"remote\"].fetch()\n\n repos = ALL_REPOS\n for repo_name, repo in zip(repos, _repos(repos)):\n repo_name = shorten_path(repo_name)\n print(f\"fetching {repo_name}\")\n _try_for_all_remotes(\n repo, fetch, raise_on_exception=False, stop_on_success=False, verbose=True\n )", "def list_branches(self, msg, args):\n trans = self._translation_util(msg)\n yield \"\\n\".join(trans.list_branches(REPOSITORY_NAME))", "def generateFinishedBuilds(branches=[],\n num_builds=None,\n max_buildnum=None, finished_before=None,\n max_search=200,\n ):", "def dirty_branches(self):\n # If no course index has been set, then no branches have changed\n if self.index is None:\n return []\n\n # If there was no index in the database to start with, then all branches\n # are dirty by definition\n if self.initial_index is None:\n return list(self.index.get('versions', {}).keys())\n\n # Return branches whose ids differ between self.index and self.initial_index\n return [\n branch\n for branch, _id\n in self.index.get('versions', {}).items()\n if self.initial_index.get('versions', {}).get(branch) != _id\n ]", "def get_branches(self, *, refs=[\"refs/heads\", \"refs/remotes\"]):\n # type: (Sequence[str]) -> List[Branch]\n stdout = self.git(\n \"for-each-ref\",\n (\n \"--format=\"\n \"%(HEAD)%00\"\n \"%(refname)%00\"\n \"%(upstream)%00\"\n \"%(upstream:remotename)%00\"\n \"%(upstream:track,nobracket)%00\"\n \"%(committerdate:unix)%00\"\n \"%(objectname)%00\"\n \"%(contents:subject)\"\n ),\n *refs\n ) # type: str\n branches = [\n branch\n for branch in (\n self._parse_branch_line(line)\n for line in filter_(stdout.splitlines())\n )\n if branch.name != \"HEAD\"\n ]\n store.update_state(self.repo_path, {\"branches\": branches})\n return branches", "def update_repos():\n with open(repolist_file, \"r\") as repofile:\n repolist = repofile.readlines()\n for idx in xrange(len(repolist)):\n l = repolist[idx].strip()\n if re.match('^[i]',l):\n repodir = clone_dir + \"/\" + os.path.basename(l)\n git(\"fetch\", \"--all\", cwd = repodir)\n pass", "def _get_pdc_branches(session, repo):\n branches_url = '{0}component-branches/'.format(PDCURL)\n params = dict(\n global_component=repo['name'],\n type=PDC_TYPES[repo['namespace']]\n )\n if VERBOSE:\n print('Querying {0} {1}'.format(branches_url, params))\n rv = session.get(branches_url, params=params, timeout=60)\n\n # If the project's branches can't be reported, just return no branches and\n # it will be skipped later on\n if not rv.ok:\n print(('The connection to \"{0}\" failed with the status code {1} and '\n 'output \"{2}\"'.format(branches_url, rv.status_code, rv.text)),\n file=sys.stderr)\n return []\n\n data = rv.json()\n return [branch['name'] for branch in data['results']]", "def pre_backup_check(repos):\n for repo in 'local', 'remote':\n repos[repo].check()\n\n # TODO: Check the ordering of this is deterministic\n most_recent_archive = repos[repo].list_archives()[-1]\n repos[repo].check_archive(most_recent_archive)", "def validate(cfg: defs.Config) -> List[str]: # noqa: C901\n res: List[str] = []\n\n def check_branch(\n comp_name: str, branch_name: str, branch: Dict[str, defs.ComponentVersion]\n ) -> None:\n \"\"\"Validate versions within a single branch.\"\"\"\n uptodate_files: Dict[pathlib.Path, Tuple[pathlib.Path, defs.ComponentFile]] = {}\n\n if not RE_BRANCH_NAME.match(branch_name):\n res.append(f\"{comp_name}: Invalid branch name: {branch_name}\")\n\n for ver, version in sorted(branch.items()):\n if not RE_VERSION_STRING.match(ver):\n res.append(f\"{comp_name}/{branch_name}: Invalid version string: {ver}\")\n\n other_cksums, driver_cksums = _split_by_existence(comp_name, branch_name, version.files)\n if version.outdated:\n update_to = [\n o_version\n for o_version in branch.values()\n if not o_version.outdated\n and _split_by_existence(comp_name, branch_name, o_version.files)[0]\n == other_cksums\n ]\n if len(update_to) != 1:\n res.append(\n f\"{comp_name}/{branch_name}/{ver}: Got {len(update_to)} possible \"\n f\"versions to update to instead of exactly one\"\n )\n else:\n bad_files = sorted(\n relpath\n for relpath, (path, fdata) in driver_cksums.items()\n if util.file_sha256sum(path) != fdata.sha256\n )\n if bad_files:\n res.append(f\"{comp_name}/{branch_name}/{ver}: Bad checksum for {bad_files}\")\n\n if not uptodate_files:\n uptodate_files = driver_cksums\n elif uptodate_files != driver_cksums:\n res.append(\n f\"{comp_name}/{branch_name}: All the up-to-date versions should \"\n f\"define the same set of files with the same checksums\"\n )\n\n if not any(not version.outdated for version in branch.values()):\n res.append(f\"{comp_name}/{branch_name}: No non-outdated versions\")\n\n def check_component(comp_name: str, comp: defs.Component) -> None:\n \"\"\"Validate the definition of a single component.\"\"\"\n if not RE_COMP_NAME.match(comp_name):\n res.append(f\"Invalid component name: {comp_name}\")\n\n for branch_name, branch in sorted(comp.branches.items()):\n check_branch(comp_name, branch_name, branch)\n\n for comp_name, comp in sorted(cfg.all_components.components.items()):\n check_component(comp_name, comp)\n\n return res", "async def main(\n *,\n ruff1: Path,\n ruff2: Path,\n projects_jsonl: Path | None,\n checkouts: Path | None = None,\n) -> None:\n if projects_jsonl:\n repositories = read_projects_jsonl(projects_jsonl)\n else:\n repositories = {(repo.org, repo.repo): repo for repo in REPOSITORIES}\n\n logger.debug(f\"Checking {len(repositories)} projects\")\n\n # https://stackoverflow.com/a/61478547/3549270\n # Otherwise doing 3k repositories can take >8GB RAM\n semaphore = asyncio.Semaphore(50)\n\n async def limited_parallelism(coroutine: T) -> T:\n async with semaphore:\n return await coroutine\n\n results = await asyncio.gather(\n *[\n limited_parallelism(compare(ruff1, ruff2, repo, checkouts))\n for repo in repositories.values()\n ],\n return_exceptions=True,\n )\n\n diffs = dict(zip(repositories, results, strict=True))\n\n total_removed = total_added = 0\n errors = 0\n\n for diff in diffs.values():\n if isinstance(diff, Exception):\n errors += 1\n else:\n total_removed += len(diff.removed)\n total_added += len(diff.added)\n\n if total_removed == 0 and total_added == 0 and errors == 0:\n print(\"\\u2705 ecosystem check detected no changes.\")\n else:\n rule_changes: dict[str, tuple[int, int]] = {}\n changes = f\"(+{total_added}, -{total_removed}, {errors} error(s))\"\n\n print(f\"\\u2139\\ufe0f ecosystem check **detected changes**. {changes}\")\n print()\n\n for (org, repo), diff in diffs.items():\n if isinstance(diff, Exception):\n changes = \"error\"\n print(f\"<details><summary>{repo} ({changes})</summary>\")\n repo = repositories[(org, repo)]\n print(\n f\"https://github.com/{repo.org}/{repo.repo} ref {repo.ref} \"\n f\"select {repo.select} ignore {repo.ignore} exclude {repo.exclude}\",\n )\n print(\"<p>\")\n print()\n\n print(\"```\")\n print(str(diff))\n print(\"```\")\n\n print()\n print(\"</p>\")\n print(\"</details>\")\n elif diff:\n changes = f\"+{len(diff.added)}, -{len(diff.removed)}\"\n print(f\"<details><summary>{repo} ({changes})</summary>\")\n print(\"<p>\")\n print()\n\n repo = repositories[(org, repo)]\n diff_lines = list(diff)\n\n print(\"<pre>\")\n for line in diff_lines:\n match = DIFF_LINE_RE.match(line)\n if match is None:\n print(line)\n continue\n\n pre, inner, path, lnum, post = match.groups()\n url = repo.url_for(diff.source_sha, path, int(lnum))\n print(f\"{pre} <a href='{url}'>{inner}</a> {post}\")\n print(\"</pre>\")\n\n print()\n print(\"</p>\")\n print(\"</details>\")\n\n # Count rule changes\n for line in diff_lines:\n # Find rule change for current line or construction\n # + <rule>/<path>:<line>:<column>: <rule_code> <message>\n matches = re.search(r\": ([A-Z]{1,4}[0-9]{3,4})\", line)\n\n if matches is None:\n # Handle case where there are no regex matches e.g.\n # + \"?application=AIRFLOW&authenticator=TEST_AUTH&role=TEST_ROLE&warehouse=TEST_WAREHOUSE\" # noqa: E501, ERA001\n # Which was found in local testing\n continue\n\n rule_code = matches.group(1)\n\n # Get current additions and removals for this rule\n current_changes = rule_changes.get(rule_code, (0, 0))\n\n # Check if addition or removal depending on the first character\n if line[0] == \"+\":\n current_changes = (current_changes[0] + 1, current_changes[1])\n elif line[0] == \"-\":\n current_changes = (current_changes[0], current_changes[1] + 1)\n\n rule_changes[rule_code] = current_changes\n\n else:\n continue\n\n if len(rule_changes.keys()) > 0:\n print(f\"Rules changed: {len(rule_changes.keys())}\")\n print()\n print(\"| Rule | Changes | Additions | Removals |\")\n print(\"| ---- | ------- | --------- | -------- |\")\n for rule, (additions, removals) in sorted(\n rule_changes.items(),\n key=lambda x: (x[1][0] + x[1][1]),\n reverse=True,\n ):\n print(f\"| {rule} | {additions + removals} | {additions} | {removals} |\")\n\n logger.debug(f\"Finished {len(repositories)} repositories\")", "def check_for_translation_updates_with_repo_and_legalcodes(\n self, repo: git.Repo, legalcodes: Iterable[\"licenses.models.LegalCode\"]\n ):\n self.say(3, \"check if repo is dirty\")\n if repo.is_dirty():\n raise Exception(\n f\"Git repo at {settings.TRANSLATION_REPOSITORY_DIRECTORY} is\"\n \" dirty. We cannot continue.\"\n )\n self.say(2, \"Fetch to update repo\")\n repo.remotes.origin.fetch()\n resource_slugs_on_transifex = self.stats.keys()\n\n # We only have the BY* 4.0 licenses in our database so far.\n # We'd like to process one potential translation branch at a time.\n # For the BY* 4.0 licenses, there's a single translation branch for\n # each language. So identify all the languages and iterate over those.\n # (Except English)\n\n # Gather the files we need to update in git.\n # This is a dict with keys = branch names, and values dictionaries\n # mapping relative paths of files to update, to their contents\n # (bytes).\n self.branches_to_update = defaultdict(_empty_branch_object)\n self.legalcodes_to_update = []\n self.branch_objects_to_update = []\n\n legalcodes_with_updated_translations = []\n\n for legalcode in legalcodes:\n language_code = legalcode.language_code\n resource_slug = legalcode.license.resource_slug\n if resource_slug not in resource_slugs_on_transifex:\n self.say(\n 2, f\"Transifex has no resource {resource_slug}. Creating.\"\n )\n\n # Create the resource\n english_pofile = legalcode.get_english_pofile()\n pofile_content = get_pofile_content(english_pofile)\n self.create_resource(\n resource_slug=resource_slug,\n resource_name=legalcode.license.fat_code(),\n pofilename=os.path.basename(\n legalcode.translation_filename()\n ),\n pofile_content=pofile_content,\n )\n self.clear_transifex_stats()\n\n if language_code not in self.stats[resource_slug]:\n self.say(\n 2,\n f\"Transifex has no {language_code} translation for\"\n f\" {resource_slug}\",\n ) # pragma: no cover\n\n # Upload the language\n self.upload_messages_to_transifex(legalcode)\n self.clear_transifex_stats()\n\n # We have a translation in this language for this license on\n # Transifex.\n # When was it last updated?\n last_activity = self.stats[resource_slug][language_code][\n \"translated\"\n ][\"last_activity\"]\n last_tx_update = (\n iso8601.parse_date(last_activity) if last_activity else None\n )\n\n if legalcode.translation_last_update is None:\n # First time: initialize, don't create branch\n legalcode.translation_last_update = last_tx_update\n legalcode.save()\n self.say(2, f\"Initialized last update time for {legalcode}\")\n continue\n\n if last_tx_update <= legalcode.translation_last_update:\n # No change\n self.say(3, f\"No changes for {legalcode}\")\n continue\n\n # Translation has changed!\n self.say(2, f\"Translation has changed for {legalcode}\")\n legalcodes_with_updated_translations.append(legalcode)\n\n return self.handle_legalcodes_with_updated_translations(\n repo, legalcodes_with_updated_translations\n )", "def check_heads(repo, their_heads, context):\n heads = repo.heads()\n heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()\n if not (\n their_heads == [b'force']\n or their_heads == heads\n or their_heads == [b'hashed', heads_hash]\n ):\n # someone else committed/pushed/unbundled while we\n # were transferring data\n raise error.PushRaced(\n b'repository changed while %s - please try again' % context\n )", "def extract_commits(self, repo_obj):\n url = REPO_COMMIT_LIST.format(full_name=repo_obj['full_name'])\n url = self.get_full_url(url)\n json_data = loads(self.get_from_net(url))\n commits = []\n for i in json_data:\n committer = i['committer']\n #stats = self.get_commit_change_stats(full_name=repo_obj['full_name'], commit_sha=i['sha'])\n stats = self.get_commit_change_stats(commit_url=i['url'])\n comm = {#TODO Fetch user's location in USER_URL\n 'date': self.get_commit_datetimezone(i['html_url']),\n 'user': i['commit']['committer']['name'],\n 'login': '',\n 'additions': stats['additions'],\n 'deletions': stats['deletions']\n }\n if committer is not None:\n comm['login'] = committer['login']\n\n commits.append(comm)\n return commits", "def get_build_infos(self, start_rev, end_rev, range=60*60*4):\n pushlogs_finder = self._create_pushlog_finder(start_rev, end_rev)\n\n pushlogs = pushlogs_finder.get_pushlogs()\n\n if not pushlogs:\n return []\n\n start_time = pushlogs[0]['date']\n end_time = pushlogs[-1]['date']\n \n build_urls = [(\"%s%s/\" % (self.build_base_url, path), timestamp)\n for path, timestamp in self._extract_paths()]\n\n build_urls_in_range = filter(lambda (u, t): t > (start_time - range)\n and t < (end_time + range), build_urls)\n\n raw_revisions = [push['changesets'][-1] for push in pushlogs]\n all_builds = []\n with futures.ThreadPoolExecutor(max_workers=8) as executor:\n futures_results = {}\n for build_url, timestamp in build_urls_in_range:\n future = executor.submit(self._get_valid_builds,\n build_url,\n timestamp,\n raw_revisions)\n futures_results[future] = build_url\n for future in futures.as_completed(futures_results):\n if future.exception() is not None:\n sys.exit(\"Retrieving valid builds from %r generated an\"\n \" exception: %s\" % (futures_results[future],\n future.exception()))\n all_builds.extend(future.result())\n\n return self._sort_builds(all_builds)", "def ping_leads_of_idle_issues():\n for repo_id in github_api.get_all_repos():\n issues = github_api.get_issues_with_label(repo_id, \"idle\")\n if not issues:\n continue\n\n idle_issue_numbers = [(i[\"number\"], i[\"html_url\"]) for i in issues]\n\n message = ('The following issue(s) in <{repo_url}|{repo}> are '\n 'idle: {issue_links}. (cc: {leads})').format(\n repo_url=\"https://github.com/{}/{}\".format(repo_id.owner, repo_id.name),\n repo=\"{}/{}\".format(repo_id.owner, repo_id.name),\n issue_links=\", \".join(\n \"<{link}|#{number}>\".format(link=link, number=number)\n for (number, link) in idle_issue_numbers),\n leads=\", \".join(github_api.get_leads_for_repo(repo_id)))\n \n slack_api.send_message(message)", "def check_manifests(self):\n # Fetch image repositories\n repos = self.fetch_repos()\n\n # Create an empty dataframe\n df = pd.DataFrame(columns=[\"image_name\", \"age_days\", \"size_gb\"])\n\n # Loop over the repositories\n logging.info(\"Checking repository manifests\")\n for repo in repos:\n # Get the manifest for the current repository\n logging.info(\"Pulling manifests for: %s\" % repo)\n show_cmd = [\n \"az\",\n \"acr\",\n \"repository\",\n \"show-manifests\",\n \"-n\",\n self.name,\n \"--repository\",\n repo,\n ]\n\n result = run_cmd(show_cmd)\n\n if result[\"returncode\"] != 0:\n logging.error(result[\"err_msg\"])\n raise AzureError(result[\"err_msg\"])\n\n logging.info(\"Successfully pulled manifests\")\n outputs = (\n result[\"output\"]\n .replace(\"\\n\", \"\")\n .replace(\" \", \"\")[1:-1]\n .split(\"},\")\n )\n logging.info(\n \"Total number of manifests in %s: %d\" % (repo, len(outputs))\n )\n\n # Loop over the manifests for each repository\n for j, output in enumerate(outputs):\n if j < (len(outputs) - 1):\n output += \"}\"\n\n # Convert the manifest to a dict and extract timestamp\n manifest = json.loads(output)\n timestamp = pd.to_datetime(manifest[\"timestamp\"]).tz_localize(\n None\n )\n\n # Get time difference between now and the manifest timestamp\n diff = (pd.Timestamp.now() - timestamp).days\n logging.info(\n \"%s@%s is %d days old.\" % (repo, manifest[\"digest\"], diff)\n )\n\n # Check the size of each image\n image_size_cmd = [\n \"az\",\n \"acr\",\n \"repository\",\n \"show\",\n \"-n\",\n self.name,\n \"--image\",\n f\"{repo}@{manifest['digest']}\",\n \"--query\",\n \"imageSize\",\n \"-o\",\n \"tsv\",\n ]\n\n result = run_cmd(image_size_cmd)\n\n if result[\"returncode\"] != 0:\n logging.error(result[\"err_msg\"])\n raise AzureError(result[\"err_msg\"])\n\n image_size = int(result[\"output\"]) * 1.0e-9\n\n # Append to dataframe\n df = df.append(\n {\n \"image_name\": f\"{repo}@{manifest['digest']}\",\n \"age_days\": diff,\n \"size_gb\": image_size,\n },\n ignore_index=True,\n )\n\n return df", "def _listBranches(self):\n assert self.wc.exists('branches')\n branches = self.wc.ls('branches')\n\n # Some early release branches used a different naming scheme\n # that doesn't sort properly with new-style release names. We\n # filter those out here, along with empty lines.\n branches = [b.strip('/') for b in branches\n if MELANGE_RELEASE_RE.match(b.strip('/'))]\n\n return sorted(branches)", "def missing_branches(self):\n upstream_tags = self.upstream_model.tags_from_semver_point(\"1.19.0\")\n deb_branches = self.deb_model.base.branches_from_semver_point(\"1.19.0\")\n return list(set(upstream_tags) - set(deb_branches))", "def test_heads_contains_false(repository: Repository) -> None:\n assert \"branch\" not in repository.heads", "def find_commits(self, project, shas, manifest_info):\n\n # Temporarily set for other methods to access\n self.project = project\n\n new_commits = list()\n invalid_shas = list()\n commit_cache = list()\n\n remote, project_url = manifest_info.get_project_remote_info(project)\n project_shas = [sha.replace(f'{project}-', '') for sha in shas]\n commit_walker = cbutil_git.CommitWalker(\n project, self.repo_base_path / project, remote, project_url,\n self.repo_cache\n )\n\n for project_sha in project_shas:\n try:\n new_commits.extend(commit_walker.walk(\n project_sha.encode('utf-8'), commit_cache,\n self.is_new_commit, self.update_commit_cache\n ))\n except cbutil_git.MissingCommitError:\n invalid_shas.append(f'{project}-{project_sha}')\n\n # Reset to ensure not accidentally re-used by another run\n # of the method or other methods\n self.project = None\n\n return new_commits, invalid_shas", "def iter_commits(init_source_repo):\n source_repo_path, _, _ = init_source_repo\n repo = Repo(source_repo_path)\n return repo.iter_commits(\"master\")", "def branches(self):\r\n url = '{0}/branches/'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):\n # Check for each named branch if we're creating new remote heads.\n # To be a remote head after push, node must be either:\n # - unknown locally\n # - a local outgoing head descended from update\n # - a remote head that's known locally and not\n # ancestral to an outgoing head\n if remoteheads == [nullid]:\n # remote is empty, nothing to check.\n return\n\n if remote.capable('branchmap'):\n headssum = _headssummary(repo, remote, outgoing)\n else:\n headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)\n newbranches = [branch for branch, heads in headssum.iteritems()\n if heads[0] is None]\n # 1. Check for new branches on the remote.\n if newbranches and not newbranch: # new branch requires --new-branch\n branchnames = ', '.join(sorted(newbranches))\n raise util.Abort(_(\"push creates new remote branches: %s!\")\n % branchnames,\n hint=_(\"use 'hg push --new-branch' to create\"\n \" new remote branches\"))\n\n # 2 compute newly pushed bookmarks. We\n # we don't warned about bookmarked heads.\n localbookmarks = repo._bookmarks\n remotebookmarks = remote.listkeys('bookmarks')\n bookmarkedheads = set()\n for bm in localbookmarks:\n rnode = remotebookmarks.get(bm)\n if rnode and rnode in repo:\n lctx, rctx = repo[bm], repo[rnode]\n if bookmarks.validdest(repo, rctx, lctx):\n bookmarkedheads.add(lctx.node())\n\n # 3. Check for new heads.\n # If there are more heads after the push than before, a suitable\n # error message, depending on unsynced status, is displayed.\n error = None\n unsynced = False\n allmissing = set(outgoing.missing)\n allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))\n allfuturecommon.update(allmissing)\n for branch, heads in sorted(headssum.iteritems()):\n if heads[0] is None:\n # Maybe we should abort if we push more that one head\n # for new branches ?\n continue\n candidate_newhs = set(heads[1])\n # add unsynced data\n oldhs = set(heads[0])\n oldhs.update(heads[2])\n candidate_newhs.update(heads[2])\n dhs = None\n discardedheads = set()\n if repo.obsstore:\n # remove future heads which are actually obsolete by another\n # pushed element:\n #\n # XXX as above, There are several cases this case does not handle\n # XXX properly\n #\n # (1) if <nh> is public, it won't be affected by obsolete marker\n # and a new is created\n #\n # (2) if the new heads have ancestors which are not obsolete and\n # not ancestors of any other heads we will have a new head too.\n #\n # This two case will be easy to handle for know changeset but much\n # more tricky for unsynced changes.\n newhs = set()\n for nh in candidate_newhs:\n if nh in repo and repo[nh].phase() <= phases.public:\n newhs.add(nh)\n else:\n for suc in obsolete.allsuccessors(repo.obsstore, [nh]):\n if suc != nh and suc in allfuturecommon:\n discardedheads.add(nh)\n break\n else:\n newhs.add(nh)\n else:\n newhs = candidate_newhs\n if [h for h in heads[2] if h not in discardedheads]:\n unsynced = True\n if len(newhs) > len(oldhs):\n # strip updates to existing remote heads from the new heads list\n dhs = sorted(newhs - bookmarkedheads - oldhs)\n if dhs:\n if error is None:\n if branch not in ('default', None):\n error = _(\"push creates new remote head %s \"\n \"on branch '%s'!\") % (short(dhs[0]), branch)\n else:\n error = _(\"push creates new remote head %s!\"\n ) % short(dhs[0])\n if heads[2]: # unsynced\n hint = _(\"you should pull and merge or \"\n \"use push -f to force\")\n else:\n hint = _(\"did you forget to merge? \"\n \"use push -f to force\")\n if branch is not None:\n repo.ui.note(_(\"new remote heads on branch '%s'\\n\") % branch)\n for h in dhs:\n repo.ui.note(_(\"new remote head %s\\n\") % short(h))\n if error:\n raise util.Abort(error, hint=hint)\n\n # 6. Check for unsynced changes on involved branches.\n if unsynced:\n repo.ui.warn(_(\"note: unsynced remote changes!\\n\"))", "def generateFinishedBuilds(builders=[], branches=[],\n num_builds=None, finished_before=None,\n max_search=200):", "def all_commits(change_id):\n commits = []\n manifest = ET.ElementTree(file='.repo/manifest.xml')\n url = (GERRIT_ROOT + 'changes/?o=CURRENT_REVISION&q=status:open+' +\n change_id)\n changes = request.urlopen(url)\n for change in parse_response(changes):\n project = change['project']\n fetch = change['revisions'][change['current_revision']]['fetch']\n # The `ref` is the same for every download scheme, hence we can use\n # the first one that is there\n ref = fetch.values()[0]['ref']\n path = project_path(manifest, project)\n commits.append((project, path, ref))\n return commits", "def _get_branches(self):\n logging.info('--- Get Branches ---')\n self.local_branches = set(self.find_branches())\n self.remote_branches = set(self.find_branches(remote=True))\n # Tags are remote branches that start with \"tags/\".\n self.tags = {\n single_branch for single_branch in self.remote_branches\n if PRX_SVNTAGS_PREFIX.match(single_branch)}", "def list_repo_state(repo):\n return {'HEAD': repo.head.commit.hexsha,\n 'branch': repo.active_branch.name,\n 'modified_files': [f.a_path for f in repo.head.commit.diff(None)],\n 'untracked_files': repo.untracked_files,\n 'diff_vs_head': repo.git.diff(repo.head.commit.tree)}", "def force_branch_stale_review_dismissal(branch_name: str) -> repoproc_t:\n def _force_branch_stale_review_dismissal(org: Organization, repo: Repository,\n branches: Dict[str, Branch]) -> List[Change[str]]:\n if branch_name in branches:\n return _set_dismiss_stale_approvals(branches[branch_name])\n else:\n print_warning(\"Requested to dismiss stale reviews on branch %s on repo %s, but the branch does not exist.\" %\n (highlight(branch_name), highlight(repo.name)))\n return []\n return _force_branch_stale_review_dismissal", "def test_get_latest_results(self):\n for x in xrange(98, 103):\n self.db.insert_single_result(generate_mock_result(repository='test-repo', run_id=x))\n self.db.insert_single_result(generate_mock_result(repository='newtest-repo', run_id=x + 1))\n results = self.db.get_all_results()\n self.assertEqual(len(results), 10)\n latest = self.db.get_latest_results()\n self.assertEqual(len(latest), 2)\n for result in latest:\n if result.repository == 'test-repo':\n self.assertEqual(result.run_id, '102')\n elif result.repository == 'newtest-repo':\n self.assertEqual(result.run_id, '103')", "def get_branches(self):\n\n # gets all branches in repository\n branches_endpoint = f'/repositories/{self.owner}/{self.repo}/refs/branches'\n filter_param = {'fields': 'values.name'}\n response = self._get_request(branches_endpoint, filter_param)\n # guard condition\n if response.status_code != STATUS_CODE_OK:\n raise BitbucketRequestSenderExc(\n f'Invalid parameter(s) in: owner: {self.owner},'\n f' repo: {self.repo}')\n # deserialize\n branches_page = response.json()\n\n return [\n {\n 'name': branch['name']\n } for branch in branches_page['values']\n ]", "def _knownrevs(repo, nodes):\n torev = repo.changelog.nodemap.get\n for n in nodes:\n rev = torev(n)\n if rev is not None:\n yield rev", "def collect(self):\n repos = requests.get('http://{}:{}/api/unstable/dosocs/repos'.format(\n self.config['broker_host'],self.config['broker_port'])).json()\n\n for repo in repos:\n try:\n logger.info(f'Adding Repo Labor Data for Repo: {repo}')\n self.generate_value_data(repo['repo_id'], repo['path'])\n except Exception as e:\n logger.error(f'Error occured for Repo: {repo}')\n logger.exception(e)\n\n self.register_task_completion('value')\n\n # while True:\n # time.sleep(2)\n # logger.info(f'Maintain Queue Empty: {self._maintain_queue.empty()}')\n # logger.info(f'Queue Empty: {self._queue.empty()}')\n # if not self._queue.empty():\n # message = self._queue.get()\n # logger.info(f\"Popped off message from Queue: {message.entry_info}\")\n # self.working_on = \"UPDATE\"\n # elif not self._maintain_queue.empty():\n # message = self._maintain_queue.get()\n # logger.info(f\"Popped off message from Maintain Queue: {message.entry_info}\")\n # self.working_on = \"MAINTAIN\"\n # else:\n # break\n\n # if message.type == 'EXIT':\n # break\n\n # if message.type != 'TASK':\n # raise ValueError(f'{message.type} is not a recognized task type')\n\n # if message.type == 'TASK':\n # try:\n # repos = requests.get('http://{}:{}/api/unstable/dosocs/repos'.format(\n # self.config['broker_host'],self.config['broker_port'])).json()\n\n # for repo in repos:\n # self.generate_value_data(repo['repo_id'], repo['path'])\n\n # self.register_task_completion('value')\n\n # except Exception:\n # # logger.error(\"Worker ran into an error for task: {}\\n\".format(message.entry_info['task']))\n # # logger.error(\"Error encountered: \" + str(e) + \"\\n\")\n # # # traceback.format_exc()\n # # logger.info(\"Notifying broker and logging task failure in database...\\n\")\n\n # logger.exception(f'Worker ran into an error for task {message.entry_info}')\n # self.register_task_failure(message.entry_info['repo_id'],\n # message.entry_info['task']['given']['git_url'])\n\n # # Add to history table\n # task_history = {\n # \"repo_id\": message.entry_info['repo_id'],\n # \"worker\": self.config['id'],\n # \"job_model\": message.entry_info['task']['models'][0],\n # \"oauth_id\": self.config['zombie_id'],\n # \"timestamp\": datetime.datetime.now(),\n # \"status\": \"Error\",\n # \"total_results\": self.results_counter\n # }\n\n # if self.history_id:\n # self.helper_db.execute(self.history_table.update().where(self.history_table.c.history_id==self.history_id).values(task_history))\n # else:\n # r = self.helper_db.execute(self.history_table.insert().values(task_history))\n # self.history_id = r.inserted_primary_key[0]\n\n # logger.info(f\"Recorded job error for: {message.entry_info['task']}\")\n\n # # Update job process table\n # updated_job = {\n # \"since_id_str\": message.entry_info['repo_id'],\n # \"last_count\": self.results_counter,\n # \"last_run\": datetime.datetime.now(),\n # \"analysis_state\": 0\n # }\n # self.helper_db.execute(self.job_table.update().where(self.job_table.c.job_model==message.entry_info['task']['models'][0]).values(updated_job))\n # logger.info(\"Updated job process for model: \" + message.entry_info['task']['models'][0] + \"\\n\")\n\n # # Reset results counter for next task\n # self.results_counter = 0\n # pass", "def gen_history(\n conn_list: List[DBConn],\n isolation_level: str,\n abort_rate: float = 0.15,\n write_rate: float = 0.33,\n predicate_read_rate: float = 0.10,\n n_objs: Optional[int] = 16,\n n_tables: Optional[int] = 3,\n seed: Optional[int] = None,\n transaction_limit: Optional[int] = 100,\n time_limit_sec: Optional[int] = None,\n db_name: Optional[str] = None,\n table_names: Optional[List[str]] = None,\n nemesis: Optional[Nemesis] = None,\n for_update: bool = False,\n teardown: bool = True,\n) -> History:\n\n def check_args(\n conn_list: List[DBConn],\n isolation_level: str,\n abort_rate: float,\n write_rate: float,\n predicate_read_rate: float,\n n_objs: Optional[int],\n n_tables: Optional[int],\n seed: Optional[int],\n transaction_limit: Optional[int],\n time_limit_sec: Optional[int],\n db_name: Optional[str],\n table_names: Optional[List[str]],\n nemesis: Optional[Nemesis],\n teardown: bool,\n ) -> None:\n \"\"\"\n Verify integrity of the arguments\n \"\"\"\n\n if n_tables is not None and table_names is not None:\n if n_tables != len(table_names if table_names else []):\n raise ValueError(\n \"Provided table names ({}) and number of tables ({}) are not compatible\".format(\n table_names, n_tables\n )\n )\n\n if not n_tables and not table_names:\n raise ValueError(\"One of <table_names> and <n_tables> need to be non-None\")\n\n if n_tables and n_tables < 1:\n raise ValueError(\"Need at least one table: {}\".format(n_tables))\n\n if table_names:\n if not all(table_names): # check that all elements are non None and not ''\n raise ValueError(\"A table name needs at least one character: {}\".format(table_names))\n\n if transaction_limit is None and time_limit_sec is None:\n raise ValueError(\"One of <transaction_limit> and <time_limit_sec> need to be non-None\")\n\n if n_objs is not None and n_objs < 1:\n raise ValueError(\"Need at least one object\")\n\n if not (0.0 <= abort_rate <= 1.0):\n raise ValueError(\"The abort rate needs to be in the [0.0, 1.0] range: {}\".format(abort_rate))\n\n if not (0.0 <= write_rate <= 1.0):\n raise ValueError(\"The write rate needs to be in the [0.0, 1.0] range: {}\".format(write_rate))\n\n if not (0.0 <= predicate_read_rate <= 1.0):\n raise ValueError(\n \"The predicate read rate needs to be in the [0.0, 1.0] range: {}\".format(predicate_read_rate)\n )\n\n if not (0.0 <= write_rate + predicate_read_rate <= 1.0):\n raise ValueError(\n \"The write rate + the predicate read rate needs to be in the [0.0, 1.0] range: {}\".format(\n write_rate + predicate_read_rate\n )\n )\n\n if len(conn_list) < 1:\n raise ValueError(\"Need at least one valid connection\")\n\n def setup(\n conn_list: List[DBConn],\n n_objs: Optional[int],\n seed: Optional[int],\n n_tables: Optional[int],\n db_name: Optional[str],\n table_names: Optional[List[str]],\n ) -> Tuple[int, str, List[str]]:\n \"\"\"\n Sets up the environment for the test\n Returns n_objs, db_name, table_names\n \"\"\"\n\n if not seed:\n seed = int(time.time() * 1e6)\n\n random.seed(seed)\n\n if not db_name:\n db_name = \"db_\" + \"\".join(random.choices(string.ascii_lowercase, k=32))\n\n if not table_names:\n table_names = [\n \"table_\" + \"\".join(random.choices(string.ascii_lowercase, k=32))\n for _ in range(n_tables if n_tables else 0)\n ]\n\n if not n_objs:\n n_objs = random.randint(min(len(table_names), 4), 64)\n\n for obj_id in range(n_objs):\n obj_ver[obj_id] = 0\n\n conn_list[0].execute(\"set global lock_wait_timeout=7\")\n conn_list[0].execute(\"create database if not exists {}\".format(db_name))\n for conn in conn_list:\n conn.execute(\"use {}\".format(db_name))\n\n for table in table_names:\n conn_list[0].execute(\n \"create table if not exists {}(id int not null, value text, primary key (id))\".format(table)\n )\n\n logger.info(\n \"[+] starting history generation (seed: {}, n_objs: {} DB: {}, tables: {})\".format(\n seed, n_objs, db_name, \", \".join(table_names)\n )\n )\n\n return n_objs, db_name, table_names\n\n def partition_ids(n_objs: int, table_names: List[str]) -> List[DBObject]:\n \"\"\"\n (Randomly) partition the object ids between the different tables\n \"\"\"\n\n rev_map: Dict[str, int] = {name: 1 for name in table_names}\n while sum(rev_map.values()) < n_objs:\n rev_map[random.choice(table_names)] += 1\n\n obj_list: List[DBObject] = list()\n for table, n in rev_map.items():\n for _ in range(n):\n obj_list.append(DBObject(len(obj_list), table))\n\n return obj_list\n\n def gen_transaction(\n txn_id: int,\n obj_list: List[DBObject],\n table_names: List[str],\n isolation_level: str,\n min_size: int,\n max_size: int,\n abort_rate: float,\n write_rate: float,\n predicate_read_rate: float,\n for_update: bool,\n ) -> Transaction:\n \"\"\"\n Generates a list of SQL statemtents for a transaction\n\n <obj_list>: list of objects\n <isolation_level>: isolation level for the transaction\n <min_size>: minimum size of the transaction (in number of operations)\n <max_size>: maximum size of the transaction (in number of operations)\n <abort_rate>: abort rate (domain = [0.0, 1.0])\n <write_rate>: write rate (domain = [0.0, 1.0])\n <predicate_read_rate>: predicate read rate (domain = [0.0, 1.0])\n \"\"\"\n\n def gen_op(\n obj_list: List[DBObject],\n table_names: List[str],\n write_rate: float,\n predicate_read_rate: float,\n for_update: bool,\n chosen_len: int,\n ) -> List[Operation]:\n \"\"\"\n Generate a single operation\n\n By fixing a chosen len across a transaction, it makes it more likely for there to be conflicts\n \"\"\"\n rnd: float = random.random()\n if rnd < write_rate:\n obj: DBObject = random.choice(obj_list)\n # This creates an object if it doesn't exist\n # Note that we cannot rely on the object being created if obj_ver[obj_id] > 0.\n # This is because obj_ver denotes the order in which the statements are *generated* not executed\n # It is incremental to ensure *uniqueness*, not *order*\n # For instance, \"1,2,0,4,3\" is a valid value for an object, but \"1,2,1,4,3\" is not\n #\n obj_ver[obj.id] += 1\n return [\n Operation(Operation.Type.READ, obj=obj, for_update=for_update),\n Operation(Operation.Type.WRITE, obj=obj, value=obj_ver[obj.id]),\n ]\n elif write_rate <= rnd < write_rate + predicate_read_rate:\n return [\n Operation(\n Operation.Type.PREDICATE_READ,\n tables=table_names,\n value=chosen_len,\n for_update=for_update,\n )\n ]\n else:\n return [\n Operation(\n Operation.Type.READ,\n obj=random.choice(obj_list),\n for_update=for_update,\n )\n ]\n\n size: int = random.randint(min_size, max_size)\n # How many times, on average, each txn will write to an object\n #\n AVG_WRITE_PER_OBJECT_PER_TXN: float = (write_rate * 0.5 * (min_size + max_size)) / len(obj_list)\n\n # This is a bit hacky, but multiplying AVG_WRITE_PER_OBJECT_PER_TXN by\n # the transaction id gives the approximate average size of each object at this point\n # since it approximates sum([AVG_WRITE_PER_OBJECT_PER_TXN] * N_TXN_UNTIL_THIS_POINT)\n #\n AVG_OBJECT_SIZE: int = int(AVG_WRITE_PER_OBJECT_PER_TXN * txn_id)\n\n ops: List[Operation] = [\n Operation(Operation.Type.SET_ISOLATION, isolation_level=isolation_level),\n Operation(Operation.Type.BEGIN),\n ]\n\n for _ in range(size):\n # Using this hacky math makes the predicate reads more likely to return\n # interesting queries\n #\n # We intentionally skew in favour of returning less values, which\n # makes this more prone to returning less values, and consequently\n # generating more anti-dependencies\n #\n for op in gen_op(\n obj_list,\n table_names,\n write_rate,\n predicate_read_rate,\n for_update,\n random.randint(int(AVG_OBJECT_SIZE * 0.85), int(AVG_OBJECT_SIZE * 1.35)),\n ):\n ops.append(op)\n\n if random.random() < abort_rate:\n ops.append(Operation(Operation.Type.ROLLBACK))\n else:\n ops.append(Operation(Operation.Type.COMMIT))\n\n return Transaction(txn_id, ops)\n\n def gen_init_txn(txn_id: int, obj_list: List[DBObject]) -> Transaction:\n \"\"\"\n Generate initial transaction to set initial value for objects\n This transaction should happend without concurrency\n \"\"\"\n ops: List[Operation] = [\n Operation(Operation.Type.SET_ISOLATION, isolation_level=\"serializable\"),\n Operation(Operation.Type.BEGIN),\n ]\n for obj in obj_list:\n ops.append(Operation(Operation.Type.WRITE, obj=obj, value=obj_ver[obj.id]))\n\n ops.append(Operation(Operation.Type.COMMIT))\n return Transaction(txn_id, ops)\n\n def gen_final_txn(txn_id: int, obj_list: List[DBObject]) -> Transaction:\n \"\"\"\n Generate final transaction to reads all objects\n This transaction should happend without concurrency\n \"\"\"\n ops: List[Operation] = [\n Operation(Operation.Type.SET_ISOLATION, isolation_level=\"serializable\"),\n Operation(Operation.Type.BEGIN),\n ]\n for obj in obj_list:\n ops.append(Operation(Operation.Type.READ, obj=obj))\n\n ops.append(Operation(Operation.Type.COMMIT))\n return Transaction(txn_id, ops)\n\n def do_teardown(conn: DBConn, db_name: str) -> None:\n conn.execute(\"drop database {}\".format(db_name))\n\n def process_txn(obj_list: List[DBObject], conn: DBConn, conn_id: int, txn: Transaction) -> Iterator[HistoryElem]:\n \"\"\"\n Process a transaction as an iterator\n \"\"\"\n object_versions: Dict[int, List[int]] = dict()\n try:\n for op in txn.ops:\n invoc: float = time.time()\n ret: Optional[List[Tuple[Any, ...]]]\n if op.type == Operation.Type.WRITE:\n prev_version = object_versions[op.obj.id] if op.obj.id in object_versions else list()\n ret = conn.execute(op.stmt(prev_version))\n else:\n ret = conn.execute(op.stmt())\n\n if op.type == Operation.Type.PREDICATE_READ:\n resp: float = time.time()\n yield HistoryElem(op, Result(value=ret), conn_id, txn.id, invoc, resp)\n for tup in ret:\n object_versions[tup[0]] = [int(v) for v in tup[1].strip().split(\",\")]\n yield HistoryElem(\n Operation(Operation.Type.READ, obj=obj_list[tup[0]]),\n Result(value=[(tup[1],)]),\n conn_id,\n txn.id,\n invoc,\n resp,\n )\n else:\n res: Result = Result(value=ret) if ret is not None else Result()\n if res.is_ok() and res.is_value():\n object_versions[op.obj.id] = res.value()\n yield HistoryElem(op, res, conn_id, txn.id, invoc)\n except Exception as e:\n conn.process_exception(e)\n yield HistoryElem(op, Result(exception=e), conn_id, txn.id, invoc)\n\n def connection_work(\n conn: DBConn,\n conn_id: int,\n obj_list: List[DBObject],\n in_queue: multiprocessing.Queue, # type: ignore\n out_queue: multiprocessing.Queue, # type: ignore\n time_limit_sec: Optional[int],\n done_ctr: multiprocessing.Value,\n ) -> None:\n \"\"\"\n Main function of the connection \"thread\"\n Pops values from the input queue and places hitory elements in the outgoing queue\n \"\"\"\n\n time.sleep(3)\n begin_ts = time.time()\n logger.info(\"[{}]: started thread\".format(conn_id))\n\n empty_cnt: int = 0\n\n while time_limit_sec is None or time.time() < begin_ts < time_limit_sec:\n try:\n txn = in_queue.get(timeout=2)\n except queue.Empty:\n empty_cnt += 1\n if empty_cnt > 3:\n break\n else:\n continue\n\n logger.info(\"[{}]: poped transaction {} (size = {})\".format(conn_id, txn.id, in_queue.qsize()))\n for hist_elem in process_txn(obj_list, conn, conn_id, txn):\n out_queue.put(hist_elem)\n logger.info(\"[{}]: finished transaction {}\".format(conn_id, txn.id))\n\n logger.info(\"[{}]: closing queue (size = {})\".format(conn_id, out_queue.qsize()))\n out_queue.close()\n with done_ctr.get_lock():\n done_ctr.value += 1\n logger.info(\"[{}]: finished thread (done ctr at {})\".format(conn_id, done_ctr.value))\n time.sleep(3)\n\n def drive_nemesis(nemesis: Nemesis, done_ctr: multiprocessing.Value, n_conns: int) -> None:\n logger.info(\"[nemesis]: started\")\n while done_ctr.value < n_conns:\n nemesis.inject()\n\n logger.info(\"[nemesis]: begin system healing\")\n nemesis.heal()\n logger.info(\"[nemesis]: system healed\")\n\n # setup phase\n check_args(\n conn_list,\n isolation_level,\n abort_rate,\n write_rate,\n predicate_read_rate,\n n_objs,\n n_tables,\n seed,\n transaction_limit,\n time_limit_sec,\n db_name,\n table_names,\n nemesis,\n teardown,\n )\n n_objs, db_name, table_names = setup(conn_list, n_objs, seed, n_tables, db_name, table_names)\n obj_list: List[DBObject] = partition_ids(n_objs, table_names)\n\n # init values for objects\n history: List[HistoryElem] = list()\n txn_id: int = 0\n init_txn: Transaction = gen_init_txn(txn_id, obj_list)\n txn_id += 1\n\n logger.info(\"[{}]: started initial transaction {}\".format(0, init_txn.id))\n for hist_elem in process_txn(obj_list, conn_list[0], 0, init_txn):\n history.append(hist_elem)\n logger.info(\"[{}]: finished transaction {}\".format(0, init_txn.id))\n\n txn_queue: multiprocessing.Queue[Transaction] = multiprocessing.Queue()\n hist_queue: multiprocessing.Queue[HistoryElem] = multiprocessing.Queue()\n done_ctr: multiprocessing.Value = multiprocessing.Value(\"i\", 0)\n\n for _ in range(transaction_limit if transaction_limit else 10 * len(conn_list)):\n # Here, if there is no limit we add 10 times the number of connections to make sure\n # they don't starve before the time limit ends\n #\n txn_queue.put(\n gen_transaction(\n txn_id,\n obj_list,\n table_names,\n isolation_level,\n MIN_TXN_SIZE,\n MAX_TXN_SIZE,\n abort_rate,\n write_rate,\n predicate_read_rate,\n for_update,\n )\n )\n txn_id += 1\n\n procs: List[multiprocessing.Process] = [\n multiprocessing.Process(\n target=connection_work,\n args=(\n conn,\n conn_id,\n obj_list,\n txn_queue,\n hist_queue,\n time_limit_sec,\n done_ctr,\n ),\n )\n for conn_id, conn in enumerate(conn_list)\n ]\n if nemesis is not None:\n procs += [multiprocessing.Process(target=drive_nemesis, args=(nemesis, done_ctr, len(conn_list)))]\n\n for p in procs:\n p.start()\n\n logger.info(\"started {} procs\".format(len(procs)))\n\n while done_ctr.value < len(conn_list):\n if transaction_limit is None:\n # We ensure that the queue is always confortably full\n #\n qsize: int = txn_queue.qsize()\n if qsize < 10 * len(conn_list):\n for _ in range(10 * len(conn_list)):\n txn_queue.put(\n gen_transaction(\n txn_id,\n obj_list,\n table_names,\n isolation_level,\n MIN_TXN_SIZE,\n MAX_TXN_SIZE,\n abort_rate,\n write_rate,\n predicate_read_rate,\n for_update,\n )\n )\n txn_id += 1\n\n txn_queue.close()\n\n while not hist_queue.empty():\n history.append(hist_queue.get())\n\n hist_queue.close()\n\n for idx, p in enumerate(procs):\n p.join()\n logger.info(\"joined proc {}\".format(idx))\n\n logger.info(\"joined all {} procs\".format(len(procs)))\n\n final_txn: Transaction = gen_final_txn(txn_id, obj_list)\n logger.info(\"[{}]: started final transaction {}\".format(0, final_txn.id))\n for hist_elem in process_txn(obj_list, conn_list[0], 0, final_txn):\n history.append(hist_elem)\n logger.info(\"[{}]: finished transaction {}\".format(0, final_txn.id))\n\n if teardown:\n do_teardown(conn_list[0], db_name)\n\n return History(history)", "def getCommitsSinceLastRelease(self):\n f = open(self.last_released, 'r')\n old_rev = f.read().replace('\\n', '')\n f.close()\n new_rev = commands.getoutput('cd '+self.proj_dir+' && git log -1 --format=%H')\n cmd = 'cd '+self.proj_dir+' && git log --no-merges --pretty=format:\"%s\" '+old_rev+'..'+new_rev\n unreleased_commits = commands.getoutput(cmd) \n print 'Commits since last release:'\n print unreleased_commits\n unreleased_commits = unreleased_commits.split('\\n')\n self.commit_msgs = unreleased_commits\n self.new_rev = new_rev", "def branches(self) -> list[str]:\n _args: list[Arg] = []\n _ctx = self._select(\"branches\", _args)\n return _ctx.execute_sync(list[str])", "def branches(self):\r\n url = self.base_url + 'branches/'\r\n return json.loads(self.bb.load_url(url))", "def bamboo_builds(ctx, from_date, to_date, use_cache):\r\n\r\n if from_date is None:\r\n from_date, to_date = previous_month_range()\r\n\r\n log.info('Getting Bamboo builds between {} and {}'.format(from_date, to_date))\r\n report = BambooBuildsReport(\r\n ctx.obj,\r\n from_date=from_date,\r\n to_date=to_date\r\n )\r\n report.run_report(use_cache=use_cache)", "def get_dirty_paths_by_status(self) -> Dict[str, List[Path]]:\n output = zsplit(git.status(\"--porcelain\", \"-z\").stdout.decode())\n return bucketize(\n output,\n key=lambda line: line[0],\n value_transform=lambda line: Path(line[3:]),\n )", "def _computeunstableset(repo):\n # revset is not efficient enough here\n # we do (obsolete()::) - obsolete() by hand\n obs = getrevs(repo, 'obsolete')\n if not obs:\n return set()\n cl = repo.changelog\n return set(r for r in cl.descendants(obs) if r not in obs)", "def _assign_branches(ctx, prl):\n heads = prl.set_heads\n if not heads:\n return None\n branch_dict = ctx.branch_dict()\n LOG.debug2('allowing branch creation: %s', ctx.branch_creation)\n # Assign branches to each of the received commits for pushed branches\n assigner = Assigner(branch_dict, heads, ctx)\n assigner.assign()\n return assigner", "def initializeBuildArea(self):\r\n\r\n repo_map = self.getRepoStatus()\r\n\r\n for obj in self.config[\"repos\"]:\r\n if obj[\"name\"] not in repo_map:\r\n if \"url\" in obj:\r\n print(\"Checking out code to {} for {}\".format(obj[\"path\"], obj[\"name\"]))\r\n if \"branch\" in obj:\r\n self.cloneGitRepo(obj[\"url\"], obj[\"path\"], obj[\"branch\"])\r\n else:\r\n self.cloneGitRepo(obj[\"url\"], obj[\"path\"])\r\n\r\n else:\r\n print(\"Creating directory : {} for repo : {}\".format(obj[\"path\"], obj[\"name\"]))\r\n makedirs(obj[\"path\"])\r\n\r\n else:\r\n if self.verbose:\r\n print(\"Repo : {}, already exists skipping!!\".format(obj[\"name\"]))", "def branches_by_name(self, username, repository_name, access_token=None):\n return self._complete_request_by_name(\n username, repository_name, \"branches\", access_token)", "def __gitBranchList(self):\n self.vcs.gitListTagBranch(self.project.getProjectPath(), False)", "def get_branches(local_only=False, directory=None):\n cmd = 'git branch --no-color'\n if not local_only:\n cmd += ' -a'\n out = check_output(cmd, shell=True, cwd=directory)\n branches = []\n for line in out.splitlines():\n if line.count('HEAD -> ') > 0:\n continue\n if line.count('(no branch)') > 0:\n continue\n line = line.strip('*').strip()\n branches.append(line)\n return branches", "def missing_branches(self):\n upstream_tags = self.upstream_model.tags_from_semver_point(\"0.8.7\")\n deb_branches = self.deb_model.base.branches_from_semver_point(\"0.8.7\")\n return list(set(upstream_tags) - set(deb_branches))", "def test_create_repositories_by_username_by_repo_slug_commit_by_node_statuses_build(self):\n pass", "def update_stable(path, sha_list, origin):\n\n conn = sqlite3.connect(rebasedb)\n c = conn.cursor()\n\n cmd = ['git', '-C', path, 'log', '--no-merges', '--abbrev=12', '--oneline',\n '--reverse', sha_list]\n commits = subprocess.check_output(cmd, encoding='utf-8', errors='ignore')\n\n for commit in commits.splitlines():\n if commit != '':\n elem = commit.split(' ')[:1]\n sha = elem[0]\n c.execute(\"select sha from stable where sha is '%s'\" % sha)\n found = c.fetchall()\n if found == []:\n c.execute('INSERT INTO stable(sha, origin) VALUES (?, ?)', (\n sha,\n origin,\n ))\n\n conn.commit()\n conn.close()", "def branch_list(repo, remote_name, pattern=None):\n # The return string for a remote reference is a single line with two\n # fields separated by a tab string. The first field is a commit hash.\n # The second field is the reference path. The unique part of the path\n # is the last field.\n #\n # 423f434cd877926ff47f3a710a7b0c414785515e\trefs/heads/enterprise-3.0\n\n lines = repo.git.ls_remote(remote_name, pattern, heads=True).split(\"\\n\")\n return [str(line.split('/')[-1]) for line in lines]", "def test_eligible_includes_expired_syncing(initialized_db):\n\n disable_existing_mirrors()\n mirror_first, repo_first = create_mirror_repo_robot([\"updated\", \"created\"], repo_name=\"first\")\n mirror_second, repo_second = create_mirror_repo_robot(\n [\"updated\", \"created\"], repo_name=\"second\"\n )\n mirror_third, repo_third = create_mirror_repo_robot([\"updated\", \"created\"], repo_name=\"third\")\n mirror_fourth, repo_third = create_mirror_repo_robot([\"updated\", \"created\"], repo_name=\"fourth\")\n\n mirror_second.sync_expiration_date = datetime.utcnow() - timedelta(hours=1)\n mirror_second.sync_status = RepoMirrorStatus.SYNCING\n mirror_second.save()\n\n mirror_fourth.sync_expiration_date = datetime.utcnow() + timedelta(hours=1)\n mirror_fourth.sync_status = RepoMirrorStatus.SYNCING\n mirror_fourth.save()\n\n candidates = get_eligible_mirrors()\n\n assert len(candidates) == 3\n assert candidates[0] == mirror_first\n assert candidates[1] == mirror_second\n assert candidates[2] == mirror_third", "def history_diff(env_name: str, env, env_reader) -> ListLike:\n version_diff_pkges: ListLike = []\n new_pkges: ListLike = []\n missing_pkges: ListLike = []\n\n history_conda_pkges = env_reader.get_environment()[\"dependencies\"]\n history_conda_pkges_dict = {}\n for spec in history_conda_pkges:\n name, package = Package.from_spec(spec)\n history_conda_pkges_dict[name] = package\n local_conda_pkges = get_dependencies(name=env_name)[\"conda\"]\n for name, package in local_conda_pkges.items():\n if name in history_conda_pkges_dict:\n if package.version != history_conda_pkges_dict[name].version:\n version_diff_pkges.append(\n \"-\" + name + \"=\" + history_conda_pkges_dict[name]\n )\n version_diff_pkges.append(\"+\" + name + \"=\" + package.version)\n else:\n new_pkges.append(\"+\" + name + \"=\" + package.version)\n for package in env.history.packages[\"conda\"]:\n if package not in local_conda_pkges.keys():\n missing_pkges.append(\"-\" + package)\n\n return version_diff_pkges, new_pkges, missing_pkges" ]
[ "0.71542406", "0.57429", "0.5712632", "0.5642177", "0.55806303", "0.5520008", "0.54752177", "0.54629207", "0.5433732", "0.5424115", "0.53311217", "0.5299445", "0.5283825", "0.52417576", "0.5239561", "0.5191586", "0.5190831", "0.51559097", "0.5126341", "0.512589", "0.50958616", "0.506375", "0.5058959", "0.4998779", "0.4994151", "0.49883273", "0.4988065", "0.4966503", "0.49611512", "0.4944596", "0.49445644", "0.49351722", "0.49318823", "0.49264133", "0.49155533", "0.49107328", "0.4905751", "0.49041823", "0.4894089", "0.48937488", "0.4892339", "0.48859754", "0.4873592", "0.48718634", "0.48646113", "0.4862932", "0.4858983", "0.4848835", "0.48447248", "0.48446128", "0.48430213", "0.48246217", "0.48040274", "0.4800334", "0.47858325", "0.47848955", "0.47812834", "0.4774983", "0.47700137", "0.47662765", "0.4756871", "0.47439352", "0.47432196", "0.4737466", "0.47370782", "0.47239706", "0.47201928", "0.4714042", "0.47118592", "0.4711583", "0.47105992", "0.47090656", "0.47026262", "0.4697617", "0.46893272", "0.46829313", "0.46796784", "0.46787122", "0.46759978", "0.46757326", "0.4656431", "0.46438256", "0.4632928", "0.46301267", "0.4613181", "0.4611939", "0.46095043", "0.46042088", "0.4581332", "0.4580464", "0.45778352", "0.45776302", "0.45729148", "0.45726714", "0.45664898", "0.45533985", "0.45525345", "0.45430744", "0.45367604", "0.45343417" ]
0.72896576
0
Wrap function in setter
def evaluate_wrapper(func): @wraps(func) def wrapper(self, blackboard): if self.state == EvaluationState.ready: self.on_enter() state = func(self, blackboard) self.state = state if state != EvaluationState.running: self.on_exit() return state return wrapper
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setter(self):\n def decorator(func):\n self._setf = func\n return self\n return decorator", "def setter(self, func):\n if not isinstance(func, (classmethod, staticmethod)):\n func = classmethod(func)\n self.fset = func\n return self", "def chainable(fn: Callable):\n # @validate_arguments\n @functools.wraps(fn)\n def setter_wrapper(self, *args: Any, **kwargs: Any) -> Any:\n fn(self, *args, **kwargs)\n return self\n\n return setter_wrapper", "def setter(self, func):\n self.fset = func\n self.set_setattr_mode(SetAttr.CallObject_ObjectValue, func)\n return self", "def set_handler(key):\n def wrapper(func):\n func.set_key = key\n return func\n\n return wrapper", "def proxy_set(self, value):\n setter = getattr(self, self.proxy_setter)\n return setter(value)", "def _setter_decor(self, fset):\n\n def fdec(obj):\n def _decor(fun):\n fset(obj, fun)\n return fun\n\n return _decor\n\n return self._init_inherit(fset=fset, fdec=fdec)", "def mutates(func):\n @wraps(func)\n def inner(self, *args, **kwargs):\n self.mutate()\n return inner.__wrapped__(self, *args, **kwargs)\n inner.__wrapped__ = func\n return inner", "def tokensetter(self, f):\n self._tokensetter = f\n return f", "def forced(setter):\n @wraps(setter)\n def __set__(desc, instance, value, forced=False):\n if forced:\n return setter(desc, instance, value)\n else:\n raise AttributeError(\"Cannot set a read-only attribute\")\n return __set__", "def __setattr__ (self, attr, value):\n self.set_value (attr, value)", "def _set_func(self, func):\n if callable(func):\n self._func = func\n else:\n raise TypeError(\"'func should be callable'\")", "def __set__(self, obj, value):\n\n return setattr(obj, '_' + self.name, value)", "def ts_setter(func):\n\n @wraps(func)\n def inner(self, value):\n \"\"\" Parse input value as ISO8601 date \"\"\"\n if value is None:\n return func(self, None)\n elif isinstance(value, datetime.datetime):\n return func(self, value)\n else:\n value = TS_SETTER_TRANSFORM_RE.sub(TS_SETTER_TRANSFORM_REPL, value)\n return func(self, iso8601.parse_date(value))\n\n return inner", "def __set__(self, obj, value):\r\n pass", "def __set__(self,obj,val): \n val = self.set_hook(obj,val)\n\n if not callable(val): self._check_value(val)\n super(Number,self).__set__(obj,val)", "def _preserve_settings(method: T.Callable) -> T.Callable:\n\n @functools.wraps(method)\n def _wrapper(\n old: \"ObservableProperty\", handler: T.Callable\n ) -> \"ObservableProperty\":\n new = method(old, handler) # type: ObservableProperty\n new.event = old.event\n new.observable = old.observable\n return new\n\n return _wrapper", "def __setattr__(self, attr, value):\n super().__setattr__(attr, value)", "def _setPropertyValue(self, name, value, typeString = ''):\n method = getattr(self.__class__, \"_setPropertyValue\" + getTypeString(value))\n return method(self, name, value, typeString)", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def set_property(key, value):\n return impl.set_property(**locals())", "def __set__(self, instance, value):\n if self.deprecated:\n cls_name = type(instance).__name__\n warnings.warn('%s.%s is deprecated. Please set %s.%s instead.'\n % (cls_name, self.get_attr_name(instance),\n cls_name, self.prop_name),\n self.deprecation_warning,\n stacklevel=2)\n\n if value is not None and self._convert_to_func is not None:\n value = self._convert_to_func(value)\n\n setattr(instance, self.prop_name, value)", "def setter_override(attribute=None, # type: str\n f=DECORATED\n ):\n return autoprops_override_decorate(f, attribute=attribute, is_getter=False)", "def __set__(self, instance, val):\n raise AttributeError(\"Can't set attribute\")", "def setter(self, setter):\n self.accessor.setter = setter", "def __call__(self, func):\n self.__name__ = func.__name__\n if self.__doc__ is None:\n self.__doc__ = func.__doc__\n if self._dbus_property is None:\n self._dbus_property = func.__name__\n if self._implicit_setter:\n return self.setter(func)\n else:\n return self.getter(func)", "def changes_data(f):\n @functools.wraps(f)\n def wrapper(self, *args, **kwargs):\n self.modified = True\n return f(self, *args, **kwargs)\n return wrapper", "def applyFuncOnValues(self, func):\r\n self._value = func(self._value)", "def __call__(self, func): # for decorator usage\n self.func = func\n return self", "def set_once(setter):\n set_instances = DescDict()\n @wraps(setter)\n def __set__(desc, instance, value):\n if instance in set_instances:\n raise AttributeError(\"Cannot set a read-only attribute\")\n else:\n set_instances[instance] = True\n setter(desc, instance, value)\n return __set__", "def __setattr__(self, name, value):\n self.set(**{name: value})", "def __setattr__(self, attr, value):", "def __set__(self, instance, value):\n instance._values[self.name] = self.process(value)", "def grantsetter(self, f):\n self._grantsetter = f\n return f", "def _mock_function(self, obj, func):\n setattr(obj, func.__name__, MethodType(func, self.breaker))", "def _mock_function(self, obj, func):\n setattr(obj, func.__name__, MethodType(func, self.breaker))", "def _writer(func):\n\n name = func.__name__\n return property(fget=lambda self: getattr(self, '_%s' % name), fset=func)", "def frozen(setfun):\n def set_attr(self,name,value):\n if hasattr(self,name): #If attribute already exists, simply set it\n setfun(self,name,value)\n return\n elif sys._getframe(1).f_code.co_name == '__init__': #Allow __setattr__ calls in __init__ calls of proper object types\n for k,v in sys._getframe(1).f_locals.items():\n if k==\"self\" and isinstance(v, self.__class__):\n setfun(self,name,value)\n return\n raise AttributeError(\"You cannot add attributes to %s\" % self)\n return set_attr", "def decorate(self, func):\n super(Chameleon, self).decorate(func)\n functools.update_wrapper(self, func,\n assigned=self.assigned,\n updated=self.updated)\n return self", "def __setattr__(self, key, value):\n return setattr(self.__dict__['_obj'], key, value)", "def _add_contract_to_setter(setter_fun, var_name, property_contract, property_name):\n\n # 0. check that we can import contracts\n try:\n # noinspection PyUnresolvedReferences\n from contracts import ContractNotRespected, contract\n except ImportError as e:\n raise Exception('Use of _add_contract_to_setter requires that PyContract library is installed. Check that you '\n 'can \\'import contracts\\'')\n\n try:\n # python 2\n setter_fun = setter_fun.im_func\n except AttributeError:\n pass\n\n # -- check if a contract already exists on the function\n if hasattr(setter_fun, '__contracts__'):\n try:\n qname = str(setter_fun.__qualname__)\n except AttributeError:\n qname = setter_fun.__name__\n msg = \"overridden setter for attribute %s implemented by function %s has a contract while there is a \" \\\n \"contract already defined for this property in the __init__ constructor. This will lead to \" \\\n \"double-contract in the final setter, please remove the one on the overridden setter.\" \\\n \"\" % (property_name, qname)\n warn(msg)\n\n # -- add the generated contract\n setter_fun_with_possible_contract = contract(setter_fun, **{var_name: property_contract})\n\n # the only thing we can't do is to replace the function's parameter name dynamically in the error messages\n # so we wrap the function again to catch the potential pycontracts error :(\n @wraps(setter_fun_with_possible_contract)\n def _contracts_parser_interceptor(self, *args, **kwargs):\n try:\n return setter_fun_with_possible_contract(self, *args, **kwargs)\n except ContractNotRespected as er:\n er.error = er.error.replace('\\'val\\'', '\\'' + property_name + '\\'')\n raise er\n\n return _contracts_parser_interceptor", "def put_value(self, value):\n # Field: replace value\n if self.nargs is None:\n try:\n setattr(obj, self.attr, value)\n except:\n return\n # Function of zero arguments: call function\n elif self.nargs == 0:\n try:\n getattr(obj, self.attr)()\n except:\n return\n # Function of one argument: call function with value\n elif self.nargs == 1:\n try:\n getattr(obj, self.attr)(value)\n except:\n return\n # Function with many arguments: distribute arguments to args\n elif self.nargs > 1:\n try:\n getattr(obj, self.attr)(*value)\n except:\n return\n else:\n return\n # If we set a value, update now.\n self.update()", "def set(self, obj, value):\n raise NotImplementedError", "def set(self, **kwargs):\n raise NotImplementedError", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(*args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, name: str, value: Any) -> None:\n super().__setattr__(name, value)", "def set(self, key, value):", "def set(self, key, value):", "def set(self, obj, value):\n pass", "def resetter(self, func):\n def __wrapper(instance, *args, **kwargs):\n func(instance, *args, **kwargs)\n self.__flags[instance] = True\n return __wrapper", "def __setattr__(self, name, value): # real signature unknown; restored from __doc__\n pass", "def __setattr__(self, name, value): # real signature unknown; restored from __doc__\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass", "def __setattr__(self, *args, **kwargs): # real signature unknown\n pass" ]
[ "0.84098685", "0.7812984", "0.77397096", "0.7689164", "0.69597083", "0.6959586", "0.69246805", "0.6809191", "0.67956316", "0.6794793", "0.67877066", "0.67604524", "0.6759456", "0.6755358", "0.67432415", "0.6720239", "0.66899675", "0.66600424", "0.6644266", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.6640938", "0.66217756", "0.66172373", "0.66128814", "0.6606453", "0.6538733", "0.65300465", "0.64814013", "0.6476309", "0.6473857", "0.64682096", "0.6451179", "0.6441155", "0.6408212", "0.6402218", "0.6397405", "0.6397405", "0.63723207", "0.6372171", "0.6351155", "0.6329043", "0.63195527", "0.6304028", "0.63004386", "0.6281667", "0.6276947", "0.6276947", "0.6276947", "0.6276947", "0.6276947", "0.6276947", "0.6274099", "0.62637675", "0.62637675", "0.6259304", "0.62524074", "0.6252349", "0.6252349", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889", "0.6215889" ]
0.0
-1
Evaluate the node state
def evaluate(self, blackboard): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate(self, state):\n abstract", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 1 : return 1\n return 0", "def evaluate_node(self):\n # p, v = np.random.random(225).astype(np.float16), np.random.random()\n socket = zmq.Context().socket(zmq.DEALER)\n socket.setsockopt_string(zmq.IDENTITY, self.player_id)\n socket.connect('ipc://./tmp/oracle_%s' % self.tree.model_name)\n print('start to evaluate', self.tree.model_name)\n while True:\n # print(self.tree.to_evaluate.qsize())\n batch = []\n states = []\n colors = []\n size = self.tree.to_evaluate.qsize()\n if size > config.INFERENCE_BATCHSIZE:\n size = config.INFERENCE_BATCHSIZE\n elif size == 0:\n time.sleep(0.001)\n continue\n for _ in range(size):\n t, black, white = self.tree.to_evaluate.get()\n mine, yours = posswap(t, black, white)\n batch.append((str(mine), str(yours), t % 2))\n states.append((black, white))\n colors.append(t % 2)\n socket.send(msgpack.dumps((batch, self.player_id)))\n result = msgpack.loads(socket.recv())\n assert len(states) == len(result[0])\n assert len(states) == len(result[1])\n for ind, state in enumerate(states):\n with self.lock:\n self.tree.nodes[state].p = result[0][ind]\n if colors[ind] == 0:\n self.tree.nodes[state].v = result[1][ind]\n else:\n self.tree.nodes[state].v = -result[1][ind]\n self.tree.nodes[state].updated = True", "def evaluate(self) :\n if self.inStates[0].getState() == self.inStates[1].getState(): return 0\n return 1", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 0 : return 0\n return 1", "def test_get_node_state(self):\n pass", "def state(self):\n return self._node._state", "def evaluate(self, state):\n\n if self.is_coords and len(state) != len(self.coords):\n raise Exception(\"\"\"state must have the same length as coords.\"\"\")\n\n if not len(state) == len(set(state)):\n raise Exception(\"\"\"Each node must appear exactly once in state.\"\"\")\n\n if min(state) < 0:\n raise Exception(\"\"\"All elements of state must be non-negative\"\"\"\n + \"\"\" integers.\"\"\")\n\n if max(state) >= len(state):\n raise Exception(\"\"\"All elements of state must be less than\"\"\"\n + \"\"\" len(state).\"\"\")\n\n return self.calculate_fitness(state)", "def evaluate(self, state, player, random_state):\n raise NotImplementedError", "def evaluate(self, tree):\n\t\tpass", "def evaluate(self, state):\n\n fitness = np.sum(state)\n self.num_evals += 1\n #print(self.num_evals)\n return fitness", "def get_power_state(self, node):", "def leafEvaluation(self, state):\n\n \"\"\"\n Use random generated values for now\n \"\"\"\n z = np.random.randint(2)\n v = random.uniform(0, 1) \n return (1-LAMBDA) * v + LAMBDA * z", "def state(params1):\n variational_circuit(params1)\n return qml.state()", "def evaluate(self) :\n pass", "def Eval(state):\n\n# H1 = htest2(state)\n# return H1\n H2 = h1(state)*monotonic(state)\n return H2", "def evaluate(self):\n pass", "def evaluate(self):\n pass", "def calculateState (self):\r\n newState = 0\r\n # print (\"Inside state function the states DNs are: \\n\")\r\n # print (\"Before starting \\n\")\r\n self.stateDanglingNodes()\r\n #for i in range(len(self.metaSpikes)):\r\n # if self.metaSpikes[i].typeSpike == 1:\r\n # print (\"Meta atom number is: \" + str(self.atomNumber) + \"\\n\")\r\n \r\n insideMetState = []\r\n # To calculate the state we need to update every atom the metaatom consistrs off then see\r\n # the states of every dangling node in the metaspikes\r\n for i in range(len(self.metaSpikes)):\r\n if self.metaSpikes[i].typeSpike == 1:\r\n #print (\"Inside type 1 \\n\")\r\n #print (\"Number of type 1 nodes: \" + str(len(self.metaSpikes[i].danglingNodeList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingNodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingNodeList[j].state)\r\n if self.metaSpikes[i].danglingNodeList[j].state == 1:\r\n # print (\"Adding one \\n\" )\r\n newState += 1\r\n else:\r\n # print (\"Subracting one \\n\")\r\n newState -= 1\r\n else:\r\n \r\n # print (\"Inside type 2 \\n\")\r\n # print (\"Number od type 1 tales: \" + str(len(self.metaSpikes[i].danglingTailList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingTailList)):\r\n #print (\"Size of tail: \" + str(len(self.metaSpikes[i].danglingTailList[j].nodeList)) + \"\\n\")\r\n for k in range(len(self.metaSpikes[i].danglingTailList[j].nodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingTailList[j].nodeList[k].state)\r\n if self.metaSpikes[i].danglingTailList[j].nodeList[k].state == 1:\r\n newState += 1\r\n else:\r\n newState -= 1 \r\n \r\n # print (\"The state of analysed nodes: \\n\" + str(insideMetState) + \"\\n\")\r\n # print (\"The length of analysed nodes: \\n\" + str(len(insideMetState)) + \"\\n\")\r\n # print (\"The new state is: \" + str(newState) + \"\\n\") \r\n self.state = newState", "def act(self, state, eps=0.):", "def evaluate(self):\n eval_list = nx.topological_sort(self.graph)\n for n in eval_list:\n n.evaluate()\n print(\"evaluating type\", type(n))\n\n # Notify observers of finished calculation\n self.notifyObservers(\"EVALUATION DONE\")\n return \"FINISHED\"", "def compute(self, *args, **kwargs):\n for node in self.evaluation_sequence:\n node.evaluate()", "def evaluate(self, state):\n _n = len(state)\n _t = np.ceil(self.t_pct*_n)\n\n # Calculate head and tail values\n tail_0 = self.tail(0, state)\n head_1 = self.head(1, state)\n\n # Calculate R(X, T)\n if (tail_0 > _t and head_1 > _t):\n _r = _n\n else:\n _r = 0\n\n # Evaluate function\n fitness = max(tail_0, head_1) + _r\n self.num_evals += 1\n return fitness", "def evaluateBoardState(self, board):\n\n \"\"\"\n These are the variables and functions for board objects which may be helpful when creating your Agent.\n Look into board.py for more information/descriptions of each, or to look for any other definitions which may help you.\n\n Board Variables:\n board.width \n board.height\n board.last_move\n board.num_to_connect\n board.winning_zones\n board.score_array \n board.current_player_score\n\n Board Functions:\n get_cell_value(row, col)\n try_move(col)\n valid_move(row, col)\n valid_moves()\n terminal(self)\n legal_moves()\n next_state(turn)\n winner()\n \"\"\"\n\n # print the valid moves on board for current player\n move = board.last_move\n\n # enemy agent's id\n enemy = self.id % 2 + 1\n\n value = self.evaluateRows(board, enemy) + self.evaluateCols(board, enemy) + self.evaluateBackwardDiagonals(board, enemy) + self.evaluateForwardDiagonals(board, enemy)\n return value", "def update(self):\r\n\r\n self.target.load_state_dict(self.model.state_dict())\r\n self.target.eval()", "def _eval_state(hass):\n state_str = ''.join(['1' if val else '0' for val in PERSIST['states']])\n state = int(state_str, 2)\n mode = PERSIST['mode']\n output = state in SCHEDULES[mode][0]\n _LOGGER.debug('Eval: %s %s = %s',\n PERSIST['mode'], str(PERSIST['states']), repr(output))\n\n if output != PERSIST['last_cmd']:\n PERSIST['last_cmd'] = output\n if output:\n _call_service(hass, SCHEDULES[mode][1], 'turn_on')\n else:\n _call_service(hass, SCHEDULES[mode][1], 'turn_off')", "def eval_state(self,t,endBehavior='halt'):\n return Trajectory.eval_state(self,t,endBehavior)", "def eval_state(self,t,endBehavior='halt'):\n return Trajectory.eval_state(self,t,endBehavior)", "def act(self, state, eps=0., train=True):\n state = torch.from_numpy(state).float().unsqueeze(0).to(self.device)\n\n # Put model into evaluation mode & get action values\n self.qnetwork_local.eval()\n with torch.no_grad():\n action_values = self.qnetwork_local(state)\n\n # Put model back in training mode\n if train:\n self.qnetwork_local.train()\n\n # Epsilon-greedy action selection\n if self.rng.uniform() > eps:\n return np.argmax(action_values.cpu().data.numpy()).astype(int)\n else:\n return self.rng.choice(np.arange(self.action_size))\n \n return", "def test_get_node_state_readonly(self):\n pass", "def evaluate(self, stochastic=False):\n ob = self._convert_state(self._env.reset())\n done = False\n actions = []\n sum_rew = 0\n while not done:\n ac, _ = self._act(ob, stochastic=stochastic)\n actions.append(ac)\n ob, rew, done, _ = self._env.step(ac)\n ob = self._convert_state(ob)\n sum_rew += rew\n self.ob = self._convert_state(self._env.reset())\n self.new = True\n return sum_rew, actions", "def eval_node(node, env):\n global genv\n global result\n node_type = node_name(node)\n\n if node_type == 'Expr':\n return eval_node(node.value, env)\n elif node_type == 'Assign':\n val = eval_node(node.value, env)\n\n while type(val) is tuple and len(val) == 2 and (type(val[1]) == GlobalEnv or type(val[1]) == LocalEnv):\n val = val[0]\n\n # extract the variable name, evaluate the RHS, then extend the environment.\n return 0, env.extend([node.targets[0].id], [val])\n elif node_type == 'BinOp':\n # get the left and right operands (we use only single operands) and the operator.\n # evaluate the operands and apply the operator. return the number, env.\n\n left = eval_node(node.left, env)[0]\n right = eval_node(node.right, env)[0]\n\n left = left[0] if type(left) is tuple else left\n right = right[0] if type(right) is tuple else right\n\n op = node_name(node.op)\n\n if op == \"Add\":\n return (left + right), env\n elif op == \"Sub\":\n return (left - right), env\n elif op == \"Mult\":\n return (left * right), env\n elif op == \"Div\":\n return (left / right), env\n elif op == \"Mod\":\n return (left % right), env\n return 0, env\n elif node_type == 'FunctionDef':\n # need the function id (name), args, and body. Extend the environment.\n # you can leave the args wrapped in the ast class and the body and unpack them\n # when the function is called.\n\n return 0, env.extend([node.name], [(node.args, node.body)])\n elif node_type == 'Call':\n # get any values passed in to the function from the Call object.\n # get the fxn name and look up its parameters, if any, and body from the env.\n # get lists for parameter names and values and extend a LocalEnv with those bindings.\n # evaluate the body in the local env, return the value, env.\n\n func = eval_node(node.func, env)[0]\n local_env = LocalEnv(None, env)\n\n args = func[0].args\n body = func[1]\n\n index = 0\n for val in node.args:\n local_env = local_env.extend([args[index].arg], [eval_node(val, local_env)[0]])\n index += 1\n\n for node in body:\n val = eval_node(node, local_env)\n\n if node_name(node) == \"Return\":\n output_val = val[0]\n local_env = val[1]\n return output_val, env\n elif node_type == 'Return':\n # evaluate the node, return the value, env.\n return eval_node(node.value, env)\n elif node_type == 'Name':\n # Name(identifier id)- lookup the value binding in the env\n # return the value, env\n return env.lookup(node.id), env\n # Num(object n) -- a number, return the number, env.\n elif node_type == 'Num':\n return node.n, env", "def act(self, state, eps=0.):\n # torch expects input (state in this case) to be defined in batches\n # so use unsqueeze to add a batch dimension of 1\n state = torch.from_numpy(state).float().unsqueeze(0).to(device)\n # TODO this is for states that are images\n #state = torch.from_numpy(state).float().to(device)\n\n #print('act state.shape', state.shape)\n self.qnetwork_local.eval()\n with torch.no_grad():\n action_values = self.qnetwork_local(state)\n if self.train_mode:\n self.qnetwork_local.train()\n\n # Epsilon-greedy action selection\n # Epsilon should still be > 0 even when not training\n if random.random() > eps:\n return np.argmax(action_values.cpu().data.numpy())\n else:\n return random.choice(np.arange(self.action_size))", "def computeNextState(self):\n aliveNeighbors = self.numOfLiveNeighbors()\n if aliveNeighbors < 2 or aliveNeighbors > 3:\n self.setNextToDead()\n\n if not self.isAlive() and aliveNeighbors == 3:\n self.setNextToAlive()", "def evaluate(self, state):\n\n fitness = 0\n\n for i in range(1, len(state)):\n if state[i] != state[i - 1]:\n fitness += 1\n\n return fitness", "def solution(self) -> State:", "def state_update(self, p, state):\n node = self._validate(p)\n node._state = state", "def act(self, state, eps=0.):\n state = torch.from_numpy(state).float().unsqueeze(0).to(device)\n self.qnetwork_local.eval()\n with torch.no_grad():\n action_values = self.qnetwork_local(state)\n self.qnetwork_local.train()\n\n # Epsilon-greedy action selection\n if random.random() > eps:\n return np.argmax(action_values.cpu().data.numpy())\n else:\n return random.choice(np.arange(self.action_size))", "def act(self, state, eps=0.):\n state = torch.from_numpy(state).float().unsqueeze(0).to(device)\n self.qnetwork_local.eval()\n with torch.no_grad():\n action_values = self.qnetwork_local(state)\n self.qnetwork_local.train()\n\n # Epsilon-greedy action selection\n if random.random() > eps:\n return np.argmax(action_values.cpu().data.numpy())\n else:\n return random.choice(np.arange(self.action_size))", "def changeState(self, node, name, state):", "def act(self, state_and_prev_recurrent, eps=0.):\n state_and_prev_recurrent = torch.from_numpy(state_and_prev_recurrent).float().unsqueeze(0).to(device)\n self.qnetwork_local.eval()\n with torch.no_grad():\n action_values = self.qnetwork_local(state_and_prev_recurrent)[:, :4]\n self.qnetwork_local.train()\n\n # Epsilon-greedy action selection\n if random.random() > eps:\n return np.argmax(action_values.cpu().data.numpy())\n else:\n return random.choice(np.arange(self.action_size))", "def get_state(self):\n curr_state = self.lidar_ranges[self.indices]\n # curr_state = (self.lidar_ranges[self.indices] - 5.) / 5.\n # print(curr_state)\n\n return curr_state", "def value(self, state):\n\t\traise NotImplementedError", "def test(state, agent):\n\n device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n if device == \"cuda:0\":\n agent = agent.cuda()\n\n state = torch.from_numpy(state).float().to(device)\n agent.eval()\n with torch.no_grad():\n action = agent(state).cpu().data.numpy()\n\n return np.clip(action, -1, 1)", "def value(self, state):\n raise NotImplementedError", "def value(self, state):\n raise NotImplementedError", "def value(self, state):\n return 0", "def evaluate(self) -> int:", "def value(self):\n\n if self.state == Node.State.VALID:\n return self._value\n else:\n with _NodeStackFrame(self):\n self.state = Node.State.PENDING\n self.value = self.compute_value(*self.args, **self.kwargs)\n return self._value", "def move(self, state):\n result = None\n self.currentDepthLimit = 0\t\n\tself.transposition = {}\n\tself.counter = 0\n\n\twhile True:\n u = float(\"inf\")\n\t v = float(\"-inf\")\n\t self.counter = 0\n\t result = None\n\t self.transposition = {}\n\t for a in state.actions():\n new = self.min_value(state.result(a), float(\"-inf\"), float(\"inf\"),self.currentDepthLimit)\n\t if new > v:\n\t v = new\n\t result = a\n\n\t elif new == v:\n\t if a.index < result.index:\n\t result = a\n\t if self.is_time_up():\n\t return result\n\t \n\t self.currentDepthLimit += 1\n\t \"\"\"If we never use evaluate function, it means all state are terminated, so return whatever the result is\"\"\"\n\t if self.counter == 0:\n\t break\n\t if self.is_time_up():\n \t return result\n\treturn result", "def eval_action(self, state):\n means, _ = self.__call__(state)\n action = self.action_scale * means + self.action_bias\n\n return action.detach().cpu().numpy()", "def __call__(self, state):\n return self.v(state)", "def state(self) -> TState:\n pass", "def get_value(self):\n if not self.visited:\n # first visit at node\n self.visited = True\n\n # value calculation\n for node, weight in self.predecessors:\n self.value += (node.get_value() * weight)\n\n # applying activation function\n if self.activation is not None:\n self.activation()\n\n self.calculated = True\n\n return self.value\n else:\n # visited node\n if self.calculated:\n # calculated in this computation\n return self.value\n else:\n # recurrent connection\n return self.past_value", "def mctsEvalFunction(state):\n return 1 if state.isWin() else 0", "def get_state():\n\tif node.id < 0:\n\t\treactor.callLater(0, get_state)\n\t\treturn\n\t\n\tnode.send(node.id, generate_start_graph, None)\n\tnode.target_filename = target_filename\n\tnode.roots = {}\n\tif DO_PROBLEMS:\n\t\ttarget_msg = msg_get_dirty_connections\n\telse:\n\t\ttarget_msg = msg_get_connections\n\tfor i in range(0, node.id):\n\t\tnode.send(i, target_msg, node.id)\n\tnode.send(node.id, wait_full_state, 0)", "def _get_state(self):", "def run(self, state: State) -> State:", "def state(self) -> nx.Graph:\n return self._state", "def getstate(self,name):\n state = self.states[name]\n debug('kfnode.getstate ',(name,state))\n return state", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def getState():\n # TODO: this isn't nearly as meaningful as it used to be", "def evaluateBoardState(self, board):\n\n \"\"\"\n These are the variables and functions for board objects which may be helpful when creating your Agent.\n Look into board.py for more information/descriptions of each, or to look for any other definitions which may help you.\n\n Board Variables:\n board.width \n board.height\n board.last_move\n board.num_to_connect\n board.winning_zones\n board.score_array \n board.current_player_score\n\n Board Functions:\n get_cell_value(row, col)\n try_move(col)\n valid_move(row, col)\n valid_moves()\n terminal(self)\n legal_moves()\n next_state(turn)\n winner()\n \"\"\"\n if self.id == 1:\n opponent_id = 2\n else:\n opponent_id = 1\n\n maxvalue = 100000\n minvalue = -maxvalue\n winner = board.winner()\n if winner == self.id:\n return maxvalue\n elif winner == opponent_id:\n return minvalue\n size_y = board.height\n size_x = board.width\n map_ = []\n num_to_connect = board.num_to_connect\n total_points = 0\n\n multiply_reachable = 1\n multiply_oddeven = 1\n # basically this function is calculating all the possible win positions\n # more pieces in a possible win position will be counted with more weights\n # a win position with X pieces in it will be counted as X^2 points\n # initialise the zones maps\n for i in range(size_y):\n map_.append([])\n for j in range(size_x):\n map_[i].append([])\n\n # Fill in the horizontal win positions\n for i in range(size_y):\n for j in range(size_x - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i][j + k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i][j + k] == self.id:\n points += len(board.winning_zones[j+k][i])\n if (self.id == 1 and i % 2 == 1) or (self.id == 2 and i%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and opponent_pieces_count == 0:\n if j - 1 >= 0 and board.board[i][j + 3] == 0 and board.board[i][j - 1] == 0 \\\n and board.try_move(j + 3) == i and board.try_move(j - 1) == i:\n return maxvalue\n elif j + 4 < size_y and board.board[i][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i][j + k] == 0 and board.try_move(j + k) == i:\n points *= multiply_reachable\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if j - 1 >= 0 and board.board[i][j + 3] == 0 and board.board[i][j - 1] == 0 \\\n and board.try_move(j + 3) == i and board.try_move(j - 1) == i:\n return minvalue\n elif j + 4 < size_y and board.board[i][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i][j + k] == 0 and board.try_move(j + k) == i:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the vertical win positions\n for i in range(size_x):\n for j in range(size_y - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[j + k][i] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[j + k][i] == self.id:\n points += len(board.winning_zones[i][j+k])\n if (self.id == 1 and (j+k) % 2 == 1) or (self.id == 2 and (j+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n points *= multiply_reachable\n # if opponent_pieces_count == 3 and self_pieces_count == 0:\n # points *= -1\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the forward diagonal win positions\n for i in range(size_y - num_to_connect + 1):\n for j in range(size_x - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i + k][j + k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i + k][j + k] == self.id:\n points += len(board.winning_zones[j+k][i+k])\n if (self.id == 1 and (i+k) % 2 == 1) or (self.id == 2 and (i+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and opponent_pieces_count == 0:\n if i - 1 >= 0 and j - 1 >= 0 and board.board[i + 3][j + 3] == 0 and board.board[i - 1][j - 1] == 0 \\\n and board.try_move(j + 3) == i + 3 and board.try_move(j - 1) == i - 1:\n return maxvalue\n elif i + 4 < size_y and j + 4 < size_x and board.board[i + 4][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i + 4 and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i + k][j + k] == 0 and board.try_move(j + k) == i + k:\n points *= multiply_reachable\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if i - 1 >= 0 and j - 1 >= 0 and board.board[i + 3][j + 3] == 0 and board.board[i - 1][j - 1] == 0 \\\n and board.try_move(j + 3) == i + 3 and board.try_move(j - 1) == i - 1:\n return minvalue\n elif i + 4 < size_y and j + 4 < size_x and board.board[i + 4][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i + 4 and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i + k][j + k] == 0 and board.try_move(j + k) == i + k:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the backward diagonal win positions\n for i in range(size_y - num_to_connect + 1):\n for j in range(size_x - 1, num_to_connect - 1 - 1, -1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i + k][j - k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i + k][j - k] == self.id:\n points += len(board.winning_zones[j-k][i+k])\n if (self.id == 1 and (i+k) % 2 == 1) or (self.id == 2 and (i+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and self_pieces_count == 0:\n if board.board[i + 3][j - 3] == 0 and board.board[i - 1][j + 1] == 0 \\\n and board.try_move(j - 3) == i + 3 and board.try_move(j + 1) == i - 1:\n return maxvalue\n elif i + 4 < size_y and j - 4 >= 0 and board.board[i + 4][j - 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j - 4) == i + 4 and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i + k][j - k] == 0 and board.try_move(j - k) == i + k:\n points *= multiply_reachable\n\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if board.board[i + 3][j - 3] == 0 and board.board[i - 1][j + 1] == 0 \\\n and board.try_move(j - 3) == i + 3 and board.try_move(j + 1) == i - 1:\n return minvalue\n elif i + 4 < size_y and j - 4 >= 0 and board.board[i + 4][j - 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j - 4) == i + 4 and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i + k][j - k] == 0 and board.try_move(j - k) == i + k:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n return total_points", "def evaluate(state):\r\n if wins(state, COMP):\r\n score = +1\r\n elif wins(state, HUMAN):\r\n score = -1\r\n else:\r\n score = 0\r\n\r\n return score", "def get(self, node_id):\n # NOTE(lucasagomes): All these state values come from the\n # DB. Ironic counts with a periodic task that verify the current\n # power states of the nodes and update the DB accordingly.\n rpc_node = objects.Node.get_by_uuid(pecan.request.context, node_id)\n return NodeStates.convert(rpc_node)", "def evaluate(self):\n raise NotImplementedError()", "def test_get_node_state_smartfail(self):\n pass", "def state(self):\n print(\"getter of variable state called\")\n return self._state", "def compute(self, node, input_vals):\n assert False, \"Implemented in subclass\"", "def eval(self, node):\n\n return None", "def state(self):\n result = self.getResult()\n return result.state", "def compute(self, node, input_vals):\r\n raise NotImplementedError", "def act(self, state, eps=0.0):\n state = torch.from_numpy(state).float().unsqueeze(0).to(self.device)\n self.dqn_local.eval()\n with torch.no_grad():\n action_values = self.dqn_local(state)\n self.dqn_local.train()\n\n # Epsilon-greedy action selection\n if random.random() > eps:\n return np.int32(np.argmax(action_values.cpu().data.numpy()))\n else:\n return np.int32(random.choice(np.arange(self.action_size)))", "def next(self):\n self.current_state = self.next_state\n self.next_state = self.clear_screen() # set values to 0\n for x in range(1, 101):\n for y in range(1, 101):\n # calculate the number of alive neighbours at given coordinates\n self.neighbours_alive = self.check_neighbours_alive(x, y)\n\n # assign the result value from rule sets\n self.next_state[x][y] = self.rule_sets[self.selected_rule][ # selected rule name\n str(self.current_state[x][y])][ # 0 or 1 (dead or alive)\n self.neighbours_alive] # number between 0 to 8\n return self.next_state", "def calculate_state(self):\n\t\tif self.state_type == 'Queues':\n\t\t\t#self.queue_state =\\\n\t\t\t#[0. if movement.AttValue('QLen(Current, Last)') is None else movement.AttValue('QLen(Current, Last)') for movement in self.lanes_movement]\n\n\t\t\tself.queue_state =\\\n\t\t\t[0. if queue.AttValue('QLen(Current, Last)') is None else queue.AttValue('QLen(Current, Last)') for queue in self.queues_counters]\n\n\t\t\tstate = np.array(self.queue_state)[np.newaxis,:]\n\n\t\tif self.state_type == \"QueuesSig\":\n\n\t\t\tself.queue_state =\\\n\t\t\t[0. if queue.AttValue('QLen(Current, Last)') is None else queue.AttValue('QLen(Current, Last)') for queue in self.queues_counters]\n\n\t\t\tstate = np.array(self.queue_state+[self.next_action_key])[np.newaxis,:]\n\t\n\t\treturn(state)", "def _on_policy_state_value_estimate(self, state, next_action, nonterminal_mask, non_final_states):\n next_state_values = Variable(to_cuda(torch.zeros(state[0].size(0)).float(), self.gpu_device))\n predictions = self.old_model(to_cuda(non_final_states, self.gpu_device))\n next_state_values[nonterminal_mask] = predictions.gather(1, Variable(\n to_cuda(next_action, self.gpu_device)[nonterminal_mask].view(-1, 1)))\n next_state_values.volatile = False\n return next_state_values", "def evaluate_state(\n trajectory: Trajectory, time_index: int, actual_position: Position\n):\n active_goal = get_active_goal(trajectory, time_index)\n\n # wrap positions in Pose objects\n actual_pose = move_cube.Pose(position=actual_position)\n goal_pose = move_cube.Pose(position=active_goal)\n\n return move_cube.evaluate_state(goal_pose, actual_pose, GOAL_DIFFICULTY)", "def evaluate(self, blackboard):\n success = EvaluationState.success\n\n state = success\n for child in self.children:\n state = child.__call__(blackboard)\n\n if state != success:\n break\n\n return state", "def state_nodes(self) -> np.ndarray:\n return np.array([[nd[c] for c in [\"alive\", \"infected\", \"immune\", \"isolated\", \"masked\"]]\n for nv, nd in self.g_.nodes.data()])", "def evaluate_state(state):\n\n my_score = get_action_score(state.action[0], state.action[1], state.action_player, state.occupied)\n other_score = get_action_score(state.action[0], state.action[1], state.player, state.occupied)\n \n return max(my_score, other_score)", "def __get_state(self, G):\n x = np.zeros(len(G))\n for node in self.infected_node_set:\n x[node] = 1\n\n # Random activation\n if self.self_activation>0:\n rdm_act = np.random.choice([0,1], size=len(x), p=[1-self.self_activation, self.self_activation])\n x = np.minimum(x+rdm_act, 1)\n return x", "def state(self):\n return self.var_state", "def evaluate(state):\n if wins(state, COMP):\n score = -1\n elif wins(state, HUMAN):\n score = 1\n else:\n score = 0\n\n return score", "def evaluate(self):\n raise NotImplementedError(\"Abstract method\")", "def evaluate(self, board):", "def evaluate(self, states, actions, tasks):\n self._assert_is_batched(states, actions, tasks)\n return self._tf_call(self._evaluate, states, actions, tasks)", "def h(self, node):\n\n return sum(s != g for (s, g) in zip(node.state, self.goal))", "def policy_eval():\r\n \r\n action_prob = [0.125, 0.625, 0.125, 0.125]# actions with probabilities\r\n data = grid_world()\r\n state_axis = np.zeros((9, 9))#initialize states\r\n threshold = .1\r\n prior_state = np.ones((9, 9))\r\n \r\n while np.abs(state_axis - prior_state).max() > threshold:\r\n for x, y in product(range(9), repeat=2):\r\n prior_state = state_axis.copy()\r\n if data.array[x, y] == 'X':\r\n continue\r\n updated_values = [data.next_direction(np.array([x, y]), next_move)\r\n for next_move in data.directions]#Updating states with directions\r\n Sum_Expectation = np.dot(action_prob,\r\n [points_val + 0.9 * state_axis[position[0], position[1]]\r\n for position, points_val in updated_values])\r\n state_axis[x, y] = Sum_Expectation\r\n print(\"\\nExercise 3.1 Shows Value functions for the policy\\n\")\r\n print(state_axis)\r\n build_grid(state_axis, \"Shows Value functions for the policy\")", "def session_state():\n\n return state.summary()", "def evaluate(self, state) -> float:\n evaluater = Evaluater(state)\n result = evaluater.transform(self.expression)\n\n try:\n value = sympy.sympify(\"\".join(tree_to_string(result)), locals=evaluater.locals)\n if value == sympy.nan:\n return None\n return value\n except TypeError:\n return None", "def run_simulation(self, state):\n \"*** YOUR CODE HERE ***\"\n player = 0\n visited_states = [(player, state)]\n depth_limited = self.depth != -1\n depth = self.depth\n expand = True\n while not visited_states[-1][1].isWin() and not visited_states[-1][1].isLose():\n if depth_limited and depth == 0: break\n state = self.UCB1(state, player) # Selection & Simulation\n if expand and state not in self.plays: # Expansion\n expand = False\n self.plays[state] = 0\n self.wins[state] = 0\n visited_states.append((player, state))\n player = (player + 1) % state.getNumAgents()\n if not expand and depth_limited and player == 0: depth -= 1\n \n for player, state in visited_states:\n if state in self.plays: # Not simulated nodes\n self.plays[state] += 1\n eval = self.evaluationFunction(visited_states[-1][1])\n if depth_limited:\n if player == 0: self.wins[state] += eval\n if player != 0: self.wins[state] -= eval\n else:\n if player == 0: self.wins[state] += eval\n if player != 0: self.wins[state] += (1 - eval)", "def UpdateNode(self, result, actions):\n self.visits += 1\n self.wins += (result > 0)\n self.losses += (result < 0)\n self.draws += (result == 0)\n self.Q = self.Q + (result - self.Q)/self.visits\n \n # update rave values\n for a in actions:\n self.N_AMAF[a] += 1\n if not a in self.Q_AMAF:\n self.Q_AMAF[a] = 0.5\n self.Q_AMAF[a] = self.Q_AMAF[a] + (result - self.Q_AMAF[a])/self.N_AMAF[a]\n else:\n self.Q_AMAF[a] = self.Q_AMAF[a] + (result - self.Q_AMAF[a])/self.N_AMAF[a]", "def do(self, state):\r\n\r\n # action is [0,1,2,3]: left, right, up, down\r\n _state = json.loads(state)\r\n if _state[0] == 0:\r\n self.Q[state][2] = 0\r\n if _state[0] == self.max_row:\r\n self.Q[state][3] = 0\r\n if _state[1] == 0:\r\n self.Q[state][0] = 0\r\n if _state[1] == self.max_col:\r\n self.Q[state][1] = 0\r\n action_map = {0: [0, -1], # left\r\n 1: [0, 1], # right\r\n 2: [-1, 0], # up\r\n 3: [1, 0], # down\r\n }\r\n chance = np.random.rand()\r\n if self.eps > chance:\r\n while True:\r\n action = np.random.randint(0,4)\r\n next = np.array(_state) + np.array(action_map[action])\r\n if all(next>=0) and next[0]<=self.max_row and next[1]<=self.max_col:\r\n break\r\n else:\r\n action = np.argmax(self.Q[state])\r\n #print(_state,action,self.Q[state])\r\n\r\n \"\"\"Please Fill Your Code Here.\r\n \"\"\"\r\n return action", "def evaluate(\n self, nodes, derivatives=np.array([0, 0, 0]), modes=None, unique=False\n ):", "def state(self):\n return self._current_value", "def updateState(self):\n self.state = self.microgridPolicy.computeState();", "def evaluation( self ) :\n\n return( self.__evaluation )", "def eval(self, state):\n valueOfPlayers = 0\n valueOfRebelAdvancments = 0\n valueOfLocations = 0\n\n\n\n for coordinate in state.gameState:\n if state.gameState[coordinate]==state.blank:\n continue\n elif state.gameState[coordinate]==state.rebel:\n valueOfRebelAdvancments = -coordinate[0]\n elif state.gameState[coordinate]==state.jedi:\n continue\n elif state.gameState[coordinate]==state.sith:\n continue\n \n valueOfLocations += valueOfRebelAdvancments\n\n \n valueOfPlayers = state.numRebels + 4*state.numJedi - 4*state.numSith\n \n return valueOfPlayers*4 + valueOfLocations", "def decision(self, state: np.ndarray):\n pass", "def evaluate(self):\n raise Exception(\"Not implemented.\")" ]
[ "0.7793834", "0.7214056", "0.71922576", "0.70654464", "0.698469", "0.6897534", "0.6870111", "0.67943263", "0.6491411", "0.6490257", "0.6447222", "0.62859714", "0.62780833", "0.62673026", "0.6194209", "0.6178528", "0.61736", "0.61736", "0.615123", "0.6134387", "0.6047522", "0.602193", "0.6016211", "0.6013917", "0.5992925", "0.59513664", "0.5946907", "0.5946907", "0.5933959", "0.5883981", "0.58670497", "0.58544356", "0.58354783", "0.5832884", "0.5827224", "0.5816047", "0.58140993", "0.58120334", "0.58120334", "0.5811658", "0.58070254", "0.57852745", "0.5759387", "0.5759032", "0.5736059", "0.5736059", "0.5724156", "0.5721841", "0.5721811", "0.5721437", "0.57193995", "0.5714518", "0.5691186", "0.5690524", "0.56846523", "0.5677462", "0.5676444", "0.56711465", "0.5671074", "0.56710637", "0.5665837", "0.5665837", "0.56611395", "0.56529367", "0.5651915", "0.5648985", "0.56452763", "0.56427246", "0.5638242", "0.5636766", "0.56365883", "0.56316084", "0.5629113", "0.56280077", "0.5627442", "0.5619451", "0.56153286", "0.5613121", "0.5610558", "0.5609689", "0.5608161", "0.5606878", "0.5604295", "0.5602622", "0.55939853", "0.558992", "0.5589719", "0.5584785", "0.5583469", "0.55827373", "0.55621165", "0.55602586", "0.55577934", "0.55577785", "0.5549153", "0.5538862", "0.5536679", "0.5532054", "0.55302405", "0.55281377", "0.55279" ]
0.0
-1
Reset this node's (and its children's) state to ready
def reset(self): self.state = EvaluationState.ready for child in self.children: if hasattr(child, "reset"): child.reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset(self):\n for c in self.children:\n c.reset()\n self.marked = False", "def reset_tree(self):\n self.root = None\n self.action = None\n self.dist_probability = None", "def reset(self):\n self.children.clear()", "def reset(self):\r\n self.key = None\r\n self.value = None\r\n self.parent = None\r\n self.left_child = None\r\n self.right_child = None\r\n self.color = BLACK\r\n self.size_tree = 0", "def _re_init(self):\n self._child_index = 0", "def _clear_node(self):\n self._element = None\n self._parent = None\n self._leftchild = None\n self._rightchild = None\n self._height = None", "def reset(self) -> None:\r\n self.tree.delete(*self.tree.get_children())", "def reset(self):\r\n self.tree = KDTree()\r\n self.paint()", "def _reset_traversal_state(self):\n for n in self.nodes.values():\n n.reset_traversal_state()", "def reset(self):\n self.state.fill(EMPTY)", "def updateTree(self):\n self.reset()\n self.resetTree() \n self.read()", "def reset_state(self):\n self._events = None # We'll have to grab the event handlers again in case they changed.\n self._elements.clear() # Clear any cached elements in case they changed or disappeared.", "def clear(self):\n self.tree = Tree()", "def reset(self):\n for parent in self.GetParents():\n parent.reset()", "def _reset_cache(self):\n self._cache = None\n for child in self.children: # pylint: disable=E1101\n child._reset_cache()", "def clear(self):\n self.root = None", "def reset(self):\n self._set_init()", "def reset_states(self) -> None:\n self._metric.reset_states()\n # for each child log\n for child in self.children_real_fake:\n child[0].reset_states()\n child[1].reset_states()", "def reset(self):\n SGMLParser.reset(self)\n self.__depth = 0\n self.__inobject = False\n self.__param = {}\n\n # this a critical data structure,\n self.__nodeTree = [[], ]", "def reset(self):\n self.set_state(self._initial_state)", "def clear(self) -> None:\n self.node.prev = self.node.next = self.node", "def reset_state(self):\n for row in range(len(self.state)):\n for column in range(len(self.state[row])):\n self.state[row][column] = None", "def clear(self):\n while len(self.nodes) > 0:\n self.nodes[0].remove()\n\n self.has_been_modified = False", "def reset_graph(self):\n self.nodes = {}\n self.add_node(self.initial_state)\n self.add_node(self.final_state)", "def resetTree(self):\n for fila in self.verDatos.get_children():\n self.verDatos.delete(fila)", "def restore(self):\n self.nodes.restore()", "def reset(self):\n self.__init__()", "def reset(self):\n self.__init__()", "def reset(self):\n self.__init__()", "def on_ResetNode_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def prepare(self):\n if self._ready_nodes is not None:\n raise ValueError(\"cannot prepare() more than once\")\n\n self._ready_nodes = [\n i.node for i in self._node2info.values() if i.npredecessors == 0\n ]\n # ready_nodes is set before we look for cycles on purpose:\n # if the user wants to catch the CycleError, that's fine,\n # they can continue using the instance to grab as many\n # nodes as possible before cycles block more progress\n cycle = self._find_cycle()\n if cycle:\n raise CycleError(f\"nodes are in a cycle\", cycle)", "def _Restore(self) -> None:\n self._SetNodes(self._nodes)", "def ready(self):\n self.update({self.STATE: self.STATE_READY})", "def reset(self):\n self.bbox = None\n self.true = None\n self.meta = None", "def _reset_state(self):\n self.state = self.start_state.copy()", "def reset(self):\n debug('resetting')\n self.marked = False", "def reset(self):\n self.nodes = []\n self.start = self.start\n self.end = self.end\n\n for row in self.charMap:\n for c in row:\n if c == \"2\":\n c.c = \"0\"\n self.n_checked = 0", "def initialize(self):\n assert not self.children()", "def _reset_state(self):\n\n self.state = None\n self.use_count = 0\n\n # Guards both state and use_count\n self.cond = threading.Condition()\n\n # Incremented each time we initialise a new mount state. Aids\n # debugging.\n self.generation = 0", "def reset(self):\n self.obstacles = []\n self._tick = 0", "def _Freeze(self) -> None:\n self._SetNodes(_FROZEN_NODE_COUNT)", "def clear(self):\n self._nodes = { }\n self._arcs = set()", "def clear(self):\n\t\tself._root = None\n\t\tself._size = 0\n\t\tself._depth = 0\n\t\tself._max_chars = 1\n\t\treturn", "def reset_visited(self):\n self.__visited = False", "def clear(self):\n self.nodes = list()\n self.inputs = list()\n self.nodes += [self]", "def reset(self):\n self.elements = [0] * len(self)", "def reset(self):\n self.steps = 0\n self.state = 0\n self.trajectory = []", "def updatetree(self):\n if self.node:\n self.node.update()\n self.draw()", "def reset(self):\n super(CheckMayaAbstract, self).reset()\n self.errorNodes = list()\n self._errorDict = {}", "def reset_tree() -> None:\n global task_tree\n task_tree = TaskTreeNode(NoOperation())\n task_tree.start_time = datetime.datetime.now()\n task_tree.status = TaskStatus.RUNNING", "def free_borrowed(self):\n self.xmlnode=None", "def clear(self):\n del self.__tree\n self.__tree = AVLTree()\n print(\"Set is empty now\")", "def reset(self):\n # Because it's a queue no need for reset..\n pass", "def reset(self):\n # Because it's a queue no need for reset..\n pass", "def reset(self):\n # Because it's a queue no need for reset..\n pass", "def make_looped(self) -> None:\n self.most_right.right_node = self.most_left\n self.most_left.left_node = self.most_right", "def reset(self):\n self._varstate = None\n self.frozen = False", "def reset(self):\n self.state = [\n ['R', 'N', 'B', 'Q', 'K', 'B', 'N', 'R'],\n ['P'] * 8,\n [' '] * 8,\n [' '] * 8,\n [' '] * 8,\n [' '] * 8,\n ['p'] * 8,\n ['r', 'n', 'b', 'q', 'k', 'b', 'n', 'r']\n ]", "def reset(self):\n self.state = self.resolve_et({NFA.START})", "def reset(self):\n self._setupObjects()", "def clear(self):\n \n self.node_set.clear()\n self.prefix.clear()\n self.suffix.clear()\n self.num_node = 0\n self.edges = 0", "def reset(self) -> None:\n self[-1].reset()", "def reset(self):\n ...", "def reset(self):\n ...", "def set_gamestate(self, state: GameState) -> None:\n self.root_state = deepcopy(state)\n self.root = Node()", "def reset_progress(self):\n self.state = \"\"", "def reset(self):\n\t\tself.pos = self.start\n\n\t\tself.weighted_n_left = 0.0\n\t\tself.weighted_n_right = self.weighted_n_node_samples\n\n\t\tself.label_count_left \t= np.zeros(self.n_classes)\n\t\tself.label_count_right \t= np.copy(self.label_count_total)", "def reset(self):\n self.visited = False\n self.calculated = False\n self.past_value = self.value\n self.value = 0", "def reset(self):\n self.doc = xml.dom.minidom.Document()", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ...", "def _reset_fuzz_state(self):\n self.total_mutant_index = 0\n if self.fuzz_node:\n self.fuzz_node.reset()", "def reset(self):\n self.st = segment_tree.SegmentTreeSampler(self.n, np.ones(self.n) * self.reg, self.random_state)", "def clear_state(self):\n super().clear_state()\n self.pid = 0", "def _refresh_tree_ref(self):\n self._tree_ref = RedBlackNodeRef(\n address=self._storage.get_root_address())", "def _reset(self) -> None:", "def _reset(self) -> None:", "def reset_if_ready(self):\n ready = self.ready\n if ready:\n self.reset()\n return ready", "def _reset_state(self):\n # Directed graph, (u, v) => v depends on u. u, v are pairs of (rule_name, rule_dir_abs)\n # Used for generating Topological Sort\n self._rule_to_dependency_graph_adjlist = {}\n self._topologically_sorted_build_rule_names = []\n\n # List of (dependency_name, dependency_dir_abs) for each build rule\n self._rule_to_dependency_list = {}\n\n # Space for rough work :P\n self._unresolved_commands = set()", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def _reset(self):", "def clear(self):\r\n self.nodes = collections.defaultdict(list)\r\n self.nodes_mapping = collections.defaultdict(list)\r\n self.edges = 0\r\n #self.children_length={}\r\n self.parents_length = collections.defaultdict(lambda : collections.defaultdict(int))", "def reset(self):\n self.previous = None\n self.state = None\n self.args = None\n self.context = None", "def _reset(self):\n pass", "def __shutdown(self):\n if self.__saveExpandedNodes(self.bookmarksTree.rootIndex()):\n self.__bookmarksManager.changeExpanded()", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self, state: nx.Graph = None):\n if state is None:\n self._state = self.init_mol\n else:\n self._state = state\n\n self.action_space.update_actions(self._state, self.observation_space)\n if self.record_path:\n self._path = [self._state]\n self._counter = 0", "def clear(self):\n self._ll_tree.clear()", "def reset(self):\n # The apply(f) method recursively calls f on itself and all children\n self.apply(self._reset_module)" ]
[ "0.7023482", "0.6967508", "0.6964375", "0.670918", "0.67040575", "0.6657348", "0.65749437", "0.65516037", "0.65334755", "0.64151716", "0.6284906", "0.62482226", "0.62395614", "0.61377215", "0.61206025", "0.6064591", "0.604821", "0.6045733", "0.60326046", "0.6019434", "0.5980827", "0.59577423", "0.5953604", "0.59498775", "0.5944229", "0.5898186", "0.5897568", "0.5897568", "0.5897568", "0.5891227", "0.5889133", "0.58664775", "0.5854126", "0.5851134", "0.58091414", "0.5800271", "0.5796151", "0.57958937", "0.5792835", "0.57661706", "0.572604", "0.57247734", "0.57159466", "0.5710393", "0.5699852", "0.5699808", "0.5696882", "0.56960666", "0.5690669", "0.56887394", "0.5688179", "0.5679739", "0.56423646", "0.56423646", "0.56423646", "0.5630337", "0.56276", "0.5610578", "0.5609584", "0.560902", "0.56031835", "0.558662", "0.55864424", "0.55864424", "0.5580201", "0.5570834", "0.55687934", "0.5561186", "0.5560757", "0.5556788", "0.5556788", "0.5556788", "0.5556788", "0.5556788", "0.5556788", "0.5556788", "0.5556788", "0.55567425", "0.55518514", "0.5547579", "0.5540265", "0.5534171", "0.5534171", "0.55290604", "0.552691", "0.5513388", "0.5513388", "0.5513388", "0.5513388", "0.55071336", "0.5505092", "0.5502186", "0.55020964", "0.54976", "0.54970306", "0.54970306", "0.54970306", "0.54940754", "0.548593", "0.5472865" ]
0.75797325
0
Evaluates the node's (and its children's) state.
def evaluate(self, blackboard): success = EvaluationState.success state = success for child in self.children: state = child.__call__(blackboard) if state != success: break return state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate(self, tree):\n\t\tpass", "def evaluate(self, state):\n abstract", "def evaluate_node(self):\n # p, v = np.random.random(225).astype(np.float16), np.random.random()\n socket = zmq.Context().socket(zmq.DEALER)\n socket.setsockopt_string(zmq.IDENTITY, self.player_id)\n socket.connect('ipc://./tmp/oracle_%s' % self.tree.model_name)\n print('start to evaluate', self.tree.model_name)\n while True:\n # print(self.tree.to_evaluate.qsize())\n batch = []\n states = []\n colors = []\n size = self.tree.to_evaluate.qsize()\n if size > config.INFERENCE_BATCHSIZE:\n size = config.INFERENCE_BATCHSIZE\n elif size == 0:\n time.sleep(0.001)\n continue\n for _ in range(size):\n t, black, white = self.tree.to_evaluate.get()\n mine, yours = posswap(t, black, white)\n batch.append((str(mine), str(yours), t % 2))\n states.append((black, white))\n colors.append(t % 2)\n socket.send(msgpack.dumps((batch, self.player_id)))\n result = msgpack.loads(socket.recv())\n assert len(states) == len(result[0])\n assert len(states) == len(result[1])\n for ind, state in enumerate(states):\n with self.lock:\n self.tree.nodes[state].p = result[0][ind]\n if colors[ind] == 0:\n self.tree.nodes[state].v = result[1][ind]\n else:\n self.tree.nodes[state].v = -result[1][ind]\n self.tree.nodes[state].updated = True", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 1 : return 1\n return 0", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 0 : return 0\n return 1", "def evaluate(self) :\n if self.inStates[0].getState() == self.inStates[1].getState(): return 0\n return 1", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def evaluate(self, state):\n\n if self.is_coords and len(state) != len(self.coords):\n raise Exception(\"\"\"state must have the same length as coords.\"\"\")\n\n if not len(state) == len(set(state)):\n raise Exception(\"\"\"Each node must appear exactly once in state.\"\"\")\n\n if min(state) < 0:\n raise Exception(\"\"\"All elements of state must be non-negative\"\"\"\n + \"\"\" integers.\"\"\")\n\n if max(state) >= len(state):\n raise Exception(\"\"\"All elements of state must be less than\"\"\"\n + \"\"\" len(state).\"\"\")\n\n return self.calculate_fitness(state)", "def test_get_node_state(self):\n pass", "def state(self):\n return self._node._state", "def compute(self, *args, **kwargs):\n for node in self.evaluation_sequence:\n node.evaluate()", "def evaluate(self, blackboard):\n success = EvaluationState.success\n\n for child in self.children:\n state = child.__call__(blackboard)\n\n if state == success:\n return success\n\n return EvaluationState.failure", "def leafEvaluation(self, state):\n\n \"\"\"\n Use random generated values for now\n \"\"\"\n z = np.random.randint(2)\n v = random.uniform(0, 1) \n return (1-LAMBDA) * v + LAMBDA * z", "def _playout(self, state):\n node = self._root\n while (1):\n if node.is_leaf():\n break\n # Greedily select next move.\n action, node = node.select(self._c_puct)\n state.do_move(action)\n\n # Evaluate the leaf using a network which outputs a list of\n # (action, probability) tuples p and also a score v in [-1, 1]\n # for the current player.\n action_probs, leaf_value = self._policy(state)\n # Check for end of game.\n end, winner = state.game_end()\n if not end:\n node.expand(action_probs)\n else:\n # for end state,return the \"true\" leaf_value\n if winner == -1: # tie\n leaf_value = 0.0\n else:\n leaf_value = (\n 1.0 if winner == state.get_current_player() else -1.0\n )\n\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def _playout(self, state):\n node = self._root\n while(1):\n if node.is_leaf():\n break\n # Greedily select next move.\n action, node = node.select(self._c_puct)\n state.do_move(action)\n\n # Evaluate the leaf using a network which outputs a list of\n # (action, probability) tuples p and also a score v in [-1, 1]\n # for the current player.\n action_probs, leaf_value = self._policy(state)\n # Check for end of game.\n end, winner = state.game_end()\n if not end:\n node.expand(action_probs)\n else:\n # for end state,return the \"true\" leaf_value\n if winner == -1: # tie\n leaf_value = 0.0\n else:\n leaf_value = (\n 1.0 if winner == state.get_current_player() else -1.0\n )\n\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def test_ChangeValueTree():\n Tree = graph.oval_graph.OvalNode(1, 'operator', 'and', False, [\n graph.oval_graph.OvalNode(2, 'value', \"true\", False),\n graph.oval_graph.OvalNode(3, 'value', \"false\", False),\n graph.oval_graph.OvalNode(4, 'operator', 'or', False, [\n graph.oval_graph.OvalNode(5, 'value', \"false\", False),\n graph.oval_graph.OvalNode(6, 'value', \"true\", False)\n ]\n )\n ]\n )\n\n Tree.change_tree_value(3, \"true\")\n tests.any_test_help.any_test_treeEvaluation_with_tree(Tree, \"true\")", "def evaluate(self):\n eval_list = nx.topological_sort(self.graph)\n for n in eval_list:\n n.evaluate()\n print(\"evaluating type\", type(n))\n\n # Notify observers of finished calculation\n self.notifyObservers(\"EVALUATION DONE\")\n return \"FINISHED\"", "def evaluate(self, state):\n\n fitness = np.sum(state)\n self.num_evals += 1\n #print(self.num_evals)\n return fitness", "def _isthisapropertree(self):\n ok = True\n if self._leftchild:\n if self._leftchild._parent != self:\n ok = False\n if self._leftchild._isthisapropertree() == False:\n ok = False\n if self._rightchild:\n if self._rightchild._parent != self:\n ok = False\n if self._rightchild._isthisapropertree() == False:\n ok = False\n if self._parent:\n if (self._parent._leftchild != self\n and self._parent._rightchild != self):\n ok = False\n return ok", "def evaluate(self):\n pass", "def evaluate(self):\n pass", "def evaluate(self) :\n pass", "def update_tree(root, executed_acts, total_rew):\n root.value = max(total_rew, root.value)\n root.visits += 1\n new_nodes = 0\n\n node = root\n for step, act in enumerate(executed_acts):\n if act not in node.children:\n node.children[act] = Node()\n new_nodes += 1\n node = node.children[act]\n node.value = max(total_rew, node.value)\n node.visits += 1\n\n return new_nodes", "def explore_expr(expr, value, is_child):\n referenced_value = value.referenced_value()\n Explorer.explore_expr(expr, referenced_value, is_child)\n return False", "def eval_logic(self, checkDict):\n result = True\n #gets individual evaluations from children\n passList = []\n for child in self.children:\n myVal = child.eval_comparison(checkDict)\n passList.append(child.eval_comparison(checkDict))\n\n #if only one child returns the only boolean available\n if(len(passList) == 1):\n result = passList[0]\n\n #TODO: Combine following cases possibly\n #print(passList)\n #gets resutl if only 2 simple logics\n elif(len(passList) == 2 and len(self.operators) == 1):\n\n result = self.operators[0](passList[0], passList[1])\n else:\n #combines all children logic using the operators\n firstCheck = True\n opIndex = 0\n for i in range(0,len(passList)):\n if(firstCheck):\n firstCheck = False\n result = self.operators[opIndex](passList[0], passList[1])\n i+=1\n else:\n result = self.operators[opIndex](result,passList[i])\n opIndex += 1\n \"\"\"\n print('----------------------')\n print(result)\n \"\"\"\n return result", "def visit(self, node):", "def visit(self, node):", "def test_get_node_state_readonly(self):\n pass", "def changeState(self, node, name, state):", "def evaluate(self):\n max_decomp = 0\n max_value = 0.0\n v = 0.0\n # find the largest log-value in children\n # for computing the log-exponentials\n for d in self.__children:\n # should be Product Node\n node = self.__children[d]\n value = node.getLogValue()\n if value == Node.ZERO:\n continue\n if max_decomp == 0 or value > max_value:\n max_decomp = d\n max_value = value\n\n if max_decomp == 0:\n self.setLogValue(Node.ZERO)\n\n for d in self.__children:\n if d not in self.__children_counts:\n continue\n cnts = self.__children_counts[d]\n node = self.__children[d]\n value = node.getLogValue()\n if value == Node.ZERO:\n continue\n v += cnts * np.exp(value - max_value)\n self.setLogValue(np.log(v / self.__counts) + max_value)", "def _isthisapropertree(self):\n ok = True\n if self._leftchild is not None:\n if self._leftchild._parent != self:\n ok = False\n if self._leftchild._isthisapropertree() is False:\n ok = False\n if self._rightchild is not None:\n if self._rightchild._parent != self:\n ok = False\n if self._rightchild._isthisapropertree() is False:\n ok = False\n if self._parent is not None:\n if self not in (self._parent._leftchild, self._parent._rightchild):\n ok = False\n return ok", "def eval(self, node):\n\n return None", "def evaluate(self, state) -> float:\n evaluater = Evaluater(state)\n result = evaluater.transform(self.expression)\n\n try:\n value = sympy.sympify(\"\".join(tree_to_string(result)), locals=evaluater.locals)\n if value == sympy.nan:\n return None\n return value\n except TypeError:\n return None", "def compute(self, node, input_vals):\n assert False, \"Implemented in subclass\"", "def get_power_state(self, node):", "def evaluate(\n self, nodes, derivatives=np.array([0, 0, 0]), modes=None, unique=False\n ):", "def evaluate(self, state):\n _n = len(state)\n _t = np.ceil(self.t_pct*_n)\n\n # Calculate head and tail values\n tail_0 = self.tail(0, state)\n head_1 = self.head(1, state)\n\n # Calculate R(X, T)\n if (tail_0 > _t and head_1 > _t):\n _r = _n\n else:\n _r = 0\n\n # Evaluate function\n fitness = max(tail_0, head_1) + _r\n self.num_evals += 1\n return fitness", "def solveOneStep(self):\n ### Student code goes here\n state = self.currentState\n #print (type(state))\n self.visited[state] = True\n #print (type(self.gm.getGameState()))\n moves = self.gm.getMovables()\n print (\"CURRENTSTATE\" + str(self.currentState.state))\n print (\"MOVABLES:\")\n if moves:\n for m in moves:\n print (str(m))\n print (\"CHILDINDEX:\")\n print (state.nextChildToVisit)\n print (\"*********\")\n if state.state == self.victoryCondition:\n return True\n #if no child to expand then go back\n if not moves or state.nextChildToVisit >= len(moves):\n self.currentState = state.parent\n if state.requiredMovable is not None:\n self.gm.reverseMove(state.requiredMovable)\n # expand\n else:\n\n next_move = moves[state.nextChildToVisit]\n self.gm.makeMove(next_move)\n state.nextChildToVisit += 1\n\n #if to parent or if visited then skip\n while (((state.parent is not None) and (self.gm.getGameState() == state.parent.state))) or GameState(self.gm.getGameState(), 0, None) in self.visited:\n print (\"PARENT FOUND!\")\n self.gm.reverseMove(next_move)\n if state.nextChildToVisit >= len(moves):\n self.currentState = state.parent\n return False\n else:\n next_move = moves[state.nextChildToVisit]\n self.gm.makeMove(next_move)\n state.nextChildToVisit += 1\n\n next_state = GameState(self.gm.getGameState(), state.depth + 1, next_move)\n next_state.parent = state\n #next_state.requiredMovable = next_move\n state.children.append(next_state)\n self.currentState = next_state\n print (state.nextChildToVisit)\n return False", "def _apply_tree_policy(self, root, state):\n visit_path = [root]\n working_state = state.clone()\n current_node = root\n while not working_state.is_terminal() and current_node.explore_count > 0:\n if not current_node.children:\n # For a new node, initialize its state, then choose a child as normal.\n legal_actions = working_state.legal_actions()\n # Reduce bias from move generation order.\n self._random_state.shuffle(legal_actions)\n player_sign = -1 if working_state.current_player() != self.player else 1\n current_node.children = [SearchNode(action, player_sign)\n for action in legal_actions]\n\n if working_state.is_chance_node():\n # For chance nodes, rollout according to chance node's probability\n # distribution\n outcomes = working_state.chance_outcomes()\n action_list, prob_list = zip(*outcomes)\n action = self._random_state.choice(action_list, p=prob_list)\n chosen_child = next(c for c in current_node.children\n if c.action == action)\n else:\n # Otherwise choose node with largest UCT value\n chosen_child = max(\n current_node.children,\n key=lambda c: c.uct_value(current_node.explore_count, self.uct_c, # pylint: disable=g-long-lambda\n self.child_default_value))\n\n working_state.apply_action(chosen_child.action)\n current_node = chosen_child\n visit_path.append(current_node)\n\n return visit_path, working_state", "def grow_tree(self):\n\n decision_node = self.root\n internal_env = copy.copy(self.env)\n\n while (not decision_node.is_final) and decision_node.visits > 1:\n\n a = self.select(decision_node)\n\n new_random_node = decision_node.next_random_node(a, self._hash_action)\n\n (new_decision_node, r) = self.select_outcome(internal_env, new_random_node)\n\n new_decision_node = self.update_decision_node(new_decision_node, new_random_node, self._hash_space)\n\n new_decision_node.reward = r\n new_random_node.reward = r\n\n decision_node = new_decision_node\n\n decision_node.visits += 1\n cumulative_reward = self.evaluate(internal_env)\n\n while not decision_node.is_root:\n random_node = decision_node.father\n cumulative_reward += random_node.reward\n random_node.cumulative_reward += cumulative_reward\n random_node.visits += 1\n decision_node = random_node.father\n decision_node.visits += 1", "def evalOnSubTreeEnd(self, node):\n\n return None", "def evaluate(self, state):\n\n fitness = 0\n\n for i in range(1, len(state)):\n if state[i] != state[i - 1]:\n fitness += 1\n\n return fitness", "def _playout(self, state):\n node = self._root\n\n while (1):\n if node.is_leaf():\n break\n # Greedily select next move.\n action, node = node.select(self._c_puct)\n state.do_move(action)\n\n action_probs, _ = self._policy(state)\n # Check for end of game\n end, winner = state.game_end()\n if not end:\n node.expand(action_probs)\n # Evaluate the leaf node by random rollout\n leaf_value = self._evaluate_rollout(state)\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def _playout(self, state):\n node = self._root\n while(1):\n is_leaf, action_node = node.select(state, self._c_puct)\n if is_leaf: break\n state.do_move(action_node[0])\n node = action_node[1]\n\n # value is useless here\n action_probs, _ = self._policy(state)\n # Check for end of game\n end, winner = state.if_win()\n if not end:\n node.expand(action_probs, state.point)\n # Evaluate the leaf node by random rollout\n leaf_value = self._evaluate_rollout(state)\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def evalBoolean(tree):\n # check if children the children is a \"or\" or a \"and\" tokken\n if (tree.children[0].data == \"or\"):\n return evalBoolean(tree.children[0].children[0]) or evalBoolean(tree.children[0].children[1])\n if (tree.children[0].data) == \"and\":\n return evalBoolean(tree.children[0].children[0]) and evalBoolean(tree.children[0].children[1])\n \n # set var1\n if(tree.children[0].data == \"integer\"):\n var1 = evalInteger(tree.children[0])\n elif(tree.children[0].data == \"variable\"):\n var1 = getValue(tree.children[0].children[0].value)\n\n # set var2\n if(tree.children[2].data == \"integer\"):\n var2 = evalInteger(tree.children[2])\n elif(tree.children[2].data == \"variable\"):\n var2 = getValue(tree.children[2].children[0].value)\n\n if(tree.children[1].children[0].data == \"greater\"):\n return var1 > var2\n if(tree.children[1].children[0].data == \"less\"):\n return var1 < var2\n if(tree.children[1].children[0].data == \"equals\"):\n return var1 == var2\n if(tree.children[1].children[0].data == \"nequal\"):\n return var1 != var2\n\n print(\"ERROR : UNEXPECTED TOKKEN\")\n return False", "def evaluate(self):\n raise NotImplementedError(\"Abstract method\")", "def DFS(self, nDepth, treenode, state):\n \n visited = []\n visited.insert(0, (state, treenode))\n \n for index in range(0, nDepth-1): \n actions = self.priorProb(state)\n treenode.expansion(actions)\n treenode.updateU_value(actions)\n treenode, action = treenode.selection() \n state = state.do_move(action).copy()\n visited.insert(0, (state, treenode)) \n \n for index in range(0, len(visited)-1): \n if(visited[index][1].isLeaf() == True):\n value = self.leafEvaluation(visited[index][0])\n else: \n value = visited[index][1].backUp(value)\n visited[-1][1].updateQ_value(value)\n visited[-1][1].updateVisits()\n return visited[-1][1]", "def compute_tree(self, x):\n if (self.body in operators):\n try:\n return self.body(self.left.compute_tree(x), self.right.compute_tree(x))\n except:\n return float(\"inf\")\n elif self.body == 'x': return x\n else: return self.body", "def run_all(self):\n # print(\"running all nodes\")\n executed = set()\n node_update_states = {node: node.block_updates for node in self.flow_view.node_items}\n\n def traverse_upwards(node):\n # Traverse upwards to the top of data flow graph\n if node in executed:\n return\n for port in node.inputs:\n for connection in port.connections:\n traverse_upwards(connection.out.node)\n # print(\"executing\", node)\n node.update_event()\n executed.add(node)\n\n for node in self.flow_view.node_items:\n node.block_updates = True\n\n for node in self.flow_view.node_items:\n traverse_upwards(node)\n\n for node in self.flow_view.node_items:\n node.block_updates = node_update_states[node]\n # print(\"All nodes executed\")", "def evaluate(self, state, player, random_state):\n raise NotImplementedError", "def visit_Node(self, node):\n pass", "def updatetree(self):\n if self.node:\n self.node.update()\n self.draw()", "def mutate(self):\n if self.mutator.evaluate(self):\n self.evaluated = 0\n return 1\n return 0", "def solveOneStep(self):\n ### Student code goes here\n if self.currentState not in self.visited:\n self.visited[self.currentState]=True\n return self.currentState.state == self.victoryCondition\n\n if self.currentState.state == self.victoryCondition:\n self.visited[self.currentState]=True\n return True\n\n if not self.currentState.children:\n for move in self.gm.getMovables():\n self.gm.makeMove(move)\n childrenState = GameState(self.gm.getGameState(), self.currentState.depth+1, move)\n if childrenState not in self.visited:\n childrenState.parent = self.currentState\n self.currentState.children.append(childrenState)\n self.gm.reverseMove(move)\n\n if self.currentState.nextChildToVisit<len(self.currentState.children):\n nextState = self.currentState.children[self.currentState.nextChildToVisit]\n self.currentState.nextChildToVisit += 1\n self.gm.makeMove(nextState.requiredMovable)\n self.currentState = nextState\n return self.solveOneStep()\n else:\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n return self.solveOneStep()", "def evaluate(self):\n raise NotImplementedError()", "def _evaluate(self, tree: nltk.tree.Tree):\n if tree.label() == \"Root\":\n if len(tree) == 1:\n func = self._evaluate(tree[0])\n func()\n else:\n func = self._evaluate(tree[0])\n result = self._evaluate(tree[1])\n func(result)\n return\n elif tree.label() == \"Result\":\n if tree[0].label() == \"Entity\":\n return self._evaluate(tree[0])\n if tree[0].label() == \"Unary_Command\":\n func = self._evaluate(tree[0])\n result = self._evaluate(tree[1])\n return func(result)\n if tree[1].label() == \"Binary_Command\":\n result_left = self._evaluate(tree[0])\n func = self._evaluate(tree[1])\n result_right = self._evaluate(tree[2])\n return func(result_left, result_right)\n elif tree.label() == \"Unary_Command\":\n func = self.unary_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Terminal_Command\":\n func = self.terminal_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Binary_Command\":\n func = self.binary_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Entity\":\n return [tree[0]]\n\n print(\"Error: CFG label rule not defined in \"\n \"evaluateEngine#self._evaluate\",\n file=sys.stderr)", "def Eval(state):\n\n# H1 = htest2(state)\n# return H1\n H2 = h1(state)*monotonic(state)\n return H2", "def get_value(self):\n if not self.visited:\n # first visit at node\n self.visited = True\n\n # value calculation\n for node, weight in self.predecessors:\n self.value += (node.get_value() * weight)\n\n # applying activation function\n if self.activation is not None:\n self.activation()\n\n self.calculated = True\n\n return self.value\n else:\n # visited node\n if self.calculated:\n # calculated in this computation\n return self.value\n else:\n # recurrent connection\n return self.past_value", "def check(self, node):\n # do the necessary setup/arguments and call self.visit (node, args)\n self.visit(node, defined=set())", "def _evaluate_branch(self, branch, remaining_nodes):\n current_eq = None\n next_nodes = []\n\n # Get the player that we need to evaluate\n for n in remaining_nodes:\n # Node are both equations and variables. We just want the\n # equations.\n if current_eq is None:\n if isinstance(n, Equation):\n current_eq = n\n else:\n # Leave whatever is left for the next layer of evaluation.\n next_nodes.append(n)\n\n # No more equations! We're done\n if current_eq is None:\n return\n\n # Go through each of the branches and evaluate the state.\n for b in branch.branches:\n # Let the player assign output\n outputs = current_eq.calculate(b.assignments)\n\n # Construct a distribution of these outputs\n distn = JointDist(outputs)\n\n # Add the branches, and then evaluate using the next set of\n # remaining nodes.\n b.add_branches(distn)\n self._evaluate_branch(b, next_nodes)", "def expand_and_evaluate(self, env) -> (np.ndarray, float):\n\t\tstate_planes = env.canonical_input_planes()\n\n\t\tleaf_p, leaf_v = self.predict(state_planes)\n\t\t# these are canonical policy and value (i.e. side to move is \"white\")\n\n\t\tif not env.white_to_move:\n\t\t\tleaf_p = Config.flip_policy(leaf_p) # get it back to python-chess form\n\t\t#np.testing.assert_array_equal(Config.flip_policy(Config.flip_policy(leaf_p)), leaf_p)\n\n\t\treturn leaf_p, leaf_v", "def __call__(self, node):\n return True;\n predcount = self.CountPred(node);\n if predcount == 0: return True;\n return len(node.predicates) != 0;", "def _numconditions(self, node_p):\n node_p = self.getnodenamed(node_p) # Verify pointer.\n parents = self.getnodeparents(node_p) # nl_p\n nparents = self.lengthnodelist(parents) # integer\n # Get the number of states in each parent node, multiply them\n npstates = 1\n for idx in range(nparents):\n node_i = self.nthnode(parents, idx)\n nstates_i = self.getnodenumberstates(node_i)\n npstates *= nstates_i\n\n return npstates", "def solveOneStep(self):\n ### Student code goes here\n # Mark this move as explored\n self.visited[self.currentState] = True\n self.visited_states.append(self.currentState.state)\n\n # Get move to make\n movables = self.gm.getMovables()\n # print(\"EXPLORING GAME STATE \" + str(self.gm.getGameState()) + \"---------------------------------------------------------\")\n to_move = self.currentState.nextChildToVisit # movables index\n # print(\"depth \", self.currentState.depth)\n\n # Return if done\n if self.currentState.state == self.victoryCondition:\n # print(\"DONE\")\n return True\n\n # If current state has no children, make children\n if not self.currentState.children:\n for movable_statement in movables:\n # Make the move\n # print(\"implementing move \", movable_statement)\n self.gm.makeMove(movable_statement)\n\n # Create a new state with this move made\n new_state = self.gm.getGameState()\n # print (\"new state \", new_state)\n\n # If the new state hasn't been visited and isn't in the queue then add it as a child and to the queue\n if (new_state not in self.visited_states):\n new_gs = GameState(new_state, self.currentState.depth + 1, movable_statement)\n new_gs.parent = self.currentState\n self.currentState.children.append(new_gs)\n self.currentState.nextChildToVisit = to_move + 1\n self.visited[new_gs] = True\n self.visited_states.append(new_state)\n self.gs_queue.append(new_gs)\n\n self.gm.reverseMove(movable_statement)\n\n # Return false if no more to explore\n if not self.gs_queue:\n return False\n\n # Revert to state at when current and next start to change\n root_curr = self.currentState\n self.currentState = self.gs_queue.popleft()\n root_new = self.currentState\n\n # Backtrack to when current node and new node start to diverge\n if root_new.depth == root_curr.depth:\n while root_curr.state != root_new.state:\n self.gm.reverseMove(root_curr.requiredMovable)\n root_curr = root_curr.parent\n root_new = root_new.parent\n else:\n while root_curr.requiredMovable:\n self.gm.reverseMove(root_curr.requiredMovable)\n root_curr = root_curr.parent\n\n # Return game master to state that we are exploring\n # Find path between root and current state\n path = []\n currNode = self.currentState\n while currNode != root_curr:\n path.append(currNode.requiredMovable)\n currNode = currNode.parent\n\n # Created backwards path, now make moves from root to current state\n path.reverse()\n for movable_statement in path:\n self.gm.makeMove(movable_statement)\n\n return False", "def test_update_node_state_readonly(self):\n pass", "def evaluate(self, payload, level=0, verbose=True):\n\n # if children are joined by AND, evaluate every child until all children\n # are evaluated or until a False breaks the loop (Need all True for AND)\n if self.conjunction_ in ['AND', 'NAND']:\n result = True\n i = 0\n while result and (i < len(self.children_)):\n \n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n result = self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n if self.conjunction_ == 'NAND':\n result = not result\n\n\n # if children are joined by OR, evaluate every child until all children\n # are evaluated or until a True breaks the loop (only need 1 True for OR)\n elif self.conjunction_ in ['OR', 'NOR']:\n result = False\n i = 0\n while result == False and (i < len(self.children_)):\n \n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n result = self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n if self.conjunction_ == 'NOR':\n result = not result\n\n # XOR evaluation - 1 and only 1 can be True. Have to iterate over all children unless the number of trues becomes greater than 1\n else:\n i = 0\n true_count = 0\n while true_count < 2 and (i < len(self.children_)):\n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n # += a boolean is equivalent to += 1 for T and += 0 for False\n true_count += self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n\n if true_count == 1:\n result = True\n else:\n result = False\n\n if verbose: \n tabs = \"\\t\" * level\n print(\"\\n\" + tabs + f\"Composite Result: {result}\")\n\n return result", "def valid_tree(phi):\n q = deque([phi]) # queue of nodes to check\n visited = list() # already checked\n\n # save the indim of the root node, and make sure all the indims\n # of the children are the same\n indim = phi.indim\n retval = True\n varsfound = 0\n\n while len(q) > 0:\n # node to check\n node = q.popleft()\n\n # check outdim\n if isinstance(node, amnet.Variable):\n retval &= (node.outdim == node.indim)\n varsfound += 1\n elif isinstance(node, amnet.Linear):\n m, n = node.w.shape\n retval &= (node.outdim == m)\n retval &= (node.x.outdim == n)\n retval &= (all([bi == 0 for bi in node.b])) # check value\n elif isinstance(node, amnet.Constant):\n retval &= (node.outdim == len(node.b))\n retval &= (all([wij == 0 for wij in np.nditer(node.w)])) # check value\n elif isinstance(node, amnet.Affine):\n m, n = node.w.shape\n retval &= (node.outdim == m)\n retval &= (node.x.outdim == n)\n retval &= (m == len(node.b))\n elif isinstance(node, amnet.Mu):\n retval &= (node.outdim == node.x.outdim)\n retval &= (node.outdim == node.y.outdim)\n retval &= (node.z.outdim == 1)\n elif isinstance(node, amnet.Stack):\n retval &= (node.outdim == node.x.outdim + node.y.outdim)\n else:\n retval = False # unknown node type\n\n # check indim\n retval &= (node.indim == indim)\n\n # short-circuit if an inconsistency has been found\n if not retval:\n return False\n\n # add children to queue\n if not(any(node is e for e in visited)):\n visited.append(node)\n #q.extend(children(node))\n q.extend([c for c in children(node) if c not in visited])\n\n # finished iterating\n # TODO: also check if graph is cyclic\n return (varsfound == 1)", "def visit(self):\n self.tree = self.recursive_visit(self.tree)\n # assert self.current_line == self.tree.absolute_bounding_box.bottom_right.line", "def move(self, state):\n result = None\n self.currentDepthLimit = 0\t\n\tself.transposition = {}\n\tself.counter = 0\n\n\twhile True:\n u = float(\"inf\")\n\t v = float(\"-inf\")\n\t self.counter = 0\n\t result = None\n\t self.transposition = {}\n\t for a in state.actions():\n new = self.min_value(state.result(a), float(\"-inf\"), float(\"inf\"),self.currentDepthLimit)\n\t if new > v:\n\t v = new\n\t result = a\n\n\t elif new == v:\n\t if a.index < result.index:\n\t result = a\n\t if self.is_time_up():\n\t return result\n\t \n\t self.currentDepthLimit += 1\n\t \"\"\"If we never use evaluate function, it means all state are terminated, so return whatever the result is\"\"\"\n\t if self.counter == 0:\n\t break\n\t if self.is_time_up():\n \t return result\n\treturn result", "def evaluate(self, stochastic=False):\n ob = self._convert_state(self._env.reset())\n done = False\n actions = []\n sum_rew = 0\n while not done:\n ac, _ = self._act(ob, stochastic=stochastic)\n actions.append(ac)\n ob, rew, done, _ = self._env.step(ac)\n ob = self._convert_state(ob)\n sum_rew += rew\n self.ob = self._convert_state(self._env.reset())\n self.new = True\n return sum_rew, actions", "def h(self, node):\n\n return sum(s != g for (s, g) in zip(node.state, self.goal))", "def get_child_states(self, tree):\n # add extra singleton dimension in middle...\n # because pytorch needs mini batches... :sad:\n if tree.num_children==0:\n child_c = Var(torch.zeros(1,1,self.mem_dim))\n child_h = Var(torch.zeros(1,1,self.mem_dim))\n if self.cudaFlag:\n child_c, child_h = child_c.cuda(), child_h.cuda()\n else:\n child_c = Var(torch.Tensor(tree.num_children,1,self.mem_dim))\n child_h = Var(torch.Tensor(tree.num_children,1,self.mem_dim))\n if self.cudaFlag:\n child_c, child_h = child_c.cuda(), child_h.cuda()\n for idx in range(tree.num_children):\n child_c[idx] = tree.children[idx].state[0]\n child_h[idx] = tree.children[idx].state[1]\n # child_c[idx], child_h[idx] = tree.children[idx].state\n return child_c, child_h", "def state_update(self, p, state):\n node = self._validate(p)\n node._state = state", "def eval_node(node, env):\n global genv\n global result\n node_type = node_name(node)\n\n if node_type == 'Expr':\n return eval_node(node.value, env)\n elif node_type == 'Assign':\n val = eval_node(node.value, env)\n\n while type(val) is tuple and len(val) == 2 and (type(val[1]) == GlobalEnv or type(val[1]) == LocalEnv):\n val = val[0]\n\n # extract the variable name, evaluate the RHS, then extend the environment.\n return 0, env.extend([node.targets[0].id], [val])\n elif node_type == 'BinOp':\n # get the left and right operands (we use only single operands) and the operator.\n # evaluate the operands and apply the operator. return the number, env.\n\n left = eval_node(node.left, env)[0]\n right = eval_node(node.right, env)[0]\n\n left = left[0] if type(left) is tuple else left\n right = right[0] if type(right) is tuple else right\n\n op = node_name(node.op)\n\n if op == \"Add\":\n return (left + right), env\n elif op == \"Sub\":\n return (left - right), env\n elif op == \"Mult\":\n return (left * right), env\n elif op == \"Div\":\n return (left / right), env\n elif op == \"Mod\":\n return (left % right), env\n return 0, env\n elif node_type == 'FunctionDef':\n # need the function id (name), args, and body. Extend the environment.\n # you can leave the args wrapped in the ast class and the body and unpack them\n # when the function is called.\n\n return 0, env.extend([node.name], [(node.args, node.body)])\n elif node_type == 'Call':\n # get any values passed in to the function from the Call object.\n # get the fxn name and look up its parameters, if any, and body from the env.\n # get lists for parameter names and values and extend a LocalEnv with those bindings.\n # evaluate the body in the local env, return the value, env.\n\n func = eval_node(node.func, env)[0]\n local_env = LocalEnv(None, env)\n\n args = func[0].args\n body = func[1]\n\n index = 0\n for val in node.args:\n local_env = local_env.extend([args[index].arg], [eval_node(val, local_env)[0]])\n index += 1\n\n for node in body:\n val = eval_node(node, local_env)\n\n if node_name(node) == \"Return\":\n output_val = val[0]\n local_env = val[1]\n return output_val, env\n elif node_type == 'Return':\n # evaluate the node, return the value, env.\n return eval_node(node.value, env)\n elif node_type == 'Name':\n # Name(identifier id)- lookup the value binding in the env\n # return the value, env\n return env.lookup(node.id), env\n # Num(object n) -- a number, return the number, env.\n elif node_type == 'Num':\n return node.n, env", "def calculateState (self):\r\n newState = 0\r\n # print (\"Inside state function the states DNs are: \\n\")\r\n # print (\"Before starting \\n\")\r\n self.stateDanglingNodes()\r\n #for i in range(len(self.metaSpikes)):\r\n # if self.metaSpikes[i].typeSpike == 1:\r\n # print (\"Meta atom number is: \" + str(self.atomNumber) + \"\\n\")\r\n \r\n insideMetState = []\r\n # To calculate the state we need to update every atom the metaatom consistrs off then see\r\n # the states of every dangling node in the metaspikes\r\n for i in range(len(self.metaSpikes)):\r\n if self.metaSpikes[i].typeSpike == 1:\r\n #print (\"Inside type 1 \\n\")\r\n #print (\"Number of type 1 nodes: \" + str(len(self.metaSpikes[i].danglingNodeList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingNodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingNodeList[j].state)\r\n if self.metaSpikes[i].danglingNodeList[j].state == 1:\r\n # print (\"Adding one \\n\" )\r\n newState += 1\r\n else:\r\n # print (\"Subracting one \\n\")\r\n newState -= 1\r\n else:\r\n \r\n # print (\"Inside type 2 \\n\")\r\n # print (\"Number od type 1 tales: \" + str(len(self.metaSpikes[i].danglingTailList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingTailList)):\r\n #print (\"Size of tail: \" + str(len(self.metaSpikes[i].danglingTailList[j].nodeList)) + \"\\n\")\r\n for k in range(len(self.metaSpikes[i].danglingTailList[j].nodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingTailList[j].nodeList[k].state)\r\n if self.metaSpikes[i].danglingTailList[j].nodeList[k].state == 1:\r\n newState += 1\r\n else:\r\n newState -= 1 \r\n \r\n # print (\"The state of analysed nodes: \\n\" + str(insideMetState) + \"\\n\")\r\n # print (\"The length of analysed nodes: \\n\" + str(len(insideMetState)) + \"\\n\")\r\n # print (\"The new state is: \" + str(newState) + \"\\n\") \r\n self.state = newState", "def successor(self, state):\n pass # abstract", "def iter_nodes(self):", "def ismcts(rootstate, itermax, verbose=False, belief=[]):\n\tprint(belief)\n\trootnode = Node()\n\n\tfor i in range(itermax):\n\t\tnode = rootnode\n\n\t\t# Determinize\n\t\tstate = rootstate.clone_and_randomize(rootstate.player_to_move)\n\n\t\t# Select\n\t\twhile state.get_moves() != [] and node.get_untried_moves(state.get_moves()) == []:\n\t\t\t# node is fully expanded and non-terminal\n\t\t\tnode = node.ucb_select_child(state.get_moves())\n\t\t\tstate.do_move(node.move)\n\n\t\t# Expand\n\t\tuntried_moves = node.get_untried_moves(state.get_moves())\n\t\tif untried_moves: # if we can expand (i.e. state/node is non-terminal)\n\t\t\tm = random.choice(untried_moves)\n\t\t\tplayer = state.player_to_move\n\t\t\tstate.do_move(m)\n\t\t\tnode = node.add_child(m, player) # add child and descend tree\n\n\t\t# Simulate\n\t\twhile state.get_moves(): # while state is non-terminal\n\t\t\tstate.do_move(random.choice(state.get_moves()))\n\n\t\t# Backpropagate\n\t\twhile node is not None: # backpropagate from the expanded node and work back to the root node\n\t\t\tnode.update(state)\n\t\t\tnode = node.parentNode\n\n\t# Output some information about the tree - can be omitted\n\tif verbose:\n\t\tprint(rootnode.tree_to_string(0))\n\telse:\n\t\tprint(rootnode.children_to_string())\n\n\treturn max(rootnode.child_nodes, key=lambda c: c.visits).move # return the move that was most visited", "def compute(self, node, input_vals):\r\n raise NotImplementedError", "def do_check(self):\n res = self.entity.do_check(self.context)\n if res:\n return self.RES_OK, 'Node check succeeded.'\n else:\n return self.RES_ERROR, 'Node check failed.'", "def solveOneStep(self):\n ### Student code goes here\n if self.currentState.state == self.victoryCondition:\n self.visited[self.currentState]=True\n return True\n return self.BFS()", "def evaluate(self):\n raise Exception(\"Not implemented.\")", "def goalTest(node, goal):\r\n if node.state == goal:\r\n return node", "def expand_and_evaluate(self, env: Chess) -> (np.ndarray, float):\n state_planes = env.canon_input_planes()\n\n leaf_p, leaf_v = self.predict(state_planes)\n\n if not env.whites_turn():\n leaf_p = Config.flip_policy(leaf_p)\n\n return leaf_p, leaf_v", "def evaluation( self ) :\n\n return( self.__evaluation )", "def performBacktrackSearch(self, rootNode, node):\r\n \r\n print (\"-- proc --\", node.state.assignment)\r\n \r\n #check if we have reached goal state\r\n if node.state.checkGoalState():\r\n print (\"reached goal state\")\r\n return True\r\n \r\n else:\r\n \r\n #check if there is a case of early failure\r\n #if node.state.forwardCheck(): \r\n if node.state.arcConsistency():\r\n \r\n #find an unassigned variable \r\n variable = node.state.selectUnassignedVariable()\r\n \r\n #for all values in the domain\r\n for value in node.state.orderDomainValues():\r\n \r\n #check if constraints are satisfied\r\n if CSP.checkConstraints(node.state.assignment,\r\n variable, value):\r\n \r\n #create child node\r\n childNode = Node(State(node.state.assignment, \r\n node.state.possibleValues, variable, value))\r\n \r\n node.addChild(childNode)\r\n \r\n #show the search tree explored so far\r\n treeplot = TreePlot()\r\n treeplot.generateDiagram(rootNode, childNode)\r\n \r\n result = self.performBacktrackSearch(rootNode, childNode)\r\n if result == True:\r\n return True\r\n return False", "def value(self):\n\n if self.state == Node.State.VALID:\n return self._value\n else:\n with _NodeStackFrame(self):\n self.state = Node.State.PENDING\n self.value = self.compute_value(*self.args, **self.kwargs)\n return self._value", "def __call__(self, s, n=1000):\n\n root = StateNode(None, s, self.game)\n \n if root.parent is not None:\n raise ValueError(\"Root's parent must be None.\")\n \n for _ in range(n):\n #selection\n node = _get_next_node(root, self.tree_policy)\n #simulation\n node.reward = self.default_policy(node)\n #print(node.reward)\n #back\n self.backup(node)\n \n root.reset(copy.deepcopy(self.game_bak))\n \n #for i in root.children:\n # print(root.children[i].__dict__)\n # for j in root.children[i].children:\n # print(root.children[i].children[j].__dict__)\n # print(\"=======\")\n return rand_max(root.children.values(), key=lambda x: x.q).action, rand_max(root.children.values(), key=lambda x: x.q).q", "def compute_tree(self, tree):\n g_list_val, g_list_h = self._build_graph(tree) # return theano variable of each node\n list_val = self._traversal_tree(tree) #\n f = theano.function(g_list_val, g_list_h, allow_input_downcast=True)\n result = f(*list_val)\n return result", "def makeBranchEval(first_branch):\n\n def score(game, player):\n if not first_branch:\n first_branch.append(game.root)\n if game.root in first_branch:\n return 1.\n return 0.\n\n return score", "def forward(self, tree, embs, training = False):\n\n # add singleton dimension for future call to node_forward\n # embs = F.torch.unsqueeze(self.emb(inputs),1)\n\n loss = Var(torch.zeros(1)) # init zero loss\n if self.cudaFlag:\n loss = loss.cuda()\n\n for idx in range(tree.num_children):\n _, child_loss = self.forward(tree.children[idx], embs, training)\n loss = loss + child_loss\n child_c, child_h = self.get_child_states(tree)\n tree.state = self.node_forward(embs[tree.idx-1], child_c, child_h)\n\n if self.output_module != None:\n output = self.output_module.forward(tree.state[1], training)\n tree.output = output\n if training and tree.gold_label != None:\n target = Var(utils.map_label_to_target_sentiment(tree.gold_label))\n if self.cudaFlag:\n target = target.cuda()\n loss = loss + self.criterion(output, target)\n return tree.state, loss", "def fn(node):\n if not node: return 0 \n ans = node.val + fn(node.left) + fn(node.right)\n vals.append(ans)\n return ans", "def evaluate(self):\r\n raise Exception(\"Not implemented.\")", "def get_move(self, state):\n # this method should only be called when self is real root.,so that's here where we can should use mutiprocess\n if self._root.is_leaf(): # no expanded children yet\n action_probs, _ = self._policy(state)\n self._root.expand(action_probs)\n\n the_children = self._root._children\n i = 0\n sorted_children = sorted(the_children.items(), key=lambda act_node: act_node[1].get_value(self._c_puct))\n for child_node in sorted_children:\n i += 1\n child_tree = MCTS(policy_value_fn,root=child_node[1])\n state_copy = copy.deepcopy(state)\n state_copy.do_move(child_node[0])\n visits_count = 0\n for j in range(0,relu(1200-i*20),10): # at least run one time\n child_tree._playout(copy.deepcopy(state_copy))\n visits_count += 1\n self._root.update(-child_tree.get_root_node().last_leafvalue,visits_count=visits_count) # update real root\n child_tree.get_root_node().set_parent(self._root) # to link the sub tree\n\n '''\n for n in range(self._n_playout):\n # get top n (assumed to be 6) nodes from children\n # step1 let all children of root have chance to run in parallel\n # adjust the round count of children by value\n if n%6 == 0:\n the_children = self._root._children\n top_n = sorted(the_children.items(),key=lambda act_node: act_node[1].get_value(self._c_puct))[:6]\n for child_node in top_n:\n # child_tree = MCTS(policy_value_fn,copy.deepcopy(child_node)) # use copy because we will use it in multiprocess\n child_tree = MCTS(policy_value_fn,\n child_node) \n state_copy = copy.deepcopy(state)\n state_copy.do_move(child_node[0])\n child_tree._playout(state_copy)\n self._root.update(-child_tree.get_root_node().last_leafvalue) # update real root\n child_tree.get_root_node().set_parent(self._root) # to link the sub tree\n # self._root.get_children()[child_node[0]] = child_tree.get_root_node() # copy sub tree\n '''\n\n '''\n return max(self._root._children.items(),\n # key=lambda act_node: act_node[1].get_visits())[0]\n key=lambda act_node: act_node[1].get_value(self._c_puct))[0]\n '''\n\n for n in range(300):\n state_copy = copy.deepcopy(state)\n self._playout(state_copy)\n return max(self._root._children.items(),\n key=lambda act_node: act_node[1].get_value(self._c_puct))[0]", "def eval_tree(tree):\n global genv\n global result\n # Here, get the list of children nodes. Iterate over that list, calling eval_node on each node.\n for node in tree.body:\n val = eval_node(node, genv)\n result = val[0]\n genv = val[1]\n return result", "def solveOneStep(self):\n ### Student code goes here\n\n if self.currentState.state == self.victoryCondition:\n return True\n\n current_depth = self.currentState.depth\n found_move = False\n while self.currentState.parent:\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n count = self.currentState.nextChildToVisit\n if len(self.currentState.children) > count:\n found_move = True\n break\n if not found_move:\n for all_visited in self.visited.keys():\n all_visited.nextChildToVisit = 0\n current_depth += 1\n if len(self.visited) == 1:\n all_possible_moves = self.gm.getMovables()\n for every_move in all_possible_moves:\n self.gm.makeMove(every_move)\n new_game_state = GameState(self.gm.getGameState(), current_depth, every_move)\n new_game_state.parent = self.currentState\n self.visited[new_game_state] = False\n self.currentState.children.append(new_game_state)\n self.gm.reverseMove(every_move)\n while current_depth != self.currentState.depth:\n count = self.currentState.nextChildToVisit\n self.currentState.nextChildToVisit += 1\n if len(self.currentState.children) > count:\n self.currentState = self.currentState.children[count]\n next_move = self.currentState.requiredMovable\n self.gm.makeMove(next_move)\n else:\n found_move = False\n while self.currentState.parent:\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n if len(self.currentState.children) > self.currentState.nextChildToVisit:\n found_move = True\n break\n if not found_move:\n return False\n\n if self.currentState.state != self.victoryCondition:\n self.visited[self.currentState] = True\n all_possible_moves = self.gm.getMovables()\n next_depth = current_depth + 1\n for every_move in all_possible_moves:\n self.gm.makeMove(every_move)\n new_game_state = GameState(self.gm.getGameState(), next_depth, every_move)\n if new_game_state not in self.visited:\n self.visited[new_game_state] = False\n new_game_state.parent = self.currentState\n self.currentState.children.append(new_game_state)\n self.gm.reverseMove(every_move)\n return False\n else:\n return True", "def evaluate(self, values):\r\n return self.left.evaluate(values) - self.right.evaluate(values)", "def _check_for_value(self):\n self.node.get_value()", "def validate(self, node):" ]
[ "0.7153151", "0.6905958", "0.65348285", "0.6446776", "0.63490057", "0.62762874", "0.6272428", "0.6272428", "0.62597114", "0.62265426", "0.6199221", "0.61362064", "0.60104877", "0.59859467", "0.5944751", "0.5939889", "0.5925841", "0.58773786", "0.57595444", "0.57158136", "0.5695773", "0.5695773", "0.56425226", "0.56212497", "0.56172675", "0.5597603", "0.5586485", "0.5586485", "0.5579974", "0.5573298", "0.555366", "0.55438936", "0.5509078", "0.550388", "0.5498936", "0.5496936", "0.5491546", "0.54910946", "0.5488864", "0.54633296", "0.5459165", "0.54575145", "0.54546934", "0.5440132", "0.54269856", "0.5384739", "0.53811586", "0.5376643", "0.53689706", "0.53669006", "0.5364824", "0.5363648", "0.5350352", "0.5348807", "0.5344157", "0.5339258", "0.53269047", "0.53138494", "0.530817", "0.53069055", "0.5305388", "0.52980167", "0.5289257", "0.5287994", "0.5285554", "0.52829427", "0.52815217", "0.5280691", "0.5275567", "0.52754974", "0.52748996", "0.5271332", "0.526053", "0.52585465", "0.52563137", "0.5253954", "0.5243425", "0.5241059", "0.5227612", "0.5224219", "0.52190286", "0.52179736", "0.52173644", "0.52170014", "0.5208288", "0.5208165", "0.51990294", "0.5189529", "0.5186248", "0.5183801", "0.518298", "0.51754224", "0.5163553", "0.51597977", "0.5153003", "0.5149297", "0.5146687", "0.5145035", "0.51408994", "0.51339996" ]
0.6596801
2
Evaluates the node's (and its children's) state. Returns success if any node succeeds, else failure.
def evaluate(self, blackboard): success = EvaluationState.success for child in self.children: state = child.__call__(blackboard) if state == success: return success return EvaluationState.failure
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate(self, blackboard):\n success = EvaluationState.success\n\n state = success\n for child in self.children:\n state = child.__call__(blackboard)\n\n if state != success:\n break\n\n return state", "def evaluate(self, tree):\n\t\tpass", "def do_check(self):\n res = self.entity.do_check(self.context)\n if res:\n return self.RES_OK, 'Node check succeeded.'\n else:\n return self.RES_ERROR, 'Node check failed.'", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def evaluate(self):\n return self._evaluate_recur(self.root())", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 1 : return 1\n return 0", "def evaluate(self, state):\n\n if self.is_coords and len(state) != len(self.coords):\n raise Exception(\"\"\"state must have the same length as coords.\"\"\")\n\n if not len(state) == len(set(state)):\n raise Exception(\"\"\"Each node must appear exactly once in state.\"\"\")\n\n if min(state) < 0:\n raise Exception(\"\"\"All elements of state must be non-negative\"\"\"\n + \"\"\" integers.\"\"\")\n\n if max(state) >= len(state):\n raise Exception(\"\"\"All elements of state must be less than\"\"\"\n + \"\"\" len(state).\"\"\")\n\n return self.calculate_fitness(state)", "def evaluate(self) :\n for inp in self.inStates :\n if inp.getState() == 0 : return 0\n return 1", "def evaluate(self) :\n if self.inStates[0].getState() == self.inStates[1].getState(): return 0\n return 1", "def evaluate(self, state):\n abstract", "def test_get_node_state(self):\n pass", "def testTree(self, valid):\n return testTreeF(self, valid)", "def _isthisapropertree(self):\n ok = True\n if self._leftchild:\n if self._leftchild._parent != self:\n ok = False\n if self._leftchild._isthisapropertree() == False:\n ok = False\n if self._rightchild:\n if self._rightchild._parent != self:\n ok = False\n if self._rightchild._isthisapropertree() == False:\n ok = False\n if self._parent:\n if (self._parent._leftchild != self\n and self._parent._rightchild != self):\n ok = False\n return ok", "def test_ChangeValueTree():\n Tree = graph.oval_graph.OvalNode(1, 'operator', 'and', False, [\n graph.oval_graph.OvalNode(2, 'value', \"true\", False),\n graph.oval_graph.OvalNode(3, 'value', \"false\", False),\n graph.oval_graph.OvalNode(4, 'operator', 'or', False, [\n graph.oval_graph.OvalNode(5, 'value', \"false\", False),\n graph.oval_graph.OvalNode(6, 'value', \"true\", False)\n ]\n )\n ]\n )\n\n Tree.change_tree_value(3, \"true\")\n tests.any_test_help.any_test_treeEvaluation_with_tree(Tree, \"true\")", "def evaluate(self):\n eval_list = nx.topological_sort(self.graph)\n for n in eval_list:\n n.evaluate()\n print(\"evaluating type\", type(n))\n\n # Notify observers of finished calculation\n self.notifyObservers(\"EVALUATION DONE\")\n return \"FINISHED\"", "def eval_tree(tree: GPTree, dataset: Iterable) -> list:\n results = []\n for data in zip(*dataset):\n try:\n output = tree.compute_tree(data[0])\n results.append(\n 0 if output == data[1] else 1\n ) # right or wrong, but no error.\n except Exception:\n results.append(2) # Fails to run.\n\n return results", "def __call__(self, node):\n return True;\n predcount = self.CountPred(node);\n if predcount == 0: return True;\n return len(node.predicates) != 0;", "def evaluate_node(self):\n # p, v = np.random.random(225).astype(np.float16), np.random.random()\n socket = zmq.Context().socket(zmq.DEALER)\n socket.setsockopt_string(zmq.IDENTITY, self.player_id)\n socket.connect('ipc://./tmp/oracle_%s' % self.tree.model_name)\n print('start to evaluate', self.tree.model_name)\n while True:\n # print(self.tree.to_evaluate.qsize())\n batch = []\n states = []\n colors = []\n size = self.tree.to_evaluate.qsize()\n if size > config.INFERENCE_BATCHSIZE:\n size = config.INFERENCE_BATCHSIZE\n elif size == 0:\n time.sleep(0.001)\n continue\n for _ in range(size):\n t, black, white = self.tree.to_evaluate.get()\n mine, yours = posswap(t, black, white)\n batch.append((str(mine), str(yours), t % 2))\n states.append((black, white))\n colors.append(t % 2)\n socket.send(msgpack.dumps((batch, self.player_id)))\n result = msgpack.loads(socket.recv())\n assert len(states) == len(result[0])\n assert len(states) == len(result[1])\n for ind, state in enumerate(states):\n with self.lock:\n self.tree.nodes[state].p = result[0][ind]\n if colors[ind] == 0:\n self.tree.nodes[state].v = result[1][ind]\n else:\n self.tree.nodes[state].v = -result[1][ind]\n self.tree.nodes[state].updated = True", "def check(self, node):\n # do the necessary setup/arguments and call self.visit (node, args)\n self.visit(node, defined=set())", "def _isthisapropertree(self):\n ok = True\n if self._leftchild is not None:\n if self._leftchild._parent != self:\n ok = False\n if self._leftchild._isthisapropertree() is False:\n ok = False\n if self._rightchild is not None:\n if self._rightchild._parent != self:\n ok = False\n if self._rightchild._isthisapropertree() is False:\n ok = False\n if self._parent is not None:\n if self not in (self._parent._leftchild, self._parent._rightchild):\n ok = False\n return ok", "def compute(self, *args, **kwargs):\n for node in self.evaluation_sequence:\n node.evaluate()", "def eval_logic(self, checkDict):\n result = True\n #gets individual evaluations from children\n passList = []\n for child in self.children:\n myVal = child.eval_comparison(checkDict)\n passList.append(child.eval_comparison(checkDict))\n\n #if only one child returns the only boolean available\n if(len(passList) == 1):\n result = passList[0]\n\n #TODO: Combine following cases possibly\n #print(passList)\n #gets resutl if only 2 simple logics\n elif(len(passList) == 2 and len(self.operators) == 1):\n\n result = self.operators[0](passList[0], passList[1])\n else:\n #combines all children logic using the operators\n firstCheck = True\n opIndex = 0\n for i in range(0,len(passList)):\n if(firstCheck):\n firstCheck = False\n result = self.operators[opIndex](passList[0], passList[1])\n i+=1\n else:\n result = self.operators[opIndex](result,passList[i])\n opIndex += 1\n \"\"\"\n print('----------------------')\n print(result)\n \"\"\"\n return result", "def goalTest(node, goal):\r\n if node.state == goal:\r\n return node", "def validate(self, node):", "def performBacktrackSearch(self, rootNode, node):\r\n \r\n print (\"-- proc --\", node.state.assignment)\r\n \r\n #check if we have reached goal state\r\n if node.state.checkGoalState():\r\n print (\"reached goal state\")\r\n return True\r\n \r\n else:\r\n \r\n #check if there is a case of early failure\r\n #if node.state.forwardCheck(): \r\n if node.state.arcConsistency():\r\n \r\n #find an unassigned variable \r\n variable = node.state.selectUnassignedVariable()\r\n \r\n #for all values in the domain\r\n for value in node.state.orderDomainValues():\r\n \r\n #check if constraints are satisfied\r\n if CSP.checkConstraints(node.state.assignment,\r\n variable, value):\r\n \r\n #create child node\r\n childNode = Node(State(node.state.assignment, \r\n node.state.possibleValues, variable, value))\r\n \r\n node.addChild(childNode)\r\n \r\n #show the search tree explored so far\r\n treeplot = TreePlot()\r\n treeplot.generateDiagram(rootNode, childNode)\r\n \r\n result = self.performBacktrackSearch(rootNode, childNode)\r\n if result == True:\r\n return True\r\n return False", "def evalBoolean(tree):\n # check if children the children is a \"or\" or a \"and\" tokken\n if (tree.children[0].data == \"or\"):\n return evalBoolean(tree.children[0].children[0]) or evalBoolean(tree.children[0].children[1])\n if (tree.children[0].data) == \"and\":\n return evalBoolean(tree.children[0].children[0]) and evalBoolean(tree.children[0].children[1])\n \n # set var1\n if(tree.children[0].data == \"integer\"):\n var1 = evalInteger(tree.children[0])\n elif(tree.children[0].data == \"variable\"):\n var1 = getValue(tree.children[0].children[0].value)\n\n # set var2\n if(tree.children[2].data == \"integer\"):\n var2 = evalInteger(tree.children[2])\n elif(tree.children[2].data == \"variable\"):\n var2 = getValue(tree.children[2].children[0].value)\n\n if(tree.children[1].children[0].data == \"greater\"):\n return var1 > var2\n if(tree.children[1].children[0].data == \"less\"):\n return var1 < var2\n if(tree.children[1].children[0].data == \"equals\"):\n return var1 == var2\n if(tree.children[1].children[0].data == \"nequal\"):\n return var1 != var2\n\n print(\"ERROR : UNEXPECTED TOKKEN\")\n return False", "def evaluate(self, payload, level=0, verbose=True):\n\n # if children are joined by AND, evaluate every child until all children\n # are evaluated or until a False breaks the loop (Need all True for AND)\n if self.conjunction_ in ['AND', 'NAND']:\n result = True\n i = 0\n while result and (i < len(self.children_)):\n \n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n result = self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n if self.conjunction_ == 'NAND':\n result = not result\n\n\n # if children are joined by OR, evaluate every child until all children\n # are evaluated or until a True breaks the loop (only need 1 True for OR)\n elif self.conjunction_ in ['OR', 'NOR']:\n result = False\n i = 0\n while result == False and (i < len(self.children_)):\n \n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n result = self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n if self.conjunction_ == 'NOR':\n result = not result\n\n # XOR evaluation - 1 and only 1 can be True. Have to iterate over all children unless the number of trues becomes greater than 1\n else:\n i = 0\n true_count = 0\n while true_count < 2 and (i < len(self.children_)):\n if verbose:\n tabs = \"\\t\" * level\n if i > 0: print(\"\\n\" + tabs + f\"{self.conjunction_} \\n\")\n child_print = 'Composite' if isinstance(self.children_[i], Composite) else 'Evaluation'\n print(tabs + f\"Evaluating Child {i + 1}, Level {level + 1} - {child_print}\")\n \n # += a boolean is equivalent to += 1 for T and += 0 for False\n true_count += self.children_[i].evaluate(payload, level + 1, verbose=verbose)\n i += 1\n\n if true_count == 1:\n result = True\n else:\n result = False\n\n if verbose: \n tabs = \"\\t\" * level\n print(\"\\n\" + tabs + f\"Composite Result: {result}\")\n\n return result", "def valid_tree(phi):\n q = deque([phi]) # queue of nodes to check\n visited = list() # already checked\n\n # save the indim of the root node, and make sure all the indims\n # of the children are the same\n indim = phi.indim\n retval = True\n varsfound = 0\n\n while len(q) > 0:\n # node to check\n node = q.popleft()\n\n # check outdim\n if isinstance(node, amnet.Variable):\n retval &= (node.outdim == node.indim)\n varsfound += 1\n elif isinstance(node, amnet.Linear):\n m, n = node.w.shape\n retval &= (node.outdim == m)\n retval &= (node.x.outdim == n)\n retval &= (all([bi == 0 for bi in node.b])) # check value\n elif isinstance(node, amnet.Constant):\n retval &= (node.outdim == len(node.b))\n retval &= (all([wij == 0 for wij in np.nditer(node.w)])) # check value\n elif isinstance(node, amnet.Affine):\n m, n = node.w.shape\n retval &= (node.outdim == m)\n retval &= (node.x.outdim == n)\n retval &= (m == len(node.b))\n elif isinstance(node, amnet.Mu):\n retval &= (node.outdim == node.x.outdim)\n retval &= (node.outdim == node.y.outdim)\n retval &= (node.z.outdim == 1)\n elif isinstance(node, amnet.Stack):\n retval &= (node.outdim == node.x.outdim + node.y.outdim)\n else:\n retval = False # unknown node type\n\n # check indim\n retval &= (node.indim == indim)\n\n # short-circuit if an inconsistency has been found\n if not retval:\n return False\n\n # add children to queue\n if not(any(node is e for e in visited)):\n visited.append(node)\n #q.extend(children(node))\n q.extend([c for c in children(node) if c not in visited])\n\n # finished iterating\n # TODO: also check if graph is cyclic\n return (varsfound == 1)", "def hasChildren():", "def is_leaf(self):\n # TODO: Check if both left child and right child have no value\n return ... and ...", "def _apply_tree_policy(self, root, state):\n visit_path = [root]\n working_state = state.clone()\n current_node = root\n while not working_state.is_terminal() and current_node.explore_count > 0:\n if not current_node.children:\n # For a new node, initialize its state, then choose a child as normal.\n legal_actions = working_state.legal_actions()\n # Reduce bias from move generation order.\n self._random_state.shuffle(legal_actions)\n player_sign = -1 if working_state.current_player() != self.player else 1\n current_node.children = [SearchNode(action, player_sign)\n for action in legal_actions]\n\n if working_state.is_chance_node():\n # For chance nodes, rollout according to chance node's probability\n # distribution\n outcomes = working_state.chance_outcomes()\n action_list, prob_list = zip(*outcomes)\n action = self._random_state.choice(action_list, p=prob_list)\n chosen_child = next(c for c in current_node.children\n if c.action == action)\n else:\n # Otherwise choose node with largest UCT value\n chosen_child = max(\n current_node.children,\n key=lambda c: c.uct_value(current_node.explore_count, self.uct_c, # pylint: disable=g-long-lambda\n self.child_default_value))\n\n working_state.apply_action(chosen_child.action)\n current_node = chosen_child\n visit_path.append(current_node)\n\n return visit_path, working_state", "def test_value_in_tree_returns_true(balanced_7_nodes):\n assert balanced_7_nodes.contains(7)", "def done(self):\n return self.left + 1 == self.right", "def execute(self):\n if len(self._tree) > 0:\n return self._tour(self._tree.root(),0,[]) # start the recursion", "def evalOnSubTreeEnd(self, node):\n\n return None", "def state(self):\n return self._node._state", "def explore_expr(expr, value, is_child):\n referenced_value = value.referenced_value()\n Explorer.explore_expr(expr, referenced_value, is_child)\n return False", "def update_tree(root, executed_acts, total_rew):\n root.value = max(total_rew, root.value)\n root.visits += 1\n new_nodes = 0\n\n node = root\n for step, act in enumerate(executed_acts):\n if act not in node.children:\n node.children[act] = Node()\n new_nodes += 1\n node = node.children[act]\n node.value = max(total_rew, node.value)\n node.visits += 1\n\n return new_nodes", "def _evaluate(self, tree: nltk.tree.Tree):\n if tree.label() == \"Root\":\n if len(tree) == 1:\n func = self._evaluate(tree[0])\n func()\n else:\n func = self._evaluate(tree[0])\n result = self._evaluate(tree[1])\n func(result)\n return\n elif tree.label() == \"Result\":\n if tree[0].label() == \"Entity\":\n return self._evaluate(tree[0])\n if tree[0].label() == \"Unary_Command\":\n func = self._evaluate(tree[0])\n result = self._evaluate(tree[1])\n return func(result)\n if tree[1].label() == \"Binary_Command\":\n result_left = self._evaluate(tree[0])\n func = self._evaluate(tree[1])\n result_right = self._evaluate(tree[2])\n return func(result_left, result_right)\n elif tree.label() == \"Unary_Command\":\n func = self.unary_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Terminal_Command\":\n func = self.terminal_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Binary_Command\":\n func = self.binary_commands.get(tree[0])[1]\n return func\n elif tree.label() == \"Entity\":\n return [tree[0]]\n\n print(\"Error: CFG label rule not defined in \"\n \"evaluateEngine#self._evaluate\",\n file=sys.stderr)", "def validate_tree(self, remove_unreachable_nodes):\n if self.root_id not in self.nodes:\n print(\"Tree does not have a root\")\n return False\n valid, visited_nodes = self._validate_tree(self.nodes[self.root_id])\n print(\"Finished tree traversal\")\n visited_nodes_set = set(visited_nodes)\n if len(visited_nodes) != len(self.nodes):\n if not remove_unreachable_nodes:\n print(\"There exist unreachable nodes. Not removing them. Use \" +\n \"remove_unreachable_nodes to remove.\")\n return False\n else:\n removed_nodes = [\n node_id\n for node_id in self.nodes\n if node_id not in visited_nodes_set\n ]\n self.nodes = {\n node_id: node\n for node_id, node in self.nodes.items()\n if node_id in visited_nodes_set\n }\n print(\"Removed %d unreachable nodes\" % len(removed_nodes))\n return valid, len(visited_nodes)", "def ismcts(rootstate, itermax, verbose=False, belief=[]):\n\tprint(belief)\n\trootnode = Node()\n\n\tfor i in range(itermax):\n\t\tnode = rootnode\n\n\t\t# Determinize\n\t\tstate = rootstate.clone_and_randomize(rootstate.player_to_move)\n\n\t\t# Select\n\t\twhile state.get_moves() != [] and node.get_untried_moves(state.get_moves()) == []:\n\t\t\t# node is fully expanded and non-terminal\n\t\t\tnode = node.ucb_select_child(state.get_moves())\n\t\t\tstate.do_move(node.move)\n\n\t\t# Expand\n\t\tuntried_moves = node.get_untried_moves(state.get_moves())\n\t\tif untried_moves: # if we can expand (i.e. state/node is non-terminal)\n\t\t\tm = random.choice(untried_moves)\n\t\t\tplayer = state.player_to_move\n\t\t\tstate.do_move(m)\n\t\t\tnode = node.add_child(m, player) # add child and descend tree\n\n\t\t# Simulate\n\t\twhile state.get_moves(): # while state is non-terminal\n\t\t\tstate.do_move(random.choice(state.get_moves()))\n\n\t\t# Backpropagate\n\t\twhile node is not None: # backpropagate from the expanded node and work back to the root node\n\t\t\tnode.update(state)\n\t\t\tnode = node.parentNode\n\n\t# Output some information about the tree - can be omitted\n\tif verbose:\n\t\tprint(rootnode.tree_to_string(0))\n\telse:\n\t\tprint(rootnode.children_to_string())\n\n\treturn max(rootnode.child_nodes, key=lambda c: c.visits).move # return the move that was most visited", "def check_all_leaves(trial):\r\n leaf_nodes = trial.get_leaf_nodes()\r\n shuffle(leaf_nodes)\r\n states = []\r\n max_value = trial.get_max_dist_value()\r\n for node in leaf_nodes:\r\n trial_copy = copy.deepcopy(trial)\r\n states.append(trial_copy)\r\n node.observe()\r\n #if node.value >= max_value:\r\n # trial_copy = copy.deepcopy(trial)\r\n # states.append(trial_copy)\r\n # return zip(states, [node.label for node in trial.observed_nodes] + [0])\r\n trial_copy = copy.deepcopy(trial)\r\n states.append(trial_copy)\r\n return zip(states, [node.label for node in trial.observed_nodes] + [0])", "def _validate_tree(self, base_node):\n if base_node.node_id not in self.nodes:\n print(\"Base node id %s not in nodes\" % base_node.node_id)\n return False, []\n if not base_node.is_valid():\n print(\"Base node %s not valid\" % base_node.node_id)\n return False, []\n if base_node.is_leaf():\n return True, [base_node.node_id]\n if (base_node.right_child is not None and\n base_node.right_child.node_id not in self.nodes.keys()):\n print(\n \"Right child id %s not in nodes\" % base_node.right_child.node_id\n + str(base_node)\n )\n return False, []\n if (base_node.left_child is not None and\n base_node.left_child.node_id not in self.nodes.keys()):\n print(\n \"Left child id %s not in nodes\" % base_node.left_child.node_id\n + str(base_node)\n )\n return False, []\n\n right_child = base_node.right_child\n left_child = base_node.left_child\n right_valid = True\n left_valid = True\n right_visited_nodes = []\n left_visited_nodes = []\n if right_child is not None:\n right_valid, right_visited_nodes = self._validate_tree(right_child)\n if not right_valid:\n print(\"At node %s right tree is invalid\" % base_node.node_id)\n return False, []\n\n if left_child is not None:\n left_valid, left_visited_nodes = self._validate_tree(left_child)\n if not left_valid:\n print(\"At node %s left tree is invalid\" % base_node.node_id)\n return False, []\n visited_nodes = ([base_node.node_id] + right_visited_nodes +\n left_visited_nodes)\n return True, visited_nodes", "def leafEvaluation(self, state):\n\n \"\"\"\n Use random generated values for now\n \"\"\"\n z = np.random.randint(2)\n v = random.uniform(0, 1) \n return (1-LAMBDA) * v + LAMBDA * z", "def validate(self):\n return self._validate(self.root)", "def solveOneStep(self):\n ### Student code goes here\n if self.currentState not in self.visited:\n self.visited[self.currentState]=True\n return self.currentState.state == self.victoryCondition\n\n if self.currentState.state == self.victoryCondition:\n self.visited[self.currentState]=True\n return True\n\n if not self.currentState.children:\n for move in self.gm.getMovables():\n self.gm.makeMove(move)\n childrenState = GameState(self.gm.getGameState(), self.currentState.depth+1, move)\n if childrenState not in self.visited:\n childrenState.parent = self.currentState\n self.currentState.children.append(childrenState)\n self.gm.reverseMove(move)\n\n if self.currentState.nextChildToVisit<len(self.currentState.children):\n nextState = self.currentState.children[self.currentState.nextChildToVisit]\n self.currentState.nextChildToVisit += 1\n self.gm.makeMove(nextState.requiredMovable)\n self.currentState = nextState\n return self.solveOneStep()\n else:\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n return self.solveOneStep()", "def on_visit(self, node: \"CSTNode\") -> bool:\n visit_func = getattr(self, f\"visit_{type(node).__name__}\", None)\n if visit_func is not None:\n retval = visit_func(node)\n else:\n retval = True\n # Don't visit children IFF the visit function returned False.\n return False if retval is False else True", "def on_visit(self, node: \"CSTNode\") -> bool:\n visit_func = getattr(self, f\"visit_{type(node).__name__}\", None)\n if visit_func is not None:\n retval = visit_func(node)\n else:\n retval = True\n # Don't visit children IFF the visit function returned False.\n return False if retval is False else True", "def test_has_correct_value(self):\n self.assertEqual(self.node.value, 7)", "def _playout(self, state):\n node = self._root\n while (1):\n if node.is_leaf():\n break\n # Greedily select next move.\n action, node = node.select(self._c_puct)\n state.do_move(action)\n\n # Evaluate the leaf using a network which outputs a list of\n # (action, probability) tuples p and also a score v in [-1, 1]\n # for the current player.\n action_probs, leaf_value = self._policy(state)\n # Check for end of game.\n end, winner = state.game_end()\n if not end:\n node.expand(action_probs)\n else:\n # for end state,return the \"true\" leaf_value\n if winner == -1: # tie\n leaf_value = 0.0\n else:\n leaf_value = (\n 1.0 if winner == state.get_current_player() else -1.0\n )\n\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def validate(self, solution: list) -> (bool, float):\n start = time() * 1000\n nodes = self.application.nodes()\n\n if solution is None:\n return False, round(time() * 1000 - start, 3)\n elif len([node for node in list(nodes) if node not in solution]) == 0:\n logging.info(f\"All {len(solution)} nodes got visited\")\n return True, round(time() * 1000 - start, 3)\n else:\n logging.error(f\"{len([node for node in list(nodes) if node not in solution])} nodes were NOT visited\")\n return False, round(time() * 1000 - start, 3)", "def _playout(self, state):\n node = self._root\n while(1):\n if node.is_leaf():\n break\n # Greedily select next move.\n action, node = node.select(self._c_puct)\n state.do_move(action)\n\n # Evaluate the leaf using a network which outputs a list of\n # (action, probability) tuples p and also a score v in [-1, 1]\n # for the current player.\n action_probs, leaf_value = self._policy(state)\n # Check for end of game.\n end, winner = state.game_end()\n if not end:\n node.expand(action_probs)\n else:\n # for end state,return the \"true\" leaf_value\n if winner == -1: # tie\n leaf_value = 0.0\n else:\n leaf_value = (\n 1.0 if winner == state.get_current_player() else -1.0\n )\n\n # Update value and visit count of nodes in this traversal.\n node.update_recursive(-leaf_value)", "def any_node_running(self):\n return any(node.running() for node in self.nodes)", "def evaluate(self):\n try:\n test_val = self.expression()\n return test_val != 0\n except ValueError:\n raise ParseError(\"Could not evaluate expression.\")", "def validate(tree):\n return rvalidate(tree.root, None, None, None, None, 0, set())", "def check_subtree(node):\r\n if node:\r\n node_loc = node.location\r\n self.assertFalse(\r\n modulestore().has_item(node_loc.version_agnostic())\r\n )\r\n self.assertTrue(modulestore().has_item(node_loc.course_agnostic()))\r\n if node.has_children:\r\n for sub in node.get_children():\r\n check_subtree(sub)", "def leaves(node, res):\n leaf = True\n if node.lesser:\n leaf = False\n leaves(node.lesser, res)\n if node.greater:\n leaf = False\n leaves(node.greater, res)\n if leaf:\n res.append(node.indices)", "def solveOneStep(self):\n ### Student code goes here\n state = self.currentState\n #print (type(state))\n self.visited[state] = True\n #print (type(self.gm.getGameState()))\n moves = self.gm.getMovables()\n print (\"CURRENTSTATE\" + str(self.currentState.state))\n print (\"MOVABLES:\")\n if moves:\n for m in moves:\n print (str(m))\n print (\"CHILDINDEX:\")\n print (state.nextChildToVisit)\n print (\"*********\")\n if state.state == self.victoryCondition:\n return True\n #if no child to expand then go back\n if not moves or state.nextChildToVisit >= len(moves):\n self.currentState = state.parent\n if state.requiredMovable is not None:\n self.gm.reverseMove(state.requiredMovable)\n # expand\n else:\n\n next_move = moves[state.nextChildToVisit]\n self.gm.makeMove(next_move)\n state.nextChildToVisit += 1\n\n #if to parent or if visited then skip\n while (((state.parent is not None) and (self.gm.getGameState() == state.parent.state))) or GameState(self.gm.getGameState(), 0, None) in self.visited:\n print (\"PARENT FOUND!\")\n self.gm.reverseMove(next_move)\n if state.nextChildToVisit >= len(moves):\n self.currentState = state.parent\n return False\n else:\n next_move = moves[state.nextChildToVisit]\n self.gm.makeMove(next_move)\n state.nextChildToVisit += 1\n\n next_state = GameState(self.gm.getGameState(), state.depth + 1, next_move)\n next_state.parent = state\n #next_state.requiredMovable = next_move\n state.children.append(next_state)\n self.currentState = next_state\n print (state.nextChildToVisit)\n return False", "def is_complete(self) -> bool:\n if self.root is None or self.root.left is None and self.root.right is None:\n return True # Empty tree or has one node is considered complete\n\n else: # Creating base index to begin traversal\n index = 0\n count = self.size()\n return self.complete_helper(self.root, index, count\n\n )", "def is_done(self):\n\n if not all(node.is_done for node in self.subnodes):\n return False\n elif fileutils.missing_files(self.output_files):\n return False\n\n return True", "def test_get_node_state_smartfail(self):\n pass", "def tag_node_from_pruning(self, tree, node, X, y):\n # If is a leaf, return False\n if node.nodes is None or len(node.nodes) == 0:\n return False\n\n # Score predictions from whole tree\n predictions = tree.predict(X)\n whole_tree_score = self.eval_func(y, predictions)\n\n # Get the children from the node\n children = BaseTree.collect_children(node)\n # Save original nodes\n original_nodes = node.nodes\n # Update node to be a leaf\n node.update(\n nodes={},\n children=children,\n feature_col=node.feature_col,\n feature_value=node.feature_value,\n node_type=node.node_type,\n )\n\n # Score predictions from leaf\n predictions = tree.predict(X)\n pruned_tree_score = self.eval_func(y, predictions)\n\n # If leaf is better, don't swap it back and return True for change\n if whole_tree_score < pruned_tree_score:\n return True\n\n # Otherwise, change the node back to the original node.\n node.update(\n children=[],\n nodes=original_nodes,\n feature_col=node.feature_col,\n feature_value=node.feature_value,\n node_type=node.node_type,\n )\n # Return False (for no change)\n return False", "def UpdateStatusFromChildren(self):\n if not self.subtests:\n return\n\n # If there are any active tests, consider it active; if any failed,\n # consider it failed, etc. The order is important!\n status = TestState.OverallStatus(\n [x.GetState().status for x in self.subtests])\n if status != self.GetState().status:\n self.UpdateState(status=status)", "def test_get_node_status(self):\n pass", "def any(self):\n return self.__node_a", "def __call__(self, node, operations, last_operation):\n if last_operation == NO_OPERATION:\n return 0\n return 1", "def fetchNodes(tree):\n if tree.results is None: #Check if the node is a branch\n condItems = {} #Initialize a container for the node conditions from lower branches\n v = [\"true\", \"false\"] #\"Veracity values\"\n for branch, veracity in [(tree.tb, v[0]), (tree.fb, v[1])]: #iterate over this node's true and false child nodes\n lower_results = fetchNodes(branch)\n if len(lower_results) == 1: #Check if child node is actually a leaf. If so,\n lower_results.insert(0, (tree.col, tree.value, veracity))\n condItems[veracity] = [lower_results] #Initialize the condition needed to reach that leaf\n else:\n condItems[veracity] = [] #If the child is not a leaf, initialize an empty list to contain its updated conditions\n for item in lower_results: #Iterate over each set of node conditions that stem from this branch\n new_descriptor = deepcopy(item) #make a deep copy of the list of node conditions from the lower level nodes\n #insert this node's condition at the beginning of each of the node conditions from the lower levels\n new_descriptor.insert(0, (tree.col, tree.value, veracity)) \n condItems[veracity].append(new_descriptor) #append the updated set of node conditions to the branches items\n node_conditions = deepcopy(condItems[v[0]]) #Initialize the complete list of node conditions that stem from this node\n node_conditions.extend(deepcopy(condItems[v[1]])) #Add the node conditions from the second branch of this node\n return node_conditions #Send the full set of node conditions from this node up to the higher nodes.\n else: #If the node is a leaf, return the dictionary of results\n return [tree.results]", "def forward(self, tree, embs, training = False):\n\n # add singleton dimension for future call to node_forward\n # embs = F.torch.unsqueeze(self.emb(inputs),1)\n\n loss = Var(torch.zeros(1)) # init zero loss\n if self.cudaFlag:\n loss = loss.cuda()\n\n for idx in range(tree.num_children):\n _, child_loss = self.forward(tree.children[idx], embs, training)\n loss = loss + child_loss\n child_c, child_h = self.get_child_states(tree)\n tree.state = self.node_forward(embs[tree.idx-1], child_c, child_h)\n\n if self.output_module != None:\n output = self.output_module.forward(tree.state[1], training)\n tree.output = output\n if training and tree.gold_label != None:\n target = Var(utils.map_label_to_target_sentiment(tree.gold_label))\n if self.cudaFlag:\n target = target.cuda()\n loss = loss + self.criterion(output, target)\n return tree.state, loss", "def resolve_to_true(self):\n print(colored(f\"Checking {self}\\n\", attrs=['bold', 'underline']))\n for elem in self.operands:\n if elem.resolve_to_true():\n print(colored(f\"Since {elem} is True then {self} is True\\n\", attrs=[\n 'bold', 'underline']))\n return True\n print(colored(f\"Since no element was True then {self} is False\\n\", attrs=[\n 'bold', 'underline']))\n return False", "def fn(node):\n if not node: return True, 0\n tf0, h0 = fn(node.left)\n tf1, h1 = fn(node.right)\n return tf0 and tf1 and abs(h0-h1) <= 1, 1 + max(h0, h1)", "def _numconditions(self, node_p):\n node_p = self.getnodenamed(node_p) # Verify pointer.\n parents = self.getnodeparents(node_p) # nl_p\n nparents = self.lengthnodelist(parents) # integer\n # Get the number of states in each parent node, multiply them\n npstates = 1\n for idx in range(nparents):\n node_i = self.nthnode(parents, idx)\n nstates_i = self.getnodenumberstates(node_i)\n npstates *= nstates_i\n\n return npstates", "def execute(self):\n if len(self._tree) > 0:\n return self.tour(self._tree.root(), 0, [])", "def check(self):\n self.isNodes = True\n self.isFixable = True\n errorNodes = list()\n for each in pm.ls(type='unknown'):\n errorNodes.append(each)\n self.status = 'OK'\n if len(errorNodes):\n self.setErrorNodes(errorNodes)\n self.setStatus('ERROR')", "def treePolicy(node):\n while not node.getState().checkTerminal():\n if node.checkFullyExpanded():\n node = findBestChild(node, True)\n else:\n return expandNode(node)\n return node", "def test_render_tree(self) -> None:\n\n def get_children(node):\n return node.children\n\n node, expect, withtags = self.tree_case_1()\n actual = render_tree(node, get_children)\n assert expect == actual, (expect, actual)\n\n node, expect, withtags = self.tree_case_2()\n actual = render_tree(node, get_children, 1)\n assert expect == actual, (expect, actual)\n\n # Ensure that we can call render_tree on the same Node\n # again. This wasn't possible in version 2.4.1 and earlier\n # due to a bug in render_tree (visited was set to {} as default\n # parameter)\n actual = render_tree(node, get_children, 1)\n assert expect == actual, (expect, actual)", "def bad_tree():\n t = graph.oval_graph.OvalNode(\n 1, \"value\", \"true\", False, [\n graph.oval_graph.OvalNode(\n 2, \"operator\", \"and\", False, [\n graph.oval_graph.OvalNode(\n 3, \"value\", \"true\", False)])])\n return", "def check_status(self):\n if not self.completed and not self.failed:\n if not self.is_job:\n self._remove_children_dependency()\n self.status = 'COMPLETED'\n self.completed = True\n else:\n completed = True\n failed = False\n for job_instance in self.instances:\n if job_instance.failed:\n failed = True\n break\n elif not job_instance.completed:\n completed = False\n\n if failed:\n self.status = 'FAILED'\n self.failed = True\n self.completed = False\n return False\n\n if completed:\n # The job node just finished, remove this dependency\n self.status = 'COMPLETED'\n self._remove_children_dependency()\n self.completed = True\n\n return not self.failed", "def _check_for_value(self):\n self.node.get_value()", "def leaf(self):\n if not self._leftchild and not self._rightchild:\n return True\n return False", "def node_leaf(self):\r\n return self.zero_son is None and self.one_son is None", "def is_final_node_reached(self):\n if self.actual_node == self.final_node:\n self.final_node_reached = True", "def IsLeaf(self):\n return not self.subtests", "def test_tree_with_one_node_has_correct_value(one_t):\n assert one_t.root.value == 10", "def internal(self):\n if self._leftchild or self._rightchild:\n return True\n return False", "def count_nodes(self):\n if self.is_empty():\n return 0\n elif self.is_leaf():\n return 1\n else:\n if self.get_left():\n if self.get_right():\n return 1 + self.get_left().count_nodes() + self.get_right().count_nodes()\n else:\n return 1 + self.get_left().count_nodes()\n else:\n return 1 + self.get_right().count_nodes()", "def execute(self):\n if len(self._tree) > 0:\n return self._tour(self._tree.root(), 0, [])", "def execute(self):\n if len(self._tree) > 0:\n return self._tour(self._tree.root(), 0, [])", "def test_get_node_state_readonly(self):\n pass", "def check(self):\n self.isNodes = False\n self.isFixable = False\n nodeType = self.get_parser.get('SETTINGS', 'settingsinfonode')\n self.setStatus('OK')\n if not len(pm.ls(type=nodeType)):\n self.setStatus('WARNING')\n self.setErrorMessage('No %s node found in the scene.' % nodeType)\n return False, ''\n elif len(pm.ls(type=nodeType)) > 1:\n self.setStatus('ERROR')\n self.setErrorMessage('More than 1 %s node found in the scene.' % nodeType)\n return False, ''\n return True, pm.ls(type=nodeType)[0]", "def __deep_count_errors(node, testSet, res):\n if node.results is not None: #Check if this node is a leaf node\n return __count_errors(node, testSet, res) #If so, return the test set classification errors made by this node.\n else:\n tbSet = testSet[testSet[node.col] >= node.value] #find which test observations belong to this tree's true branch\n fbSet = testSet[testSet[node.col] < node.value] #find which test observations belong to this tree's false branch\n \n if node.tb.results is None: #Check if the true branch is a branch node\n #If so, get the count of all misclassifications made by this branch's descendent leaf nodes on the test observations\n term1 = __deep_count_errors(node.tb, tbSet, res)\n else: #If the true branch is a leaf node, return the count of all test set classification errors made by the leaf.\n term1 = __count_errors(node.tb, tbSet,res)\n if node.fb.results is None: #Check if the false branch is a branch node\n #If so, get the count of all misclassifications made by this branch's descendent leaf nodes on the test observations\n term2 = __deep_count_errors(node.fb, fbSet, res)\n else: #If the false branch is a leaf node, return the count of all test set classification errors made by the leaf.\n term2 = __count_errors(node.fb, fbSet, res) \n return term1 + term2 #Sum the classification errors made by this nodes descendant leaves.", "def goal_test(self, state):\n self.numbernodes += 1\n\n i = 0\n for box in state.boxes :\n for coord in self.board.positionGoal :\n if coord[0] == box.y and coord[1] == box.x : \n i+=1\n if i == 0 : return False\n i = 0\n return True", "def evaluate(self, state):\n\n fitness = 0\n\n for i in range(1, len(state)):\n if state[i] != state[i - 1]:\n fitness += 1\n\n return fitness", "def test_contains_returns_true_on_tree_with_value_left(bst_all_to_left):\n assert bst_all_to_left.contains(3) is True\n assert bst_all_to_left.contains(1) is True\n assert bst_all_to_left.contains(2) is True", "def fn(node):\n if not node: return 0 \n ans = node.val + fn(node.left) + fn(node.right)\n vals.append(ans)\n return ans", "def backprop(node, result):\n while node:\n node.addOutcome(result)\n node = node.getParent()", "def evaluate(self, stochastic=False):\n ob = self._convert_state(self._env.reset())\n done = False\n actions = []\n sum_rew = 0\n while not done:\n ac, _ = self._act(ob, stochastic=stochastic)\n actions.append(ac)\n ob, rew, done, _ = self._env.step(ac)\n ob = self._convert_state(ob)\n sum_rew += rew\n self.ob = self._convert_state(self._env.reset())\n self.new = True\n return sum_rew, actions", "def is_leaf(self):\r\n return self.num_children() == 0", "def evaluate(\n self, nodes, derivatives=np.array([0, 0, 0]), modes=None, unique=False\n ):", "def _check_for_children(self):\n if len(self.node.get_children()) > 0:\n raise ValueError('This Node is not a leaf node. Children of this node '\n 'are {}'.format(self.client.get_children()))", "def evaluate(self, state) -> float:\n evaluater = Evaluater(state)\n result = evaluater.transform(self.expression)\n\n try:\n value = sympy.sympify(\"\".join(tree_to_string(result)), locals=evaluater.locals)\n if value == sympy.nan:\n return None\n return value\n except TypeError:\n return None" ]
[ "0.71742636", "0.6537723", "0.6279385", "0.6106426", "0.6106426", "0.60549605", "0.60402316", "0.60212874", "0.5946995", "0.58815885", "0.58076704", "0.5800487", "0.57829833", "0.5762511", "0.56892544", "0.56854844", "0.56838524", "0.5680494", "0.56597567", "0.56062233", "0.5581533", "0.55706203", "0.55386907", "0.55359536", "0.5529258", "0.5495778", "0.54765767", "0.54727125", "0.54070693", "0.5397771", "0.53957146", "0.53873974", "0.53655344", "0.5365239", "0.5352941", "0.5340576", "0.5320727", "0.5315682", "0.53135157", "0.5307729", "0.5301102", "0.53001565", "0.5299251", "0.52894336", "0.52550274", "0.5253843", "0.52531105", "0.52531105", "0.5220624", "0.52132344", "0.5206658", "0.5204129", "0.5203145", "0.52029896", "0.5185802", "0.5180725", "0.5179254", "0.5177484", "0.5175891", "0.5169358", "0.51628107", "0.5151051", "0.51490676", "0.51426035", "0.51314783", "0.513105", "0.5129985", "0.51276326", "0.51149714", "0.51134187", "0.51087356", "0.51067334", "0.51058006", "0.5105749", "0.5100384", "0.5095947", "0.5091047", "0.5088406", "0.5087842", "0.5086998", "0.50802004", "0.50617695", "0.50594413", "0.5050744", "0.5049044", "0.5047383", "0.5047383", "0.5034324", "0.5033244", "0.50284237", "0.5026149", "0.50210404", "0.501797", "0.50158966", "0.5015393", "0.5009259", "0.50087905", "0.50074875", "0.50048417", "0.5004558" ]
0.6946291
1
imports 'catalog', and creates a pandas.DataFrame containing the columns specified in 'params'. 'catalog' is expected to be in the .csv format.
def import_data(catalog='xmatch_TGAS_Simbad.csv', params=None, nrows=None, delimiter=','): print "Loading %s and creating DataFrame.." % catalog df_imported = pd.read_csv(catalog, delimiter=delimiter, header=0, usecols=params, nrows=nrows) print "..Done\n----------" return df_imported
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_catalog(self):\n self.catalog = pd.read_csv(self.catalog_path, \n index_col=0, parse_dates=True)\n self.unique_years = self.catalog.index.year.unique()\n return", "def loadData(catalog):\n return controller.loadData(catalog)", "def loadData(catalog):\n return controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\r\n controller.loadData(catalog)", "def load_data(catalog):\n controller.load_data(catalog)", "def pd_load_acquisition_csv(acquisition_path, **kwargs):\n\n columns = [\n 'loan_id', 'orig_channel', 'seller_name', 'orig_interest_rate', 'orig_upb', 'orig_loan_term',\n 'orig_date', 'first_pay_date', 'orig_ltv', 'orig_cltv', 'num_borrowers', 'dti', 'borrower_credit_score',\n 'first_home_buyer', 'loan_purpose', 'property_type', 'num_units', 'occupancy_status', 'property_state',\n 'zip', 'mortgage_insurance_percent', 'product_type', 'coborrow_credit_score', 'mortgage_insurance_type',\n 'relocation_mortgage_indicator', 'year_quarter'\n ]\n dtypes = {\n 'loan_id': np.int64,\n 'orig_channel': CategoricalDtype(['B', 'C', 'R']),\n 'seller_name': str,\n 'orig_interest_rate': np.float64,\n 'orig_upb': np.int64,\n 'orig_loan_term': np.int64,\n 'orig_date': str,\n 'first_pay_date': str,\n 'orig_ltv': np.float64,\n 'orig_cltv': np.float64,\n 'num_borrowers': np.float64,\n 'dti': np.float64,\n 'borrower_credit_score': np.float64,\n 'first_home_buyer': CategoricalDtype(['N', 'U', 'Y']),\n 'loan_purpose': CategoricalDtype(['C', 'P', 'R', 'U']),\n 'property_type': CategoricalDtype(['CO', 'CP', 'MH', 'PU', 'SF']),\n 'num_units': np.int64,\n 'occupancy_status': CategoricalDtype(['I', 'P', 'S']),\n 'property_state': CategoricalDtype(\n ['AK', 'AL', 'AR', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'GA', 'HI',\n 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MI', 'MN',\n 'MO', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH',\n 'OK', 'OR', 'PA', 'PR', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI',\n 'VT', 'WA', 'WI', 'WV', 'WY']),\n 'zip': np.int64,\n 'mortgage_insurance_percent': np.float64,\n 'product_type': CategoricalDtype(['FRM']),\n 'coborrow_credit_score': np.float64,\n 'mortgage_insurance_type': np.float64,\n 'relocation_mortgage_indicator': CategoricalDtype(['N', 'Y']),\n 'year_quarter': np.int64\n }\n\n a = pd.read_csv(acquisition_path, names=columns, delimiter='|', dtype=dtypes, parse_dates=[6,7], error_bad_lines=True, warn_bad_lines=True, na_filter=True)\n return a", "def pd_load_acquisition_csv(acquisition_path, **kwargs):\n\n cols = [\n 'loan_id', 'orig_channel', 'seller_name', 'orig_interest_rate', 'orig_upb', 'orig_loan_term',\n 'orig_date', 'first_pay_date', 'orig_ltv', 'orig_cltv', 'num_borrowers', 'dti', 'borrower_credit_score',\n 'first_home_buyer', 'loan_purpose', 'property_type', 'num_units', 'occupancy_status', 'property_state',\n 'zip', 'mortgage_insurance_percent', 'product_type', 'coborrow_credit_score', 'mortgage_insurance_type',\n 'relocation_mortgage_indicator'\n ]\n\n dtypes = {\n \"loan_id\": np.int64,\n \"monthly_reporting_period\": str,\n \"servicer\": str,\n \"interest_rate\": np.float64,\n \"current_actual_upb\": np.float64,\n \"loan_age\": np.float64,\n \"remaining_months_to_legal_maturity\": np.float64,\n \"adj_remaining_months_to_maturity\": np.float64,\n \"maturity_date\": str,\n \"msa\": np.float64,\n \"current_loan_delinquency_status\": np.int32,\n \"mod_flag\": CategoricalDtype(['N', 'Y']),\n \"zero_balance_code\": CategoricalDtype(['01', '02', '06', '09', '03', '15', '16']),\n \"zero_balance_effective_date\": str,\n \"last_paid_installment_date\": str,\n \"foreclosed_after\": str,\n \"disposition_date\": str,\n \"foreclosure_costs\": np.float64,\n \"prop_preservation_and_repair_costs\": np.float64,\n \"asset_recovery_costs\": np.float64,\n \"misc_holding_expenses\": np.float64,\n \"holding_taxes\": np.float64,\n \"net_sale_proceeds\": np.float64,\n \"credit_enhancement_proceeds\": np.float64,\n \"repurchase_make_whole_proceeds\": np.float64,\n \"other_foreclosure_proceeds\": np.float64,\n \"non_interest_bearing_upb\": np.float64,\n \"principal_forgiveness_upb\": np.float64,\n \"repurchase_make_whole_proceeds_flag\": CategoricalDtype(['N', 'Y']),\n \"foreclosure_principal_write_off_amount\": np.float64,\n \"servicing_activity_indicator\": CategoricalDtype(['N', 'Y']),\n }\n print(acquisition_path)\n\n #return pd.read_csv(acquisition_path, names=cols, delimiter='|', dtype=dtypes, parse_dates=[6,7])\n return pd.read_csv('acq.csv', names=cols, delimiter='|', dtype=dtypes, parse_dates=[6,7])", "def load_catalog(self, sql_context):\n self.df_catalog = (sql_context.read\n .format('jdbc')\n .options(url=self.catalog_connection,\n dbtable=\"(\" + self.catalog_query + \") as foo\",\n driver=\"org.postgresql.Driver\")\n .load())", "def create_dataframe():\r\n\r\n df = pd.read_csv('data/data.csv', header=0)\r\n return df", "def loadData(catalog):\n loadArtworks(catalog)\n loadArtists(catalog)\n loadAdquires(catalog)\n loadNacionalities(catalog)\n load2DArtworks(catalog)\n loadArtistMediumsTags(catalog)\n loadDptments(catalog)\n catalog['artists'] = sortArtists(catalog, 3)\n fillArtistMediums(catalog)\n fillMostUsedMediums(catalog)\n catalog['artists_tags'] = sortArtistTags(catalog, 3)\n sort_dptments(catalog)", "def load() -> DataFrame:\n return load_file(__file__, \"default.csv.gz\")", "def get_flat_file_data(kind: str, server: str='PROD', ID: str='42') -> DataFrame:\r\n k = {\r\n 'c': 'customer_data_{0}_{1}_.csv',\r\n 'b': 'vendor_data_{0}_{1}_.csv'\r\n }\r\n f = k[kind].format(server, ID)\r\n df = pd.read_csv(f'{BASE_DIR}/{f}', encoding='UTF-8')\r\n df = prepare_input_df(df)\r\n return df", "def _csv_engine(filename, node):\n sep = node.get(\"sep\", \",\")\n header = node.get(\"header\", 0)\n logger.debug(\n \"Parsing CSV '{}'. sep={}, header={}.\".format(filename, sep, header)\n )\n index = node.get(\"index\")\n encoding = node.get(\"encoding\")\n if not index:\n raise InvalidConfig(\"An 'index' column is required. It should \"\n \"be the sample id column.\")\n\n df = pd.read_csv(filename, sep=sep, header=header, encoding=encoding)\n df.set_index(index, verify_integrity=True, inplace=True, drop=True)\n df.index = df.index.astype(str)\n\n return df", "def create_catalog_dataframe(save_dataframe):\n if not save_dataframe:\n return pd.read_pickle(f'{path_dictionary[\"catalog_dataframe_grouped_path\"]}')\n\n catalog_items = DbHelper.get_all_catalog_items()\n\n # Create dataframe to put all information together\n columns = ['user_id', 'item_id', 'session_id', 'window_size_x', 'window_size_y', 'page_size_x', 'page_size_y', 'catalog_item_list', 'user_log_list']\n catalog_items_df = pd.DataFrame(catalog_items, columns=columns)\n\n # Clean 'Catalog Items' that user see during a session\n catalog_items_df['catalog_item_list'] = catalog_items_df.apply(clean_objects_listed, axis=1)\n\n # Clean Log Files\n catalog_items_df['user_log_list'] = catalog_items_df.apply(clean_logs, axis=1)\n\n # Get Catalog Items that user hover or has a click action, her/his mouse\n catalog_items_df = get_interacted_catalog_items(catalog_items_df)\n\n # Label the catalog items as 0\n catalog_items_df['catalog_item_list'] = catalog_items_df.apply(label_page_type, axis=1)\n\n catalog_items_df_grouped = catalog_items_df.groupby(['user_id', 'session_id'], as_index=False).agg(lambda x: list(x))\n catalog_items_df_grouped.drop(['item_id', 'window_size_x', 'window_size_y', 'page_size_x', 'page_size_y'], axis=1, inplace=True)\n\n if save_dataframe:\n catalog_items_df.to_pickle(f'{path_dictionary[\"path_raw_catalog_dataframe\"]}')\n catalog_items_df_grouped.to_pickle(f'{path_dictionary[\"path_catalog_dataframe\"]}')\n catalog_items_df_grouped.to_csv(f'{path_dictionary[\"path_catalog_csv\"]}', index=False, sep='|')\n return catalog_items_df_grouped", "def _read(**kwargs) -> DataFrame:\n Engine.subscribe(_update_engine)\n\n try:\n pd_obj = FactoryDispatcher.read_csv_glob(**kwargs)\n except AttributeError:\n raise AttributeError(\"read_csv_glob() is only implemented for pandas on Ray.\")\n\n # This happens when `read_csv` returns a TextFileReader object for iterating through\n if isinstance(pd_obj, pandas.io.parsers.TextFileReader):\n reader = pd_obj.read\n pd_obj.read = lambda *args, **kwargs: DataFrame(\n query_compiler=reader(*args, **kwargs)\n )\n return pd_obj\n\n return DataFrame(query_compiler=pd_obj)", "def prepare_data(filename='data/DOT_timeSeries.csv'):\n\n # read data file into pandas dataframe\n df = pd.read_csv(filename)\n\n # extract unwanted 'countries' from dataframe\n countries = ['Europe', 'Emerging and Developing Europe', 'Emerging and Developing Asia',\n 'Middle East, North Africa, and Pakistan', 'Export earnings: nonfuel',\n 'Sub-Saharan Africa', 'Export earnings: fuel', 'Western Hemisphere',\n 'World', 'Special Categories', 'Advanced Economies', 'CIS',\n 'Emerging and Developing Economies']\n for country in countries:\n df = extract_relevant_rows(df, column_name='Country Name', column_value=country, not_equal=True)\n df = extract_relevant_rows(df, column_name='Counterpart Country Name', column_value=country, not_equal=True)\n\n # extract exports only from data\n exports = extract_relevant_rows(df, column_name='Indicator Code', column_value='TXG_FOB_USD')\n # extract value attributes only from exports\n export_values = extract_relevant_rows(exports, column_name='Attribute', column_value='Value')\n\n return export_values", "def _load(self, config: Dict):\n return pd.read_csv(config['path'])", "def loadData(catalog):\n loadVideos(catalog)\n loadCategories(catalog)", "def loader():\n bucket = data_load_variables[\"bucket\"]\n\n if data_load_variables[\"use_lite_dataset\"]:\n dataset_name = data_load_variables[\"lite_dataset_name\"]\n else:\n dataset_name = data_load_variables[\"dataset_name\"]\n\n s3 = boto3.client('s3')\n\n obj = s3.get_object(Bucket=bucket, Key=dataset_name)\n # get object and file (key) from bucket\n\n df = pd.read_csv(obj['Body'])\n return df", "def createDataFrame(path):\n df = pd.read_csv(path)\n df = df[['planet_name', 'planet_mass', 'orbital_radius', 'host_name', \n 'spectral_type', 'stellar_age', 'stellar_radius', \n 'stellar_mass', 'stellar_temperature', 'stellar_luminosity', \n 'optical_magnitude', 'near_ir_magnitude', \n 'stellar_surface_gravity', 'stellar_metallicity']]\n \n df = df.dropna(subset=['spectral_type'])\n df.spectral_type = df.spectral_type.str[0:1]\n df.spectral_type = df.spectral_type.str.strip()\n classification = np.array(['O','B','A','F','G','K','M'])\n df = df[df.spectral_type.isin(classification)]\n df.insert(4, \"amount_of_planets\", 0)\n df.amount_of_planets = df.groupby('host_name')['host_name'].transform('count')\n \n df.planet_mass = np.log10(df.planet_mass)\n df.orbital_radius = np.log10(df.orbital_radius)\n \n df = df.sort_values(by=['host_name'])\n df = df.reset_index(drop=True) \n \n return df", "def import_csv_data(cr, registry):\n files = ['data/sc.info.csv']\n for file in files:\n tools.convert_file(cr, 'prospects_app', file, None,\n mode='init', noupdate=True, kind='init')", "def loadData(catalog):\n\n loadArtwork(catalog)\n loadArtists(catalog)", "def get_data(params):\n username = params['username']\n provider = params[\"provider\"]\n quality = params[\"quality\"]\n speed = params[\"speed\"]\n vpn = params[\"vpn\"]\n platform = params[\"platform\"]\n clean = params[\"clean\"]\n date = params[\"date\"]\n interface = params[\"interface\"]\n csvmode = params[\"csvmode\"]\n path = params['path']\n output_path = './data/collected/'\n\n output_file = '{}_{}_{}_{}_{}_{}_{}_{}.csv'.format(username, provider, quality, speed, vpn, platform, clean, date)\n command = 'python3.8 {} -i {} -s {} {}'.format(path, interface, csvmode, output_file)\n os.system(command)\n \n return", "def get_df(\n self,\n response: Response,\n parse_dates: Optional[Union[List[int], bool]] = False,\n sort_values: Optional[List[str]] = None,\n reindex_columns: Optional[List[str]] = None,\n ) -> pd.DataFrame:\n\n with io.BytesIO() as buffer:\n try:\n buffer.write(response.content)\n buffer.seek(0)\n z: zipfile.ZipFile = zipfile.ZipFile(buffer)\n\n except zipfile.BadZipFile as e:\n print(\"Bad zip file\", e)\n\n else: # TODO need to annotate csv\n csv = z.open(z.namelist()[0]) # ignores all but first file in zip\n df: pd.DataFrame = pd.read_csv(csv, parse_dates=parse_dates)\n\n df = df.rename(columns={\"PRC\": \"MW\"})\n\n if sort_values:\n df = df.sort_values(sort_values).reset_index(drop=True)\n\n if reindex_columns:\n df = df.reindex(columns=reindex_columns)\n\n return df", "def loadData(catalog):\n loadArtists(catalog)\n loadArtworks(catalog)", "def loadData(catalog):\n loadArtists(catalog)\n loadArtworks(catalog)", "def run(self) -> DataFrame:\n with self.create_census_api_session():\n logger.info('Retrieving variables...')\n variables: Variables = self.get_variables()\n logger.info('Retrieving ACS tables...')\n tables = self.get_tables()\n\n # Add geometry\n gazetteer_files: List[GazetteerFile] = []\n shapefiles: List[Shapefile] = []\n if self.geometry == 'points':\n logger.info('Retrieving Gazetteer files...')\n gazetteer_files.extend(self.get_gazetteer_files())\n elif self.geometry == 'polygons':\n logger.info('Retrieving shapefiles...')\n shapefiles.extend(self.get_shapefiles())\n dataframe = self.assemble_dataframe(variables, tables, gazetteer_files, shapefiles)\n return dataframe", "def loadData(catalog):\n loadArtworks(catalog)\n loadArtists(catalog)", "def _setup_dataframe(self, serie, metadata=None):\n header = self.get_data_header(serie, dataset='cnv')\n df = self.get_data_in_frame(serie, header, dataset='cnv')\n df = self.df_handler.map_column_names_of_dataframe(df)\n\n return df", "def _read_csv(self) -> pd.DataFrame:\n\n return pd.concat(\n [\n pd.read_csv(f, usecols=[1, 2, 3, 4, 5])\n for f in self.temp_path.iterdir()\n if f.name.endswith(\".csv\")\n ]\n )", "def readindata(chosen_columns, chosen_url):\n return pd.read_csv(chosen_url, usecols=chosen_columns)", "def load_data(filepath):\n\tlogging.info(f\"Load data from {filepath}\")\n\tdf = pd.read_csv(filepath)\n\tdf = set_dtypes(df)\n\tdf = df.sort_values(by='query_date')\n\n\treturn df", "def create_df(link=config.api_link, key=config.api_key, master_file = 'pvd_crime_master.csv'):\n #only want reports we don't already have, so what is the most recent date in the master\n master = pd.read_csv(master_file, nrows=1)\n most_recent = pd.to_datetime(master['reported_date'])[0]\n most_recent_format = most_recent.strftime('%Y-%m-%dT%H:%M:%S.000')\n\n headers = {'Authentication': key} #api_key\n \n query = \"SELECT * WHERE reported_date > '\"+most_recent_format+\"' ORDER BY reported_date LIMIT 13000\"\n\n params = {'$query': query}\n\n response = requests.get(link, headers=headers, params=params) #json data\n response_json = response.json() #json data as list of dictionaries\n \n #create and return pandas DataFrame of json response\n\n return pd.DataFrame(response_json)", "def get_data(filename):\r\n return pd.read_csv(filename)", "def data_frame(records: list) -> pandas.DataFrame:\n return pandas.DataFrame(records, columns=lciafmt_cols)", "def open_csv(filename=\"NOTEEVENTS.csv\", index=['SUBJECT_ID', 'HADM_ID']):\n df = pd.read_csv(DATA_DIR / filename,\n index_col=index,\n # nrows=1000,\n infer_datetime_format=True)\n logger.info(f\"opening {filename}\")\n logger.info(f\"Dataframe columns: {df.columns}\")\n # logger.info(f\"Clinical note types: {df['CATEGORY'].unique()}\")\n return df", "def read_catalog(catalog):\n with open(catalog, \"r\") as f:\n header = f.readline()\n if header.startswith('#EventID | Time | Latitude | Longitude | Depth/km'):\n catalog = _read_iris(f)\n elif header.startswith('time, latitude, longitude, depth, depthUnits, magnitude'):\n catalog = _read_sod(f)\n else:\n sys.exit(\"Unknown catalog format\")\n return catalog", "def get_df(config_summary_url):\n return pd.read_csv(urlretrieve(config_summary_url)[0])", "def load_data(path):\n\n columns = ['Item Year', 'Original Value', 'Standard Value', 'Original Currency',\n 'Standard Currency', 'Orignal Measure', 'Standard Measure', 'Location',\n 'Commodity']\n col_type = [int, float, float, object, object, object, object, object]\n\n col_type_dict = dict(zip(columns, col_type))\n\n au_df = pd.read_csv(path, usecols=columns)\n au_df = au_df.astype(col_type_dict)\n au_df.name = 'AU_data'\n \n return au_df, columns", "def __loaddata(filename, datatype='flightcsv', minprob=0.001, maxprob=0.20):\n if datatype is 'flightcsv':\n return extract_flight_csv(filename, minprob=minprob, maxprob=maxprob)\n else:\n raise Exception('unknown datatype %s' % datatype)", "def prepare_data():\n df = pd.read_csv('Wholesale customers data.csv')\n df_numeric = df[['Fresh', 'Milk', 'Grocery', 'Frozen', 'Detergents_Paper', 'Delicassen']]\n return df, df_numeric", "def create_df_from_remote_csv(url):\n if url is None:\n return None\n response = requests.get(url)\n if response.status_code == 200:\n if response.headers['content-type'] == \"text/csv\":\n response.encoding = 'utf-8'\n data = pd.read_csv(io.StringIO(response.text))\n return data\n else:\n print('Error. '\n 'The file is encoded using unsupported content-type {}'\n .format(response.headers['content-type']))\n else:\n print('Error. '\n 'The file could not be downloaded. Returned HTTP status code: {}'\n .format(response.status_code))\n\n return None", "def _get_data(*, from_web: bool) -> pd.DataFrame:\n\n df = read_in_data.SaveFormats.CSV.read(from_web=from_web)\n return df", "def prepare_CSV(self):\n self.drop_columns()\n self.rename_columns()\n self.spilt_columns()\n self.add_vehicle_id_column()\n self.add_source_column()\n self.add_timestamp_columns()\n self.get_colour_columns()\n self.clean_column_formats()\n\n # print(self.data.info())\n # print(self.data.sample(10))\n\n return self.data", "def loadData(catalog):\n loadVideos(catalog)", "def read_partslist_csv(csv: str)->pd.DataFrame:\n try:\n p_df = pd.read_csv(csv, sep='\\t', header=0, engine='python', na_values='', skipfooter=3,\n dtype={'BLItemNo': str, 'BLColorId': int, 'LDrawColorId': int, 'Qty': int})\n p_df = p_df.fillna({'BLColorId': '', 'Qty': 0})\n p_df = p_df.rename(mapper={'BLItemNo': 'ItemId', 'BLColorId': 'Color'}, axis=1)\n p_df = p_df.drop(columns=['ElementId', 'LdrawId', 'LDrawColorId'])\n return p_df\n except FileNotFoundError as e:\n print(e)\n return pd.DataFrame()", "def load_handler(\n endpoint: str,\n path: str,\n columns: list,\n types: Union[dict, None],\n parse_dates: list,\n coerce_dates: bool = False,\n) -> pd.DataFrame:\n\n try:\n\n # Read CSV file from Meteostat endpoint\n df = pd.read_csv(\n endpoint + path,\n compression=\"gzip\",\n names=columns,\n dtype=types,\n parse_dates=parse_dates,\n )\n\n # Force datetime conversion\n if coerce_dates:\n df.iloc[:, parse_dates] = df.iloc[:, parse_dates].apply(\n pd.to_datetime, errors=\"coerce\"\n )\n\n except (FileNotFoundError, HTTPError):\n\n # Create empty DataFrane\n df = pd.DataFrame(columns=[*types])\n\n # Display warning\n warn(f\"Cannot load {path} from {endpoint}\")\n\n # Return DataFrame\n return df", "def load_customers(dir):\n customSchema = StructType([ \\\n StructField(\"customerId2\", IntegerType(), True), \\\n StructField(\"churnlabel\", IntegerType(), True), \\\n StructField(\"gender\", StringType(), True), \\\n StructField(\"shippingCountry\", StringType(), True), \\\n StructField(\"dateCreated\", StringType(), True), \\\n StructField(\"yearOfBirth\", IntegerType(), True), \\\n StructField(\"premier\", IntegerType(), True)])\n\n df = sqlContext.read.format('com.databricks.spark.csv') \\\n .options(header='false', delimiter='\\t', nullValue='\\\\N') \\\n .load(get_dir_customers(dir) + '/*', schema=customSchema)\n\n return df", "def load(*args):\r\n\r\n #args[0].to_csv(str(PATH.joinpath('./data/{}.csv'.format(args[1]))),index=False)\r\n\r\n try: # it will fail if duplicates\r\n args[0].to_sql('cmf', con=engine, if_exists='append', index=False)\r\n except:\r\n pass", "def initialize_from_file(filename):\r\n df = pd.read_csv(filename)\r\n return df", "def sa_pandas_init(self):\n\n lca = self.lca\n\n ind_activity = 0\n ind_product = 1\n ind_biosphere = 2\n\n cols = []\n rows = []\n inputs = []\n\n #All exchanges in inputs\n for input_ in self.inputs:\n\n if input_ == 'biosphere':\n continue\n\n for i in self.inputs_dict[input_]['tech_params']:\n act = lca.reverse_dict() [ind_activity] [i['col']]\n prod = lca.reverse_dict() [ind_product] [i['row']]\n cols += [ bw.get_activity(act) ['name'] ]\n rows += [ bw.get_activity(prod)['name'] ]\n inputs += [input_]\n for j in self.inputs_dict[input_]['bio_params']:\n act = lca.reverse_dict() [ind_activity] [j['col']]\n bio = lca.reverse_dict() [ind_biosphere] [j['row']]\n cols += [ bw.get_activity(act) ['name'] ]\n rows += [ bw.get_activity(prod)['name'] ]\n inputs += [input_]\n\n if self.parameters != None:\n # All parameters\n parameters_names_list = [name for name in self.parameters_array['name']]\n cols += parameters_names_list\n rows += parameters_names_list\n inputs += ['Parameters']*len(parameters_names_list)\n\n df = pd.DataFrame([inputs, rows, cols], index = ['Inputs', 'Products or flows', 'Activities'])\n df = df.transpose()\n\n self.sensitivity_indices_df = df", "def run(self) -> pd.DataFrame:\n with open(self.file_path, 'r') as in_file:\n headers = in_file.readline()\n headers = headers.replace(\"\\n\", \"\")\n\n if ',' in headers:\n headers = headers.split(',')\n else:\n headers = headers.split()\n\n if headers == self.NORMAL_HEADERS:\n return self.normal_csv()\n else:\n return self.read_data_columns_to_rows()", "def data_from_csv(self, filepath):\n self.dataframe = pd.load_csv(filepath, separator='')", "def fetch_csv(filename):\n variable = pd.read_csv(filename+'.csv', index_col=0)\n return variable", "def read_csv():", "def get_rows(header, station):\n df = pd.DataFrame(header, columns=list(header.keys()) + all_cols)\n for row in station[3:]:\n #cranky parsing issues\n var, vals = parse_row(row)\n # data type\n \n if var in categorical:\n #try:\n df[var] = np.array(vals, dtype='object')\n\n elif var in integer: # cast to int\n if (var == 'N/X'):\n df['X/N'] = np.array(vals, dtype='float64')\n else:\n #try:\n df[var] = np.array(vals, dtype='object')\n\n else:\n raise KeyError(f\"{var} parsing not supported\")\n \n return df", "def init_from_file(filename, verbose=False):\n\n SalesShare._data.clear()\n SalesShare._calibration_data.clear()\n\n if verbose:\n omega_log.logwrite('\\nInitializing database from %s...' % filename)\n\n input_template_name = __name__\n input_template_version = 0.13\n input_template_columns = {'market_class_id', 'start_year', 'annual_vmt',\n 'price_amortization_period', 'share_weight', 'discount_rate',\n 'o_m_costs', 'average_occupancy', 'logit_exponent_mu'\n }\n\n template_errors = validate_template_version_info(filename, input_template_name, input_template_version,\n verbose=verbose)\n\n if not template_errors:\n # read in the data portion of the input file\n df = pd.read_csv(filename, skiprows=1)\n\n template_errors = validate_template_column_names(filename, input_template_columns, df.columns,\n verbose=verbose)\n\n if not template_errors:\n validation_dict = {'market_class_id': omega_globals.options.MarketClass.market_classes}\n\n template_errors += validate_dataframe_columns(df, validation_dict, filename)\n\n if not template_errors:\n SalesShare._data = df.set_index(['market_class_id', 'start_year']).sort_index().to_dict(orient='index')\n\n for mc in df['market_class_id'].unique():\n SalesShare._data[mc] = {'start_year': np.array(df['start_year'].loc[df['market_class_id'] == mc])}\n\n return template_errors", "def load_extract(cryptocurrency):\n df = pd.read_csv(f'input_12mo/{cryptocurrency}.csv')\n df = df['Close'].copy()\n df = df[-183:].copy()\n return df", "def create_df(datadir: str, ext: str='txt') -> pd.DataFrame:\n\n datalist = []\n for name in os.listdir(datadir):\n filename = '/'.join([datadir, name])\n if os.path.isfile(filename) and ext in name[-len(ext):]:\n row_data = []\n content = read_file.read_file(filename)\n row_data.append(read_file.extract_name(content))\n row_data.append(read_file.extract_year(content))\n row_data.append(read_file.extract_form_factor(content))\n row_data.append(read_file.extract_max_power(content))\n row_data.append(read_file.extract_min_power(content))\n row_data.append(read_file.extract_cpu_speed(content))\n row_data.append(read_file.extract_core_num(content))\n for ind in range(10, 100, 10):\n row_data.append(read_file.extract_int_power(content, ind))\n datalist.append(row_data)\n\n return pd.DataFrame(data=datalist, columns=[\n 'Name', 'Year', 'FormFac', 'MaxPower', 'IdlePower', 'CPU speed',\n 'NumCores'\n ]+[''.join([str(ind), '%Power']) for ind in range(10, 100, 10)])", "def get_data_from_csv(filepath, filename, datatypes, date_column_list):\n\n concatenated_file = os.path.join(filepath, filename)\n\n dataframe = get_data_from_csv_full_path(concatenated_file, datatypes, date_column_list)\n\n return dataframe", "def importData(filename):\r\n data = pd.read_csv(filename)\r\n return data", "def test_get_df_from_csv():\n df = get_df_from_csv('politics_30_months_comments_cleaned_standardized_vader_flair.csv')\n print(df.head())", "def _load_csv(root_path, table_meta):\n relative_path = os.path.join(root_path, table_meta['path'])\n dtypes = _read_csv_dtypes(table_meta)\n\n data = pd.read_csv(relative_path, dtype=dtypes)\n data = _parse_dtypes(data, table_meta)\n\n return data", "def load_csv_model(filename) -> tuple:\n dat_sci = pd.read_csv(resources_folder(filename), index_col=0)\n commenter('data from ' + filename, lambda: print(dat_sci))\n\n ind = dat_sci.index\n # commenter('index', lambda: print(ind))\n col = dat_sci.columns\n # commenter('columns', lambda: print(col))\n # self.data = np.asmatrix(dat_sci.values)\n # commenter('data', lambda: print(self.data))\n # print(type(dat_sci))\n\n return dat_sci, ind, col", "def read_csv(filename, schema=[], usecols=None, **kwargs):\n \n data_folder = filename[:filename.rfind(\"/\")+1]\n with open(data_folder + 'policy.txt', 'r') as f:\n policy = Policy(f.read().rstrip())\n print(f'Policy of input data {filename}:\\n' + str(policy))\n with open(data_folder + 'meta.txt', 'r') as f:\n complete_schema = f.readline().strip().replace('\"', '').split(',')\n rows = int(f.readline())\n # print('Data Schema: ' + str(schema))\n\n if not schema and usecols == None:\n return DataFrame(complete_schema, policy, shape=[len(schema), rows])\n elif schema:\n return DataFrame(schema, policy, shape=[len(schema), rows])\n elif usecols is not None:\n return DataFrame(usecols, file_policy, shape=[len(usecols), rows])", "def __init__(self, args):\n self.verbose = args.verbose\n self.force = args.force\n self.extra = args.extra\n self.master_csv = args.master\n self.new_files = args.new_files\n self.df_mas_lab_data = None # Master Lab data\n self.df_new_lab_data = None # Aggregated new Lab data\n self.columns = [\n \"CLIA\",\n \"FACILITY_TYPE\",\n \"CERTIFICATE_TYPE\",\n \"LAB_NAME\",\n \"STREET\",\n \"CITY\",\n \"STATE\",\n \"ZIP\",\n \"PHONE\",\n ]", "def load_data(fpath: str, station: Dict[str, Any]) -> pd.DataFrame:\n df = pd.read_csv(\n fpath,\n skiprows=station['header_line_num']-1,\n usecols=['date', 'rain'],\n )\n\n # format the date from a string to a proper datetime object\n df['date'] = pd.to_datetime(df['date'])\n\n # extract year, month, week, and day to separate columns\n df['year'] = df['date'].dt.year\n df['month'] = df['date'].dt.month\n df['day'] = df['date'].dt.dayofyear\n df['week'] = df['date'].dt.weekofyear\n df['year_month'] = df['date'].dt.to_period('M')\n\n return df", "def get_pars_df(plan_type, stop=False):\n df = pd.concat([get_plan_pars(patient, plan_type, stop)\n for patient in patients])\n return df", "def fetch(url: str, cache: str) -> pd.DataFrame:\n r = requests.get(url)\n r.raise_for_status()\n datestamp = date.today().strftime('%Y%m%d')\n name = url.split('/')[-1].replace('.csv','')\n os.makedirs(cache, exist_ok=True)\n filename = os.path.join(cache, f\"{datestamp}_{name}.csv\")\n with open(filename, \"w\") as f:\n f.write(r.text)\n return pd.read_csv(filename)", "def import_data():\n data = pd.read_csv('partA/bikes_October18.csv', ',')\n return data", "def read_product(filename=None):\n if not filename:\n filename = settings.PRODUCT_FILENAME\n return pd.read_csv(filename, sep='|')", "def getCatalog(self, version=None, level=None, cubeInfo=True):\n print('WaPOR API: Loading catalog WaPOR.v{v}_l{lv}...'.format(\n v=version, lv=level))\n self.isAPITokenSet()\n\n isFound = False\n\n # if isinstance(version, int) and isinstance(level, int):\n # print('| int')\n # if 0 < version < 3 and 0 < level < 4:\n # print('| range')\n if version == self.version and level == self.level:\n # print('| equal')\n if self.catalog is not None:\n # print('| not None')\n isFound = True\n\n if isFound:\n df = self.catalog\n\n print('WaPOR API: Loading catalog WaPOR.v{v}_l{lv} found.'.format(\n v=version, lv=level))\n else:\n df = self._query_catalog(version, level)\n\n print('WaPOR API: Loading catalog WaPOR.v{v}_l{lv} loaded.'.format(\n v=version, lv=level))\n\n if cubeInfo:\n cubes_measure = []\n cubes_dimension = []\n for cube_code in df['code'].values:\n cubes_measure.append(self._query_cubeMeasures(cube_code))\n cubes_dimension.append(self._query_cubeDimensions(cube_code))\n df['measure'] = cubes_measure\n df['dimension'] = cubes_dimension\n\n self.catalog = df\n return self.catalog", "def _parse_csv(csv_file: str) -> pd.DataFrame:\n return pd.read_csv(csv_file, header=0)", "def process_catalog(\n type1, cat, config, RA, DEC, z=\"Redshift\", RAf=\"RA\", DECf=\"DEC\", origin=\"Origin\"\n):\n # Skip unwanted catalogues\n if unwanted_catalogue(cat.meta[\"name\"], set_unwanted_list(type1, config)):\n return None\n\n # Make a list of potential column that contain redshift information\n col_selection = column_selection(type1, cat)\n final_z_col = select_best_redshift(cat, col_selection)\n\n # If no relevant redshift column is present, skip the catalog\n if final_z_col == None:\n return None\n\n # Skip weird column/tables with weird units/types for\n if cat[final_z_col].dtype not in [np.float32, np.float64]:\n return None\n\n # If all values are masked, skip the catalog\n if all(cat[final_z_col].mask):\n return None\n\n # Homogenize column names\n cat.rename_column(final_z_col, z)\n\n # Select only relevant columns: RA, DEC and redshift\n final_cat = cat[RA, DEC, z][~cat[z].mask]\n\n # Rename the coord columns with chosen names\n final_cat.rename_column(RA, RAf)\n final_cat.rename_column(DEC, DECf)\n\n # Add Vizier catalog name to the table for future reference\n final_cat.add_column(Column([cat.meta[\"name\"]] * len(final_cat)), name=origin)\n\n # Add to master list of tables\n return final_cat", "def import_data():\n\tif os.path.exists(\"log.csv\"):\n\t\t#print (\"--training data imported to data frame\\n\")\n\t\tdf = pd.read_csv(\"log.csv\", index_col=0)\n\telse:\n\t\tprint(\"training CSV not found\")\n\t\texit()\n\t\n\treturn df", "def generate_Struct(csv_file, pathToDir):\n\n df = extract_structure_from_csv(csv_file)\n\n df = df[ESSENTIAL_CSV_COLUMNS]\n\n for session_kwargs in df.to_dict('index').values():\n session = AnDOData(**session_kwargs)\n session.basedir = pathToDir\n session.generate_structure()", "def loadManySymbols(symbols, dates, column_name, base_dir):\n\tdf = pd.DataFrame(index=dates)\t# empty data frame that has indexs as dates\n\tif 'SET' not in symbols: # add SET for reference, if absent\n\t\tsymbols = np.append(['SET'],symbols)\n \n\tbase_dir = join(DIR_CURRENT,base_dir)\n\tfor symbol in symbols:\n\t\t# read CSV file path given symbol.\n\t\tcsv_file = os.path.join(base_dir, symbol + '.csv') \n\t\t#df_temp = pd.read_csv(csv_file, index_col='Date',\t\t\n\t\t\t#parse_dates=True, usecols=['Date', column_name], na_values=['nan'])\n\t\tdf_temp = pd.read_csv(csv_file, index_col='DATE',\n\t\t\tparse_dates=True, usecols=['DATE', column_name], \n\t\t\tna_values=['nan'])\n\t\t\n\t\tdf_temp = df_temp.rename(columns={column_name: symbol})\n\t\tdf = df.join(df_temp) # left join by default\n\t\t\n\t\tif symbol == 'SET': # drop dates SET did not trade (nan values)\n\t\t\tdf = df.dropna(subset=[\"SET\"])\n\t \n\treturn df", "def load_data(fn):\n return pandas.read_csv(fn, dtype={'Name': str, 'Reason': str, 'Amount': float, 'Day': int})", "def read_csv(fp, header_file=None, parse_vars=False, assertions=None, *args, **kwargs):\n\n kwargs = dict(kwargs)\n\n squeeze = kwargs.get(\"squeeze\", False)\n\n # set defaults\n engine = kwargs.pop(\"engine\", \"python\")\n kwargs[\"engine\"] = engine\n\n header = OrderedDict()\n\n if isinstance(header_file, string_types):\n with open(header_file, \"r\") as hf:\n header = ordered_load(hf.read())\n\n elif header_file is not None:\n header = ordered_load(hf.read())\n\n if isinstance(fp, string_types):\n with open(fp, \"r\") as fp:\n _header = _parse_headered_data(fp)\n data = pd.read_csv(fp, *args, **kwargs)\n\n else:\n _header = _parse_headered_data(fp)\n data = pd.read_csv(fp, *args, **kwargs)\n\n header.update(_header)\n\n kwargs.update({\"attrs\": header})\n args, kwargs, special = Container.strip_special_attributes(args, kwargs)\n\n if parse_vars:\n if \"variables\" in special:\n for key, var in special[\"variables\"].items():\n special[\"variables\"][key] = Variables.parse_string_var(var)\n\n if squeeze:\n if len(data.shape) == 1:\n s = Series(data, **special)\n _verify_assertions(\n assertions, attrs=s.attrs, variables=s.variables, coords=s.coords\n )\n return s\n\n df = DataFrame(data, **special)\n\n if squeeze and df.shape[1] == 1:\n s = Series(df[df.columns[0]], **special)\n _verify_assertions(\n assertions, attrs=s.attrs, variables=s.variables, coords=s.coords\n )\n return s\n else:\n _verify_assertions(\n assertions, attrs=df.attrs, variables=df.variables, coords=df.coords\n )\n return df", "def load(self) -> pd.DataFrame:\n if os.path.exists(self.file_name):\n df = pd.read_csv(self.file_name, index_col=0)\n df = self._clean(df)\n else:\n _LOG.debug(\"No file '%s'\", self.file_name)\n df = pd.DataFrame()\n return df", "def build_catalog_info(self, catalog_info):\n cat = SourceFactory.build_catalog(**catalog_info)\n catalog_info['catalog'] = cat\n # catalog_info['catalog_table'] =\n # Table.read(catalog_info['catalog_file'])\n catalog_info['catalog_table'] = cat.table\n catalog_info['roi_model'] =\\\n SourceFactory.make_fermipy_roi_model_from_catalogs([cat])\n catalog_info['srcmdl_name'] =\\\n self._name_factory.srcmdl_xml(sourcekey=catalog_info['catalog_name'])\n return CatalogInfo(**catalog_info)", "def __init__(self):\n\n # connect to quilt\n self.pkg = quilt3.Package.browse(\n \"aics/segmenter_model_zoo\", registry=\"s3://allencell\"\n )\n self.meta = self.pkg[\"metadata.csv\"]()", "def load_file(self):\n\n self.df = self.sqlContext.read.csv(self.source, sep=self.sep, header=True, inferSchema=True)", "def fetch_dataset(url, pandas_impl=pandas):\n\n print(f'fetching dataset at {url}')\n return pandas_impl.read_csv(url)", "def get_data_from_apis(\n api_keys: dict,\n api_params: dict,\n data_start_date: dt.date,\n data_end_date: dt.date = None,\n providers: list = None,\n save_dirpath: str = None,\n) -> pd.DataFrame:\n if providers is None:\n getters = _GETTERS\n else:\n getters = {k: v for k, v in _GETTERS.items() if k in providers}\n\n for provider in getters.keys():\n if provider not in api_keys.keys():\n raise InvalidAPIKey(f\"No API Key was provided for {provider}.\")\n if provider not in api_params.keys():\n msg = f\"No API requests parameters were provided for {provider}.\"\n raise InvalidAPIRequestsParams(msg)\n\n metadata_list = []\n obs_df_list = []\n # pylint: disable=invalid-name\n for provider, Getter in getters.items():\n getter = Getter(api_key=api_keys[provider])\n data, metadata = getter.get_data(\n series_params=api_params[provider][\"series_params\"],\n start_date=data_start_date,\n end_date=data_end_date,\n )\n metadata_list.append(metadata)\n obs_df_list.append(data)\n\n merged_metadata = reduce(lambda left, right: left + right, metadata_list)\n merged_data = merge_df_list_on(obs_df_list, on=\"date\")\n\n if save_dirpath is not None:\n date = dt.date.today().strftime(\"%Y%m%d\")\n dirpath = Path(ROOT_PATH) / save_dirpath / date\n create_dir_if_missing(dirpath)\n data_path = dirpath / \"raw_data.csv\"\n data.to_csv(data_path, sep=\";\", index=False, encoding=\"utf-8\")\n metadata_path = dirpath / \"metadata.yaml\"\n save_yaml(metadata, metadata_path)\n logger.success(f\"All data retrieved, cleaned and saved to {dirpath}.\")\n\n return merged_data, merged_metadata", "def create_df(files_list=my_files):\n\n all_records = list()\n\n for file in files_list:\n all_records += zr_parser(path.join(my_dir, file))\n\n return pd.DataFrame(all_records)", "def cursor_to_dataframe(cur):\n description = cur.description\n column_names = [item.name for item in description]\n data = cur.fetchall()\n df = pandas.DataFrame(data, columns=column_names)\n cur.close()\n return df", "def cur_quotes_create_dataframe(self):\n self.cur_quotes_df = pandas.read_csv(self.cur_quotes_csvfile,header =None)\n self.cur_quotes_df.rename(columns={org: change.upper() for org, change\\\n in zip(self.cur_quotes_df.columns,self.cur_quotes_parm_headers)},\\\n inplace=True)", "def get_dataset(filepath):\n return pandas.read_csv(filepath, header='infer')", "def extract_data_from_DB(query, dao_object, *query_params):\n\n local_query = None\n\n if(len(query_params) == 0):\n local_query = query\n else:\n local_query = query % query_params\n\n #print(local_query)\n\n # Extract data\n #output_df = 0\n output_df = pd.DataFrame(dao_object.get(local_query))\n column_names = dao_object.get_column_name()\n output_df.columns = column_names\n\n return output_df", "def csv_df(filename):\n\n # Get dataframe from CSV file\n csv_df = pd.read_csv(filename, low_memory=False)\n\n csv_df = _manage_cols(csv_df)\n \n # Add columns that are in an API dataframe but not a CSV dataframe\n csv_df['centroidtype'] = np.nan\n csv_df['date'] = np.nan\n csv_df['genus'] = np.nan\n csv_df['huc10name'] = np.nan\n csv_df['huc10'] = np.nan\n csv_df['huc12name'] = np.nan\n csv_df['huc12'] = np.nan\n csv_df['huc8name'] = np.nan\n csv_df['species'] = np.nan\n\n # Rename columns so both csv and api dataframes have identical headers\n renamed_columns = _get_col_rename(csv_df, 'csv')\n csv_df = csv_df.rename(columns=renamed_columns)\n \n # Reorder columns\n cols = list(csv_df.columns)\n cols = cols[:4] + cols[69:70] + cols[75:76] + cols[4:69] + cols[70:75] # species and genus\n cols = cols[:17] + cols[69:70] + cols[17:69] + cols[70:] # centroidtype\n cols = cols[:18] + cols[75:] + cols[18:75] # huc8name\n cols = cols[:20] + cols[72:] + cols[20:72] # huc10name, huc10, huc12name, huc12\n cols = cols[:24] + cols[75:] + cols[24:75] # date\n csv_df = csv_df[cols]\n\n # Change reference columns to single reference column\n csv_df = _convert_refs(csv_df)\n \n return csv_df", "def sql_return_df(query, params, date_cols):\n conn = sqlite3.connect(db_filepath)\n df = pd.read_sql(query, conn, params=params, parse_dates=date_cols)\n conn.close()\n return df", "def load():\n return load_pandas()", "def gp_dataframe_import(filename):\n path = os.path.join('..', 'data', filename)\n frame = pd.read_csv(path)\n return frame", "def process(self, inputs):\n df = cudf.read_csv(self.conf['path'])\n # extract the year, month, day\n ymd = df['DTE'].astype('str').str.extract(r'(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)')\n # construct the standard datetime str\n df['DTE'] = ymd[0].str.cat(ymd[1],\n '-').str.cat(ymd[2],\n '-').astype('datetime64[ms]')\n df = df[['DTE', 'OPEN', 'CLOSE', 'HIGH', 'LOW', 'SM_ID', 'VOLUME']]\n df['VOLUME'] /= 1000\n # change the names\n df.columns = ['datetime', 'open', 'close',\n 'high', 'low', \"asset\", 'volume']\n return df" ]
[ "0.6171639", "0.5896814", "0.5896814", "0.58639306", "0.58639306", "0.58639306", "0.58639306", "0.58639306", "0.5844626", "0.57802", "0.5761707", "0.5757528", "0.5681903", "0.56408125", "0.5618386", "0.5584293", "0.5555248", "0.5497467", "0.5468997", "0.54676163", "0.54645765", "0.54604167", "0.542605", "0.54117143", "0.54058087", "0.53844804", "0.5375254", "0.53704023", "0.5369245", "0.53685987", "0.53685987", "0.5345788", "0.5344207", "0.53436315", "0.5342438", "0.53341514", "0.53243864", "0.52890587", "0.5287817", "0.5267034", "0.52607983", "0.5254842", "0.5254073", "0.52517486", "0.52468425", "0.52370864", "0.52368164", "0.5222275", "0.5219795", "0.52095246", "0.51974136", "0.51869303", "0.5183931", "0.5182166", "0.51804924", "0.5177519", "0.51737136", "0.5164946", "0.5160507", "0.51587355", "0.5156649", "0.515268", "0.51494104", "0.5136659", "0.5121213", "0.5103174", "0.5101507", "0.50977874", "0.50950825", "0.50910234", "0.5090355", "0.5084929", "0.5084434", "0.5079767", "0.5079726", "0.5076158", "0.50565946", "0.505405", "0.50486094", "0.50422305", "0.504208", "0.50412303", "0.5040692", "0.50318754", "0.50286347", "0.50277925", "0.5024865", "0.5020186", "0.50152797", "0.5012618", "0.5010458", "0.50056547", "0.5003375", "0.49946818", "0.49936688", "0.49747404", "0.49693698", "0.49634725", "0.4954379", "0.49495038" ]
0.75542295
0
Open a table fits file and convert it to a pandas dataframe.
def import_fits(fitsfile='tgasptyc.fits'): if isfile(fitsfile): print "Opening %s.." % fitsfile table = Table.read(fitsfile) pandas_df = table.to_pandas() else: print "%s not found. Exiting." % fitsfile sys.exit() print "Converting table to pandas_df.." print "..Done" return pandas_df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def df_from_fits(filename, i=1):\n return pd.DataFrame.from_records(fitsio.FITS(filename)[i].read().byteswap().newbyteorder())", "def load_fits_table(fname):\n\treturn fits.open(fname)[1].data", "def load_fits(path: str, ncols: int, nonames: bool) -> DataFrame:\n assert not nonames\n\n from astropy.table import Table\n df = Table.read(path).to_pandas()\n if ncols:\n df = df[df.columns[:ncols]]\n # Need to mask \"special\" -99 values\n df.replace(-99, np.nan, inplace=True)\n df.replace(-99.9, np.nan, inplace=True)\n return df", "def table_to_dataframe(file):\n columns = ['instrument', 'dataset', 'flowcell', 'well', \n 'well_tile', 'cell', 'blob', 'position_i', 'position_j',\n 'read', 'quality']\n\n columns_drop = ['instrument', 'flowcell', 'dataset', 'well_tile']\n\n df = pd.read_csv(file, sep='\\s+', header=None, quoting=3)\n df.columns = columns\n df['tile'] = df['well_tile'] % 1000\n df = df.drop(columns_drop, axis=1)\n return df", "def load(file):\n return pq.read_table(file).to_pandas()", "def collect_data(data_file):\n dat = Table.read(data_file, format='fits')\n df_bytes = dat.to_pandas() # Convert to pandas dataframe\n df = pd.DataFrame() # Init empty dataframe for converted types\n\n # Convert byte columns to strings\n for column in df_bytes:\n if df_bytes[column].dtype == np.dtype('object'):\n df[column + \"_str\"] = df_bytes[column].str.decode(\"utf-8\")\n df[column] = df[column + \"_str\"].copy(deep=True)\n df.drop(column + \"_str\", axis=1, inplace=True)\n else:\n df[column] = df_bytes[column]\n # Drop infinity values.\n df = df[~df.isin([np.inf, -np.inf]).any(1)]\n return df", "def load_file_to_dataframe(self, file_path: str) -> pd.DataFrame:\n return pd.read_csv(file_path, sep=\"\\t\")", "def _parse_fits(filepath):\n hdulist = fits.open(filepath)\n header = OrderedDict(hdulist[0].header)\n # For these NoRH files, the time series data is recorded in the primary\n # HDU\n data = hdulist[0].data\n\n # No explicit time array in FITS file, so construct the time array from\n # the FITS header\n obs_start_time=parse_time(header['DATE-OBS'] + 'T' + header['CRVAL1'])\n length = len(data)\n cadence = np.float(header['CDELT1'])\n sec_array = np.linspace(0, length-1, (length/cadence))\n\n norh_time = []\n for s in sec_array:\n norh_time.append(obs_start_time + datetime.timedelta(0,s))\n\n return header, pandas.DataFrame(data, index=norh_time)", "def read_table(file_name: Union[str, Path], **kwargs):\n\tfile_name = Path(file_name)\n\textension = file_name.suffix\n\tdefault_args = {\n\t\t'.csv': {'delimiter': ','},\n\t\t'.tsv': {'delimiter': '\\t'}\n\t}\n\n\t# arguments = self._cleanArguments(extension, arguments)\n\tfile_name = str(file_name.absolute())\n\tif extension in {'.xls', '.xlsx', '.xlsm'}: # .xlsm is not a typo.\n\n\t\tdf = pandas.read_excel(file_name, **kwargs)\n\telif extension in {'.csv', '.tsv', '.fsv', '.txt'}:\n\t\targuments = {**default_args.get(extension), **kwargs}\n\t\tif 'sheetname' in arguments: arguments.pop('sheetname')\n\t\tdf = pandas.read_table(file_name, **arguments)\n\telif extension == '.pkl':\n\t\tdf = pandas.read_pickle(file_name)\n\telse:\n\t\traise NameError(\"{} does not have a valid extension!\".format(file_name))\n\treturn df", "def read_pipe_table_to_pandas(filename):\n\n astropy_data = astropy.io.ascii.read(filename)\n data_stream = StringIO()\n astropy_data[2:].write(data_stream, format='ascii.basic', delimiter='|')\n data_stream.seek(0)\n return pandas.read_csv(data_stream,\n comment='#',\n sep='|',\n skipinitialspace=True)", "def _pdread2astrotable(csvgzdir):\n df = pd.read_csv(csvgzdir)\n tb = Table.from_pandas(df)\n return tb", "def parse(self):\n if self.filename.endswith('.gz'):\n compression = 'gzip'\n elif self.filename.endswith('.bz2'):\n compression = 'bz2'\n else:\n compression = None\n df = pd.read_table(self.filename, compression=compression)\n\n # drop empty column from extra tab\n df.dropna(axis=1, how='all', inplace=True)\n return df", "def read_file(fname: str) -> pd.DataFrame:\n raw_data = (\n pd.read_hdf(fname).to_frame().reset_index(level=[0, 1]).loc[ANALYSIS_DATE]\n )\n raw_data[\"date\"] = raw_data.index\n return raw_data", "def data_from_fits(fits_file):\n hdul = fits.open(fits_file)\n data = hdul[0].data\n return data", "def _read_tab(pth):\n if not os.path.exists(pth):\n raise SampleTableFileException(\n \"File does not exist: {}\".format(pth))\n read_csv_kwargs = {\"engine\": \"python\", \"dtype\": str,\n \"index_col\": False, \"keep_default_na\": False,\n \"na_values\": [\"\"]}\n return pd.read_csv(pth, sep=infer_delimiter(pth), **read_csv_kwargs)", "def collect_data(file = 'osc+otc-Assembled.fits'):\n dat = Table.read(file, format='fits')\n df_bytes = dat.to_pandas() # Convert to pandas dataframe\n df = pd.DataFrame() # Init empty dataframe for converted types\n\n # Convert byte columns to strings\n for column in df_bytes:\n if df_bytes[column].dtype == np.dtype('object'):\n df[column + \"_str\"] = df_bytes[column].str.decode(\"utf-8\")\n df[column] = df[column + \"_str\"].copy()\n df.drop(column + \"_str\", axis = 1, inplace = True)\n else:\n df[column] = df_bytes[column]\n\n # Prints sum of NULL values by column\n # df.isnull().sum().to_csv(output_dir + \"Missing_Values.csv\")\n return df", "def OSW2df(osw_file, table_name):\n conn = connOSW(osw_file)\n df = pd.read_sql_query(\"SELECT * FROM \" + table_name, conn)\n conn.close()\n return df", "def run(self) -> pd.DataFrame:\n with open(self.file_path, 'r') as in_file:\n headers = in_file.readline()\n headers = headers.replace(\"\\n\", \"\")\n\n if ',' in headers:\n headers = headers.split(',')\n else:\n headers = headers.split()\n\n if headers == self.NORMAL_HEADERS:\n return self.normal_csv()\n else:\n return self.read_data_columns_to_rows()", "def read_table(cls, filepath_or_buffer, *args, **vargs):\n if filepath_or_buffer.endswith('.csv') and 'sep' not in vargs:\n vargs['sep'] = ','\n df = pandas.read_table(filepath_or_buffer, *args, **vargs)\n labels = df.columns\n return Table([df[label].values for label in labels], labels)", "def load_data(file_name):\n return Orange.data.Table(file_name)", "def table_to_data_frame(table):\n nr = table.rowCount()\n nc = table.columnCount()\n\n if nr == 0:\n return None\n\n idx_labels = []\n for i in range(nr):\n item = table.verticalHeaderItem(i)\n if item is not None:\n idx_labels.append(item.text().replace(' ', ''))\n else:\n idx_labels.append(i)\n\n col_labels = []\n for i in range(nc):\n item = table.horizontalHeaderItem(i)\n if item is not None:\n col_labels.append(item.text().replace(' ', ''))\n else:\n col_labels.append(i)\n\n tdata = []\n for i in range(nr):\n ldata = []\n for j in range(nc):\n value = table.item(i, j).text()\n if str_is_float(value):\n value = float(value)\n ldata.append(value)\n tdata.append(ldata)\n df = _pd.DataFrame(_np.array(tdata), index=idx_labels, columns=col_labels)\n\n return df", "def load_raw_table(conf, table):\n confrd = load_config_raw_data(conf)\n path_table = Path(confrd[table][\"path\"])\n sep = confrd[table][\"sep\"]\n encoding = confrd[table][\"encoding\"]\n df = pd.read_csv(path_table, sep=sep, encoding=encoding)\n return df", "def import_experiments_table(path):\n return pd.read_csv(path, sep=\"\\t\", skiprows=1, header=0)", "def import_files_table(path):\n return pd.read_csv(path, sep=\"\\t\", skiprows=1, header=0)", "def load_tsv(path: str, ncols: int, nonames: bool) -> DataFrame:\n cols = range(ncols) if ncols else None\n return pandas.read_csv(path, usecols=cols, sep='\\t', skipinitialspace=True, header='infer' if not nonames else None)", "def read_bed_file(path, labelnum=0):\n\n bed_df = pd.read_table(path, sep=\"\\t\", header=None)\n colnames = generate_colnames(bed_df, labelnum)\n bed_df.columns = colnames\n print(bed_df.head())\n return bed_df", "def import_tables(file, pages):\n tables = camelot.read_pdf(\n file, pages=pages,\n flavor='stream',\n )\n return tables", "def read_df_from_binary(file_name_mask):\n data = read_matrix_from_binary(file_name_mask + '-value.bin')\n with open(file_name_mask + '-name.txt', 'r') as f:\n index = f.readline().strip().split('\\t')\n columns = f.readline().strip().split('\\t')\n return pandas.DataFrame(data=data, index=index, columns=columns)", "def read(filename, replace_columns=True):\n f = open(filename)\n lines = f.readlines()\n f.close()\n\n # Extract column names from the odt file.\n for i, line in enumerate(lines):\n if line.startswith('# Columns:'):\n columns = []\n odt_section = i # Should be removed after runs are split.\n for part in re.split('Oxs_|Anv_|Southampton_', line)[1:]:\n for char in [\"{\", \"}\", \" \", \"\\n\"]:\n part = part.replace(char, '')\n if replace_columns:\n if part in columns_dic.keys():\n columns.append(columns_dic[part])\n else:\n msg = \"Entry {} not in lookup table.\".format(part)\n raise ValueError(msg)\n else:\n columns.append(part)\n\n # Extract units from the odt file.\n for i, line in enumerate(lines):\n if line.startswith('# Units:'):\n units = line.split()[2:]\n\n # Extract the data from the odt file.\n data = []\n for i, line in enumerate(lines[odt_section:]):\n if not line.startswith(\"#\"):\n data.append([float(number) for number in line.split()])\n\n df = pd.DataFrame(data, columns=columns)\n # next line is required to allow adding list-like attribute to pandas DataFrame\n # see https://github.com/pandas-dev/pandas/blob/2f9d4fbc7f289a48ed8b29f573675cd2e21b2c89/pandas/core/generic.py#L3631\n df._metadata.append('units')\n df.units = dict(zip(columns, units))\n return df", "def read_fit_column(file):\n\n # Data was pulled out of an exposure by modifying residual_fringe.py to write out a column of data\n # The function we are testing is fit_1d_background_complex.\n\n file_dir = Path(__file__).parent.resolve()\n file_path = str(file_dir / file)\n\n with fits.open(file_path) as hdu:\n col_data = hdu[1].data\n col_weight = hdu[2].data\n col_wnum = hdu[3].data\n bg_fit = hdu[4].data\n store_freq = hdu[0].header['FFREQ']\n\n return col_data, col_weight, col_wnum, bg_fit, store_freq", "def full_dataset():\n return TabularDataset.from_path(train_path='tests/data/dummy_tabular/train.csv',\n val_path='tests/data/dummy_tabular/val.csv', sep=',')", "def fits_summary(self):\n t = pyfits.open(self.fits_file)[1].data\n # remove columns that have multiple dimensions\n for j in range(3):#??? why\n for i,col in enumerate(t.columns):\n if len(col.array.shape)>1:\n t.columns.del_col(i)\n\n tt=pyfits.BinTableHDU.from_columns(t.columns)\n df = pd.DataFrame(tt.data)\n df['flux13*'] = df['Flux_Density']*1e13\n df['unc_flux13*'] = df['Unc_Flux_Density']*1e13\n summary = html_table(df.describe().T, float_format=FloatFormat(3),\n heading='', href=False, maxlines=50)\n self.fits_summary_table = summary.replace('%', '%%')\n # creates error??\n #print ('Check: %s' % df)", "def h5ToDf(filename):\n log.info(f\"Import data from: {filename}\")\n with h5py.File(filename, \"r\") as hf :\n d = {}\n for name in list(hf.keys()):\n d[name] = np.array(hf[name][:])\n df = pd.DataFrame(data=d)\n return(df)", "def h5ToDf(filename):\n log.info(f\"Import data from: {filename}\")\n with h5py.File(filename, \"r\") as hf :\n d = {}\n for name in list(hf.keys()):\n d[name] = np.array(hf[name][:])\n df = pd.DataFrame(data=d)\n return(df)", "def get_table_from_ldac(filename, frame=1):\n from astropy.table import Table\n if frame>0:\n frame = frame*2\n tbl = Table.read(filename, hdu=frame)\n return tbl", "def get_table(self, table, format=\"FITS\", verbose=False):\n # make sure the table exists\n try:\n results = self.quick(\"select top 0 * from {}\".format(table),context=\"MYDB\")\n except Exception as e:\n # raise ValueError(\"table MyDB.{} not found\".format(table)) from None\n raise_from(ValueError(\"table MyDB.{} not found\".format(table)), None)\n # first try to get it as a quick request, which is much faster if it works\n try:\n return self.quick(\"select * from {}\".format(table),context=\"MYDB\",astropy=True)\n except Exception as e:\n pass\n \n # sigh, have to go through output queue\n t0 = time.time()\n format = format.upper()\n if format not in [\"FITS\",\"CSV\"]:\n # just force a good value\n format = \"FITS\"\n if verbose:\n print(\"Making output request for {}-format data\".format(format))\n job_id = self.request_output(table,format)\n status = self.monitor(job_id)\n if status[0] != 5:\n raise Exception(\"Output request failed.\")\n job_info = self.job_info(jobid=job_id)[0]\n url = job_info[\"OutputLoc\"]\n if format == \"FITS\":\n fh = fits.open(url)\n # TDIM keywords in the Casjobs FITS header are simply wrong\n # Have to delete them to avoid bad problems in astropy.io.fits\n del fh[1].header['TDIM*']\n tab = Table(fh[1].data)\n fh.close()\n else:\n r = requests.get(url)\n r.raise_for_status()\n tab = ascii.read(MastCasJobs.replacenull(r.text),format='csv')\n if verbose:\n print(\"{:.1f} s: Retrieved {} row {} table\".format(time.time()-t0,len(tab),format))\n return tab", "def fits_read(filename, header=False, columns=None):\n fx = fitsio.FITS(filename, upper=True)\n fxcolnames = fx[1].get_colnames()\n if columns is None:\n tscolumns = ['RA', 'DEC', 'COMP', 'TYCHOVETO']\n readcolumns = numpy.intersect1d(list(tscolumns), fxcolnames).tolist()\n assert readcolumns != [], \"Columns are different than default\"\n else:\n assert numpy.setdiff1d(columns, fxcolnames).tolist() == [], \"Missing column\"\n readcolumns = list(columns)\n #\n #\n data = fx[1].read(columns=readcolumns)\n if header:\n hdr = fx[1].read_header()\n fx.close()\n return data, hdr, fxcolnames\n else:\n fx.close()\n return data", "def load_fits_primary(filename, transpose=True):\n output = pyfits.open(filename)\n output = np.array(output[0].data)\n if transpose:\n output = output.transpose()\n\n return output", "def txt_to_dataframe(folder,name_parcellation):\n column_weight = ['patients','degree', 'density', 'global_efficiency', 'transitivity', 'assortavity', 'clustering_coef',\n 'fiedler_value', 'small_worldness','Null']\n\n file_name=folder+name_parcellation+'.txt'\n data=pd.read_csv(file_name,header=None,delimiter=';')\n data.columns=column_weight\n data=data.drop(['Null'],axis=1)\n file_len=folder+name_parcellation+'_len.txt'\n data_len=only_connected_patients(file_len)\n data_len=data_len.values\n data['length']=data_len\n data=data[data['length']>-1.0]\n data=data.reset_index(drop=True)\n return data", "def read_table(file, **kwargs):\n extn = Reader.get_extension(file).lower()\n if extn.startswith('.xls'):\n return read_table_excel(file, **kwargs)\n elif extn == '.gsheet':\n if hasattr(file, 'as_posix'): # a pathlib.Path object\n file = str(file)\n elif hasattr(file, 'name'): # a TextIOWrapper object\n file = file.name\n return read_table_gsheets(file[:-7], **kwargs) # ignore the extension\n else:\n return read_table_text(file, **kwargs)", "def read_mumax3_table(filename):\n \n table = pd.read_csv(filename, sep='\\t')\n table.columns = ' '.join(table.columns).split()[1::2]\n \n return table", "def tabular_parser(path: str, header: bool = True) -> TabularData:\n with open(path, \"r\") as read_obj:\n csv_reader = reader(read_obj)\n list_of_rows = list(csv_reader)\n rows = np.array(list_of_rows)\n\n if header:\n return TabularData(column_names=rows[0, :], data=rows[1:, :])\n else:\n return TabularData(column_names=None, data=rows[1:, :])", "def read_exprs_as_df(fn):\n df = pd.read_table(fn, index_col=0).T\n return df", "def fromfits(self, filename=None):\n robot_array = fitsio.read(filename, ext=1)\n target_array = fitsio.read(filename, ext=2)\n self.clearTargetList()\n self.target_fromarray(target_array)\n self.robot_fromarray(robot_array)\n return", "def table_to_dataframe(table):\n ths = table.find_all('th')\n trs = table.find_all('tr')\n if ths:\n columns = [th.text.strip() for th in ths]\n else:\n columns = [td.text.strip() for td in trs[0]]\n trs = trs[1:]\n _logger.debug(f'Creating DataFrame with columns {columns}')\n\n data = [[maybe_convert(td.text)\n for idx, td in enumerate(tr.find_all('td'))\n if idx < len(columns)]\n for tr in trs]\n return pd.DataFrame(data, columns=columns).dropna(how='all')", "def load_df_from_txt(fname, direc=\"data/result/\", sep=\"\\t\"):\n path = create_file_path(fname, direc)\n try:\n return multi_index_tsv_to_dataframe(path, sep, header_rows=None)\n except IOError:\n raise IOError(\"Failed to open '{}\".format(path))", "def open_data(table):\n engine = create_engine(myDB, encoding='latin1') \n conn = engine.connect()\n select = conn.execute('select * from ' + table)\n\n df = pd.DataFrame(select.fetchall()) \n df.columns = select.keys()\n\n conn.close()\n return df", "def get_file_df(filepath):\n dd = [json.loads(f) for f in open(filepath).readlines()]\n return pd.DataFrame(dd)", "def _read_table(hdulist, extname, **kwargs):\n t = _read_ext(Table, hdulist, extname, **kwargs)\n h = hdulist[extname].header\n for i in range(h['TFIELDS']):\n try:\n t.columns[i].unit = h['TUNIT%d' % (i + 1)]\n except Exception:\n pass\n return t", "def read(cls, filename):\n hdu_list = fits.open(filename)\n return cls.from_fits(hdu_list)", "def _csv2df(data_file):\n df = pd.read_csv(data_file, encoding=\"ISO-8859-1\", low_memory=False)\n return df", "def pipe(cls, filename: str) -> pd.DataFrame:\n\n # initialise parser\n extractor = cls(filename)\n \n # get records\n raw_data = extractor.load_data()\n \n # extract relevant data\n extracted_data = extractor.extract_data(raw_data)\n\n return extracted_data", "def load(self) -> pd.DataFrame:\n if os.path.exists(self.file_name):\n df = pd.read_csv(self.file_name, index_col=0)\n df = self._clean(df)\n else:\n _LOG.debug(\"No file '%s'\", self.file_name)\n df = pd.DataFrame()\n return df", "def load_to_dataframe(self) -> DataFrame:\n return read_csv(self._csv_path, converters={\n # Check if embedding size is the empty string,\n # as it would be for Count models\n \"Embedding size\": lambda v: int(float(v)) if len(v) > 0 else nan\n })", "def from_hdf(path_or_buf, key=None, **kwargs):\r\n return SpatialDataFrame(pd.read_hdf(path_or_buf=path_or_buf,\r\n key=key, **kwargs))", "def load_dataset(file_name):\n file_path = join(DATA_DIR, file_name)\n text_field = Field(pad_token=None, tokenize=_tokenize_str)\n\n dataset = TabularDataset(\n path=file_path,\n format='csv',\n fields=[('text', text_field)],\n skip_header=False)\n\n text_field.build_vocab(dataset)\n return dataset", "def read_table(tablefile):\n #read in a workbook from file\n survey = xlrd.open_workbook(tablefile)\n\n #for sheet in survey.sheets():\n\n # print sheet\n # print sheet.name\n # print \"Number of rows: \"+str(sheet.nrows)\n # print \"Number of columns: \"+str(sheet.ncols)\n return survey", "def extract_y(tracer_file, tracer_network):\n y_table = pd.DataFrame(tracer_file['Y'])\n y_table.columns = list(tracer_network['isotope'])\n return y_table", "def _read_gff3_using_pandas( file ):\n import pandas\n result = pandas.read_table(\n file,\n comment = '#',\n names = [ 'seqid', 'source', 'type', 'start', 'end', 'score', 'strand', 'phase', 'attributes' ],\n na_values = \".\",\n dtype = {\n 'seqid': str,\n 'source': str,\n 'type': str,\n 'start': int,\n 'end': int,\n 'score': float,\n 'strand': str,\n 'phase': str,\n 'attributes': str\n }\n )\n return result", "def create_table(f, geoinfo):\n bounds_cols = xb_points + yb_points\n df = pd.read_csv(f, delimiter=\";\", index_col=\"INDEX_RC\")\n df[duration_name] = parse_duration_level(f)\n df = df.join(geoinfo[[\"X_CENT_GEO\", \"Y_CENT_GEO\", \"Col\", \"Row\"]])\n df = df.rename(columns={\"Col\": x, \"Row\": y, \"X_CENT_GEO\": lon, \"Y_CENT_GEO\": lat})\n return df", "def read_data(fname, cols):\n df = (pd.read_csv(fname, header=None, sep=r\"\\s+\", comment=\"#\",\n names=cols, dtype=np.float64)\n .iloc[1:]) # First line is the total number of trees\n # Could reset_index, but we don't shuffle the DataFrame\n return df", "def _parse_textfile(self):\n\n field_names = list(self.FIELD_NAME_TO_INDEX.keys())\n field_indices = list(self.FIELD_NAME_TO_INDEX.values())\n frame = pd.read_csv(\n self.filepath,\n header=None, # MAGIC file has no header line\n delimiter=self.DELIMITER,\n usecols=field_indices,\n names=field_names,\n converters=self.FIELD_CONVERTERS,\n )\n return frame", "def load_data(file):\n if file == \"test\":\n file_path = '../data/day-4-test.txt'\n elif file == \"full\":\n file_path = '../data/day-4.txt'\n else:\n raise Exception('load_data() must take the input argument \"test\" or \"full\"')\n\n # read file\n with open(file_path) as f:\n lines = f.read().split(\"\\n\\n\")\n\n # turn into a dictionary, then a data frame\n f = lambda x: pd.DataFrame(list_to_dict(x.split()), index = [0])\n x = [f(x) for x in lines]\n return pd.concat(x, ignore_index=True, sort=True)", "def load_data(path, file, verbose=False, index=0):\n \n df = pd.read_csv(path+file, index_col=index)\n \n if verbose:\n shape = f'{df.shape}'\n dtypes = f'{df.dtypes[:30]}'\n head = f'{df.head()[:10]}'\n name = file.split('.')[0]\n \n print(f'{name} shape'.center(80, '-'))\n print(shape.center(80))\n print(f\"{name}'s column types\".center(80, '-'))\n print(dtypes)\n print(f\"{name} first five rows\".center(80, '-'))\n print(head)\n \n return df", "def convert_to_dataframe(input_file, file_type):\r\n if file_type == \"pcap\" or file_type == \"pcapng\":\r\n return convert_pcap_to_dataframe(input_file)\r\n elif file_type == \"nfdump\":\r\n return convert_nfdump_to_dataframe(input_file)\r\n else:\r\n raise UnsupportedFileTypeError(\"The file type \" + file_type + \" is not supported.\")", "def save_fits(df, fname):\n df = df.reset_index()\n outtable = Table.from_pandas(df)\n Path(fname).parent.mkdir(parents=True, exist_ok=True)\n outtable.write(fname, format='fits', overwrite=True)", "def read_table03(rawfile):\n df = pd.read_csv(rawfile, skiprows = range(4)+range(55,62), thousands=\",\", na_values=['-'])\n print df\n\n df.dropna(axis=0, how=\"all\", inplace=True)\n #df.dropna(axis=0, subset=df.columns[1:], how=\"all\", inplace=True)\n df.dropna(axis=1, how=\"all\", inplace=True)\n\n df = df.rename(columns = {\"Unnamed: 2\": \"All races Percent\", \"Unnamed: 4\": \"Males Percent\", \"Unnamed: 6\": \"Females Percent\", \"Unnamed: 8\":\"25-34 Percent\", \"25 to 34 years old\":\"25-34\", \"Unnamed: 10\": \"35-54 Percent\", \"35 to 54 years old\":\"35-54\", \"Unnamed: 12\":\">=55 Percent\", \"55 years and older\":\">=55\", \"Unnamed: 14\": \"White Percent\", \"Unnamed: 16\": \"Non-Hispanic White Percent\", \"Unnamed: 18\": \"Black Percent\", \"Unnamed: 20\": \"Asian Percent\", \"Unnamed: 22\": \"Hispanic Percent\", \"Hispanic \\n(of any race)\":\"Hispanic\"})\n\n df.drop(0, inplace=True)\n df[\"Detailed Years of School\"][1] = \"Total\"\n\n df = df.ix[[45,46,47,48], :]\n df.set_index(\"Detailed Years of School\", inplace=True)\n print df\n #for i in range(4):\n #df.iloc[i][:] = df.iloc[i][:].str.replace(r'[$,]', '').astype('float')\n print df.dtypes\n\n return df", "def read_table(table_name, hf, df_fmt, ilo, ihi):\n dfs = []\n for dt, block in df_fmt.groupby(\"dtype\"):\n \n # check if this dtype contains waveform data\n if 'waveform' in block['name'].values:\n wf_group = f\"/{table_name}/waveform\"\n wf_block = read_waveforms(wf_group, hf, df_fmt, ilo, ihi)\n wf_rows, wf_cols = wf_block.shape\n nrows = wf_rows\n \n # get number of additional columns\n new_cols = [c for c in list(block[\"name\"].values) if c != 'waveform']\n newcols = len(new_cols)\n \n # allocate the full numpy array for this dtype\n np_block = np.empty((nrows, newcols + wf_cols), dtype=dt)\n np_block[:, newcols:] = wf_block\n \n cols = []\n for i, col in enumerate(new_cols):\n ds = hf[f\"{table_name}/{col}\"] \n \n if ihi is None:\n ihi = ds.shape[0]\n nwfs = ihi - ilo + 1 # inclusive\n \n np_block[:, i] = ds[ilo:ihi]\n cols.append(col)\n cols.extend(np.arange(wf_cols)) \n\n dfs.append(pd.DataFrame(np_block, columns=cols))\n \n # read normal 'array<1>{real}' columns\n else:\n ncols = len(block)\n nrows = block[\"size\"].unique()\n if len(nrows) > 1:\n print('Error, columns are different lengths')\n exit()\n nrows = nrows[0]\n np_block = np.empty((nrows, ncols), dtype=dt)\n \n for i, col in enumerate(block[\"name\"]):\n ds = hf[f\"{table_name}/{col}\"]\n np_block[:,i] = ds[...]\n \n dfs.append(pd.DataFrame(np_block, columns=block[\"name\"])) \n \n # concat final DF after grouping dtypes and avoiding copies\n return pd.concat(dfs, axis=1, copy=False)", "def extract(filepath: str) -> pd.DataFrame:\n try:\n return pd.read_csv(filepath)\n except Exception as e:\n raise InvalidSourceFile(f\"Can not read file {filepath}: {str(e)}\")", "def load_data(self) -> pd.DataFrame:\n\n if not os.path.exists(self.file_name):\n raise FileNotFoundError(f\"File does not exist: {self.file_name}\")\n\n _, ext = os.path.splitext(self.file_name)\n assert ext == \".xls\", \"Invalid filetype attempted to load\"\n\n return pd.read_excel(self.file_name)", "def read_matrix(self, matrix_id, dataset_id, table_name):\n tbl = self.Table(matrix_id, dataset_id, table_name)\n return tbl.to_dataframe()", "def convert_tables_to_dataframe(self, tables: List[Table]):\n geography_types = get_geo_mappings('geo_codes').keys()\n\n # Melt each subset to adopt common schema\n subsets = []\n for header, *rows in tables:\n subset = DataFrame(rows, columns=header)\n # Consolidate geography type in a single column\n geography_columns = set(geography_types) & set(subset.columns)\n id_vars = ['NAME', 'GEO_ID', 'geo_type', *geography_columns, 'year']\n melted = subset.melt(id_vars=id_vars).drop(columns=geography_columns)\n subsets.append(melted)\n\n # Ensure correct sort order and value dtype\n dataframe = (\n pd.concat(subsets)\n .sort_values(by=['geo_type', 'variable', 'NAME', 'year'])\n .reset_index(drop=True)\n )\n dataframe['value'] = dataframe['value'].astype(float)\n\n return dataframe", "def file_to_df(self, chunksize=None):\n\t\tdf = pd.read_csv(\n\t\t\tfilepath_or_buffer=os.path.join(ROOT_DIR, \"raw\", \"{}.csv.bz2\".format(self.year)),\n\t\t\tsep=\",\", compression=\"bz2\", encoding=\"utf-8\", usecols=[\"Origin\", \"Dest\", \"Distance\"],\n\t\t\tchunksize=chunksize\n\t\t)\n\n\t\treturn df", "def read(cls, filename):\n with fits.open(str(make_path(filename)), memmap=False) as hdulist:\n return cls.from_hdulist(hdulist)", "def load() -> DataFrame:\n return load_file(__file__, \"default.csv.gz\")", "def as_DataFrame (self):\n return DataFrame(self.table)", "def test_from_file_xls(self):\n with TemporaryDirectory() as tmp:\n fp, df_test = save_simple_dataframe(tmp, 'test.xls')\n df_read = BaseDataClass.from_file(fp).df\n self.assertEqual(\n pd.testing.assert_frame_equal(df_test, df_read),\n None,\n )", "def read(cls, filename, hdu=\"PSF_2D_TABLE\"):\n filename = str(make_path(filename))\n table = Table.read(filename, hdu=hdu)\n return cls.from_table(table)", "def load_raw_data(path: str) -> pd.DataFrame:\n data = []\n with open(path) as file:\n for line in file:\n data.append(line)\n data_df = pd.DataFrame(data, columns = {'tweet'})\n return data_df", "def _read_fits(filename):\n if any(fn in os.path.basename(filename) for fn in COMPOSITE_MATCHES):\n with fits.open(filename) as hdu:\n data, header = hdu[1].data, hdu[1].header\n dqf = None\n elif any(fn in os.path.basename(filename) for fn in L1B_MATCHES):\n with fits.open(filename) as hdu:\n data, header, dqf = hdu[0].data, _fix_l1b_header(filename), hdu[1].data\n else:\n raise ValueError(\n f\"File {filename} does not look like a SUVI L1b FITS file or L2 HDR composite.\"\n )\n return header, data, dqf", "def fetch_data(self) -> pd.DataFrame:\r\n os.chdir(r'\\\\192.168.8.90\\投研部\\Jessica\\test_data')\r\n if self.tic in ['RB.CCRI', 'HC.CCRI', 'I.CCRI', 'J.CCRI', 'JM.CCRI', 'ZC.CCRI']:\r\n f = pd.read_hdf('data.h5', 'snc')\r\n if self.tic in ['CU.CCRI', 'ZN.CCRI', 'AL.CCRI', 'NI.CCRI']:\r\n f = pd.read_hdf('data.h5', 'met')\r\n data = f.loc[f.loc[:, 'sec_code'] == self.tic, :]\r\n # extract I.CCRI data\r\n table = pd.pivot_table(data, index=['date'], columns=['factor_code'], values='factor_value')\r\n table = table.sort_values(by='date')\r\n \r\n return table", "def read_rdata(rdata_fullpath, table_name):\n from rpy2.robjects import pandas2ri, r\n pandas2ri.activate()\n\n # we want forward slashes for R\n rdata_fullpath_forR = rdata_fullpath.replace(\"\\\\\", \"/\")\n print \"Loading %s\" % rdata_fullpath_forR\n \n # read in the data from the R session with python\n r['load'](rdata_fullpath_forR)\n # check that it's there\n table_df = pandas2ri.ri2py(r['model_summary'])\n\n # fillna\n for col in table_df.columns:\n nullcount = sum(pandas.isnull(table_df[col]))\n if nullcount > 0: print \" Found %5d NA values in column %s\" % (nullcount, col)\n table_df = table_df.fillna(0)\n for col in table_df.columns:\n nullcount = sum(pandas.isnull(table_df[col]))\n if nullcount > 0: print \" -> Found %5d NA values in column %s\" % (nullcount, col)\n \n print \"Read %d lines from %s\" % (len(table_df), rdata_fullpath)\n return table_df", "def read_tsv(path):\n return pd.read_csv(path, sep=\"\\t\", index_col=0)", "def test_read_csv_to_dataframe(fname):\n df = read_csv_to_dataframe(fname)\n print(df.head())", "def from_table(table, table_id=None):\n return tree.VOTableFile.from_table(table, table_id=table_id)", "def load_table_as_pd(conn, tablename: str):\n # get table as a pandas dataframe\n statement = f\"\"\"\n SELECT *\n FROM '{tablename}';\n \"\"\"\n df = pd.read_sql_query(statement, conn)\n return df", "def get_df_all_results(self, file):\n # read csv into dataframe\n df = pd.read_csv(file)\n # rename columns\n names = [\"index\", \"samp1\", \"samp2\", \"es\", \"sd1\", \"sd2\", \"k\", \"perm\",\n \"t_test\"]\n df.columns = names\n return df", "def sourceToDataframe(self):\n df = pd.read_excel(self.filename)\n df.columns = df.iloc[10]\n df = df.drop(df.index[:11])\n self.df = df #makes this df accessible to the whole class now\n self.insertODN()\n display(df.head())", "def load_data(txt_path: str = RAW_TXT) -> pd.DataFrame:\n df = pd.read_csv(txt_path)[INDICES]\n return df", "def create_table_from_file():\n\n full_path = os.getcwd()\n file_name = full_path + \"/inventory/inventory.csv\"\n\n if os.path.exists(file_name):\n table = data_manager.get_table_from_file(file_name)\n\n else:\n ui.print_error_message(\"There is no file to read!\")\n table = []\n\n return table", "def _load_data(self, comp=None):\n\t\tif comp is None:\n\t\t\tcomp = self._compensation\n\t\tmatrix = self._fcsfile.read_data(fmt='matrix',\n\t\t\tcomp=comp)\n\t\treturn pd.DataFrame(matrix, columns=self._channels)", "def make_summary_tables( res ):\n\n # transform second table to csv and read this as a dataFrame\n result_fit_df = pd.read_csv(StringIO( res.tables[1].as_csv() ), sep=\",\",index_col=0)\n result_fit_df.columns = [i.strip() for i in result_fit_df.columns]\n result_fit_df.index = [i.strip() for i in result_fit_df.index]\n\n # first table is trickier because the data is spread on to columns, and there is title line\n L = res.tables[0].as_html().split('\\n')\n L.pop(1) # get rid of the title\n tmp = pd.read_html('\\n'.join(L) , header=None)[0] # read as a dataframe, but with 4 columns \n\n names = list(tmp[0]) + list(tmp[2])[:-2] # columns 0 and 2 are metric names\n values = list(tmp[1]) + list(tmp[3])[:-2] # columns 1 and 3 are the corresponding values\n # NB : I exclude the last 2 elements which are empty \n \n result_general_df = pd.DataFrame( {'Name': names , 'Value' : values}, index = names , columns=['Value'] )\n \n return result_general_df , result_fit_df", "def read_table(lines):\n # header line 1: (100*Z+A), mass in [m_neutron]\n # [MAT, 3, MT/ ZA, AWR, 0, 0, 0, 0] HEAD\n\n # header line 2: Q-value and some counts\n # [MAT, 3, MT/ QM, QI, 0, LR, NR, NP/ EINT/ S(E)] TAB1\n f = read_line(lines[1])\n nS = int(f[4]) # number of interpolation sections\n nP = int(f[5]) # number of data points\n\n # header line 3: interpolation information\n # [MAT, 3, 0/ 0.0, 0.0, 0, 0, 0, 0] SEND\n # 1 y is constant in x (constant, histogram)\n # 2 y is linear in x (linear-linear)\n # 3 y is linear in ln(x) (linear-log)\n # 4 ln(y) is linear in x (log-linear)\n # 5 ln(y) is linear in ln(x) (log-log)\n # 6 y obeys a Gamow charged-particle penetrability law\n\n # data lines\n x = []\n y = []\n for l in lines[3:]:\n f = read_line(l)\n x.append(f[0])\n y.append(f[1])\n x.append(f[2])\n y.append(f[3])\n x.append(f[4])\n y.append(f[5])\n return np.array(x[:nP]), np.array(y[:nP])", "def test_tabledump(self):\n datastr = (\n '\" 1\" \"abc\" \" 3.70000007152557\" \" 0\"\\n'\n '\" 2\" \"xy \" \" 6.69999971389771\" \" 1\"\\n'\n )\n cdstr = (\n 'c1 1J I11 \"\" \"\"'\n ' -2147483647 \"\" \"\" \\n'\n 'c2 3A A3 \"\" \"\"'\n ' \"\" \"\" \"\" \\n'\n 'c3 1E G15.7 \"\" \"\"'\n ' \"\" 3 0.4 \\n'\n 'c4 1L L6 \"\" \"\"'\n ' \"\" \"\" \"\" \\n'\n )\n # copy fits file to the temp directory\n self.copy_file(\"tb.fits\")\n\n # test without datafile\n fits.tabledump(self.temp(\"tb.fits\"))\n assert os.path.isfile(self.temp(\"tb_1.txt\"))\n\n # test with datafile\n fits.tabledump(self.temp(\"tb.fits\"), datafile=self.temp(\"test_tb.txt\"))\n assert os.path.isfile(self.temp(\"test_tb.txt\"))\n\n # test with datafile and cdfile\n datafile = self.temp(\"data.txt\")\n cdfile = self.temp(\"coldefs.txt\")\n fits.tabledump(self.temp(\"tb.fits\"), datafile, cdfile)\n assert os.path.isfile(datafile)\n with open(datafile) as data:\n assert data.read() == datastr\n with open(cdfile) as coldefs:\n assert coldefs.read() == cdstr", "def load_data(path_to_file: str) -> pd.DataFrame:\n print(\"Started loading the excel data from {0} into a datafram - this may take a while. You may want to grab a coffee.\".format(path_to_file))\n df = pd.read_excel(path_to_file, engine='openpyxl', header=HEADER_COLUMN)\n print(\"Finished loading the excel data from {0} into a dataframe.\".format(path_to_file))\n return df", "def tire_data(tire_path):\n\n df = pd.read_csv(tire_path, index_col=0, sep=\";\", low_memory=False)\n\n return df", "def load_ctffind_4_1_0(file_name: str) -> pd.DataFrame:\n header_names: typing.List[str]\n ctffind_data: pd.DataFrame\n ctffind_meta: pd.DataFrame\n\n header_names = get_ctffind_4_1_0_header_names()\n ctffind_data = util.load_file(\n file_name,\n names=header_names,\n skiprows=5,\n usecols=(1, 2, 3, 4, 5, 6)\n )\n ctffind_data['PhaseShift'] = np.degrees(ctffind_data['PhaseShift'])\n\n ctffind_meta = get_ctffind_4_1_0_meta(file_name=file_name)\n return pd.concat([ctffind_data, ctffind_meta], axis=1)", "def load_data(dataset_path: str):\n data = arff.loadarff(dataset_path)\n data_frame = pd.DataFrame(data[0])\n return data_frame", "def path_to_frame(coefs, colnames, rownames):\n return pd.DataFrame(coefs, index=rownames, columns=colnames)", "def table_to_df(table, index_col=None, columns=None) -> pd.DataFrame:\n header, *rows = table.find_all('tr')\n cols = columns or [\n re.sub(r'[^a-z%]', ' ', th.text.strip().lower()).strip().replace(' ', '_')\n for th in header.find_all(['td', 'th'])\n ]\n rows = [list(td.text.strip() for td in tr.find_all(['td', 'th']))[:len(cols)]\n for tr in rows]\n df = pd.DataFrame(rows, columns=cols)\n if index_col:\n df.set_index(index_col, inplace=True)\n return df" ]
[ "0.72136354", "0.70939803", "0.6888955", "0.6693443", "0.65260655", "0.64546525", "0.64119506", "0.6292741", "0.60741687", "0.6048607", "0.59836", "0.59633166", "0.5962561", "0.5943778", "0.59254146", "0.5899131", "0.5885488", "0.586403", "0.57661194", "0.5747174", "0.57433605", "0.57226944", "0.5703075", "0.5671623", "0.5671411", "0.5609038", "0.5608039", "0.5604904", "0.5603439", "0.55851346", "0.5579887", "0.5575242", "0.55720633", "0.55720633", "0.55694044", "0.5540916", "0.55372244", "0.5506218", "0.55001795", "0.54980505", "0.54819345", "0.5480398", "0.54526836", "0.54465586", "0.54435426", "0.5439257", "0.5437694", "0.5415366", "0.5397251", "0.5397115", "0.53906715", "0.53796303", "0.53574294", "0.5350511", "0.5349285", "0.53376436", "0.5335658", "0.5334992", "0.5330669", "0.53255713", "0.53152275", "0.53125393", "0.5311399", "0.5306009", "0.5290048", "0.5289043", "0.5287394", "0.52802", "0.527559", "0.52748257", "0.5270847", "0.5261935", "0.52577007", "0.52545506", "0.52479017", "0.5247717", "0.5245352", "0.5244765", "0.52402806", "0.5235409", "0.52338547", "0.5230887", "0.5229026", "0.52206206", "0.52202624", "0.5220169", "0.52198696", "0.52148294", "0.5211447", "0.5206742", "0.52051914", "0.5203834", "0.5203696", "0.519958", "0.51991224", "0.51944983", "0.5191156", "0.5189308", "0.5185698", "0.5183509" ]
0.7552445
0
creates a new column 'tycho2_id' in the tycho2 catalog. This is for comparison with the TGAS catalog.
def create_tycho_id(tycho2df): tycho2df['tycho2_id'] = tycho2df.TYC1.astype(str).str.cat(tycho2df.TYC2.astype(str), sep='-')\ .str.cat(tycho2df.TYC3.astype(str), sep='-') tycho2df = tycho2df.rename(columns={'HIP': 'hip'}) return tycho2df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_idx2id(self, id2idx = None):\n if id2idx is None:\n return {v:k for k, v in self.id2idx.items()}\n return {v:k for k, v in id2idx.items()}", "def getMcc2Id(self):\n return self._base.getMcc2Id()", "def get_col2id( self, ratios_standardized, db ):\n\t\tcol_info_collection = db.col_info\n\t\tcol_name = []\n\t\tcol_id = []\n\t\tfor i in col_info_collection.find():\n\t\t\tcol_name.append(i[\"egrin2_col_name\"])\n\t\t\tcol_id.append(i[\"col_id\"])\n\t\tfor i in ratios_standardized.columns.values:\n\t \t\tif i not in col_name:\n\t \t\t\tcol_name.append( i )\n\t \t\t\tif len(col_id) > 0:\n\t\t\t\t\tcol_id.append(max(col_id)+1)\n\t\t\t\telse:\n\t\t\t\t\tcol_id.append(0)\n\t \tcol_info = pd.DataFrame( zip( col_id, col_name ), index = col_name, columns = [ \"col_id\", \"egrin2_col_name\"] )\n\t \treturn col_info", "def add_column_into_target_sf(self, tap_type, table, new_column):\n self.run_query_target_snowflake(\n f'ALTER TABLE ppw_e2e_tap_{tap_type}{self.sf_schema_postfix}.{table} ADD {new_column[\"name\"]} int'\n )\n self.run_query_target_snowflake(\n f'UPDATE ppw_e2e_tap_{tap_type}{self.sf_schema_postfix}.{table}'\n f' SET {new_column[\"name\"]}={new_column[\"value\"]} WHERE 1=1'\n )", "def getMcc2Id(self):\n return NotImplementedError", "def trilha2(self, trilha2):\n self._trilha2 = trilha2", "def tag_id2(self, tag_id2):\n if self._configuration.client_side_validation and tag_id2 is None:\n raise ValueError(\"Invalid value for `tag_id2`, must not be `None`\") # noqa: E501\n\n self._tag_id2 = tag_id2", "def getMcc2Id(self):\n return self.mcc2id", "def getMcc2Id(self):\n return self.mcc2id", "def getMcc2Id(self):\n return self.mcc2id", "def getMcc2Id(self):\n return self.mcc2id", "def _getNewCatId(self):\n\n newCatId = COCO_PLUS.CAT_ID\n COCO_PLUS.CAT_ID += 1\n\n return newCatId", "def make_category_table_level2(category_level2_table, category_table):\n # Create a dict mapping 'category_level1_names' to 'category_level1_index'\n category_name2label_level2 = {}\n for item_level2 in category_level2_table.itertuples():\n category_name = item_level2[1]\n category_idx = item_level2[2]\n category_name2label_level2[category_name] = category_idx\n # Create a dict mapping 'category_id' to 'category_level1_index'\n category_id2label_level2 = {}\n for item in category_table.itertuples():\n category_id = item[0]\n category_idx = category_name2label_level2[item[2]]\n category_id2label_level2[category_id] = category_idx\n return category_id2label_level2", "def add_column_into_source(self, tap_type, table, new_column):\n run_query_method = getattr(self, f'run_query_tap_{tap_type}')\n run_query_method(\n f'ALTER TABLE {table} ADD {new_column[\"name\"]} int'\n )\n run_query_method(\n f'UPDATE {table} set {new_column[\"name\"]}={new_column[\"value\"]} where 1=1'\n )", "def secondary_id(self, value):\n self._write(MX_SECONDARY_ID, value)", "def oeid_to_existing_extid(self, cr, uid, referential_id, openerp_id, context=None):\n return self.get_extid(cr, uid, openerp_id, referential_id, context=context)", "def catalog_id(self):\n return self._catalog_id", "def create_table(self):\n self.db.query(f\"\"\"\n CREATE TABLE IF NOT EXISTS {self.table} (\n substitut_id bigint unsigned references product(id),\n original_id bigint unsigned references product(id),\n PRIMARY KEY (substitut_id, original_id)\n )\n \"\"\")", "def newid(self, target_table):\n self.new_id[target_table] += 1\n return self.new_id[target_table]", "def _output_imei_column(self):\n if self._generate_check_digit:\n imei_col_name = sql.Identifier('imei_norm_with_check_digit')\n else:\n imei_col_name = sql.Identifier('imei_norm')\n return imei_col_name", "def _get_country_id(self, code2):\n if not hasattr(self, '_country_codes'):\n self._country_codes = {}\n\n if code2 not in self._country_codes.keys():\n self._country_codes[code2] = Country.objects.get(code2=code2).pk\n return self._country_codes[code2]", "def get_n2(cls):\n return cls.objects.get(pk='N2')", "def get_catalog_id(self):\n return self._catalog_id", "def insert_column(self, tb_name, column_name, data_type):\n sentences = f\"\"\"\n ALTER TABLE {tb_name} ADD COLUMN {column_name} {data_type};\n \"\"\"\n print(sentences)\n self.commit(sentences)", "def table_catalog_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"table_catalog_id\")", "def table_catalog_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"table_catalog_id\")", "def get_actual_id(translated):", "def secondary_id(self):\n return self._read(MX_SECONDARY_ID)", "def catalog_id(self, catalog_id):\n self._catalog_id = catalog_id", "def tdim2(dim2):\n transformers = [Enumerate(dim2.categories), OneHotEncode(len(dim2.categories))]\n tdim2 = TransformedDimension(Compose(transformers, dim2.type), dim2)\n return tdim2", "def UpdateIdTable(self):\n query = ('INSERT INTO %s '\n ' SELECT DISTINCT id, \\'\\', \\'\\', \\'\\' '\n ' FROM %s '\n ' WHERE id NOT IN (SELECT box_office_mojo FROM %s)' %\n (self.ID_TABLE, self.BOX_OFFICE_TABLE, self.ID_TABLE))\n self.cursor.execute(query)", "def custom_info2(self, custom_info2):\n\n self._custom_info2 = custom_info2", "def custom_info2(self, custom_info2):\n\n self._custom_info2 = custom_info2", "def makeFootnoteId(self, id):\n if self.getConfig(\"UNIQUE_IDS\"):\n return 'fn%s%d-%s' % (self.sep, self.unique_prefix, id)\n else:\n return 'fn%s%s' % (self.sep, id)", "def getTypeID(self) -> int:\n ...", "def stock_2_query(self):\n return f\"\"\"\n SELECT '{self.stock_2}'\n FROM closing_prices;\"\"\"", "def second_category_axis(self, second_category_axis):\n\n self.container['second_category_axis'] = second_category_axis", "def extid_to_existing_oeid(self, cr, uid, referential_id, external_id, context=None):\n res = self.get_oeid(cr, uid, external_id, referential_id, context=context)\n return res", "def trilha2(self):\n return self._trilha2", "def secondary_id(self, secondary_id):\n\n self._secondary_id = secondary_id", "def test_co2_sensor_entity_id(self):\n with patch.dict(TYPES, {'CarbonDioxideSensor': self.mock_type}):\n state = State('sensor.airmeter_co2', '500', {})\n get_accessory(None, state, 2, {})", "def __assign_name_id(self):\n if not self.name_id:\n self.name_id = str(BaseTicketing.objects.create())", "def _blacklist_new_tblname(self):\n return 'listgen_temp_{0}_new_blacklist'.format(self._run_id)", "def _pairings_imei_imsi_new_tblname(self):\n return 'listgen_temp_{0}_new_pairings_imei_imsis'.format(self._run_id)", "def new_constraint_name(self, column, type):\n name = self.name.lstrip('migrate_')[:30]\n if type == 'UNIQUE':\n return '{}_{}_{}_uniq'.format(name, column[:15], self._random_string(8))\n elif type == 'PRIMARY KEY':\n return '{}_{}_pkey'.format(name, self._random_string(4))\n else:\n raise NotImplementedError('Name not implemented for type {}'.format(type))", "def before_update(mapper, conn, target):\n if isinstance(target, Column):\n raise TypeError('Got a column instead of a table')\n\n if target.id_ is None:\n dataset_id = ObjectNumber.parse(target.d_id)\n target.id_ = str(TableNumber(dataset_id, target.sequence_id))", "def make_temp_tbl(self, type: str = \"user_details\"):\n uid = uuid.uuid4()\n temp_tbl_name = \"temp_\" + str(uid).replace('-', '_')\n\n if self.config.dbtype.lower() == \"mysql\":\n create_temp_tbl_sql = f\"CREATE TABLE {temp_tbl_name} LIKE {type};\"\n elif self.config.dbtype.lower() == \"sqlite\":\n create_temp_tbl_sql = f\"CREATE TABLE {temp_tbl_name} AS SELECT * FROM {type} WHERE 0\"\n self.engine.execute(create_temp_tbl_sql)\n return temp_tbl_name", "def add_column_primary(self, name, type):\n raise NotImplementedError(\n \"Please implement the 'add_column_primary' method in a derived class.\")", "def migrate_from_18_8_0(self, globals_dict):\n\n bv2kw = globals_dict['bv2kw']\n products_Product = rt.models.products.Product\n \n @override(globals_dict)\n def create_products_product(id, name, description, category_id, delivery_unit, vat_class, number_of_events, min_asset, sales_account_id, sales_price):\n # if delivery_unit: delivery_unit = settings.SITE.models.products.DeliveryUnits.get_by_value(delivery_unit)\n # if vat_class: vat_class = settings.SITE.models.vat.VatClasses.get_by_value(vat_class)\n if sales_price is not None: sales_price = Decimal(sales_price)\n kw = dict()\n kw.update(id=id)\n if name is not None: kw.update(bv2kw('name',name))\n if description is not None: kw.update(bv2kw('description',description))\n kw.update(category_id=category_id)\n kw.update(delivery_unit=delivery_unit)\n kw.update(vat_class=vat_class)\n #kw.update(number_of_events=number_of_events)\n #kw.update(min_asset=min_asset)\n kw.update(sales_account_id=sales_account_id)\n kw.update(sales_price=sales_price)\n return products_Product(**kw)\n\n return '18.11.0'", "def id_col_of_parent(self):\n if self.parent_table() == 'SupplyEfficiency':\n return 'id'\n\n parent_col = self.parent_col\n if parent_col.endswith('tech_id') or parent_col.endswith('node_id') or\\\n parent_col.endswith('technology_id') or parent_col == 'subsector_id':\n return parent_col\n\n return 'id'", "def GachaCraftNodeExcelAddLv2Property(builder, Lv2Property):\n return AddLv2Property(builder, Lv2Property)", "def to_t2(self, t2):\n assert t2 >= 0, 't2 must be positive'\n\n delta = np.sqrt(t2 / self.t2) * self.delta\n\n eff = EffectConstant(delta, pool_cov=self.pool_cov, mask=self.mask,\n scale=self.scale)\n\n assert np.isclose(t2, eff.t2), 't2 scale error'\n\n return eff", "def H2(self) -> int:\n return self.raw_measure()[0]", "def test_convert_column_name_value_to_id(self):\n ohe = Mock()\n ohe.transform.return_value = np.array([\n [0, 1] # one hot encoding, second dimension\n ])\n transformer = DataTransformer()\n transformer._column_transform_info_list = [\n ColumnTransformInfo(\n column_name='x', column_type='continuous', transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n ),\n ColumnTransformInfo(\n column_name='y', column_type='discrete', transform=ohe,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n ]\n\n result = transformer.convert_column_name_value_to_id('y', 'yes')\n assert result['column_id'] == 1 # this is the 2nd column\n assert result['discrete_column_id'] == 0 # this is the 1st discrete column\n assert result['value_id'] == 1 # this is the 2nd dimension in the one hot encoding", "def get_catalog_identifier(source: Union[Source, int]):\n return source.catalogid if isinstance(source, Source) else int(source)", "def svn_fs_path_change2_create(*args):\r\n return _fs.svn_fs_path_change2_create(*args)", "def to_ot2_equivalent(self) -> DeckSlotName:\n return _ot3_to_ot2.get(self, self)", "def create_dilution_pair2(self, pair, conc1, vol1, conc2, vol2):\n concentration_unit = DilutionSettings.concentration_unit_to_string(self.concentration_unit)\n conc_source_udf = \"Conc. Current ({})\".format(concentration_unit)\n conc_target_udf = \"Target conc. ({})\".format(concentration_unit)\n pair.input_artifact.udf_map = UdfMapping({conc_source_udf: conc1,\n \"Current sample volume (ul)\": vol1})\n pair.output_artifact.udf_map = UdfMapping({conc_source_udf: conc1,\n \"Current sample volume (ul)\": vol1,\n \"Target vol. (ul)\": vol2,\n conc_target_udf: conc2,\n \"Dil. calc target vol\": None,\n \"Dil. calc target conc.\": None,\n \"Dil. calc source vol\": None})\n return pair", "def _add_column(df_main, serie, name):\n df = serie.to_frame(name=name)\n df_main_new = df_main.merge(df, left_on='customerId2', right_index='customerId2', how='left')\n return df_main_new", "def to_id(self):\n return \"%s%s%s%s%s\" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,\n self.start_time,\n NoOpTraceId.DELIMITER, self.__number)", "def secondary_port_number(self):\n return self._props[\"persistent_identifiers\"].get(self._secondary_port_prop)", "def addCommonExtraColumn(self, req, study_id, found_extra_table, column_name, data_type, description):\n debug = False\n common_extra_table_name = None\n min_column_count = None\n quoted_column_name = '\"{0}\"'.format(column_name.upper())\n \n if 'SAMPLE' in found_extra_table:\n common_extra_table_name = 'COMMON_EXTRA_SAMPLE'\n min_column_count = 2\n elif 'PREP' in found_extra_table:\n common_extra_table_name = 'COMMON_EXTRA_PREP'\n min_column_count = 3\n \n if common_extra_table_name == None:\n raise Exception('Error: Could not determine the common extra table name. The found extra table is: %s' % found_extra_table)\n \n # Set the database data type:\n database_data_type = ''\n if data_type == 'text' or database_data_type == 'range':\n database_data_type = 'varchar2(4000)'\n elif data_type == 'numeric':\n database_data_type = 'int'\n elif data_type == 'date':\n database_data_type = 'date'\n \n if database_data_type == '':\n raise Exception('Could not determine common extra column data type.')\n\n # Create the column if it doesn't already exist\n statement = \"\"\"\n select count(*) \n from all_tab_columns \n where column_name = '{0}' \n and table_name = '{1}'\n \"\"\".format(column_name.upper(), common_extra_table_name)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con = self.getMetadataDatabaseConnection()\n results = con.cursor().execute(statement).fetchone()\n if results[0] == 0:\n statement = 'alter table %s add %s %s' % (common_extra_table_name, quoted_column_name, database_data_type)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n \n # Copy the data found in the found extra_table\n if common_extra_table_name == 'COMMON_EXTRA_SAMPLE':\n statement = \"\"\"\n MERGE INTO common_extra_sample e\n USING (\n SELECT sample_id, {0}\n FROM {1}\n ) x\n ON (e.sample_id = x.sample_id)\n WHEN MATCHED THEN \n UPDATE SET e.{0} = x.{0}\n WHEN NOT MATCHED THEN \n INSERT (e.sample_id, e.{0})\n VALUES (x.sample_id, x.{0})\n \"\"\".format(quoted_column_name, found_extra_table)\n else:\n statement = \"\"\"\n MERGE INTO common_extra_prep e\n USING (\n SELECT sample_id, row_number, {0}\n FROM {1}\n ) x\n ON (e.sample_id = x.sample_id and e.row_number = x.row_number)\n WHEN MATCHED THEN \n UPDATE SET e.{0} = x.{0}\n WHEN NOT MATCHED THEN \n INSERT (e.sample_id, e.row_number, e.{0})\n VALUES (x.sample_id, x.row_number, x.{0})\n \"\"\".format(quoted_column_name, found_extra_table)\n \n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n statement = 'commit'\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n \n # Remove the column from the found extra table. If it's the last custom column in the table, remove the table\n statement = \"select count(*) from all_tab_columns where table_name = '%s'\" % (found_extra_table)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n results = con.cursor().execute(statement).fetchone()\n if results[0] <= min_column_count:\n statement = 'drop table %s' % (found_extra_table)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n else:\n statement = 'alter table %s drop column %s' % (found_extra_table, quoted_column_name)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n \n # Clean up references in study_actual_columns\n extra_table_study_id = found_extra_table.split('_')[2]\n\n statement = \"\"\"\n update study_actual_columns \n set table_name = '\"{0}\"' \n where study_id = {1} \n and table_name = '\"{2}\"'\n \"\"\".format(common_extra_table_name, extra_table_study_id, found_extra_table)\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)\n statement = 'commit'\n if debug:\n req.write('<pre>' + statement + '</pre><br/>')\n con.cursor().execute(statement)", "def track2_equivalent(self, track2_equivalent):\n\n self._track2_equivalent = track2_equivalent", "def get_or_create_term(self, term):\n term_id = self.get_term_id(term)\n if term_id is not None:\n return term_id\n\n term_id = self.db.execute('INSERT INTO terms(term) VALUES (?) ', (term,)).lastrowid\n postings_table = 'term_%d' % term_id\n\n info(\"creating table for term \" + term)\n\n self.db.execute('''CREATE TABLE %s (document_id INT NOT NULL,\n hits INT NOT NULL,\n FOREIGN KEY (document_id) REFERENCES documents(rowid));''' % postings_table)\n\n return term_id", "def _notifications_imei_new_tblname(self):\n return 'listgen_temp_{0}_new_notifications_imeis'.format(self._run_id)", "def create_tag_id():\n return uuid.uuid1().int", "def makeFootnoteRefId(self, id):\n if self.getConfig(\"UNIQUE_IDS\"):\n return 'fnref%s%d-%s' % (self.sep, self.unique_prefix, id)\n else:\n return 'fnref%s%s' % (self.sep, id)", "def get_lvl_index2id(self, ion):\n\n q_ion_lvls = self.session.query(Level.level_id.label(\"id\"),\n Level.level_index.label(\"index\")). \\\n filter(and_(Level.ion == ion,\n Level.data_source == self.data_source))\n\n lvl_index2id = list()\n for id, index in q_ion_lvls:\n lvl_index2id.append((index, id))\n\n lvl_index2id_dtype = [(\"index\", np.int), (\"id\", np.int)]\n lvl_index2id = np.array(lvl_index2id, dtype=lvl_index2id_dtype)\n lvl_index2id = pd.DataFrame.from_records(lvl_index2id, index=\"index\")\n\n return lvl_index2id", "def get_unique_id(self) -> str:\n if not self._monitored_value:\n return super().get_unique_id()\n else:\n return f\"extalife-{str(self.channel_data.get('serial'))}-{self.channel_id}-{self._monitored_value}\"", "def generate_identifier(sender, instance, **kwargs):\n identifier = Concept.create_identifier(instance.query)\n qs = Concept.objects.filter(identifier=identifier, lang=instance.lang)\n if instance.pk:\n qs = qs.exclude(pk=instance.pk)\n if qs.count() > 0:\n raise ValueError(\"Concept identifier conflict\")\n instance.identifier = identifier", "def unique_id(self):\n return f\"octopus_energy_gas_{self._serial_number}_{self._mprn}_previous_accumulative_cost_override_tariff\"", "def onchange_product_id(self):\n if not self.product_id:\n self.bom_id = False\n elif not self.bom_id or self.bom_id.product_tmpl_id != self.product_tmpl_id or (self.bom_id.product_id and self.bom_id.product_id != self.product_id):\n bom = self.env['mrp.bom']._bom_find(product=self.product_id, picking_type=self.picking_type_id, company_id=self.company_id.id, bom_type='normal')\n if bom:\n self.bom_id = bom.id\n self.product_qty = self.bom_id.product_qty\n self.product_uom_id = self.bom_id.product_uom_id.id\n else:\n self.bom_id = False\n self.product_uom_id = self.product_id.uom_id.id", "def before_insert(mapper, conn, target):\n\n #from identity import ObjectNumber\n #assert not target.fk_vid or not ObjectNumber.parse(target.fk_vid).revision\n\n if target.sequence_id is None:\n # In case this happens in multi-process mode\n conn.execute(\"BEGIN IMMEDIATE\")\n sql = text(\n '''SELECT max(c_sequence_id)+1 FROM columns WHERE c_t_id = :tid''')\n\n max_id, = conn.execute(sql, tid=target.t_id).fetchone()\n\n if not max_id:\n max_id = 1\n\n target.sequence_id = max_id\n\n Column.before_update(mapper, conn, target)", "def gsi1_pk(self):\n return \"data_type#book\"", "def _internal2document_id(value):\n return 2*value + 1", "def get_product_2(conn, product_id: int) -> str:\n with conn.cursor() as cursor:\n cursor.execute(\"\"\"select id, name, price, image, category_id from products\n where id = {0}\"\"\".format(product_id))\n try:\n return cursor.fetchone()\n except TypeError:\n raise errors.StoreError", "def _notifications_triplets_new_tblname(self):\n return 'listgen_temp_{0}_new_notifications_triplets'.format(self._run_id)", "def unique_id(self):\n return f\"octopus_energy_intelligent_charge_limit\"", "def _add_W2(self, w2, row):\n if self.pretty_p:\n w2 = pretty_p(w2)\n row['W2'] = w2\n return True", "def create_tag(self, fixer_names):\n key = tuple(sorted(fixer_names))\n return 'rt2to3-' + hashlib.md5(str(key).encode('utf-8')).hexdigest()[:6]", "def data_type_id(self, value: str):\n self._data_type_id = value", "def setchi2(self,name,chi2):\n if (name not in KFNode.names):\n warning(' state not in node ',name)\n self.chi2[name]=chi2\n return", "def unique_id(self):\r\n return f\"{DOMAIN}_{self.charge_point_id}_{self.connector_id}\"", "def get_offense_team_id(self):\n pass", "def custom_data_2(self, custom_data_2):\n # type: (string_types) -> None\n\n if custom_data_2 is not None:\n if not isinstance(custom_data_2, string_types):\n raise TypeError(\"Invalid type for `custom_data_2`, type has to be `string_types`\")\n\n self._custom_data_2 = custom_data_2", "def get_alternative_id(self):\n raise NotImplementedError()", "def get_type_id(self):\n\n raise Exception(\"Not implemented!\"+self.__class__)", "def __create_categorical_col(self, df, columns):\n\n # Temporarily remove tuple such that columns can be checked\n for n, item in enumerate(columns):\n if isinstance(item, tuple):\n name, _ = item\n temporary_columns = columns.copy()\n temporary_columns[n] = name\n\n # Use appropriate var in validation\n if 'temporary_columns' in locals():\n column_set = temporary_columns\n else:\n column_set = columns\n\n\n for n, column in enumerate(columns):\n if type(column) == tuple:\n cat_col, new_col = column\n df[new_col] = df[cat_col]\n column = cat_col\n df[column], uniques = pd.factorize(df[column])\n return df", "def acumula_t2(self, t2, rodada):\n self.t2[rodada].append(t2)", "def setup_acc2gene_id(self, gene2acc_file, acc_tax_id2tf_acc):\n\t\tsys.stderr.write(\"Setting up acc2gene_id...\")\n\t\tp_gb_acc_version = re.compile(r'(^\\w+)\\.\\d+')\n\t\ttf_acc2gene_id_bridge_acc_ls = {}\n\t\treader = csv.reader(open(gene2acc_file, 'r'), delimiter ='\\t')\n\t\tfor row in reader:\n\t\t\ttax_id, gene_id, status, rna_nuc_acc_ver, rna_nuc_gi, prot_acc_ver,\\\n\t\t\tprot_gi, genomic_nuc_acc_ver, genomic_nuc_gi, start, end, orientation = row\n\t\t\ttax_id = int(tax_id)\t#integer\n\t\t\tgene_id = int(gene_id)\t#integer\n\t\t\tacc_ver_to_check = [rna_nuc_acc_ver, rna_nuc_gi, prot_acc_ver,\\\n\t\t\t\tprot_gi, genomic_nuc_acc_ver, genomic_nuc_gi]\n\t\t\tfor acc_ver in acc_ver_to_check:\n\t\t\t\tif acc_ver!='-':\n\t\t\t\t\tkey = (acc_ver.upper(), tax_id)\n\t\t\t\t\tif key in acc_tax_id2tf_acc:\n\t\t\t\t\t\ttf_acc= acc_tax_id2tf_acc[key]\n\t\t\t\t\t\tif tf_acc not in tf_acc2gene_id_bridge_acc_ls:\n\t\t\t\t\t\t\ttf_acc2gene_id_bridge_acc_ls[tf_acc] = []\n\t\t\t\t\t\ttf_acc2gene_id_bridge_acc_ls[tf_acc].append((gene_id, key[0]))\n\t\t\t\t\tp_gb_acc_version_result = p_gb_acc_version.search(acc_ver)\n\t\t\t\t\tif p_gb_acc_version_result:\n\t\t\t\t\t\taccession = p_gb_acc_version_result.groups()[0]\n\t\t\t\t\t\tkey = (accession.upper(), tax_id)\n\t\t\t\t\t\tif key in acc_tax_id2tf_acc:\n\t\t\t\t\t\t\ttf_acc= acc_tax_id2tf_acc[key]\n\t\t\t\t\t\t\tif tf_acc not in tf_acc2gene_id_bridge_acc_ls:\n\t\t\t\t\t\t\t\ttf_acc2gene_id_bridge_acc_ls[tf_acc] = []\n\t\t\t\t\t\t\ttf_acc2gene_id_bridge_acc_ls[tf_acc].append((gene_id, key[0]))\n\t\tdel reader\n\t\tsys.stderr.write(\"Done\\n\")\n\t\treturn tf_acc2gene_id_bridge_acc_ls", "def tc_advice_id(self, dpid, tc_type, tc_subtype, src_mac, detail1):\n switch = self.switches[dpid]\n #*** TBD, deal with context:\n context = self.context_default\n #*** Look up source mac to get a port number:\n port_number = switch.mactable.mac2port(src_mac, context)\n\n #*** TBD, handle return value for port not found...\n\n if tc_subtype == 'lldp':\n #*** Check to see if we already know this identity:\n db_data = {'id_type': tc_subtype,\n 'src_mac': src_mac, 'node_name': detail1}\n db_result = self.dbidnode.find_one(db_data)\n if not db_result:\n #*** LLDP identity not in database so add it:\n db_data = {'last_seen': time.time(), 'id_type': tc_subtype,\n 'src_mac': src_mac, 'node_name': detail1}\n db_result = self.dbidnode.insert_one(db_data)\n self.logger.info(\"Created new ID Node record id_type=%s \"\n \"node_name=%s\", tc_subtype, detail1)\n #*** Check to see if we need to add a flow to switch:\n switch.flowtables.add_fe_tc_id(tc_subtype, detail1, src_mac,\n self.main_policy.optimised_rules.get_rules())\n else:\n #*** Just update the last_seen field:\n db_result = self.dbdpae.update_one(\n {'id_type': tc_subtype,\n 'src_mac': src_mac, 'node_name': detail1},\n {\n '$set': {\n 'last_seen': time.time()\n },\n }\n )\n self.logger.debug(\"Last seen updated for %s of %s ID Node \"\n \"record(s) id_type=%s node_name=%s\",\n db_result.modified_count,\n db_result.matched_count,\n tc_subtype, detail1)\n else:\n self.logger.info(\"Didn't action tc_subtype=%s\", tc_subtype)", "def custom_data_2(self):\n # type: () -> string_types\n return self._custom_data_2", "def generate_n2(self):\r\n obj = Random\r\n self.n_2 = int(obj.time_split[0])+int(obj.time_split[1])+int(obj.time_split[2])\r\n self.n_2decimal = int(abs(pi)*math.pow(10, self.n_2)) % 10", "def create_num_id(df):\n df['id'] = df['patient_id'].apply(lambda x:int(x.split('_')[1]))\n return df", "def _generate_flavorid(self):\n nonexistent_flavor = 2700\n flavor_ids = [value[\"id\"] for key, value in\n instance_types.get_all_types().iteritems()]\n while nonexistent_flavor in flavor_ids:\n nonexistent_flavor += 1\n else:\n return nonexistent_flavor", "def test_create_data_type_id(self):\n pt = PrepTemplate.create(self.metadata, self.new_raw_data,\n self.test_study, self.data_type_id)\n # The returned object has the correct id\n self.assertEqual(pt.id, 2)\n\n # The row in the prep template table have been created\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_template WHERE prep_template_id=2\")\n # prep_template_id, data_type_id, raw_data_id, preprocessing_status,\n # investigation_type\n self.assertEqual(obs, [[2, 2, 5, 'not_preprocessed', None]])\n\n # The relevant rows to common_prep_info have been added.\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.common_prep_info WHERE prep_template_id=2\")\n # prep_template_id, sample_id, center_name,\n # center_project_name, emp_status_id\n exp = [[2, '1.SKB8.640193', 'ANL', 'Test Project', 1],\n [2, '1.SKD8.640184', 'ANL', 'Test Project', 1],\n [2, '1.SKB7.640196', 'ANL', 'Test Project', 1]]\n self.assertEqual(sorted(obs), sorted(exp))\n\n # The relevant rows have been added to the prep_columns table\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_columns WHERE prep_template_id=2\")\n # prep_template_id, column_name, column_type\n exp = [[2, 'str_column', 'varchar'],\n [2, 'ebi_submission_accession', 'varchar'],\n [2, 'run_prefix', 'varchar'],\n [2, 'barcodesequence', 'varchar'],\n [2, 'linkerprimersequence', 'varchar'],\n [2, 'platform', 'varchar'],\n [2, 'experiment_design_description', 'varchar'],\n [2, 'library_construction_protocol', 'varchar']]\n self.assertEqual(sorted(obs), sorted(exp))\n\n # The new table exists\n self.assertTrue(exists_table(\"prep_2\", self.conn_handler))\n\n # The new table hosts the correct values\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_2\")\n # sample_id, str_column, ebi_submission_accession,\n # run_prefix, barcodesequence, linkerprimersequence\n exp = [['1.SKB7.640196', 'Value for sample 3', 'ILLUMINA',\n 's_G1_L002_sequences', 'CCTCTGAGAGCT', None,\n 'GTGCCAGCMGCCGCGGTAA', 'BBBB', 'AAAA'],\n ['1.SKB8.640193', 'Value for sample 1', 'ILLUMINA',\n 's_G1_L001_sequences', 'GTCCGCAAGTTA', None,\n 'GTGCCAGCMGCCGCGGTAA', 'BBBB', 'AAAA'],\n ['1.SKD8.640184', 'Value for sample 2', 'ILLUMINA',\n 's_G1_L001_sequences', 'CGTAGAGCTCTC', None,\n 'GTGCCAGCMGCCGCGGTAA', 'BBBB', 'AAAA']]\n self.assertEqual(sorted(obs), sorted(exp))", "def find_ID(table):\n if field_exists(table, \"orig_ID\"):\n return \"orig_ID\"\n elif field_exists(table, \"ORIG_FID\"):\n return \"ORIG_FID\"\n else:\n return arcpy.Describe(table).OIDFieldName", "def getId(self): #$NON-NLS-1$\r", "def get_extid(self, cr, uid, openerp_id, referential_id, context=None):\n if isinstance(openerp_id, list):\n openerp_id = openerp_id[0]\n model_data_ids = self.pool.get('ir.model.data').search(cr, uid, [('model', '=', self._name), ('res_id', '=', openerp_id), ('referential_id', '=', referential_id)])\n if model_data_ids and len(model_data_ids) > 0:\n prefixed_id = self.pool.get('ir.model.data').read(cr, uid, model_data_ids[0], ['name'])['name']\n ext_id = self.id_from_prefixed_id(prefixed_id)\n return ext_id\n return False", "def _CreateRecordId(self):\n self._record_count += 1\n return '%s_%s' % (self._unique_id, self._record_count)" ]
[ "0.50282025", "0.50220966", "0.5005243", "0.49620757", "0.49583018", "0.4915321", "0.4880197", "0.4807147", "0.4807147", "0.4807147", "0.4807147", "0.47635424", "0.47528297", "0.46979147", "0.4659034", "0.4647828", "0.45996457", "0.45982006", "0.4572538", "0.456453", "0.4558992", "0.45358315", "0.45353693", "0.45138946", "0.45115072", "0.4509936", "0.4496806", "0.44886637", "0.44786626", "0.44775993", "0.44448185", "0.44380718", "0.44380718", "0.43921542", "0.43857777", "0.43836507", "0.43823576", "0.4378395", "0.4365863", "0.43516034", "0.43476528", "0.43453452", "0.4342531", "0.43367985", "0.43348253", "0.43305406", "0.43278134", "0.43206066", "0.43166178", "0.4315775", "0.430756", "0.43070456", "0.42774674", "0.42715064", "0.425164", "0.42500982", "0.42395437", "0.4238831", "0.42333135", "0.42322797", "0.42146385", "0.42138755", "0.4211259", "0.42099372", "0.4206865", "0.41983488", "0.41922873", "0.41905743", "0.4185256", "0.41815162", "0.41751212", "0.4173172", "0.41690636", "0.41667804", "0.41635373", "0.41599578", "0.41556668", "0.41526115", "0.41468912", "0.41468278", "0.41461107", "0.41455796", "0.4140818", "0.4138845", "0.41362622", "0.41249195", "0.41205508", "0.41111225", "0.41105804", "0.41038188", "0.41034502", "0.4101991", "0.40967694", "0.40965623", "0.40942338", "0.4090624", "0.40903643", "0.40896615", "0.40894514", "0.40891665" ]
0.7335801
0
select data with relative parallax error less than 'cutoff', add absolute magnitude columns for plotting. If catalog is not None, the cutoff on BV will not be applied (ensures initial variable stars DataFrame is not constrained in magnitudes)
def data_process(df_toprocess=None, cutoff=0.2, bv_cutoff=0.15, catalog=None): print "Selecting objects.." df_toprocess['sigma_pi/pi'] = df_toprocess.loc[:, 'parallax_error'].astype(float) / df_toprocess.loc[:, 'parallax']\ .astype(float) print "..Done\nCutoff at relative parallax error of %s\n----------" % cutoff # only take objects with relative parallax error < cutoff df_toprocess = df_toprocess.loc[df_toprocess.loc[:, 'parallax'] / df_toprocess.loc[:, 'parallax_error'] > 1. / cutoff] print catalog if catalog is None: print "Replacing whitespace with nan" df_toprocess = df_toprocess.replace(' ', np.nan) # some cells are ' ' instead of nan print "Converting BTmag and VTmag to floats.." df_toprocess.BTmag = df_toprocess.BTmag.astype(float) df_toprocess.VTmag = df_toprocess.VTmag.astype(float) # Some values are NaN: print "Removing objects with missing BT or VT measurements.." df_toprocess = df_toprocess[df_toprocess.BTmag.notnull()] df_toprocess = df_toprocess[df_toprocess.VTmag.notnull()] print "Computing B-V and M_V.." df_toprocess['B_V'] = df_toprocess.BTmag - df_toprocess.VTmag df_toprocess['M_V'] = df_toprocess.VTmag - 5. * (np.log10(1000. / df_toprocess.parallax) - 1.) print "Converting sigma BT and sigma VT to float.." df_toprocess.e_BTmag = df_toprocess.e_BTmag.astype(float) df_toprocess.e_VTmag = df_toprocess.e_VTmag.astype(float) print "Computing sigma B-V.." df_toprocess['e_B_V'] = np.sqrt(df_toprocess.e_BTmag.pow(2)+df_toprocess.e_VTmag.pow(2)) print "Applying selection on sigma BT-VT < %s.." % bv_cutoff df_toprocess = df_toprocess[df_toprocess.e_B_V < bv_cutoff] if catalog == 'xmatch_TGAS_Simbad.csv': df_toprocess = df_toprocess.loc[(df_toprocess['J'] < 11.) & (df_toprocess['K'] < 11.)] print "min in J: %s" % np.max(df_toprocess['J']) print "max in J: %s" % np.min(df_toprocess['J']) df_toprocess.insert(10, 'B_V', df_toprocess.loc[:, 'B'] - df_toprocess.loc[:, 'V']) df_toprocess.insert(10, 'J_K', df_toprocess.loc[:, 'J'] - df_toprocess.loc[:, 'K']) df_toprocess.insert(10, 'M_G', df_toprocess.loc[:, 'phot_g_mean_mag'] - 5. * (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.)) df_toprocess.insert(10, 'M_J', df_toprocess.loc[:, 'J'] - 5. * (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.)) df_toprocess.insert(10, 'M_K', df_toprocess.loc[:, 'K'] - 5. * (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.)) if catalog == 'xmatch_TGAS_VSX.csv': df_toprocess = df_toprocess[df_toprocess.V == 0] print "%s objects selected" % len(df_toprocess) print "..Done\n----------" return df_toprocess
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def localize_red_clump(star_catalog,close_cat_idx,log):\n\n def select_within_range(mags, colours, mag_min, mag_max, col_min, col_max):\n \"\"\"Function to identify the set of array indices with values\n between the range indicated\"\"\"\n\n idx1 = np.where(colours >= col_min)[0]\n idx2 = np.where(colours <= col_max)[0]\n idx3 = np.where(mags >= mag_min)[0]\n idx4 = np.where(mags <= mag_max)[0]\n idx = set(idx1).intersection(set(idx2))\n idx = idx.intersection(set(idx3))\n idx = list(idx.intersection(set(idx4)))\n\n return idx\n\n RC = photometry_classes.Star()\n\n inst_i = star_catalog['cal_ref_mag_ip'][close_cat_idx]\n inst_r = star_catalog['cal_ref_mag_rp'][close_cat_idx]\n inst_g = star_catalog['cal_ref_mag_gp'][close_cat_idx]\n cal_i = star_catalog['imag'][close_cat_idx]\n cal_r = star_catalog['rmag'][close_cat_idx]\n cal_g = star_catalog['gmag'][close_cat_idx]\n inst_ri = inst_r - inst_i # Catalogue column order is red -> blue\n inst_gi = inst_g - inst_i\n inst_gr = inst_g - inst_r\n cal_ri = cal_r - cal_i\n cal_gi = cal_g - cal_i\n cal_gr = cal_g - cal_r\n\n log.info('\\n')\n log.info('Localizing the Red Clump')\n log.info('Median (r-i), i: '+str(np.median(inst_ri))+', '+str(np.median(inst_i)))\n log.info('Median (g-i), i: '+str(np.median(inst_gi))+', '+str(np.median(inst_i)))\n log.info('Median (g-r), g: '+str(np.median(inst_gr))+', '+str(np.median(inst_g)))\n\n ri_min = 0.8\n ri_max = 1.2\n i_min = 15.5\n i_max = 16.5\n\n r_min = 16.2\n r_max = 17.5\n\n gi_min = 2.5\n gi_max = 3.5\n\n gr_min = 1.5\n gr_max = 2.2\n g_min = 17.8\n g_max = 19.5\n\n log.info('Selected Red Clump giants between:')\n log.info('i = '+str(i_min)+' to '+str(i_max))\n log.info('r = '+str(r_min)+' to '+str(r_max))\n log.info('(r-i) = '+str(ri_min)+' to '+str(ri_max))\n log.info('g = '+str(g_min)+' to '+str(g_max))\n log.info('(g-r) = '+str(gr_min)+' to '+str(gr_max))\n log.info('(g-i) = '+str(gi_min)+' to '+str(gi_max))\n\n idx = select_within_range(inst_i, inst_ri, i_min, i_max, ri_min, ri_max)\n\n (RC.ri, RC.sig_ri, RC.i, RC.sig_i) = calc_distribution_centroid_and_spread_2d(inst_ri[idx], inst_i[idx], use_iqr=True)\n\n idx = select_within_range(inst_r, inst_ri, r_min, r_max, ri_min, ri_max)\n\n (RC.r, RC.sig_r) = calc_distribution_centre_and_spread(inst_r[idx], use_iqr=True)\n\n idx = select_within_range(inst_g, inst_gr, g_min, g_max, gr_min, gr_max)\n\n (RC.gr, RC.sig_gr, RC.g, RC.sig_g) = calc_distribution_centroid_and_spread_2d(inst_gr[idx], inst_g[idx], use_iqr=True)\n\n idx = select_within_range(inst_g, inst_gi, g_min, g_max, gi_min, gi_max)\n\n (RC.gi, RC.sig_gi, RC.g, RC.sig_g) = calc_distribution_centroid_and_spread_2d(inst_gi[idx], inst_g[idx], use_iqr=True)\n\n log.info('\\n')\n log.info('Centroid of Red Clump Stars at:')\n log.info(RC.summary(show_mags=True))\n log.info(RC.summary(show_mags=False,show_colours=True))\n\n RC.transform_to_JohnsonCousins()\n\n log.info(RC.summary(show_mags=False,johnsons=True))\n\n return RC", "def do_lowzcut_check(cat, subdir):\n lowzcut = cat.lowzcut\n cat.lowzcut = True\n cat.plot_omega_dla(zmax=5,label=\"Cutting\")\n cat.lowzcut = False\n cat.plot_omega_dla(zmax=5,label=\"Not cutting\")\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"omega_gp_lowz\"))\n plt.clf()\n\n cat.lowzcut = True\n cat.plot_line_density(zmax=5,label=\"Cutting\")\n cat.lowzcut = False\n cat.plot_line_density(zmax=5,label=\"Not cutting\")\n plt.ylim(0,0.12)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"dndx_gp_lowz\"))\n plt.clf()\n cat.lowzcut = lowzcut", "def FE_find_and_cap_outliers(df, features, drop=False, verbose=False):\r\n df = df.copy(deep=True)\r\n outlier_indices = []\r\n idcol = 'idcol'\r\n df[idcol] = range(len(df))\r\n if isinstance(features, str):\r\n features = [features]\r\n # iterate over features(columns)\r\n for col in features:\r\n # Determine a list of indices of outliers for feature col\r\n thresh = outlier_determine_threshold(df, col)\r\n mask_outliers = is_outlier(df[col], thresh=thresh).astype(int)\r\n dfout_index = df.iloc[np.where(mask_outliers>0)].index\r\n\r\n df['anomaly1'] = 0\r\n df.loc[dfout_index ,'anomaly1'] = 1\r\n\r\n ### this is how the column looks now before capping outliers\r\n if verbose:\r\n fig, (ax1,ax2) = plt.subplots(1,2,figsize=(12,5))\r\n colors = {0:'blue', 1:'red'}\r\n ax1.scatter(df[idcol], df[col], c=df[\"anomaly1\"].apply(lambda x: colors[x]))\r\n ax1.set_xlabel('Row ID')\r\n ax1.set_ylabel('Target values')\r\n ax1.set_title('%s before capping outliers' %col)\r\n\r\n capped_value = df.loc[dfout_index, col].min() ## this is the value we cap it against\r\n df.loc[dfout_index, col] = capped_value ## maximum values are now capped\r\n ### you are now good to go - you can show how they are capped using before and after pics\r\n if verbose:\r\n colors = {0:'blue', 1:'red'}\r\n ax2.scatter(df[idcol], df[col], c=df[\"anomaly1\"].apply(lambda x: colors[x]))\r\n ax2.set_xlabel('Row ID')\r\n ax2.set_ylabel('Target values')\r\n ax2.set_title('%s after capping outliers' %col)\r\n\r\n # Let's save the list of outliers and see if there are some with outliers in multiple columns\r\n outlier_indices.extend(dfout_index)\r\n\r\n # select certain observations containing more than one outlier in 2 columns or more. We can drop them!\r\n outlier_indices = Counter(outlier_indices)\r\n multiple_outliers = list( k for k, v in outlier_indices.items() if v > 3 )\r\n ### now drop these rows altogether ####\r\n df.drop([idcol,'anomaly1'], axis=1, inplace=True)\r\n if drop:\r\n print('Shape of dataframe before outliers being dropped: %s' %(df.shape,))\r\n number_of_rows = df.shape[0]\r\n df.drop(multiple_outliers, axis=0, inplace=True)\r\n print('Shape of dataframe after outliers being dropped: %s' %(df.shape,))\r\n print('\\nNumber_of_rows with multiple outliers in more than 3 columns which were dropped = %d' %(number_of_rows-df.shape[0]))\r\n return df", "def analyse_colour_mag_diagrams(params,star_catalog,catalog_header,\n target,source,blend,RC,\n det_idx,cat_idx,close_cat_idx,log):\n\n tol = 2.0\n\n filters = { 'ip': 'SDSS-i', 'rp': 'SDSS-r', 'gp': 'SDSS-g' }\n\n inst_i = star_catalog['cal_ref_mag_ip'][det_idx]\n inst_r = star_catalog['cal_ref_mag_rp'][det_idx]\n inst_g = star_catalog['cal_ref_mag_gp'][det_idx]\n cal_i = star_catalog['imag'][cat_idx]\n cal_r = star_catalog['rmag'][cat_idx]\n cal_g = star_catalog['gmag'][cat_idx]\n inst_ri = inst_r - inst_i # Catalogue column order is red -> blue\n inst_gr = inst_g - inst_r\n inst_gi = inst_g - inst_i\n cal_ri = cal_r - cal_i\n cal_gr = cal_g - cal_r\n cal_gi = cal_g - cal_i\n\n linst_i = star_catalog['cal_ref_mag_ip'][close_cat_idx]\n linst_r = star_catalog['cal_ref_mag_rp'][close_cat_idx]\n linst_g = star_catalog['cal_ref_mag_gp'][close_cat_idx]\n lcal_i = star_catalog['imag'][close_cat_idx]\n lcal_r = star_catalog['rmag'][close_cat_idx]\n lcal_g = star_catalog['gmag'][close_cat_idx]\n linst_ri = linst_r - linst_i # Catalogue column order is red -> blue\n linst_gr = linst_g - linst_r\n linst_gi = linst_g - linst_i\n lcal_ri = lcal_r - lcal_i\n lcal_gr = lcal_g - lcal_r\n lcal_gi = lcal_g - lcal_i\n\n plot_colour_mag_diagram(params,inst_i, inst_ri, linst_i, linst_ri, target,\n source, blend, RC, 'r', 'i', 'i', tol, log)\n\n plot_colour_mag_diagram(params,inst_r, inst_ri, linst_r, linst_ri, target,\n source, blend, RC, 'r', 'i', 'r', tol, log)\n\n plot_colour_mag_diagram(params,inst_g, inst_gr, linst_g, linst_gr, target,\n source, blend, RC, 'g', 'r', 'g', tol, log)\n\n plot_colour_mag_diagram(params,inst_g, inst_gi, linst_g, linst_gi, target,\n source, blend, RC, 'g', 'i', 'g', tol, log)", "def get_ptf10iuv(colorplt = False):\n z = 0.0251485\n ebv = 0.0371 # SFD\n D = cosmo.luminosity_distance([z])[0].value * 1e+6 # in pc\n dis_mod = 5*np.log10(D / 10)\n print (\"adopt g band t_max estimated by myself\")\n t_max = 55357.387 \n tb = pd.read_csv('../data/otherSN/Kasliwal2012/PTF10iuv', sep='\\t')\n tb = tb.drop(columns=[\"Unnamed: 4\"])\n tb = tb.rename(columns={'Filter' : 'filter',\n 'MJD': 'mjd'})\n tb = tb[~np.array([x[0]=='>' for x in tb['Mag'].values])]\n tb['mag'] = np.array([float(x.split(\" +or-\")[0]) for x in tb['Mag'].values])\n tb['emag'] = np.array([float(x.split(\" +or-\")[1]) for x in tb['Mag'].values])\n tb = tb.drop(columns=[\"Mag\"])\n \n ixg = tb['filter'].values == \"g\"\n ixr = tb['filter'].values == \"r\"\n ixi = tb['filter'].values == \"i\"\n ixz = tb['filter'].values == \"z\"\n ixB = tb['filter'].values == \"B\"\n tb['wave'] = np.zeros(len(tb))\n tb['wave'].values[ixB] = 4359\n tb['wave'].values[ixg] = 4814\n tb['wave'].values[ixr] = 6422\n tb['wave'].values[ixi] = 7883\n tb['wave'].values[ixz] = 9670\n \n tb['mag0'] = tb['mag'] - extinction.ccm89(tb['wave'].values, 3.1*ebv, 3.1)\n tb['mag0_abs'] = tb['mag0'] - dis_mod\n tb['tmax_rf'] = (tb['mjd'] - t_max) / (1+z)\n tb = tb.sort_values(by = \"mjd\")\n if colorplt==False:\n return tb\n \n else:\n tb = add_datecol(tb)\n ix = np.in1d(tb[\"filter\"].values, np.array(['g', 'r', 'i']))\n tb = tb[ix]\n tb = tb[tb.mjd > 55352.5]\n tb = tb[tb.mjd < 55593.5]\n \n dates = get_date_span(tb)\n datesave = []\n for i in range(len(dates)):\n x = dates[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n if len(tbsub)!=0:\n flts = tbsub['filter'].values\n if \"r\" in flts and np.sum(np.unique(flts))!=1:\n datesave.append(x)\n datesave = np.array(datesave)\n \n mcolor = []\n mcolor_unc = []\n mjds = []\n colorname = []\n for i in range(len(datesave)):\n x = datesave[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n gtb = tbsub[tbsub[\"filter\"].values==\"g\"]\n rtb = tbsub[tbsub[\"filter\"].values==\"r\"]\n itb = tbsub[tbsub[\"filter\"].values==\"i\"]\n if len(gtb)!=0:\n gmjds = gtb[\"mjd\"].values\n gmags = gtb[\"mag0\"].values\n gemags = gtb[\"emag\"].values\n gwtgs = 1/gemags**2\n gmag = np.sum(gmags * gwtgs) / np.sum(gwtgs)\n gmjd = np.sum(gmjds * gwtgs) / np.sum(gwtgs)\n gemag = 1/ np.sqrt(np.sum(gwtgs))\n if len(rtb)!=0:\n rmjds = rtb[\"mjd\"].values\n rmags = rtb[\"mag0\"].values\n remags = rtb[\"emag\"].values\n rwtgs = 1/remags**2\n rmag = np.sum(rmags * rwtgs) / np.sum(rwtgs)\n rmjd = np.sum(rmjds * rwtgs) / np.sum(rwtgs)\n remag = 1/ np.sqrt(np.sum(rwtgs))\n if len(itb)!=0:\n imjds = itb[\"mjd\"].values\n imags = itb[\"mag0\"].values\n iemags = itb[\"emag\"].values\n iwtgs = 1/iemags**2\n imag = np.sum(imags * iwtgs) / np.sum(iwtgs)\n imjd = np.sum(imjds * iwtgs) / np.sum(iwtgs)\n iemag = 1/ np.sqrt(np.sum(iwtgs))\n if len(gtb)!=0 and len(rtb)!=0:\n mcolor.append(gmag - rmag)\n mjds.append( 0.5 * (gmjd + rmjd) )\n mcolor_unc.append( np.sqrt(gemag**2 + remag**2) )\n colorname.append(\"gmr\")\n if len(rtb)!=0 and len(itb)!=0:\n mcolor.append(rmag - imag)\n mjds.append( 0.5 * (rmjd + imjd) )\n mcolor_unc.append( np.sqrt(remag**2 + iemag**2) )\n colorname.append(\"rmi\")\n \n ctb = Table(data = [mjds, mcolor, mcolor_unc, colorname],\n names = [\"mjd\", \"c\", \"ec\", \"cname\"])\n \n ctb['tmax_rf'] = (ctb['mjd'] - t_max) / (1+z)\n ctb = ctb.to_pandas()\n return ctb", "def vscat(a,fig=None,ls=None,marker='o',nmin=2,mhmin=-3,density=False,out=None) :\n if fig == None : fig,ax=plots.multi(4,6,hspace=0.001,wspace=0.4,figsize=(12,8))\n else : fig,ax=fig\n tbins=[3000,3500,4000,4500,5500,8000,30000] \n hbins=[8,11,12,13,15]\n try: snr = a['SNREV']\n except: snr=a['SNR']\n j=np.where(snr > 300) [0]\n snr[j] = 300\n for i in range(len(tbins)-1) :\n ax[i,0].text(0.9,0.9,'{:d}<=RV_TEFF<{:d}'.format(tbins[i],tbins[i+1]),ha='right',transform=ax[i,0].transAxes,fontsize=8)\n for j in range(len(hbins)-1) :\n ax[0,j].set_title('{:d}<=H<{:d}'.format(hbins[j],hbins[j+1]))\n gd = np.where((a['RV_TEFF']>=tbins[i]) & (a['RV_TEFF']<tbins[i+1]) &\n (a['H']>=hbins[j]) & (a['H']<hbins[j+1]) &\n (a['NVISITS']>nmin) & (a['RV_FEH']>mhmin) & (a['VSCATTER'] > 0)) [0]\n print(tbins[i],tbins[i+1],hbins[j],hbins[j+1],nmin,len(gd))\n try :\n #plots.plotc(ax[i,2],snr[gd],a['VSCATTER'][gd],a['RV_FEH'][gd],marker=marker,xr=[0,310],yr=[0,1],xt='S/N',yt='VSCATTER')\n ax[i,j].hist(a['VSCATTER'][gd],bins=np.arange(0,1,0.01),ls=ls,histtype='step',color=colors[j],normed=density)\n ax[i,j].set_xlabel('VSCATTER (km/s)')\n ax[i,j].plot([0.1,0.1],ax[i,j].get_ylim())\n #ax[i,1].hist(a['VSCATTER'][gd],bins=np.arange(0,1,0.01),histtype='step',cumulative=True,normed=True,ls=ls,color=colors[j])\n #ax[i,1].set_xlabel('VSCATTER')\n except : pass\n\n if out is not None : \n fig.savefig(out+'.png')\n plt.close()\n\n fig.suptitle('NVISITS>{:d} [M/H]>{:6.2f}'.format(nmin,mhmin))\n return fig,ax", "def completeness(input_sources_cat,detected_sources_cat,output_fname,cat_falsedet,Mag_lim,pix_radius):\n\n #Load catalogues in table\n input_cat=ascii.read('%s.txt' % input_sources_cat)\n detected_cat=ascii.read('%s.cat' % detected_sources_cat) \n #print (input_cat)\n #print (detected_cat)\n print ('Number of sources in stuff catalog below the mag lim of %.2f: %d' % (Mag_lim,len(input_cat[input_cat['MAG']<Mag_lim])))\n print ('Number of sources detected: %d \\n' % len(detected_cat))\n\n #Pixel radius\n pixradius=pix_radius\n\n nb=0\n i=0\n det=np.zeros(len(input_cat))\n x_det_list=np.zeros(len(input_cat))\n y_det_list=np.zeros(len(input_cat))\n mag_sex=np.zeros(len(input_cat))\n\n col_det=Column(name='detected',data=det)\n x_det_coord=Column(name='x_coord_det',data=x_det_list)\n y_det_coord=Column(name='y_coord_det',data=y_det_list)\n mag_det=Column(name='mag_det',data=mag_sex)\n input_cat.add_columns([col_det,x_det_coord,y_det_coord,mag_det])\n\n col_det_sex=Column(name='detected',data=np.zeros(len(detected_cat)))\n detected_cat.add_columns([col_det_sex])\n\n\n for x1, y1 in zip (detected_cat['XPEAK_IMAGE'], detected_cat['YPEAK_IMAGE']):\n #print ('object n. {0:d} at position: {1:.2f}-{2:.2f} \\n'.format(nb,x1,y1))\n min_dist=1e40\n j=0\n x_det=-1;y_det=-1;\n for x2,y2,mag in zip(input_cat['COORD_XPIXEL'],input_cat['COORD_YPIXEL'],input_cat['MAG']):\n if detected_cat['detected'][i]==0 and x1 >= int(x2)-pixradius and x1 <= int(x2)+pixradius and y1 >= int(y2)-pixradius and y1 <= int(y2)+pixradius:\n #Test the minimum distance\n dist=(x2-x1)**2+(y2-y1)**2\n if dist < min_dist:# and detected_cat['MAG_AUTO'][i] > 0.9*mag and detected_cat['MAG_AUTO'][i] < 1.1*mag:\n min_dist=dist\n x_det=x1\n y_det=y1\n mag_det=detected_cat['MAG_AUTO'][i]\n index=j\n j+=1\n if min_dist<1e40:\n nb+=1\n detected_cat['detected'][i]=1\n #print ('Matched sources n. {0:d} at position: {1:.2f}-{2:.2f} \\n'.format(i,x_det,y_det))\n input_cat['detected'][index]=1\n input_cat['x_coord_det'][index]=x_det\n input_cat['y_coord_det'][index]=y_det\n input_cat['mag_det'][index]=mag_det\n else:\n detected_cat['detected'][i]=-1\n #print ('Matched sources n. {0:d} at position: {1:.2f}-{2:.2f} \\n'.format(i,x_det,y_det))\n\n i+=1\n\n\n \"\"\"\n for x1,y1 in zip(input_cat['COORD_YPIXEL'],input_cat['COORD_XPIXEL']):\n nb+=1\n #print ('object n. {0:d} at position: {1:.2f}-{2:.2f} \\n'.format(nb,x1,y1))\n min_dist=1e40\n x_det=-1;y_det=-1;\n j=0\n for x2, y2 in zip (detected_cat['XPEAK_IMAGE'], detected_cat['YPEAK_IMAGE']):\n if detected_cat['detected'][j]==0 and x2 >= int(x1)-pixradius and x2 <= int(x1)+pixradius and y2 >= int(y1)-pixradius and y2 <= int(y1)+pixradius:\n #Test the minimum distance\n dist=(x2-x1)**2+(y2-y1)**2\n if dist < min_dist:\n min_dist=dist\n x_det=x2\n y_det=y2\n mag_det=detected_cat['MAG_AUTO'][j]\n index=j\n j+=1\n \n if min_dist<1e40:\n i+=1\n detected_cat['detected'][index]=1\n #print ('Matched sources n. {0:d} at position: {1:.2f}-{2:.2f} \\n'.format(i,x_det,y_det))\n input_cat['detected'][nb-1]=1\n input_cat['x_coord_det'][nb-1]=x_det\n input_cat['y_coord_det'][nb-1]=y_det\n input_cat['mag_det'][nb-1]=mag_det\n \"\"\"\n #Cross match catalog\n print ('Number of sources matched in both catalogs: %d' % nb)\n\n #Write output file\n ascii.write(input_cat,'%s.txt' % output_fname)\n\n\n\n x_false_list=detected_cat['XPEAK_IMAGE'][detected_cat['detected']==-1]\n y_false_list=detected_cat['YPEAK_IMAGE'][detected_cat['detected']==-1]\n mag_sex=detected_cat['MAG_AUTO'][detected_cat['detected']==-1]\n\n #x_det_coord=Column(name='x_coord',data=x_det_list)\n #y_det_coord=Column(name='y_coord',data=y_det_list)\n #mag_det=Column(name='mag_det',data=mag_sex)\n false_det_cat=Table([x_false_list,y_false_list,mag_sex],names=('x_coord','y_coord','mag_det'))\n\n\n #Write false detections in a separated file\n ascii.write(false_det_cat,'%s.txt' % cat_falsedet)", "def identify_and_handel_outliers(self):\n col_list = [] # This will hold the column names created for the administration of the modified z-score test\n values_dropped = []\n cont_cols = self.df.select_dtypes(exclude=[\"category\"]).columns # Gets continous columns \n for col in cont_cols:\n#TODO: Add lines to check column len(), if len() == 0, drop drop column, create cont_cols and cat_cols, and drop from there as well. \n df_len = len(self.df)\n top_value = self.df[col].value_counts(normalize=True, ascending=False, dropna=True)\n top_value = top_value.head(1).reset_index().to_numpy()[0] #Gets the top occuring value along with its percentage of occurances\n if top_value[1] > 0.5:#Test if the top occuring value makes up more than 50% of the data\n remaining_col = self.df[col][~self.df[col].isin([top_value[0]])] #Gets all values not within the 50% of single value data\n self.df[f\"{col}_mod_z\"] = phase_one_data_prep.modified_zscore(remaining_col) #Gets modified z-score for remaining items\n self.df[f\"{col}_mod_z\"] = self.df[f\"{col}_mod_z\"].fillna(0) #Fills all missing z-scores\\\n #with zero(because that 50% of data removed would be zero anyways)\n self.df = self.df[self.df[f\"{col}_mod_z\"] < 3] #Removed all values outside 3\n col_list.append(f\"{col}_mod_z\")#Appends name of column to list\n values_dropped.append((col, df_len - len(self.df)))\n else:\n self.df[f\"{col}_mod_z\"] = phase_one_data_prep.modified_zscore(self.df[col]) #Gets modified z-score \n self.df[f\"{col}_mod_z\"] = self.df[f\"{col}_mod_z\"].fillna(0)\n self.df = self.df[self.df[f\"{col}_mod_z\"] < 3] #Removed all values outside 3\n col_list.append(f\"{col}_mod_z\")#Appends name of column to list\n values_dropped.append((col, df_len - len(self.df)))\n self.df.drop(columns = col_list, inplace=True)#Removed columns created to test modified z-score\n self.outliers_dropped = values_dropped", "def filter_data(df, coords, vel=False, vel_threshold=0.15, radiant_threshold=5):\n\tdf = df[(df.range > 110) & (df.range < 130) \n\t\t& (df.th < 70) # Unphysical, given the range cut.\n\t\t& (df.fl == 0)] # Bad interferometry if fl=1\n\n\t# Velocity cut\n\tif vel:\n\t\tdf = df[(df.new_ptn > vel*(1-vel_threshold)) \n\t\t& (df.new_ptn < vel*(1+vel_threshold))]\n\n\t# Identify the shower radiant\n\tif isinstance(coords, str):\n\t\twavelet = read_wavelet(coords)\n\t\tdf = df.apply(get_wavelet_radiant, axis=1, args=(wavelet,))\n\telse:\n\t\tdf['radiant_ll0'] = coords[0]\n\t\tdf['radiant_beta'] = coords[1]\n\n\tdf['separation'] = df.apply(check_radiant, axis=1)\n\tdf.drop(['radiant_ll0', 'radiant_beta'], axis=1, inplace=True)\n\tdf_shower = df[df['separation'] <= radiant_threshold]\n\treturn df_shower", "def plot_HDres_histos_vs_z(\n df,\n nameout,\n threshold_var=\"class0\",\n threshold_list=[0.5, 0.7, 0.9],\n threshold_sign=\">\",\n):\n\n P = df[df[\"class0\"] > 0.5]\n Ias = df[df[\"target\"] == 0]\n\n TP = P[P[\"target\"] == 0]\n FP = P[P[\"target\"] != 0]\n\n sel_TP_dic = {}\n sel_FP_dic = {}\n for t in threshold_list:\n if threshold_sign == \">\":\n sel_TP_dic[t] = TP[TP[threshold_var] > t]\n sel_FP_dic[t] = FP[FP[threshold_var] > t]\n else:\n sel_TP_dic[t] = TP[TP[threshold_var] < t]\n sel_FP_dic[t] = FP[FP[threshold_var] < t]\n\n plt.clf()\n cm = CMAP\n fig = plt.figure(figsize=(14, 14))\n # gs = gridspec.GridSpec(4, 2, width_ratios=[3, 1], height_ratios=[2, 2, 1, 1])\n # gs.update(wspace=0.1, hspace=0.3)\n\n # # gridspec init\n # ax00 = plt.subplot(gs[0, 0]) # Hres Ia\n # ax10 = plt.subplot(gs[1, 0], sharex=ax00) # Hres CC\n # ax20 = plt.subplot(gs[2:, 0], sharex=ax00) # efficiency\n # ax01 = plt.subplot(gs[0, 1], sharey=ax00) # histo Ia\n # ax11 = plt.subplot(gs[1, 1], sharey=ax10) # histo CC\n # ax21 = plt.subplot(gs[2, 1]) # histo x1\n # ax31 = plt.subplot(gs[3, 1]) # histo c\n gs = gridspec.GridSpec(3, 3, height_ratios=[2, 2, 1])\n # gs.update(wspace=0.2, hspace=0.1)\n\n # gridspec init\n ax00 = plt.subplot(gs[0, 0:2]) # Hres Ia\n ax10 = plt.subplot(gs[1, 0:2], sharex=ax00) # Hres CC\n ax20 = plt.subplot(gs[2, 0]) # redshift dist\n ax01 = plt.subplot(gs[0, 2], sharey=ax00) # histo Ia\n ax11 = plt.subplot(gs[1, 2], sharey=ax10) # histo CC\n ax21 = plt.subplot(gs[2, 1]) # histo x1\n ax31 = plt.subplot(gs[2, 2]) # histo c\n\n # lines\n ax00.plot([0, 1.2], np.zeros(len([0, 1.2])), \"k:\")\n ax10.plot([0, 1.2], np.zeros(len([0, 1.2])), \"k:\")\n\n mubins = np.arange(-2, 2 + 0.1, 0.1)\n\n # Hres w. histogram\n def HRwhisto(\n df, sel_dic, ax_left, ax_right, threshold_sign, ylabel=\"TP\", visible=False\n ):\n if ylabel == \"TP\":\n sntyp = \"Ia\"\n else:\n sntyp = \"CC\"\n ax_left.scatter(\n df[\"SIM_REDSHIFT_CMB\"],\n df[\"delmu\"],\n c=df[\"class0\"],\n cmap=CMAP,\n vmin=0.5,\n vmax=1,\n s=8,\n )\n ax_left.errorbar(\n df[\"SIM_REDSHIFT_CMB\"],\n df[\"delmu\"],\n yerr=df[\"delmu_err\"],\n color=\"gray\",\n zorder=0,\n fmt=\"none\",\n marker=\"none\",\n )\n\n ax_left.set_ylim(-2, 2)\n ax_left.set_xlim(0, 1.2)\n ax_left.set_ylabel(f\"{ylabel} residual\", fontsize=18)\n ax_left.tick_params(labelsize=14)\n plt.setp(ax_left.get_xticklabels(), visible=visible)\n if visible is True:\n ax_left.set_xlabel(\"simulated redshift\", fontsize=18)\n for t in threshold_list:\n sel = sel_dic[t]\n n_SNe = len(sel)\n ax_right.hist(\n sel[\"delmu\"],\n orientation=\"horizontal\",\n histtype=\"step\",\n color=cm(t),\n bins=mubins,\n density=True,\n label=f\"{n_SNe} {sntyp} {threshold_sign} {t}\",\n lw=2,\n )\n ax_right.legend(loc=\"lower center\", prop={\"size\": 13})\n plt.setp(ax_right.get_yticklabels(), visible=False)\n plt.setp(ax_right.get_xticklabels(), visible=False)\n ax_right.plot(\n [ax_right.get_xlim()[0], ax_right.get_xlim()[1]],\n np.zeros(len([ax_right.get_xlim()[0], ax_right.get_xlim()[1]])),\n \"k:\",\n )\n\n HRwhisto(TP, sel_TP_dic, ax00, ax01, threshold_sign, ylabel=\"TP\", visible=False)\n HRwhisto(FP, sel_FP_dic, ax10, ax11, threshold_sign, ylabel=\"FP\", visible=True)\n\n # z histos\n n, bins_to_use, tmp = ax20.hist(\n Ias[\"SIM_REDSHIFT_CMB\"], histtype=\"step\", color=\"black\", bins=15, lw=3\n )\n\n for t in threshold_list:\n sel_TP = sel_TP_dic[t]\n sel_FP = sel_FP_dic[t]\n ax20.hist(\n sel_TP[\"SIM_REDSHIFT_CMB\"], histtype=\"step\", color=cm(t), bins=bins_to_use\n )\n ax20.hist(\n sel_FP[\"SIM_REDSHIFT_CMB\"],\n histtype=\"step\",\n color=cm(t),\n linestyle=\"--\",\n bins=bins_to_use,\n )\n ax20.set_xlim(0, 1.2)\n ax20.tick_params(labelsize=14)\n ax20.set_xlabel(\"simulated redshift\", fontsize=18)\n\n # hist stretch\n n, bins_to_use, tmp = ax21.hist(Ias[\"x1\"], color=\"black\", histtype=\"step\", lw=3)\n for t in threshold_list:\n sel_TP = sel_TP_dic[t]\n ax21.hist(\n sel_TP[\"x1\"],\n orientation=\"vertical\",\n histtype=\"step\",\n color=cm(t),\n bins=bins_to_use,\n lw=2,\n )\n ax21.set_xlabel(\"x1\", fontsize=18)\n ax21.yaxis.set_label_position(\"right\")\n ax21.set_xlim(-3, 3)\n ax21.tick_params(labelsize=14)\n # color histo\n n, bins_to_use, tmp = ax31.hist(Ias[\"c\"], color=\"black\", histtype=\"step\", lw=3)\n for t in threshold_list:\n sel_TP = sel_TP_dic[t]\n ax31.hist(\n sel_TP[\"c\"],\n orientation=\"vertical\",\n histtype=\"step\",\n color=cm(t),\n bins=bins_to_use,\n lw=2,\n )\n ax31.set_xlabel(\"c\", fontsize=18)\n ax31.set_xlim(-1, 1)\n ax31.tick_params(labelsize=14)\n ax31.yaxis.set_label_position(\"right\")\n\n gs.tight_layout(fig)\n plt.savefig(nameout)\n plt.close()\n del fig", "def mut_filter(df, rate, binary_cutoff=12):\n get_min_count = lambda s: s.value_counts().min() if len(s.unique()) > 1 else -1\n df = df[df.apply(get_min_count, axis=1) > binary_cutoff]\n cc = H.screen_feature(rate, rev_kruskal, df)\n\n fc_apply = lambda s: fc(s, rate)\n direction = df.apply(fc_apply, axis=1)\n direction.name = 'direction'\n\n cc = cc.join(direction)\n #cc = cc[cc.direction == False]\n #return cc\n\n df = df.ix[H.true_index((cc.p > .01) | (cc.direction == True))]\n df = df.dropna(axis=1)\n return df", "def apply_cuts(objects):\n #- Check if objects is a filename instead of the actual data\n if isinstance(objects, (str, unicode)):\n objects = io.read_tractor(objects)\n \n #- undo Milky Way extinction\n flux = unextinct_fluxes(objects)\n gflux = flux['GFLUX']\n rflux = flux['RFLUX']\n zflux = flux['ZFLUX']\n w1flux = flux['W1FLUX']\n wflux = flux['WFLUX']\n \n #- DR1 has targets off the edge of the brick; trim to just this brick\n if 'BRICK_PRIMARY' in objects.dtype.names:\n primary = objects['BRICK_PRIMARY']\n else:\n primary = np.ones(len(objects), dtype=bool)\n \n #----- LRG\n lrg = primary.copy()\n lrg &= rflux > 10**((22.5-23.0)/2.5)\n lrg &= zflux > 10**((22.5-20.56)/2.5)\n lrg &= w1flux > 10**((22.5-19.35)/2.5)\n lrg &= zflux > rflux * 10**(1.6/2.5)\n #- clip to avoid warnings from negative numbers raised to fractional powers\n lrg &= w1flux * rflux.clip(0)**(1.33-1) > zflux.clip(0)**1.33 * 10**(-0.33/2.5)\n\n #----- ELG\n elg = primary.copy()\n elg &= rflux > 10**((22.5-23.4)/2.5)\n elg &= zflux > rflux * 10**(0.3/2.5)\n elg &= zflux < rflux * 10**(1.5/2.5)\n elg &= rflux**2 < gflux * zflux * 10**(-0.2/2.5)\n elg &= zflux < gflux * 10**(1.2/2.5)\n\n #----- Quasars\n psflike = ((objects['TYPE'] == 'PSF') | (objects['TYPE'] == 'PSF ')) \n qso = primary.copy()\n qso &= psflike\n qso &= rflux > 10**((22.5-23.0)/2.5)\n qso &= rflux < gflux * 10**(1.0/2.5)\n qso &= zflux > rflux * 10**(-0.3/2.5)\n qso &= zflux < rflux * 10**(1.1/2.5)\n #- clip to avoid warnings from negative numbers raised to fractional powers\n qso &= wflux * gflux.clip(0)**1.2 > rflux.clip(0)**(1+1.2) * 10**(-0.4/2.5)\n ### qso &= wflux * gflux**1.2 > rflux**(1+1.2) * 10**(2/2.5)\n\n #------ Bright Galaxy Survey\n #- 'PSF' for astropy.io.fits; 'PSF ' for fitsio (sigh)\n bgs = primary.copy()\n bgs &= ~psflike\n bgs &= rflux > 10**((22.5-19.35)/2.5)\n\n #----- Standard stars\n fstd = primary.copy()\n fstd &= psflike\n fracflux = objects['DECAM_FRACFLUX'].T \n signal2noise = objects['DECAM_FLUX'] * np.sqrt(objects['DECAM_FLUX_IVAR'])\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n for j in (1,2,4): #- g, r, z\n fstd &= fracflux[j] < 0.04\n fstd &= signal2noise[:, j] > 10\n\n #- observed flux; no Milky Way extinction\n obs_rflux = objects['DECAM_FLUX'][:, 2]\n fstd &= obs_rflux < 10**((22.5-16.0)/2.5)\n fstd &= obs_rflux > 10**((22.5-19.0)/2.5)\n #- colors near BD+17; ignore warnings about flux<=0\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n grcolor = 2.5 * np.log10(rflux / gflux)\n rzcolor = 2.5 * np.log10(zflux / rflux)\n fstd &= (grcolor - 0.32)**2 + (rzcolor - 0.13)**2 < 0.06**2\n\n #-----\n #- construct the targetflag bits\n #- Currently our only cuts are DECam based (i.e. South)\n desi_target = lrg * desi_mask.LRG_SOUTH\n desi_target |= elg * desi_mask.ELG_SOUTH\n desi_target |= qso * desi_mask.QSO_SOUTH\n\n desi_target |= lrg * desi_mask.LRG\n desi_target |= elg * desi_mask.ELG\n desi_target |= qso * desi_mask.QSO\n\n desi_target |= fstd * desi_mask.STD_FSTAR\n \n bgs_target = bgs * bgs_mask.BGS_BRIGHT\n bgs_target |= bgs * bgs_mask.BGS_BRIGHT_SOUTH\n\n #- nothing for MWS yet; will be GAIA-based\n mws_target = np.zeros_like(bgs_target)\n\n #- Are any BGS or MWS bit set? Tell desi_target too.\n desi_target |= (bgs_target != 0) * desi_mask.BGS_ANY\n desi_target |= (mws_target != 0) * desi_mask.MWS_ANY\n\n return desi_target, bgs_target, mws_target", "def filter_catalogs(conn, catalogs, res): \n # Determine which resolution range the image belongs in\n for config, res_range in res_dict.items():\n if res_range[0] < res <= res_range[1]:\n use_range = res_range\n # Combine highest resolutions to allow for more catalogs\n if config == 'A' or config == 'B':\n use_range = (res_dict['A'][0], res_dict['B'][1])\n\n # Find all catalogs that fall into the adequate resolution range\n cur = conn.cursor()\n filtered_catalogs = []\n for catalog in catalogs:\n try:\n catalog_res = catalogio.catalog_dict[catalog]['resolution']\n except KeyError:\n cur.execute('''SELECT resolution FROM radcat.catalogs\n WHERE name = %s''', (catalog, ))\n catalog_res = cur.fetchone()[0]\n if use_range[0] < catalog_res <= use_range[1]:\n filtered_catalogs.append(catalog)\n\n cur.close()\n\n return filtered_catalogs", "def add_climatology_cols(df):\n return df", "def view_marginals_raw(data, label=''):\n variables = ['sao2', 'heartrate', 'respiration', 'systemicmean']\n\n num_gradations = 25\n # for cutoff in the gradations, what fraction of samples (at a given time point) fall into that cutoff bracket?\n grid = np.zeros(shape=(16, num_gradations, 4))\n grid = np.zeros(shape=(16, num_gradations, 4))\n assert data.shape[-1] == 4\n ranges = []\n for var in range(4):\n # allow for a different range per variable (if zoom)\n low = np.min(data[:, :, var])\n high = np.max(data[:, :, var])\n ranges.append([low, high])\n gradations = np.linspace(low, high, num_gradations)\n for (i, cutoff) in enumerate(gradations):\n # take the mean over samples\n frac = ((data[:, :, var] > low) & (data[:, :, var] <= cutoff)).mean(axis=0)\n low = cutoff\n grid[:, i, var] = frac\n\n fig, axarr = plt.subplots(nrows=4, ncols=1, sharex=True)\n axarr[0].imshow(grid[:, :, 0].T, origin='lower', aspect=0.5, cmap='magma_r')\n axarr[1].imshow(grid[:, :, 1].T, origin='lower', aspect=0.5, cmap='magma_r')\n axarr[2].imshow(grid[:, :, 2].T, origin='lower', aspect=0.5, cmap='magma_r')\n axarr[3].imshow(grid[:, :, 3].T, origin='lower', aspect=0.5, cmap='magma_r')\n\n for (var, ax) in enumerate(axarr):\n labels = np.round(np.linspace(ranges[var][0], ranges[var][1], num_gradations)[1::4], 0)\n ax.set_yticks(np.arange(num_gradations)[1::4])\n ax.set_yticklabels(labels)\n ax.set_ylabel(variables[var])\n ax.yaxis.set_ticks_position('none')\n ax.xaxis.set_ticks_position('none')\n ax.set_adjustable('box-forced')\n ax.spines['top'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.grid(b=True, color='black', alpha=0.2, linestyle='--')\n\n axarr[-1].set_xticks(np.arange(16)[::2])\n\n plt.tight_layout(pad=0.0, w_pad=-5.0, h_pad=0.1)\n plt.savefig(\"./experiments/eval/eICU_marginals_\" + label + \".png\")\n\n return True", "def get_sn2005ek(colorplt=False):\n z = 0.016551\n ebv = 0.210\n D = cosmo.luminosity_distance([z])[0].value * 1e+6 # in pc\n dis_mod = 5*np.log10(D / 10)\n t_max = 53639.9\n print (\"adopt r band t_max from Drout+13\")\n \n # tb = pd.read_csv('/Users/yuhanyao/Desktop/ZTF18abfcmjw/data/Drout2013/table1', sep='\\t')\n # tb = tb.drop(columns=[\"Unnamed: 6\"])\n \n mjds = np.array([53639.3, 53640.3, 53641.3, 53642.2, 53643.2, 53645.3,\n 53646.5, 53648.0, 53649.2, 53650.4, 53651.3, 53652.5,\n 53654.2, 53655.2, 53656.2, 53657.2])\n \n Bmags = np.array([18.25, 18.38, 18.65, np.nan, 19.10, 19.71,\n 20.07, np.nan, 20.67, 20.90, 21.05, np.nan,\n 21.74, np.nan, np.nan, np.nan])\n \n Bmag_uncs = np.array([0.02, 0.03, 0.02, np.nan, 0.05, 0.07, \n 0.07, np.nan, 0.04, 0.04, 0.04, np.nan,\n 0.12, np.nan, np.nan, np.nan])\n \n Vmags = np.array([17.83, 18.03, 17.92, np.nan, 18.24, 18.66,\n 18.93, 19.48, 19.63, 19.86, 19.98, 20.35,\n 20.60, 20.74, 20.88, 21.22])\n \n Vmag_uncs = np.array([0.02, 0.03, 0.01, np.nan, 0.02, 0.02,\n 0.02, 0.06, 0.03, 0.03, 0.04, 0.05, \n 0.08, 0.10, 0.08, 0.13])\n \n Rmags = np.array([17.46, 17.41, 17.60, 17.69, 17.86, 18.18, \n np.nan, 18.83, 19.03, 19.26, 19.48, 19.75,\n 20.08, np.nan, 20.47, np.nan])\n \n Rmag_uncs = np.array([0.01, 0.02, 0.01, 0.02, 0.01, 0.01,\n np.nan, 0.03, 0.02, 0.02, 0.02, 0.04,\n 0.05, np.nan, 0.08, np.nan])\n\n Imags = np.array([17.20, 17.13, 17.18, np.nan, 17.47, 17.71, \n np.nan, 18.13, 18.26, 18.51, 18.61, 18.74, \n 19.01, np.nan, 19.47, np.nan])\n \n Imag_uncs = np.array([0.02, 0.04, 0.02, np.nan, 0.03, 0.02,\n np.nan, 0.06, 0.02, 0.02, 0.02, 0.03,\n 0.05, np.nan, 0.06, np.nan])\n \n mymjds = np.hstack([mjds, mjds, mjds, mjds])\n mymags = np.hstack([Bmags, Vmags, Rmags, Imags])\n myemags = np.hstack([Bmag_uncs, Vmag_uncs, Rmag_uncs, Imag_uncs])\n myfilts = np.hstack([ np.repeat(\"B\", len(Bmags)),\n np.repeat(\"V\", len(Bmags)),\n np.repeat(\"R\", len(Rmags)),\n np.repeat(\"I\", len(Imags)) ])\n ix = ~np.isnan(mymags)\n tb = pd.DataFrame({'mjd': mymjds[ix],\n 'mag': mymags[ix],\n 'emag': myemags[ix],\n \"filter\": myfilts[ix]})\n \n ixB = tb['filter'].values==\"B\"\n ixV = tb['filter'].values==\"V\"\n ixR = tb['filter'].values==\"R\"\n ixI = tb['filter'].values==\"I\"\n \n tb['wave'] = np.zeros(len(tb))\n tb['wave'].values[ixB] = 4359\n tb['wave'].values[ixV] = 5430\n tb['wave'].values[ixR] = 6349\n tb['wave'].values[ixI] = 8797\n \n tb['mag0'] = tb['mag'] - extinction.ccm89(tb['wave'].values, 3.1*ebv, 3.1)\n tb['mag0_abs'] = tb['mag0'] - dis_mod\n tb['tmax_rf'] = (tb['mjd'] - t_max) / (1+z)\n if colorplt==False:\n return tb\n else:\n tb = add_datecol(tb)\n ix = np.in1d(tb[\"filter\"].values, np.array(['B', 'R', 'I']))\n tb = tb[ix]\n\n dates = get_date_span(tb)\n datesave = []\n for i in range(len(dates)):\n x = dates[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n if len(tbsub)!=0:\n flts = tbsub['filter'].values\n if \"R\" in flts and np.sum(np.unique(flts))!=1:\n datesave.append(x)\n datesave = np.array(datesave)\n \n mcolor = []\n mcolor_unc = []\n mjds = []\n colorname = []\n for i in range(len(datesave)):\n x = datesave[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n gtb = tbsub[tbsub[\"filter\"].values==\"B\"]\n rtb = tbsub[tbsub[\"filter\"].values==\"R\"]\n itb = tbsub[tbsub[\"filter\"].values==\"I\"]\n if len(gtb)!=0:\n gmjds = gtb[\"mjd\"].values\n gmags = gtb[\"mag0\"].values\n gemags = gtb[\"emag\"].values\n gwtgs = 1/gemags**2\n gmag = np.sum(gmags * gwtgs) / np.sum(gwtgs)\n gmjd = np.sum(gmjds * gwtgs) / np.sum(gwtgs)\n gemag = 1/ np.sqrt(np.sum(gwtgs))\n if len(rtb)!=0:\n rmjds = rtb[\"mjd\"].values\n rmags = rtb[\"mag0\"].values\n remags = rtb[\"emag\"].values\n rwtgs = 1/remags**2\n rmag = np.sum(rmags * rwtgs) / np.sum(rwtgs)\n rmjd = np.sum(rmjds * rwtgs) / np.sum(rwtgs)\n remag = 1/ np.sqrt(np.sum(rwtgs))\n if len(itb)!=0:\n imjds = itb[\"mjd\"].values\n imags = itb[\"mag0\"].values\n iemags = itb[\"emag\"].values\n iwtgs = 1/iemags**2\n imag = np.sum(imags * iwtgs) / np.sum(iwtgs)\n imjd = np.sum(imjds * iwtgs) / np.sum(iwtgs)\n iemag = 1/ np.sqrt(np.sum(iwtgs))\n if len(gtb)!=0 and len(rtb)!=0:\n mcolor.append(gmag - rmag)\n mjds.append( 0.5 * (gmjd + rmjd) )\n mcolor_unc.append( np.sqrt(gemag**2 + remag**2) )\n colorname.append(\"BmR\")\n if len(rtb)!=0 and len(itb)!=0:\n mcolor.append(rmag - imag)\n mjds.append( 0.5 * (rmjd + imjd) )\n mcolor_unc.append( np.sqrt(remag**2 + iemag**2) )\n colorname.append(\"RmI\")\n \n ctb = Table(data = [mjds, mcolor, mcolor_unc, colorname],\n names = [\"mjd\", \"c\", \"ec\", \"cname\"])\n \n ctb['tmax_rf'] = (ctb['mjd'] - t_max) / (1+z)\n ctb = ctb.to_pandas()\n return ctb", "def plot_completeness(cat_name,output_name,name_plot,mag_lims,binning_mag,plot,second_cat='no'):\n\n cat=ascii.read('%s.txt' % cat_name)\n mag_bins=np.arange(mag_lims[0],mag_lims[1],binning_mag)\n\n mask=cat['detected']==1\n mag_binned_tot=np.digitize(cat['MAG'],mag_bins,right=True)\n mag_binned_det=np.digitize(cat[mask]['MAG'],mag_bins,right=True)\n\n nb_mag=np.array([ len(np.where(mag_binned_tot==i)[0]) for i in range(1,len(mag_bins)) ])\n nb_mag_det = np.array([ len(np.where(mag_binned_det==i)[0]) for i in range(1,len(mag_bins)) ])\n #mag_tot= np.array([stuff_cat['MAG'][mag_binned_tot == i].mean() for i in range(1, len(mag_bins))])\n #mag_det= np.array([stuff_cat[mask]['MAG'][mag_binned_det == i].mean() for i in range(1, len(mag_bins))])\n print (nb_mag)\n print (nb_mag_det)\n\n #Write completeness result in text file\n np.savetxt('%s.txt' % output_name, list(zip(mag_bins,nb_mag,nb_mag_det)),fmt='%.2f %d %d')\n\n\n mag_bin_plot=(mag_bins[:-1]+mag_bins[1:])/2\n\n import matplotlib.pyplot as plt\n\n # the histogram of the input sources\n n, bins, patches = plt.hist(cat['MAG'], mag_bins, normed=0, facecolor='green', alpha=0.75)\n plt.xlabel('Magnitude')\n plt.ylabel('Nb of sources')\n plt.xlim([mag_bins[0],mag_bins[-1]])\n plt.savefig('results/plots/hist_sources.png')\n #plt.show()\n\n plt.clf()\n plt.plot(mag_bin_plot,nb_mag_det/nb_mag)\n plt.xlabel('Magnitude AB')\n plt.ylabel('Efficiency')\n plt.grid(True)\n plt.savefig('%s.png' % output_name)\n if plot: plt.show()\n\n\n if second_cat != 'no':\n cat2=ascii.read('%s.txt' % second_cat)\n mag_bins2=np.arange(mag_lims[0],mag_lims[1],binning_mag)\n\n mask2=cat2['detected']==1\n mag_binned_tot2=np.digitize(cat2['MAG'],mag_bins2,right=True)\n mag_binned_det2=np.digitize(cat2[mask2]['MAG'],mag_bins2,right=True)\n\n nb_mag2=np.array([ len(np.where(mag_binned_tot2==i)[0]) for i in range(1,len(mag_bins2)) ])\n nb_mag_det2 = np.array([ len(np.where(mag_binned_det2==i)[0]) for i in range(1,len(mag_bins2)) ])\n\n mag_bin_plot2=(mag_bins2[:-1]+mag_bins2[1:])/2\n #print (mag_bin_plot)\n #plt.plot(mag_bin_plot,nb_mag_det/nb_mag,label='seeing=0.7\"',color='red')\n #plt.plot(mag_bin_plot2,nb_mag_det2/nb_mag2,label='seeing=0.1\"',color='green')\n plt.plot(mag_bin_plot,nb_mag_det/nb_mag,label='5.9',color='red')\n plt.plot(mag_bin_plot2,nb_mag_det2/nb_mag2,label='5',color='green')\n plt.xlabel('Magnitude AB')\n plt.ylabel('Efficiency')\n #plt.yscale('log')\n #plt.xscale('log')\n plt.grid(True)\n plt.legend()\n plt.savefig('results/plots/completeness_comp.png')\n if plot: plt.show()", "def make_photom_catalog_uvis(data, filt, origin=''):\n\tfnarr = [data[key]['filename'] for key in data.keys()]\n\t# Make sure filename does not include path.\n\tif '/' in fnarr[0]:\n\t if 'temp_lacos' in fnarr[0]:\n\t for i in range(len(fnarr)):\n\t file_name = fnarr[i].split('temp_lacos/')[1]\n\t fnarr[i] = file_name\n\t else:\n\t for i in range(len(fnarr)):\n\t file_name = (fnarr[i].split('/'))[len(fnarr[i].split('/'))-1]\n\t fnarr[i] = file_name\n\t\n\tamparr = [data[key]['amp'] for key in data.keys()]\n\tshutarr = [data[key]['shutter'] for key in data.keys()]\n\tmjdarr = [data[key]['mjd_avg'] for key in data.keys()]\n\tmjddeltarr = [data[key]['mjd_deltat'] for key in data.keys()]\n\tchiparr = [data[key]['chip'] for key in data.keys()]\n\taxis1arr = [data[key]['axis1'] for key in data.keys()]\n\taxis2arr = [data[key]['axis2'] for key in data.keys()]\n\txcarr = [data[key]['xc'] for key in data.keys()]\n\tycarr = [data[key]['yc'] for key in data.keys()]\n\txcparr = [data[key]['xcp'] for key in data.keys()]\n\tycparr = [data[key]['ycp'] for key in data.keys()]\n\tbackarr = [data[key]['background'] for key in data.keys()]\n\tbackrmsarr = [data[key]['background_rms'] for key in data.keys()]\n\texptimearr = [data[key]['exptime'] for key in data.keys()]\n\tf1 = [data[key]['flux'][0] for key in data.keys()]\n\tf2 = [data[key]['flux'][1] for key in data.keys()]\n\tf3 = [data[key]['flux'][2] for key in data.keys()]\n\tf4 = [data[key]['flux'][3] for key in data.keys()]\n\tf5 = [data[key]['flux'][4] for key in data.keys()]\n\tf6 = [data[key]['flux'][5] for key in data.keys()]\n\tf7 = [data[key]['flux'][6] for key in data.keys()]\n\tf8 = [data[key]['flux'][7] for key in data.keys()]\n\tf9 = [data[key]['flux'][8] for key in data.keys()]\n\tf10 = [data[key]['flux'][9] for key in data.keys()]\n\tf12 = [data[key]['flux'][10] for key in data.keys()]\n\tf14 = [data[key]['flux'][11] for key in data.keys()]\n\tf16 = [data[key]['flux'][12] for key in data.keys()]\n\tf18 = [data[key]['flux'][13] for key in data.keys()]\n\tf20 = [data[key]['flux'][14] for key in data.keys()]\n\tf24 = [data[key]['flux'][15] for key in data.keys()]\n\tf28 = [data[key]['flux'][16] for key in data.keys()]\n\tf32 = [data[key]['flux'][17] for key in data.keys()]\n\tf36 = [data[key]['flux'][18] for key in data.keys()]\n\tf40 = [data[key]['flux'][19] for key in data.keys()]\n\tf45 = [data[key]['flux'][20] for key in data.keys()]\n\tf50 = [data[key]['flux'][21] for key in data.keys()]\n\tf55 = [data[key]['flux'][22] for key in data.keys()]\n\tf60 = [data[key]['flux'][23] for key in data.keys()]\n\tf65 = [data[key]['flux'][24] for key in data.keys()]\n\tf70 = [data[key]['flux'][25] for key in data.keys()]\n \n\tm1 = [data[key]['mag'][0] for key in data.keys()]\n\tm2 = [data[key]['mag'][1] for key in data.keys()]\n\tm3 = [data[key]['mag'][2] for key in data.keys()]\n\tm4 = [data[key]['mag'][3] for key in data.keys()]\n\tm5 = [data[key]['mag'][4] for key in data.keys()]\n\tm6 = [data[key]['mag'][5] for key in data.keys()]\n\tm7 = [data[key]['mag'][6] for key in data.keys()]\n\tm8 = [data[key]['mag'][7] for key in data.keys()]\n\tm9 = [data[key]['mag'][8] for key in data.keys()]\n\tm10 = [data[key]['mag'][9] for key in data.keys()]\n\tm12 = [data[key]['mag'][10] for key in data.keys()]\n\tm14 = [data[key]['mag'][11] for key in data.keys()]\n\tm16 = [data[key]['mag'][12] for key in data.keys()]\n\tm18 = [data[key]['mag'][13] for key in data.keys()]\n\tm20 = [data[key]['mag'][14] for key in data.keys()]\n\tm24 = [data[key]['mag'][15] for key in data.keys()]\n\tm28 = [data[key]['mag'][16] for key in data.keys()]\n\tm32 = [data[key]['mag'][17] for key in data.keys()]\n\tm36 = [data[key]['mag'][18] for key in data.keys()]\n\tm40 = [data[key]['mag'][19] for key in data.keys()]\n\tm45 = [data[key]['mag'][20] for key in data.keys()]\n\tm50 = [data[key]['mag'][21] for key in data.keys()]\n\tm55 = [data[key]['mag'][22] for key in data.keys()]\n\tm60 = [data[key]['mag'][23] for key in data.keys()]\n\tm65 = [data[key]['mag'][24] for key in data.keys()]\n\tm70 = [data[key]['mag'][25] for key in data.keys()]\n\t\n\tm1_err = [data[key]['merr'][0] for key in data.keys()]\n\tm2_err = [data[key]['merr'][1] for key in data.keys()]\n\tm3_err = [data[key]['merr'][2] for key in data.keys()]\n\tm4_err = [data[key]['merr'][3] for key in data.keys()]\n\tm5_err = [data[key]['merr'][4] for key in data.keys()]\n\tm6_err = [data[key]['merr'][5] for key in data.keys()]\n\tm7_err = [data[key]['merr'][6] for key in data.keys()]\n\tm8_err = [data[key]['merr'][7] for key in data.keys()]\n\tm9_err = [data[key]['merr'][8] for key in data.keys()]\n\tm10_err = [data[key]['merr'][9] for key in data.keys()]\n\tm12_err = [data[key]['merr'][10] for key in data.keys()]\n\tm14_err = [data[key]['merr'][11] for key in data.keys()]\n\tm16_err = [data[key]['merr'][12] for key in data.keys()]\n\tm18_err = [data[key]['merr'][13] for key in data.keys()]\n\tm20_err = [data[key]['merr'][14] for key in data.keys()]\n\tm24_err = [data[key]['merr'][15] for key in data.keys()]\n\tm28_err = [data[key]['merr'][16] for key in data.keys()]\n\tm32_err = [data[key]['merr'][17] for key in data.keys()]\n\tm36_err = [data[key]['merr'][18] for key in data.keys()]\n\tm40_err = [data[key]['merr'][19] for key in data.keys()]\n\tm45_err = [data[key]['merr'][20] for key in data.keys()]\n\tm50_err = [data[key]['merr'][21] for key in data.keys()]\n\tm55_err = [data[key]['merr'][22] for key in data.keys()]\n\tm60_err = [data[key]['merr'][23] for key in data.keys()]\n\tm65_err = [data[key]['merr'][24] for key in data.keys()]\n\tm70_err = [data[key]['merr'][25] for key in data.keys()]\n \n\ttt = {'#filename':fnarr, 'amp':amparr, 'shutter':shutarr, \\\n\t 'mjd_avg':mjdarr, 'mjd_deltat':mjddeltarr, 'chip':chiparr, \\\n\t 'axis1':axis1arr, 'axis2':axis2arr,'xc':xcarr, 'yc':ycarr, \\\n\t 'xcp':xcparr, 'ycp':ycparr, 'background':backarr, \\\n\t 'background_rms':backrmsarr, 'exptime':exptimearr, \\\n 'f1':f1, 'f2':f2, 'f3':f3,'f4':f4,'f5':f5,'f6':f6,'f7':f7,'f8':f8,\\\n 'f9':f9,'f10':f10,'f12':f12,'f14':f14,'f16':f16,'f18':f18,'f20':f20,\\\n 'f24':f24,'f28':f28,'f32':f32,'f36':f36,'f40':f40,'f45':f45,\\\n 'f50':f50,'f55':f55,'f60':f60,'f65':f65,'f70':f70,'m1':m1, 'm2':m2, \\\n 'm3':m3,'m4':m4,'m5':m5,'m6':m6,'m7':m7,'m8':m8,'m9':m9,'m10':m10,\\\n 'm12':m12,'m14':m14,'m16':m16,'m18':m18,'m20':m20,'m24':m24,\\\n 'm28':m28,'m32':m32,'m36':m36,'m40':m40,'m45':m45,'m50':m50,\\\n 'm55':m55,'m60':m60,'m65':m65,'m70':m70,'m1err':m1_err, \\\n 'm2err':m2_err, 'm3err':m3_err,'m4err':m4_err,'m5err':m5_err,\\\n 'm6err':m6_err,'m7err':m7_err,'m8err':m8_err,'m9err':m9_err,\\\n 'm10err':m10_err,'m12err':m12_err,'m14err':m14_err,'m16err':m16_err,\\\n 'm18err':m18_err,'m20err':m20_err,'m24err':m24_err,'m28err':m28_err,\\\n 'm32err':m32_err,'m36err':m36_err,'m40err':m40_err,'m45err':m45_err,\\\n 'm50err':m50_err,'m55err':m55_err,'m60err':m60_err,'m65err':m65_err,\\\n 'm70err':m70_err}\n\n\tascii.write(tt, origin+filt+'_photcat.dat', \\\n\t names=['#filename','amp','shutter','mjd_avg','mjd_deltat',\\\n\t 'chip','axis1','axis2','xc','yc','xcp','ycp',\\\n\t 'background','background_rms','exptime', \\\n 'f1','f2','f3','f4','f5','f6','f7','f8','f9','f10',\\\n 'f12','f14','f16','f18','f20','f24','f28','f32','f36',\\\n 'f40','f45','f50','f55','f60','f65','f70',\\\n 'm1','m2','m3','m4','m5','m6','m7','m8','m9','m10',\\\n 'm12','m14','m16','m18','m20','m24','m28','m32','m36',\\\n 'm40','m45','m50','m55','m60','m65','m70','m1err',\\\n 'm2err','m3err','m4err','m5err','m6err','m7err',\\\n 'm8err','m9err','m10err','m12err','m14err','m16err',\\\n 'm18err','m20err','m24err','m28err','m32err','m36err',\\\n 'm40err','m45err','m50err','m55err','m60err','m65err',\\\n 'm70err'], \\\n formats={'#filename':'%s','amp':'%s','shutter':'%s',\\\n 'mjd_avg':'%9.4f','mjd_deltat':'%6.4f','chip':'%i',\\\n 'axis1':'%i','axis2':'%i','xc':'%8.3f','yc':'%8.3f',\\\n 'xcp':'%8.3f','ycp':'%8.3f', 'background':'%0.5f',\\\n 'background_rms':'%0.5f', 'exptime':'%0.2f', \\\n 'f1':'%0.3f', 'f2':'%0.3f','f3':'%0.3f','f4':'%0.3f',\\\n 'f5':'%0.3f','f6':'%0.3f','f7':'%0.3f','f8':'%0.3f',\\\n 'f9':'%0.3f','f10':'%0.3f','f12':'%0.3f',\\\n 'f14':'%0.3f','f16':'%0.3f','f18':'%0.3f',\\\n 'f20':'%0.3f','f24':'%0.3f','f28':'%0.3f',\\\n 'f32':'%0.3f','f36':'%0.3f','f40':'%0.3f',\\\n 'f45':'%0.3f','f50':'%0.3f','f55':'%0.3f',\\\n 'f60':'%0.3f','f65':'%0.3f','f70':'%0.3f',\\\n 'm1':'%0.3f','m2':'%0.3f','m3':'%0.3f','m4':'%0.3f',\\\n 'm5':'%0.3f','m6':'%0.3f','m7':'%0.3f','m8':'%0.3f',\\\n 'm9':'%0.3f','m10':'%0.3f','m12':'%0.3f',\\\n 'm14':'%0.3f','m16':'%0.3f','m18':'%0.3f',\\\n 'm20':'%0.3f','m24':'%0.3f','m28':'%0.3f',\\\n 'm32':'%0.3f','m36':'%0.3f','m40':'%0.3f',\\\n 'm45':'%0.3f','m50':'%0.3f','m55':'%0.3f',\\\n 'm60':'%0.3f','m65':'%0.3f','m70':'%0.3f', \\\n 'm1err':'%0.3f', 'm2err':'%0.3f','m3err':'%0.3f',\\\n 'm4err':'%0.3f','m5err':'%0.3f','m6err':'%0.3f',\\\n 'm7err':'%0.3f','m8err':'%0.3f','m9err':'%0.3f',\\\n 'm10err':'%0.3f','m12err':'%0.3f','m14err':'%0.3f',\\\n 'm16err':'%0.3f','m18err':'%0.3f','m20err':'%0.3f',\\\n 'm24err':'%0.3f','m28err':'%0.3f','m32err':'%0.3f',\\\n 'm36err':'%0.3f','m40err':'%0.3f','m45err':'%0.3f',\\\n 'm50err':'%0.3f','m55err':'%0.3f','m60err':'%0.3f',\\\n 'm65err':'%0.3f','m70err':'%0.3f'})", "def apolco(a,minfeh=-3,out=None) :\n apo=np.where((a['TELESCOPE'] == 'apo25m') & (a['RV_FEH']>minfeh) )[0]\n fig=vscat(a[apo],marker='o',density=True)\n lco=np.where((a['TELESCOPE'] == 'lco25m') & (a['RV_FEH']>minfeh) )[0]\n vscat(a[lco],fig=fig,ls=':',marker='+',density=True)\n if out is not None : \n fig[0].savefig(out+'_1.png')\n plt.close()\n i1,i2=match.match(a['APOGEE_ID'][apo],a['APOGEE_ID'][lco])\n print('matched {:d} stars'.format(len(i1)))\n fig,ax=plots.multi(1,2)\n #plots.plotp(ax[0,0],a['SNR'][apo[i1]],a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],yr=[-3,3],yt=r'$\\Delta$ VHELIO_AVG',xt='S/N')\n #plots.plotp(ax[0,1],a['SNR'][apo[i1]],a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],yr=[-50,50],yt=r'$\\Delta$ VHELIO_AVG',xt='S/N')\n #plots.plotp(ax[1,0],a['SNR'][apo[i1]],a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],yr=[-0.5,0.5],yt=r'$\\Delta$ VSCATTER',xt='S/N')\n #plots.plotp(ax[1,1],a['SNR'][apo[i1]],a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],yr=[-5,5],yt=r'$\\Delta$ VSCATTER',xt='S/N')\n ax[0].hist(a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],bins=np.arange(-0.5,0.5,0.02),histtype='step')\n ax[0].set_xlabel(r'$\\Delta$ VHELIO_AVG')\n ax[1].hist(a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],bins=np.arange(-0.25,0.25,0.01),histtype='step')\n ax[1].set_xlabel(r'$\\Delta$ VSCATTER')\n if out is not None : \n fig.savefig(out+'_2.png')\n plt.close()", "def show_trap_results():\n df_grid = pd.read_hdf('./temp_results.h5', '/optimize_grid')\n print(df_grid)\n \n print('Minimum fwhm:')\n print(df_grid[df_grid.fwhm_ovr_mean==df_grid.fwhm_ovr_mean.min()])\n \n plt.plot(df_grid.e_fit, df_grid.fwhm_ovr_mean, '.b')\n plt.show()", "def plot_selected(df, title='title', columns=[], shouldNormalize = True, symbol='any stock'):\n #df = df[columns][start_index:end_index]\n #df = df.loc[start_index:end_index, columns]\n df = df.loc[:, columns]\n ylabel=\"Price\"\n normal = \"un normalized\"\n if shouldNormalize:\n df = normalize(df.loc[:,['Close', 'sma200']])\n ylabel = \"%\"\n normal = \"normalized\"\n #print('df.shape in plot=',df.shape)\n plot_data(df, title=title, ylabel=ylabel)", "def __init__(self, x=0, y=0, flux=None, time=None, wcs=None, quality=None, mask=None, exposure=1800, sector=0,\n size=150,\n camera=1, ccd=1, cadence=None):\n super(Source, self).__init__()\n if cadence is None:\n cadence = []\n if quality is None:\n quality = []\n if wcs is None:\n wcs = []\n if time is None:\n time = []\n if flux is None:\n flux = []\n\n self.size = size\n self.sector = sector\n self.camera = camera\n self.ccd = ccd\n self.cadence = cadence\n self.quality = quality\n self.exposure = exposure\n self.wcs = wcs\n co1 = 38.5\n co2 = 116.5\n catalog_1 = self.search_gaia(x, y, co1, co1)\n catalog_2 = self.search_gaia(x, y, co1, co2)\n catalog_3 = self.search_gaia(x, y, co2, co1)\n catalog_4 = self.search_gaia(x, y, co2, co2)\n catalogdata = vstack([catalog_1, catalog_2, catalog_3, catalog_4], join_type='exact')\n catalogdata = unique(catalogdata, keys='DESIGNATION')\n coord = wcs.pixel_to_world([x + (size - 1) / 2 + 44], [y + (size - 1) / 2])[0].to_string()\n ra = float(coord.split()[0])\n dec = float(coord.split()[1])\n catalogdata_tic = tic_advanced_search_position_rows(ra=ra, dec=dec, radius=(self.size + 2) * 21 * 0.707 / 3600)\n # print(f'no_of_stars={len(catalogdata_tic)}, camera={camera}, ccd={ccd}: ra={ra}, dec={dec}, radius={(self.size + 2) * 21 * 0.707 / 3600}')\n self.tic = convert_gaia_id(catalogdata_tic)\n self.flux = flux[:, y:y + size, x:x + size]\n self.mask = mask[y:y + size, x:x + size]\n self.time = np.array(time)\n median_time = np.median(self.time)\n interval = (median_time - 388.5) / 365.25\n\n num_gaia = len(catalogdata)\n tic_id = np.zeros(num_gaia)\n x_gaia = np.zeros(num_gaia)\n y_gaia = np.zeros(num_gaia)\n tess_mag = np.zeros(num_gaia)\n in_frame = [True] * num_gaia\n for i, designation in enumerate(catalogdata['DESIGNATION']):\n ra = catalogdata['ra'][i]\n dec = catalogdata['dec'][i]\n if not np.isnan(catalogdata['pmra'].mask[i]): # masked?\n ra += catalogdata['pmra'][i] * np.cos(np.deg2rad(dec)) * interval / 1000 / 3600\n if not np.isnan(catalogdata['pmdec'].mask[i]):\n dec += catalogdata['pmdec'][i] * interval / 1000 / 3600\n pixel = self.wcs.all_world2pix(\n np.array([catalogdata['ra'][i], catalogdata['dec'][i]]).reshape((1, 2)), 0, quiet=True)\n x_gaia[i] = pixel[0][0] - x - 44\n y_gaia[i] = pixel[0][1] - y\n try:\n tic_id[i] = catalogdata_tic['ID'][np.where(catalogdata_tic['GAIA'] == designation.split()[2])[0][0]]\n except:\n tic_id[i] = np.nan\n if np.isnan(catalogdata['phot_g_mean_mag'][i]):\n in_frame[i] = False\n elif catalogdata['phot_g_mean_mag'][i] >= 25:\n in_frame[i] = False\n elif -4 < x_gaia[i] < self.size + 3 and -4 < y_gaia[i] < self.size + 3:\n dif = catalogdata['phot_bp_mean_mag'][i] - catalogdata['phot_rp_mean_mag'][i]\n tess_mag[i] = catalogdata['phot_g_mean_mag'][\n i] - 0.00522555 * dif ** 3 + 0.0891337 * dif ** 2 - 0.633923 * dif + 0.0324473\n if np.isnan(tess_mag[i]):\n tess_mag[i] = catalogdata['phot_g_mean_mag'][i] - 0.430\n if np.isnan(tess_mag[i]):\n in_frame[i] = False\n else:\n in_frame[i] = False\n\n tess_flux = 10 ** (- tess_mag / 2.5)\n t = Table()\n t[f'tess_mag'] = tess_mag[in_frame]\n t[f'tess_flux'] = tess_flux[in_frame]\n t[f'tess_flux_ratio'] = tess_flux[in_frame] / np.nanmax(tess_flux[in_frame])\n t[f'sector_{self.sector}_x'] = x_gaia[in_frame]\n t[f'sector_{self.sector}_y'] = y_gaia[in_frame]\n catalogdata = hstack([catalogdata[in_frame], t]) # TODO: sorting not sorting all columns\n catalogdata.sort('tess_mag')\n self.gaia = catalogdata", "def identify_flux(xyz: list) -> list:\n flagged_lines = [tup for tup in xyz if abs(tup[3]) > THRESHOLDS[0] and abs(tup[4]) > THRESHOLDS[1]]\n\n return flagged_lines", "def drop_attributes(df, cutoff=25, extra_add=[]):\n\n df_copy = df.copy()\n\n attributs_drop = []\n for var in sorted(df.columns):\n series = df[var]\n perc_missing = 100 - series.count() / len(series) * 100\n\n if perc_missing > cutoff:\n attributs_drop.append(var)\n else:\n continue\n\n if len(extra_add) == 0:\n df_copy.drop(attributs_drop, axis=1, inplace=True)\n\n else:\n attributs_drop = attributs_drop + extra_add\n df_copy.drop(attributs_drop, axis=1, inplace=True)\n\n return df_copy", "def manipulate_data(ds, var, predef_clim, predef_trnd, trn_yrs, all_yrs, \n apply_latw=True, apply_detrending=True, dropna=True):\n\n \n if((var=='SD')|(var=='sd')|(var=='snowc')): \n ds[var] = ds[var].where(ds[var]>=0, other=0.0)\n ds[var] = ds[var].where(ds[var]==0, other=1.0)\n #ds[var].values = Gauss_filter(ds[var].values, (0,3,3))\n \n \"\"\"\n if((var=='hgt')|(var=='z')|(var=='GPT')):\n months = ds.time.to_index().month; ssn_ends = (months==2)|(months==5)|(months==8)|(months==11)\n ds = ds.sel(time=ssn_ends)\n else: \n ds = ds.resample(time='3M').mean()\n \"\"\"\n \n ds = ds.resample(time='3M').mean()\n\n ds = ds.sel(time=slice(str(all_yrs[0])+'-01-01', str(all_yrs[-1])+'-12-31')) \n \n try: \n clim = predef_clim\n ds = ds.groupby('time.season') - clim\n print('Predefined climatology used')\n except:\n clim = ds.sel(time=slice(str(trn_yrs[0])+'-01-01', str(trn_yrs[-1])+'-12-31')).groupby('time.season').mean('time')\n ds = ds.groupby('time.season') - clim\n print('Climatology calculated from data')\n \n if(apply_latw): ds[var].values = lat_weighting(ds[var].values, \n ds.lat, ds.lon)\n if(dropna):\n ds = ds.stack(gridcell=('lat', 'lon')).dropna(dim='gridcell',how='any')\n else: \n ds = ds.stack(gridcell=('lat', 'lon')).fillna(0)\n \n \n trend_models = { }\n if(apply_detrending): \n ds = ds.load()\n for ssn in ('DJF', 'MAM', 'JJA', 'SON'):\n #ssn_idx = ds['time.season'] == ssn\n \n trn_idx = bool_index_to_int_index(np.isin(ds['time.season'], ssn) & np.isin(ds['time.year'], trn_yrs))\n all_idx = bool_index_to_int_index(np.isin(ds['time.season'], ssn) & np.isin(ds['time.year'], all_yrs))\n \n trn_x = np.array(ds.time[trn_idx].values.tolist()).reshape(-1,1)\n all_x = np.array(ds.time[all_idx].values.tolist()).reshape(-1,1)\n try:\n trend = predef_trnd[ssn].predict(all_x)\n trend_models[ssn] = predef_trnd[ssn]\n print('Predefined trend model used')\n except:\n #_, trend_model = define_trends(ds[var][trn_idx], trn_x)\n _, trend_model = define_trends(ds[var][all_idx], all_x)\n trend = trend_model.predict(all_x)\n trend_models[ssn] = trend_model\n print('Trends calculated from data')\n \n ds[var][all_idx] = ds[var][all_idx] - trend\n \n\n \n return ds, clim, trend_models", "def cn_filter(df, binary_cutoff=12):\n del_df = (df.ix['Deletion'].dropna(1) < 0).astype(int)\n del_df = del_df[del_df.sum(1) >= binary_cutoff]\n del_df.index = del_df.index.droplevel(1)\n del_df = del_df.T\n amp_df = (df.ix['Amplification'].dropna(1) > 0).astype(int)\n amp_df = amp_df[amp_df.sum(1) >= binary_cutoff]\n amp_df.index = amp_df.index.droplevel(1)\n amp_df = amp_df.T\n return amp_df, del_df", "def plot_selected(df, title='title', columns=[], shouldNormalize=True, symbol='any stock'):\n # df = df[columns][start_index:end_index]\n # df = df.loc[start_index:end_index, columns]\n df = df.loc[:, columns]\n ylabel = \"Price\"\n normal = \"un normalized\"\n if shouldNormalize:\n df = normalize(df.loc[:, ['Close', 'sma200']])\n ylabel = \"%\"\n normal = \"normalized\"\n # print('df.shape in plot=',df.shape)\n plot_data(df, title=title, ylabel=ylabel)", "def plot_lc_per_aperture(\n self,\n sector=None,\n kwargs={\"aper_radius\": 1, \"percentile\": 84, \"threshold_sigma\": 3},\n apertures=[\"pipeline\", \"round\", \"square\", \"percentile\", \"threshold\"],\n return_lcs=False,\n ):\n sector = self.sector if sector is None else sector\n nrows = len(apertures)\n fig, axs = pl.subplots(\n nrows=nrows,\n ncols=2,\n figsize=(10, nrows * 2),\n constrained_layout=True,\n gridspec_kw={\"width_ratios\": [3, 1], \"hspace\": 0, \"wspace\": 0},\n )\n custom_lcs = {}\n for n, sap_mask in enumerate(apertures):\n ax1 = axs[n, 0]\n lc = self.make_custom_lc(\n sector=sector, sap_mask=sap_mask, **kwargs\n )\n lc.scatter(ax=ax1, label=sap_mask)\n print(f\"mask={sap_mask}; contratio={self.contratio:.2f}\")\n custom_lcs[sap_mask] = lc\n if n != len(apertures) - 1:\n ax1.set_xlabel(\"\")\n ax1.set_xticklabels(\"\")\n if n == 0:\n ax1.set_title(f\"{self.target_name} (sector {sector})\")\n if self.tpf is None:\n tpf = self.get_tpf()\n else:\n tpf = self.tpf\n img = np.nanmedian(self.tpf.flux, axis=0)\n\n ax2 = axs[n, 1]\n ax = plot_aperture_outline(\n img, mask=self.aper_mask, imgwcs=tpf.wcs, ax=ax2\n )\n ax.axis(\"off\")\n if return_lcs:\n return fig, custom_lcs\n else:\n return fig", "def control_variation(df, outDir, features_to_analyse, \n variables_to_analyse=[\"date_yyyymmdd\"], \n remove_outliers=True, \n p_value_threshold=0.05, \n PCs_to_keep=10):\n \n # Record non-data columns before dropping feature columns \n other_colnames = [col for col in df.columns if col not in features_to_analyse]\n \n # Drop columns that contain only zeros\n colnames_before = list(df.columns)\n AllZeroFeats = df[features_to_analyse].columns[(df[features_to_analyse] == 0).all()]\n df = df.drop(columns=AllZeroFeats)\n colnames_after = list(df.columns)\n zero_cols = [col for col in colnames_before if col not in colnames_after]\n if len(zero_cols) > 0:\n print(\"Dropped %d features with all-zero summaries:\\n%s\" % (len(zero_cols), zero_cols))\n \n # Record feature column names after dropping zero data\n features_to_analyse = [feat for feat in df.columns if feat not in other_colnames]\n \n # Remove outliers from the dataset \n if remove_outliers:\n df, indsOutliers = removeOutliersMahalanobis(df, features_to_analyse)\n remove_outliers = False \n # NB: Ensure Mahalanobis operation to remove outliers is performed only once!\n\n # Check for normality in features to analyse in order decide which \n # statistical test to use: one-way ANOVA (parametric) or Kruskal-Wallis \n # (non-parametric) test\n TEST = check_normality(df, features_to_analyse, p_value_threshold)\n\n # Record name of statistical test used (kruskal/f_oneway)\n test_name = str(TEST).split(' ')[1].split('.')[-1].split('(')[0].split('\\'')[0]\n\n # CONTROL VARIATION: STATS (ANOVAs)\n # - Does N2 worm behaviour on control vary across experiment days? \n # (worms are larger? Shorter L1 diapuase? Camera focus/FOV adjusted? Skewed by non-worm tracked objects?\n # Did not record time when worms were refed! Could be this. If so, worms will be bigger across all foods on that day) \n # - Perform ANOVA to see if features vary across imaging days for control\n # - Perform Tukey HSD post-hoc analyses for pairwise differences between imaging days\n # - Highlight outlier imaging days and investigate reasons why\n # - Save list of top significant features for outlier days - are they size-related features?\n for grouping_variable in variables_to_analyse:\n print(\"\\nTESTING: %s\\n\" % grouping_variable)\n \n if not len(df[grouping_variable].unique()) > 1:\n print(\"Need at least two groups for stats to investigate %s\" % grouping_variable)\n else:\n print(\"Performing %s tests for '%s'\" % (test_name, grouping_variable)) \n \n test_results_df, sigfeats_out = \\\n topfeats_ANOVA_by_group(df, \n grouping_variable, \n features_to_analyse,\n TEST,\n p_value_threshold)\n \n # Ensure directory exists to save results\n Path(outDir).mkdir(exist_ok=True, parents=True)\n \n # Define outpaths\n froot = 'control_variation_in_' + grouping_variable + '_' + test_name\n stats_outpath = outDir / (froot + \"_results.csv\")\n sigfeats_outpath = outDir / (froot + \"_significant_features.csv\")\n \n # Save test statistics + significant features list to file\n test_results_df.to_csv(stats_outpath)\n sigfeats_out.to_csv(sigfeats_outpath, header=False)\n\n # Box plots\n plotDir = outDir / \"Plots\"\n topfeats_boxplots_by_group(df, \n test_results_df, \n grouping_variable,\n plot_save_dir=plotDir, #save to plotDir\n p_value_threshold=p_value_threshold)\n \n # PCA (coloured by grouping variable, eg. experiment date)\n df = doPCA(df, \n grouping_variable, \n features_to_analyse,\n plot_save_dir = plotDir,\n PCs_to_keep = PCs_to_keep)", "def volcano_plotter():\n print(\"this is volcano plotter\")\n from math import log\n with open(\"../bob/processed/24h_bobdata_ed2_volcano.csv\", \"w\") as outF:\n outF.write(\"Gene log2FoldChange pvalue\\n\")\n with open(\"../bob/processed/24h_bobdata_ed2.csv\", \"r\") as inpF:\n skipFlag = True\n missCount = 1\n for inpLine in inpF:\n if skipFlag:\n skipFlag = False\n continue\n inpLine = inpLine.split(\"\\\" \\\"\")\n curLine = []\n for inpI in inpLine:\n try:\n curLine.append(float(inpI.strip(\"\\\"\\n \")))\n except ValueError:\n curLine.append(inpI.strip(\"\\\"\\n \")) # by this point, each line in the entry file is processed into a neat list\n if curLine[2] == \"\": # if no gene name is given, just add a placeholder\n curLine[2] = \"Noname\" + str(missCount)\n missCount += 1\n # calculate log2foldChange here:\n try:\n FAvg = (curLine[4] + curLine[5] + curLine[6])/3.0 # KO\n SAvg = (curLine[7] + curLine[8] + curLine[9])/3.0 # WT\n except TypeError:\n print(curLine)\n raise\n logFoldChange = log(SAvg/FAvg,2) # so positive numbers are more abundant in the wt cells, negatives number in the KO, at least for the 24H bobdata file\n outF.write(curLine[2] + \" \" + str(logFoldChange) + \" \" + str(curLine[10]) + \"\\n\") # write out results to file", "def plot_lc_per_aperture(\n self,\n sector=None,\n kwargs={\"aper_radius\": 1, \"percentile\": 84, \"threshold_sigma\": 3},\n apertures=[\"round\", \"square\", \"percentile\", \"threshold\"],\n return_lcs=False,\n ):\n sector = self.sector if sector is None else sector\n nrows = len(apertures)\n fig, axs = pl.subplots(\n nrows=nrows,\n ncols=2,\n figsize=(10, nrows * 2),\n constrained_layout=True,\n gridspec_kw={\"width_ratios\": [3, 1], \"hspace\": 0, \"wspace\": 0},\n )\n custom_lcs = {}\n for n, sap_mask in enumerate(apertures):\n ax1 = axs[n, 0]\n lc = self.make_custom_lc(\n sector=sector, sap_mask=sap_mask, **kwargs\n )\n lc.scatter(ax=ax1, label=sap_mask)\n print(f\"mask={sap_mask}; contratio={self.contratio:.2f}\")\n custom_lcs[sap_mask] = lc\n if n != len(apertures) - 1:\n ax1.set_xlabel(\"\")\n ax1.set_xticklabels(\"\")\n if n == 0:\n ax1.set_title(f\"{self.target_name} (sector {sector})\")\n if self.tpf_tesscut is None:\n tpf = self.get_tpf_tesscut()\n else:\n tpf = self.tpf_tesscut\n img = np.nanmedian(self.tpf_tesscut.flux, axis=0)\n\n ax2 = axs[n, 1]\n ax = plot_aperture_outline(\n img, mask=self.aper_mask, imgwcs=tpf.wcs, ax=ax2\n )\n ax.axis(\"off\")\n if return_lcs:\n return fig, custom_lcs\n else:\n return fig", "def feature_filter(df,feature, high = True):\r\n assert feature in [\"speechiness\",\r\n \"acousticness\",\r\n \"instrumentalness\",\r\n \"liveness\"], \"feature must be one of the following: speechiness,acousticness,instrumentalness,liveness\"\r\n #more features may be added\r\n x = 0.9 if high == True else 0.1\r\n df = df[df[feature] > x] if high == True else df[df[feature] < x]\r\n return df", "def select_using_simsurvey_meta(meta, lcs):\n mask = (lcs.stats['mag_max']['p48g'] < 18.5 ) \\\n |(lcs.stats['mag_max']['p48r'] < 18.5 )\n\n # Now select\n meta = meta[mask]\n\n return meta", "def _metadata_changed(self, old, new):\n\n #self.cross_plot.value_range.low = self.minz\n #self.cross_plot.value_range.high = self.maxz\n #self.cross_plot2.value_range.low = self.minz\n #self.cross_plot2.value_range.high = self.maxz\n if self._imag_index.metadata.has_key(\"selections\"):\n x_ndx, y_ndx = self._imag_index.metadata[\"selections\"]\n if y_ndx and x_ndx:\n# xdata, ydata = self._image_index.get_data()\n# xdata, ydata = xdata.get_data(), ydata.get_data()\n self.pd_horiz.set_data(\"horiz\", self._image_value.data[y_ndx,:])\n self.pd_vert.set_data(\"vert\", self._image_value.data[:,x_ndx])", "def lect_ca(dir_cat,SourceRA,SourceDec,SourceROI,distmin,name,outputfile,namesource):\n\tprint \"SourceROI=\"+str(SourceROI)\n\tfcal= open(outputfile,\"w\");\n\t\n\t#fcat=pyfits.open(dir_cat)\n\n\tdonnees=pyfits.getdata(dir_cat,1)\n\tnames=donnees.field(name_col)\n\tprint names\n\tra=donnees.field(ra_col)\n\tdec=donnees.field(dec_col)\n\tind=donnees.field(ind_col)\n\tcour=donnees.field(cour_col)\n\tEpiv=donnees.field(Epivot_col)\t\n\tInteg=donnees.field(Inte_col)\n\tPrefact=donnees.field(pref_col)\n\tErr_prefact=donnees.field(err_pref_col)\n\tvariabilite=donnees.field(varia_cut)\n\n #Loop on the LAT Catalog\r\n\tfor p in range(0,len(names)-1):\n\t\t\t#Calcul de la distance angulaire separant la source p et la source que l'on etudie\r\n\t\tdist = (180./3.14159)*acos(cos(3.14159/2. - float(SourceDec)*3.14159/180.)*cos(3.14159/2.- float(dec[p])*3.14159/180.)+sin(3.14159/2. - float(SourceDec)*3.14159/180.)*sin(3.14159/2.- float(dec[p])*3.14159/180.)*cos((float(SourceRA) - float(ra[p]))*3.14159/180.))\n\t\t\t\n\t\t\t#These Marie-Helene Grondin p.\n\t\t#nom=names[p].split(\" \")\n\t\t#names[p]=nom[0]+\"_\"+nom[1]\n\n\n#\t\tif (dist< float(SourceROI)):\n#\t\t\tprint str(dist)+str(names[p])\r \r\n\t\tif (dist < float(SourceROI) and dist > float(distmin) ): #SI la source est dans la region d'interet mais n'est pas confondu avec la source elle meme. 0.2 est subjectif, compromis qui marche dans la pluspart des cas mais pas dans tous. Faire attention parfois il faut remplacer ce 0.2 par 0.3\r\n\t\t\tif float(cour[p]!=\"NULL\") and float(Integ[p]) > 1e-8 and dist < 5: #cut-off\r\n\t\t\t\tprint \"curvature = \", float(cour[p])\r\n\t\t\t\ttxt = str(names[p])+\" \"+str(ra[p])+\" \"+str(dec[p])+\" \"+str(Integ[p])+\" \"+str(ind[p])+\" \"+str(Prefact[p])+\" \"+str(Epiv[p])+\" 1\"\n#\t\t\t\tif variabilite[p]>varia_seuil:\n txt+=\" 1 \"+str(dist)\n#\t\t\t\telse :\n#\t\t\t txt+=\" 0 \"+str(dist)\r\n\t\t\telse: #no cut off with flux level high enough\r\n\t\t\t\ttxt = str(names[p])+\" \"+str(ra[p])+\" \"+str(dec[p])+\" \"+str(Integ[p])+\" \"+str(ind[p])+\" \"+str(Prefact[p])+\" \"+str(Epiv[p])+\" 0\"\n#\t\t\t\tif variabilite[p]>varia_seuil:\n\t\t\t \ttxt+=\" 1 \"+str(dist)\n#\t\t\t\telse :\n#\t\t\t\t\ttxt+=\" 0 \"+str(dist)\n\t\t\tfcal.write(txt)\r\n\t\t\tfcal.write(\"\\n\")\n\t\telif (dist < float(raymax) and names[p]!=namesource) :\n\n\t\t\tif float(cour[p]) > 11.34 and float(Integ[p]) > 1e-8 and dist < 5: #cut-off\n\t\t\t\tprint \"curvature = \", float(cour[p])\r\n\t\t\t\ttxt = str(names[p])+\" \"+str(ra[p])+\" \"+str(dec[p])+\" \"+str(Integ[p])+\" \"+str(ind[p])+\" \"+str(Prefact[p])+\" \"+str(Epiv[p])+\" 1 2 \"+str(dist)\n\r\n\t\t\telse: #no cut off with flux level high enough\r\n\t\t\t\ttxt = str(names[p])+\" \"+str(ra[p])+\" \"+str(dec[p])+\" \"+str(Integ[p])+\" \"+str(ind[p])+\" \"+str(Prefact[p])+\" \"+str(Epiv[p])+\" 0 2 \"+str(dist) \r\n\t\t\tfcal.write(txt)\r\n\t\t\tfcal.write(\"\\n\")\n\t#On ajoute l'objet de l'etude\r\n\tpwn = name\r\n\ttxt = str(pwn)+\" \"+str(SourceRA)+\" \"+str(SourceDec)+\" 1e-10 2 0\"\r\n\tfcal.write(txt)\r\n\tfcal.write(\"\\n\")\n\n\tfcal.close()", "def make_tuning_plot_rmse(df, error_col_name=\"rmse\",\n error_title = \"Top 10% RMSE\",\n cutoff = 0.10):\n\n df = df.copy()\n\n # Get the regularizer and reset coeff\n coeff = [float(i.split(\"evidence_new_reg_\")[1]) if \"evidence\" in i else i for i in df['method_name']]\n df[\"method_name\"] = coeff\n df[\"Data\"] = convert_dataset_names(df[\"dataset\"])\n df[\"Method\"] = df[\"method_name\"]\n\n # Get appropriate datasets\n trials = 'trial_number'\n methods = 'Method'\n\n # Make area plot\n uniq_methods = set(df[\"Method\"].values)\n method_order = sorted(uniq_methods,\n key=lambda x : x if isinstance(x, float) else -1)\n method_df = []\n datasets = set()\n for data, sub_df in df.groupby(\"Data\"):\n # Add datasets\n datasets.add(data)\n rmse_sub = sub_df[error_col_name]\n methods_sub = sub_df[\"Method\"]\n trials_sub= sub_df['trial_number']\n for method_idx, method in enumerate(method_order):\n # Now summarize these lines\n bool_select = (methods_sub == method)\n\n rmse_method = rmse_sub[bool_select]\n trials_temp = trials_sub[bool_select]\n areas = []\n # create area!\n for trial, rmse_trial in zip(trials_sub, rmse_method):\n num_tested = len(rmse_trial)\n cutoff_index = int(cutoff * num_tested) - 1\n rmse_val = rmse_trial[-cutoff_index]\n to_append = {error_title: rmse_val,\n \"Regularizer Coeff, $\\lambda$\": method,\n \"method_name\": method,\n \"Data\": data,\n \"Trial\" : trial}\n method_df.append(to_append)\n method_df = pd.DataFrame(method_df)\n\n # Filter out dropout\n method_df = method_df[[i != \"dropout\" for i in\n method_df['method_name']]].reset_index()\n\n # Normalize by dataset\n for dataset in datasets:\n # Make a divison vector of ones and change it to a different value only\n # for the correct dataset of interest to set max rmse to 1\n division_factor = np.ones(len(method_df))\n indices = (method_df[\"Data\"] == dataset)\n\n # Normalize with respect to the ensemble so that this is 1\n max_val = method_df[indices].query(\"method_name == 'ensemble'\").mean()[error_title]\n\n # Take the maximum of the AVERAGE so it's normalized to 1\n division_factor[indices] = max_val\n method_df[error_title] = method_df[error_title] / division_factor\n\n method_df_evidence = method_df[[isinstance(i, float) for i in\n method_df['method_name']]].reset_index()\n method_df_ensemble = method_df[[\"ensemble\" in str(i) for i in\n method_df['method_name']]].reset_index()\n\n data_colors = {\n dataset : sns.color_palette()[index]\n for index, dataset in enumerate(datasets)\n }\n\n min_x = np.min(method_df_evidence[\"Regularizer Coeff, $\\lambda$\"])\n max_x= np.max(method_df_evidence[\"Regularizer Coeff, $\\lambda$\"])\n\n sns.lineplot(x=\"Regularizer Coeff, $\\lambda$\", y=error_title,\n hue=\"Data\", alpha=0.8, data=method_df_evidence,\n palette = data_colors)\n\n for data, subdf in method_df_ensemble.groupby(\"Data\"):\n\n color = data_colors[data]\n area = subdf[error_title].mean()\n std = subdf[error_title].std()\n plt.hlines(area, min_x, max_x, linestyle=\"--\", color=color, alpha=0.8)\n\n # Add ensemble baseline\n ensemble_line = plt.plot([], [], color='black', linestyle=\"--\",\n label=\"Ensemble\")\n # Now make ensemble plots\n plt.legend(bbox_to_anchor=(1.1, 1.05))", "def og_features(scan,filt=None,base_noise=None,thresh=-1.4781e-10,diff=1,verbose=False,scale=10):\n #get gradients of data\n der = np.array(np.gradient(scan,diff))\n \n #calculate gardient magnitudes and directions\n der_mag = np.linalg.norm(der,axis=0) \n der_uvecs = der/der_mag\n \n z_cur = np.copy(scan).ravel()\n\n #estimate noise level and set derivative filter threshold\n if filt is None:\n filt = np.mean(signaltonoise(der_mag)[-1])\n \n \n if base_noise is not None:\n filt = np.maximum(filt,base_noise)\n \n\n\n #filter directions and magnitudes\n x, y, z = der_uvecs[0].ravel(), der_uvecs[1].ravel(), der_mag.ravel()\n \n #filter using threshold and filt\n x_filt, y_filt, z_filt = x[z_cur>thresh], y[z_cur>thresh], z[z_cur>thresh]\n #x_filt, y_filt, z_filt = x, y, z\n\n \n #print(len(z_filt))\n x_filt, y_filt, z_filt = x_filt[z_filt>filt], y_filt[z_filt>filt], z_filt[z_filt>filt]\n\n \n #calculate angles\n angles_filt = np.sign(y_filt)*np.arccos(x_filt/1)\n\n \n #print(len(angles_filt))\n \n if len(angles_filt) < 2:\n return 0,0,0\n \n #fit single line\n sol1 = least_squares(ress_1line,[-np.pi/2],args=(angles_filt,),bounds=[-np.pi,0],method='dogbox',jac='2-point',max_nfev=2000)\n\n #fit two lines by grid search\n #sol_grid = grid_search(ress_2line,angles_filt,[[-np.pi,0],[-np.pi,0]])\n \n \n singleline = sol1.x[0]\n \n mx = np.minimum(np.abs(singleline-(-np.pi)),np.abs(singleline))\n \n sol_grid = grid_search(ress_2line_pm,angles_filt,[[0,mx]],umid = singleline)\n spread_lines = sol_grid[1]\n sol_grid[1] = [singleline+spread_lines,singleline-spread_lines]\n \n \n #compute average of squared residuals for both cases\n resid1 = ress_1line(sol1.x,angles_filt)\n\n grid_c11 = np.average(np.power(resid1,2))\n \n grid_c11 = np.average(np.abs(resid1))\n \n grid_c21 = sol_grid[-1]\n \n \n multip = cotunnel_score2(scan,scan>thresh,diff,scale)\n \n final_grid2 = multip*(grid_c11-grid_c21)\n \n \n \"\"\"\n plt.scatter(angles_filt,z_filt,marker='x',c='k',s=15,linewidth=0.4)\n plt.axvline(sol1.x,color='b')\n plt.axvline(sol1.x+(np.pi),color='b')\n plt.axvline(sol_grid[1][0],0,color='r', linestyle='--')\n plt.axvline(sol_grid[1][1],0,color='r', linestyle='--')\n \n plt.axvline(sol_grid[1][0]+(np.pi),0,color='r', linestyle='--')\n plt.axvline(sol_grid[1][1]+(np.pi),0,color='r', linestyle='--')\n \n plt.xlabel(\"$\\\\theta_g$ / rad\")\n \n plt.xlim([-np.pi,np.pi])\n plt.ylim([0,z.max()])\n \n \n plt.ylabel(\"$|g|$\")\n \n plt.xticks([-np.pi,0,np.pi])\n \n plt.locator_params(axis='y', nbins=2)\n \n plt.savefig(\"og_fig.svg\")\n \n plt.show()\n \"\"\"\n return final_grid2,multip,(grid_c11-grid_c21)", "def filter_data(self):\n self.df = self.df[HeatStrokeDataFiller.important_features]", "def process_catalog(\n type1, cat, config, RA, DEC, z=\"Redshift\", RAf=\"RA\", DECf=\"DEC\", origin=\"Origin\"\n):\n # Skip unwanted catalogues\n if unwanted_catalogue(cat.meta[\"name\"], set_unwanted_list(type1, config)):\n return None\n\n # Make a list of potential column that contain redshift information\n col_selection = column_selection(type1, cat)\n final_z_col = select_best_redshift(cat, col_selection)\n\n # If no relevant redshift column is present, skip the catalog\n if final_z_col == None:\n return None\n\n # Skip weird column/tables with weird units/types for\n if cat[final_z_col].dtype not in [np.float32, np.float64]:\n return None\n\n # If all values are masked, skip the catalog\n if all(cat[final_z_col].mask):\n return None\n\n # Homogenize column names\n cat.rename_column(final_z_col, z)\n\n # Select only relevant columns: RA, DEC and redshift\n final_cat = cat[RA, DEC, z][~cat[z].mask]\n\n # Rename the coord columns with chosen names\n final_cat.rename_column(RA, RAf)\n final_cat.rename_column(DEC, DECf)\n\n # Add Vizier catalog name to the table for future reference\n final_cat.add_column(Column([cat.meta[\"name\"]] * len(final_cat)), name=origin)\n\n # Add to master list of tables\n return final_cat", "def select_var_feature(adata, min_score=0.5, nb_features=None, show=True, copy=False):\n if copy:\n inplace=False\n else:\n inplace=True\n\n adata = adata.copy() if not inplace else adata\n \n # calculate variability score\n cal_var(adata, show=show) # adds variability score for each feature \n # adata.var['variablility_score'] = abs(adata.var['prop_shared_cells']-0.5)\n var_annot = adata.var.sort_values(ascending=True, by ='variability_score')\n\n # calculate the min score to get a specific number of feature \n if nb_features != None and nb_features < len(adata.var_names): \n min_score = var_annot['variability_score'][nb_features] \n \n \n adata_tmp = adata[:,adata.var['variability_score']<=min_score].copy()\n \n ## return the filtered AnnData objet.\n if not inplace:\n adata_tmp = adata[:,adata.var['variability_score']<=min_score]\n return(adata_tmp)\n else:\n adata._inplace_subset_var(adata.var['variability_score']<=min_score)", "def main():\n df = pd.read_csv(\"traj_samples_v3.csv\")\n\n # remove run and motorcycle\n df = df[df.transportation_mode != 'run']\n df = df[df.transportation_mode != 'motorcycle']\n\n # the 'mean_speed' will change depending on which point feature I wanted to plot. It is only 1 of 20 that I tried.\n df[['mean_speed', 'transportation_mode']].boxplot(by='transportation_mode')\n axes = plt.gca()\n axes.set_ylim([-1, 360]) # I set this value depending on which point feature I'm plotting.\n plt.show()", "def initVolumeCutPlaneNode(node, item):\n if isinstance(item, items.ComplexMixIn):\n node.addRow(ComplexModeRow(item))\n\n node.addRow(PlaneRow(item))\n\n node.addRow(ColormapRow(item))\n\n node.addRow(ItemProxyRow(\n item=item,\n name='Show <=Min',\n fget=item.getDisplayValuesBelowMin,\n fset=item.setDisplayValuesBelowMin,\n events=items.ItemChangedType.ALPHA))\n\n node.addRow(InterpolationRow(item))", "def identify_flux(xyz: list) -> list:\n flagged_lines = []\n\n for line in xyz:\n *orig,dollar_amount,pct_amount = line\n if abs(dollar_amount) > THRESHOLDS[0] and abs(pct_amount) > THRESHOLDS[1]:\n flagged_lines.append(line)\n\n\n\n\n return flagged_lines", "def refine_dataset(original_data,settings):\n data = original_data[original_data.sweep_primary_load_temperature >= settings['valid_load_temp_range'][0]]\n data = data[data.sweep_primary_load_temperature <= settings['valid_load_temp_range'][1]]\n data = data[data.f_0_err/data.f_0 < settings['fractional_f_0_err_limit']]\n data = data[data.Q_err/data.Q < settings['fractional_Q_err_limit']]\n data = data[data.Q >= settings['valid_Q_range'][0]]\n data = data[data.Q <= settings['valid_Q_range'][1]]\n if settings['max_package_temp_deviation'] is not None:\n median_temp = np.median(data.sweep_primary_package_temperature)\n temp_deviations = np.abs(data.sweep_primary_package_temperature - median_temp)\n data = data[temp_deviations < settings['max_package_temp_deviation']]\n #data = data.sort([\"f_0\"])\n data['f_0_max'] = np.zeros((data.shape[0],))#data.groupby(\"resonator_index\")[\"f_0\"].transform(lambda x: x.max())\n data['Q_i_max'] = np.zeros((data.shape[0],))\n data['responsivity_Hz_per_K'] = np.zeros((data.shape[0],))\n data['responsivity_err'] = np.zeros((data.shape[0],))\n data['responsivity_offset'] = np.zeros((data.shape[0],))\n for index in np.unique(data.resonator_index):\n group = data[data.resonator_index == index]\n max = group[group.sweep_primary_load_temperature < settings['f_0_max_temp_limit']].f_0.max()\n data.f_0_max[data.resonator_index == index] = max\n max = group[group.sweep_primary_load_temperature < settings['f_0_max_temp_limit']].Q_i.max()\n data.Q_i_max[data.resonator_index == index] = max\n \n data['delta_f_0_Hz'] = (data.f_0-data.f_0_max)*1e6\n data['fractional_delta_f_0'] = data.delta_f_0_Hz/(1e6*data.f_0_max)#(1e6*data.noise_measurement_freq_MHz)\n data['fractional_delta_Q_i'] = data.Q_i/data.Q_i_max - 1\n\n for index in np.unique(data.resonator_index):\n group = data[data.resonator_index == index]\n try:\n (slope,offset),cov = np.polyfit(group.sweep_primary_load_temperature,group.delta_f_0_Hz,1,cov=True)\n print slope\n data.responsivity_Hz_per_K[data.resonator_index == index] = slope\n data.responsivity_offset[data.resonator_index == index] = offset\n data.responsivity_err[data.resonator_index == index] = np.sqrt(cov[1,1])\n except ValueError:\n continue\n except np.linalg.LinAlgError:\n continue\n eigvals_Hz = []\n nets = []\n for eigvals,freq,responsivity in zip(data.pca_eigvals,data.noise_measurement_freq_MHz,data.responsivity_Hz_per_K):\n # Convert eigvals spectra from 1/Hz units to Hz/sqrt(Hz)\n spectrum_Hz = np.sqrt(eigvals)*freq*1e6\n eigvals_Hz.append(spectrum_Hz)\n # Calculate net in muK sqrt(s). In the following, 1e6 is K -> uK factor, and sqrt(2) is 1/sqrt(Hz) -> sqrt(s) factor\n net = (1e6*spectrum_Hz/abs(responsivity))/np.sqrt(2)\n nets.append(net)\n data['pca_eigvals_Hz_per_rootHz'] = eigvals_Hz \n data['net_uK_rootsec'] = nets\n return data", "def __init__(self):\r\n self.filter_p_number = 3 # First one with enough data for statistics\r\n self.prfs_d = extract_settings_elvis()\r\n\r\n ccds = True\r\n filtered = False\r\n scamp = False\r\n\r\n input_df = read_csv('cats/cat_clean_ssos.csv', index_col=0)\r\n filt_cat = self.gets_filtered_catalog() # Gets data from filtered\r\n\r\n if ccds:\r\n cats_d = self.extract_cats()\r\n self.extract_stats_ccds(cats_d, input_df, filt_cat)\r\n elif filtered:\r\n self.extract_stats_filt(filt_cat, input_df)\r\n elif scamp:\r\n pass\r\n # self.extract_stats_scamp(input_df)\r\n else:\r\n pass", "def xx_plot(epoch, model, features, filters, figname, fgal=0.5):\n # fetch Stripe 82 data\n X, Xcov = fetch_prepped_s82data(epoch, fgal, features, filters)\n Xcoadd, Xcoaddcov = fetch_prepped_s82data(epoch, fgal, features,\n filters, use_single=False)\n N = 20000\n X = X[:N]\n Xcov = Xcov[:N]\n Xcoadd = Xcoadd[:N]\n Xcoaddcov = Xcoaddcov[:N]\n\n # unpickle the XD model\n if type(model) == str: \n f = open(model, 'rb')\n model = cPickle.load(f)\n f.close()\n\n # Calculate the posteriors, draw samples\n a, m, v = model.posterior(X, Xcov)\n posts = np.zeros_like(X)\n for i in range(X.shape[0]):\n posts[i] = model.sample(a[i], m[i], v[i], size=1)\n\n lo = [0.01, 0.02, 0.06]\n hi = [0.99, 0.96, 0.98]\n idx = [0, 1, 4]\n bins = [100, 100, 300]\n label = ['psfmag $r$', 'modelmag $u-g$', 'modelmag $i-z$']\n N = len(idx)\n fs = 5\n lsize = 20\n f = pl.figure(figsize=(N * fs, 2 * fs))\n pl.subplots_adjust(wspace=0.3)\n for i in range(N):\n x = X[:, idx[i]]\n y = Xcoadd[:, idx[i]]\n p = posts[:, idx[i]]\n ind = (y > -999) & (Xcoaddcov[:, idx[i]][:, idx[i]] < 10.)\n x = x[ind]\n y = y[ind]\n p = p[ind]\n ax = pl.subplot(2, N, i + 1)\n v = np.sort(x)\n mn, mx = v[np.int(lo[i] * x.shape[0])], v[np.int(hi[i] * x.shape[0])]\n hist2d(x, y, ax=ax, bins=bins[i], plot_contours=True,\n plot_datapoints=True)\n pl.plot([mn, mx], [mn, mx], 'r', lw=2)\n pl.ylabel('Coadd ' + label[i], fontsize=lsize)\n pl.xlabel('Single Epoch ' + label[i], fontsize=lsize)\n pl.xlim(mn, mx)\n pl.ylim(mn, mx)\n ax = pl.subplot(2, N, i + 4)\n hist2d(p, y, ax=ax, bins=bins[i], plot_contours=True,\n plot_datapoints=True)\n pl.plot([mn, mx], [mn, mx], 'r', lw=2)\n pl.xlim(mn, mx)\n pl.ylim(mn, mx)\n pl.ylabel('Coadd ' + label[i], fontsize=lsize)\n pl.xlabel('XD Posterior ' + label[i], fontsize=lsize)\n f.savefig(figname, bbox_inches='tight')", "def VarianceFilter(X, data_headers, varCut=0.1):\n Xidx = np.var(X[data_headers].values, axis=1) > varCut\n return X.iloc[Xidx, :]", "def get_iPTF16hgs(colorplt = False):\n z = 0.017\n ebv = 0\n D = cosmo.luminosity_distance([z])[0].value * 1e+6 # in pc\n dis_mod = 5*np.log10(D / 10)\n \n tb = pd.read_csv('../data/otherSN/iPTF16hgs/table1.txt', sep=\"\\t\")\n tb = tb.drop(columns=[\"Unnamed: 5\"])\n tb = tb.rename(columns={'Filter' : 'filter',\n 'MJD': 'mjd'})\n tb = tb[~np.array([x[0]=='>' for x in tb['Magnitude'].values])]\n tb['mag'] = np.array([float(x.split(\" +or-\")[0]) for x in tb['Magnitude'].values])\n tb['emag'] = np.array([float(x.split(\" +or-\")[1]) for x in tb['Magnitude'].values])\n tb = tb.drop(columns=[\"Magnitude\"])\n \n ixg = tb['filter'].values == \"g\"\n ixr = tb['filter'].values == \"r\"\n ixi = tb['filter'].values == \"i\"\n tb['wave'] = np.zeros(len(tb))\n tb['wave'].values[ixg] = 4814\n tb['wave'].values[ixr] = 6422\n tb['wave'].values[ixi] = 7883\n tb['mag0'] = tb['mag'] - extinction.ccm89(tb['wave'].values, 3.1*ebv, 3.1)\n tb['mag0_abs'] = tb['mag0'] - dis_mod\n t_max = 57691.59 # from the paper\n tb['tmax_of'] = tb['mjd'] - t_max\n tb['tmax_rf'] = (tb['mjd'] - t_max) / (1+z)\n \"\"\"\n plt.errorbar(tb[\"tmax_rf\"].values[ixg], tb[\"mag\"].values[ixg], tb[\"emag\"].values[ixg], fmt=\".g\")\n plt.errorbar(tb[\"tmax_rf\"].values[ixr], tb[\"mag\"].values[ixr], tb[\"emag\"].values[ixr], fmt=\".r\")\n plt.errorbar(tb[\"tmax_rf\"].values[ixi], tb[\"mag\"].values[ixi], tb[\"emag\"].values[ixi], fmt=\".y\")\n \"\"\"\n tb = add_datecol(tb)\n tb = add_physcol(tb)\n #tb = tb.drop(columns=[\"datetime64\"])\n if colorplt==False:\n return tb\n else:\n #tb = tb[tb.mjd > 55352.5]\n #tb = tb[tb.mjd < 55593.5]\n \n dates = get_date_span(tb)\n datesave = []\n for i in range(len(dates)):\n x = dates[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n if len(tbsub)!=0:\n flts = tbsub['filter'].values\n if \"r\" in flts and np.sum(np.unique(flts))!=1:\n datesave.append(x)\n datesave = np.array(datesave)\n \n mcolor = []\n mcolor_unc = []\n mjds = []\n colorname = []\n for i in range(len(datesave)):\n x = datesave[i]\n ix = tb[\"date\"].values == x\n tbsub = tb[ix]\n gtb = tbsub[tbsub[\"filter\"].values==\"g\"]\n rtb = tbsub[tbsub[\"filter\"].values==\"r\"]\n itb = tbsub[tbsub[\"filter\"].values==\"i\"]\n if len(gtb)!=0:\n gmjds = gtb[\"mjd\"].values\n gmags = gtb[\"mag0\"].values\n gemags = gtb[\"emag\"].values\n gwtgs = 1/gemags**2\n gmag = np.sum(gmags * gwtgs) / np.sum(gwtgs)\n gmjd = np.sum(gmjds * gwtgs) / np.sum(gwtgs)\n gemag = 1/ np.sqrt(np.sum(gwtgs))\n if len(rtb)!=0:\n rmjds = rtb[\"mjd\"].values\n rmags = rtb[\"mag0\"].values\n remags = rtb[\"emag\"].values\n rwtgs = 1/remags**2\n rmag = np.sum(rmags * rwtgs) / np.sum(rwtgs)\n rmjd = np.sum(rmjds * rwtgs) / np.sum(rwtgs)\n remag = 1/ np.sqrt(np.sum(rwtgs))\n if len(itb)!=0:\n imjds = itb[\"mjd\"].values\n imags = itb[\"mag0\"].values\n iemags = itb[\"emag\"].values\n iwtgs = 1/iemags**2\n imag = np.sum(imags * iwtgs) / np.sum(iwtgs)\n imjd = np.sum(imjds * iwtgs) / np.sum(iwtgs)\n iemag = 1/ np.sqrt(np.sum(iwtgs))\n if len(gtb)!=0 and len(rtb)!=0:\n mcolor.append(gmag - rmag)\n mjds.append( 0.5 * (gmjd + rmjd) )\n mcolor_unc.append( np.sqrt(gemag**2 + remag**2) )\n colorname.append(\"gmr\")\n if len(rtb)!=0 and len(itb)!=0:\n mcolor.append(rmag - imag)\n mjds.append( 0.5 * (rmjd + imjd) )\n mcolor_unc.append( np.sqrt(remag**2 + iemag**2) )\n colorname.append(\"rmi\")\n \n ctb = Table(data = [mjds, mcolor, mcolor_unc, colorname],\n names = [\"mjd\", \"c\", \"ec\", \"cname\"])\n \n ctb['tmax_rf'] = (ctb['mjd'] - t_max) / (1+z)\n ctb = ctb.to_pandas()\n return ctb", "def add_catalogs(self):\n n_exposures = len(self.info['Module'])\n self.info['point_source'] = [None] * n_exposures\n self.info['galaxyListFile'] = [None] * n_exposures\n self.info['extended'] = [None] * n_exposures\n self.info['convolveExtended'] = [False] * n_exposures\n self.info['movingTarg'] = [None] * n_exposures\n self.info['movingTargSersic'] = [None] * n_exposures\n self.info['movingTargExtended'] = [None] * n_exposures\n self.info['movingTargToTrack'] = [None] * n_exposures\n\n for i in range(n_exposures):\n if int(self.info['detector'][i][-1]) < 5:\n filtkey = 'ShortFilter'\n pupilkey = 'ShortPupil'\n else:\n filtkey = 'LongFilter'\n pupilkey = 'LongPupil'\n filt = self.info[filtkey][i]\n pup = self.info[pupilkey][i]\n\n if self.point_source[i] is not None:\n # In here, we assume the user provided a catalog to go with each filter\n # so now we need to find the filter for each entry and generate a list that makes sense\n self.info['point_source'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.point_source, 'point source')))\n else:\n self.info['point_source'][i] = None\n if self.galaxyListFile[i] is not None:\n self.info['galaxyListFile'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.galaxyListFile, 'galaxy')))\n else:\n self.info['galaxyListFile'][i] = None\n if self.extended[i] is not None:\n self.info['extended'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.extended, 'extended')))\n else:\n self.info['extended'][i] = None\n if self.movingTarg[i] is not None:\n self.info['movingTarg'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.movingTarg, 'moving point source target')))\n else:\n self.info['movingTarg'][i] = None\n if self.movingTargSersic[i] is not None:\n self.info['movingTargSersic'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.movingTargSersic, 'moving sersic target')))\n else:\n self.info['movingTargSersic'][i] = None\n if self.movingTargExtended[i] is not None:\n self.info['movingTargExtended'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.movingTargExtended, 'moving extended target')))\n else:\n self.info['movingTargExtended'][i] = None\n if self.movingTargToTrack[i] is not None:\n self.info['movingTargToTrack'][i] = os.path.abspath(os.path.expandvars(\n self.catalog_match(filt, pup, self.movingTargToTrack, 'non-sidereal moving target')))\n else:\n self.info['movingTargToTrack'][i] = None\n if self.convolveExtended is True:\n self.info['convolveExtended'] = [True] * n_exposures", "def _subclass_init(self, **kwargs):\n self._flux_scaling = 1.0\n if kwargs.get('apply_metacal_test3_fix'):\n # In Run 1.2i metacal_test3, the fluxes returned by metacal are not\n # properly scaled by the pixel size, it's necessary to apply a\n # 0.2**2 correction factor\n self._flux_scaling /= 0.2**2\n\n super(DC2MetacalCatalog, self)._subclass_init(**kwargs)", "def test_scalar_aperture():\n\n data = np.ones((20, 20), dtype=float)\n\n ap = CircularAperture((10, 10), r=3.0)\n colnames1 = aperture_photometry(data, ap, error=data).colnames\n assert (colnames1 == ['id', 'xcenter', 'ycenter', 'aperture_sum',\n 'aperture_sum_err'])\n\n colnames2 = aperture_photometry(data, [ap], error=data).colnames\n assert (colnames2 == ['id', 'xcenter', 'ycenter', 'aperture_sum_0',\n 'aperture_sum_err_0'])\n\n colnames3 = aperture_photometry(data, [ap, ap], error=data).colnames\n assert (colnames3 == ['id', 'xcenter', 'ycenter', 'aperture_sum_0',\n 'aperture_sum_err_0', 'aperture_sum_1',\n 'aperture_sum_err_1'])", "def find_components_old(image,deltaPix,lens_rad_arcsec = 5.0,lens_rad_ratio = None, gal_rad_ratio = 0.1,min_size_arcsec=0.3,thresh=0.4, show_locations=False):\n\n # convert minimum component size in pixel units\n min_size = int(min_size_arcsec / deltaPix)\n \n #Convert lens radius and central galaxy radius to pixels\n if lens_rad_ratio == None:\n lens_rad = int(lens_rad_arcsec / deltaPix)\n else: lens_rad = int(len(image) * lens_rad_ratio)\n gal_rad = int(len(image) * gal_rad_ratio)\n \n # downscale source image to data resolution (for speed + easier for converting to data units)\n #down = image_util.re_size(image, factor=supersampling_factor_source)\n \n # apply laplacian of gaussian (LoG) filter to enhance maxima\n filtered = - gaussian_laplace(deepcopy(image), sigma = min_size, mode='constant', cval=0.)\n \n# print(filtered.min(),filtered.max(),filtered.min() + thresh * np.abs(filtered.min()))\n \n \n # assume all value below max*threshold can not be maxima, so put all to zero\n# filtered[filtered < thresh*filtered.max()] = 0.\n \n# assume all value below min*threshold can not be maxima, so put all to zero\n filtered[filtered < filtered.min() + thresh * np.abs(filtered.min())] = 0.\n \n if show_locations:\n plt.figure(figsize = (8,8))\n plt.subplot(1,2,1)\n plt.imshow(image, origin='lower', norm=SymLogNorm(5))\n plt.title('Image')\n\n plt.subplot(1,2,2)\n plt.imshow(filtered, origin='lower', norm=SymLogNorm(5))\n plt.title('Filtered Image')\n plt.show()\n \n # find coordinates of local maxima\n #print(int(0.5 * min_size))\n max_idx_2d_small = peak_local_max(filtered, min_distance=0)\n max_idx_2d_large = peak_local_max(filtered, min_distance=1)\n \n x_list_small, y_list_small = max_idx_2d_small[:, 1], max_idx_2d_small[:, 0]\n x_list_large, y_list_large = max_idx_2d_large[:, 1], max_idx_2d_large[:, 0]\n \n im_center_x, im_center_y = len(image) / 2., len(image) / 2.\n \n R = np.sqrt((x_list_large - im_center_x)**2 + (y_list_large - im_center_y)**2)\n new_center_x, new_center_y = x_list_large[R < gal_rad], y_list_large[R < gal_rad]\n \n if (len(new_center_x) > 1) and (len(x_list_large[R == R.min()]) ==1 ): \n new_center_x, new_center_y = x_list_large[R == R.min()], y_list_large[R == R.min()]\n elif (len(new_center_x) > 1) and (len(x_list_large[R == R.min()]) > 1 ): \n new_center_x, new_center_y = im_center_x, im_center_y\n elif len(new_center_x) == 0: \n new_center_x, new_center_y = im_center_x, im_center_y\n \n \n R_small = np.sqrt((x_list_small - new_center_x)**2 + (y_list_small - new_center_y)**2)\n R_large = np.sqrt((x_list_large - new_center_x)**2 + (y_list_large - new_center_y)**2)\n \n x_sats, y_sats = x_list_small[R_small > lens_rad], y_list_small[R_small > lens_rad]\n \n # show maxima on image for debug\n if show_locations:\n fig = plt.figure(figsize=(4, 4))\n #plt.imshow(image, origin='lower', cmap=cmap_flux, norm=LogNorm(1e-2))\n plt.imshow(image, origin='lower', norm=SymLogNorm(5))\n \n for i in range(len(x_sats)):\n plt.scatter([x_sats[i]], [y_sats[i]], c='red', s=60, marker='+')\n# plt.annotate(i+1, (x_list[i], y_list[i]), color='black')\n \n# for i in range(len(x_mask)):\n# plt.scatter([x_mask[i]], [y_mask[i]], c='red', s=100, marker='*')\n# plt.annotate(i+1, (x_mask[i], y_mask[i]), color='red')\n plt.scatter(new_center_x, new_center_y,c='red', s=100, marker='*')\n \n draw_lens_circle = Circle((new_center_x, new_center_y),lens_rad ,fill=False)\n draw_gal_circle = Circle((new_center_x, new_center_y),gal_rad, fill = False)\n plt.gcf().gca().add_artist(draw_lens_circle)\n plt.gcf().gca().add_artist(draw_gal_circle)\n plt.title('Detected Components')\n plt.text(1, 1, \"detected components\", color='red')\n fig.axes[0].get_xaxis().set_visible(True); fig.axes[0].get_yaxis().set_visible(True)\n plt.show()\n return (x_sats, y_sats), (new_center_x, new_center_y)", "def __init__(self,df, init_pars, var='dep_var', var_name='Volume of Nile'):\n self.df = df\n self.var = var\n self.var_name = var_name\n self.y = np.array(df[var].values.flatten())\n self.times = df.index\n self.pardict = init_pars\n self.options = {'eps':1e-09,\n 'maxiter':2000}", "def get_catalog(self, query=None, query_fields=None, print_query=False,exclude_gaia=False,**kwargs):\n # Query\n main_cat = super(DECaL_Survey, self).get_catalog(query_fields=query_fields, print_query=print_query,**kwargs)\n main_cat = Table(main_cat,masked=True)\n #\n for col in main_cat.colnames:\n main_cat[col].mask = np.isnan(main_cat[col])\n #Convert SNR to mag error values.\n snr_cols = [colname for colname in main_cat.colnames if \"snr\" in colname]\n for col in snr_cols:\n main_cat[col].mask = main_cat[col]<0\n main_cat[col] = 2.5*np.log10(1+1/main_cat[col])\n \n main_cat = main_cat.filled(-99.0)\n #Remove gaia objects if necessary\n if exclude_gaia:\n self.catalog = main_cat[main_cat['gaia_pointsource']==0]\n else:\n self.catalog = main_cat\n # Clean\n main_cat = catalog_utils.clean_cat(main_cat, photom['DECaL'])\n self.validate_catalog()\n # Return\n return self.catalog", "def quantify_cf(lags, cf, plot=False):\n\n # identify the peak magnitude\n abs_cf = np.abs(cf)\n peak_magnitude = abs_cf.max()\n\n # identify the peak delay\n imax = abs_cf.argmax()\n peak_delay = lags[imax]\n\n # compute the area under the curve\n dt = np.diff(lags).max()\n cf_width = abs_cf.sum()*dt\n\n # compute the skewdness\n p = abs_cf / abs_cf.sum()\n mean = np.sum(lags*p)\n std = np.sqrt(np.sum(p*(abs_cf - mean)**2))\n skew = np.sum(p*(abs_cf - mean)**3) / std**3\n\n # compute the left and right areas under the absolute curve\n max_width = abs_cf[lags != 0].sum()*dt\n right_width = abs_cf[lags > 0].sum()*dt\n left_width = abs_cf[lags < 0].sum()*dt\n\n # create a measure of anisotropy from the AUCs\n anisotropy = (right_width - left_width) / max_width\n \n li = lags < 0\n ri = lags > 0\n\n # determine the mean lag time, i.e. the lag \"center of mass\". do this for each half\n cfl = np.abs(cf[li]) / np.abs(cf[li]).sum()\n left_lag = np.sum(cfl*lags[li])\n cfr = np.abs(cf[ri]) / np.abs(cf[ri]).sum()\n right_lag = np.sum(cfr*lags[ri])\n\n # integrate the right and left sides independently\n dl = np.diff(lags).max()\n left_sum = cf[li].sum()*dl\n right_sum = cf[ri].sum()*dl\n\n # take the correlation coefficient at zero lag\n cc = cf[lags == 0][0]\n\n if plot:\n plt.figure()\n plt.axhline(0, c='k')\n plt.plot(lags, cf, 'r-', linewidth=3)\n plt.axvline(peak_delay, c='g', alpha=0.75)\n plt.ylim(-1, 1)\n plt.axis('tight')\n t = 'width=%0.1f, mean=%0.1f, std=%0.1f, skew=%0.1f, anisotropy=%0.2f' % (cf_width, mean, std, skew, anisotropy)\n plt.title(t)\n plt.show()\n\n return {'magnitude':peak_magnitude, 'delay':peak_delay, 'width':cf_width,\n 'mean':mean, 'std':std, 'skew':skew, 'anisotropy':anisotropy,\n 'left_lag':left_lag, 'right_lag':right_lag, 'left_sum':left_sum, 'right_sum':right_sum, 'cc':cc}", "def my_featurize(apartment):\n col =np.array([1, 2, 0, 0, 0, 0, 0, 0 ])\n a= pd.DataFrame(apartment[col])\n if(apartment.get('condition')== 'good'):\n col[1] =1\n else:\n if(apartment.get('condition')== 'zero condition'):\n col[1] = 0\n col[2] =apartment.get('num_rooms')\n col[3] =apartment.get('area')\n col[4] =apartment.get('num_bathrooms')\n col[5] =apartment.get('floor')\n col[6] =apartment.get('ceiling_height')\n col[7] =apartment.get('max_floor')\n\n return col, apartment['price']", "def plot(dsname, wdir = './', width = 1000.0, dt = 5.0*yt.units.Myr, fields = all_fields,\n thickness = 20.0, outdir = './enrichment_plots_kpc'):\n\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n gal = Galaxy(dsname, wdir = wdir)\n data = gal.df\n\n @derived_field(name=\"logNO\", units=\"\")\n def _logNO(field, data):\n return np.log10(data['N_Abundance'] / data['O_Abundance'])\n gal.ds.add_field((\"gas\", \"logNO\"), function=_logNO, units=\"\")\n\n make_filtered_field(gal.ds, 'logNO', ['O_Fraction','N_Fraction'])\n make_filtered_field(gal.ds, 'O_over_H', ['O_Fraction'])\n make_filtered_field(gal.ds, 'N_over_O', ['O_Fraction','N_Fraction'])\n# def _logNO_filtered(field,data):\n# x = data[('gas','logNO')]\n#\n# f1 = data[('gas','O_Fraction')]\n# f2 = data[('gas','N_Fraction')]\n#\n# x[ (f1 < tol) + (f2 < tol)] = np.nan\n#\n# return x\n# gal.ds.add_field(('gas','logNO_filtered'), function = _logNO_filtered, units = \"\")\n\n M = data['birth_mass']\n t_o = data['creation_time'].convert_to_units('Myr')\n MS_lifetime = data[('io','particle_model_lifetime')].to('Myr')\n MS_death = t_o + MS_lifetime\n px = (data['particle_position_x'] - gal.ds.domain_center[0]).to('pc')\n py = (data['particle_position_y'] - gal.ds.domain_center[1]).to('pc')\n pz = (data['particle_position_z'] - gal.ds.domain_center[2]).to('pc')\n\n recent_death = (MS_death > gal.ds.current_time - dt) * (MS_death <= gal.ds.current_time + 0.001*yt.units.Myr)\n alive = MS_death > gal.ds.current_time + 0.001*yt.units.Myr\n\n AGB = M < 8.0\n massive_star = (M > 8.0) * (M < 25.0)\n\n boxdim = np.array([width*1.25,width*1.25,thickness])*yt.units.pc\n region = gal.ds.box(gal.ds.domain_center - boxdim*0.5, gal.ds.domain_center + boxdim*0.5)\n\n proj = yt.ProjectionPlot(gal.ds, 'z', fields,\n weight_field = 'number_density', data_source = region, width = (width,'pc'))\n\n if 'number_density' in fields:\n proj.set_unit('number_density','cm**(-3)')\n proj.set_cmap('number_density','viridis')\n proj.set_zlim('number_density',1.0E-4,200.0)\n\n if 'O_over_H_filtered' in fields:\n proj.set_cmap('O_over_H_filtered','cubehelix')\n proj.set_log('O_over_H_filtered', False)\n proj.set_zlim('O_over_H_filtered', -5, 1)\n proj.set_colorbar_label('O_over_H_filtered', r'[O/H]')\n\n if 'N_over_O_filtered' in fields:\n proj.set_cmap('N_over_O_filtered','PRGn')\n proj.set_log('N_over_O_filtered',False)\n proj.set_zlim('N_over_O_filtered',-2,2)\n proj.set_colorbar_label('N_over_O_filtered', r'[N/O]')\n\n if 'logNO' in fields:\n proj.set_cmap('logNO','PRGn')\n proj.set_log('logNO',False)\n proj.set_zlim('logNO',-2,0.5)\n proj.set_colorbar_label('logNO', r'log( N / O )')\n\n if 'logNO_filtered' in fields:\n proj.set_cmap('logNO_filtered','PRGn')\n proj.set_log('logNO_filtered',False)\n proj.set_zlim('logNO_filtered',-2,0.5)\n proj.set_colorbar_label('logNO_filtered', r'log( N / O )')\n\n if 'Temperature' in fields:\n proj.set_cmap('Temperature', 'RdYlBu_r')\n proj.set_log('Temperature',True)\n proj.set_zlim('Temperature',10.0, 1.0E7)\n proj.set_colorbar_label('Temperature', r'Temperature (K)')\n\n if 'G_o' in fields:\n proj.set_cmap('G_o', 'cubehelix')\n proj.set_log('G_o', True)\n proj.set_zlim('G_o',0.05, 100.0)\n proj.set_colorbar_label('G_o', r'ISRF (G$_{\\rm o}$)')\n\n if 'Q0_flux':\n proj.set_cmap('Q0_flux', 'magma')\n proj.set_log('Q0_flux',True)\n proj.set_zlim('Q0_flux',1.0E-6, 1.0E-1)\n proj.set_colorbar_label('Q0_flux', r'HI Ionizing Radiation (s$^{-1}$)')\n\n Mstar = np.sum(gal.df['particle_mass'][ gal.df['particle_type'] == 11]).to('Msun')\n time = gal.ds.current_time.to('Myr')\n# proj.annotate_title(r\"Time = %1.1f Myr M$_{*}$ = %2.2E M$_{\\odot}$\"%(time.value,Mstar.value))\n proj.set_font( {'size' : 32} )\n proj.save(outdir + '/') # necessary\n\n\n dt = 5.0 * yt.units.Myr\n # buffer around image. otherwise points plotted near edge of image my run a little outside\n # viewing area, causing weird shifts in plotting. Not sure how to control this otherwise\n buffer = 15.0 # in pc\n in_image = (np.abs(pz) <= boxdim[2]*0.5) *\\\n (np.abs(px) <= (width*0.5 - buffer)) *\\\n (np.abs(py) <= (width*0.5 - buffer))\n\n pp = {}\n pp['massive_star_winds'] = in_image * alive * massive_star\n pp['AGB_winds'] = in_image * recent_death * AGB\n pp['SN'] = in_image * recent_death * massive_star\n #pp['other_stars'] = in_image * alive * (np.logical_not(pp['massive_star_winds']))\n\n for k in list(proj.plots.keys()):\n image = proj.plots[k]\n\n #\n # Now select and annotate the points we want\n #\n for s in list(pp.keys()):\n if np.size(px[pp[s]].value) > 0:\n print(np.size(px[pp[s]]), 'Particles in ', s, px[pp[s]], py[pp[s]])\n image.axes.scatter(px[pp[s]].value,py[pp[s]].value, s = ps[s], marker = markers[s], color = colors[s])\n else:\n print('No particles in ', s)\n\n# proj.refresh()\n# proj.hide_axes()\n proj.save(outdir + '/') # necessary\n\n if 'N_over_O' in fields:\n vmin,vmax = -2,2\n x = proj.plots['N_over_O']\n x.image.set_norm( MidpointNormalize(midpoint= 0.5*(vmin+vmax), vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.5*(vmin+vmax),vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_N_over_O_number_density.png')\n\n if 'logNO' in fields:\n vmin, vmax = -2, 0.25\n x = proj.plots['logNO']\n x.image.set_norm( MidpointNormalize(midpoint= 0.0, vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.0, vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_logNO_number_density.png')\n\n del(proj)\n del(gal)\n\n return", "def __init__(self, predict_lowerbound: float, first_season: int, aug_num_cuts: int, aug_min_cuts_on: int,\n cdf_cutoff: float):\n super().__init__(CutLayer(MultiplyAggregateLayer(InnerAppearanceLayer(first_season, aug_num_cuts,\n aug_min_cuts_on, cdf_cutoff)), mean, 1.0, predict_lowerbound))", "def plot_figure9_lower():\n column_titles = None\n\n height_ceiling = 500.\n height_ceiling_id = list(height_range_ceilings).index(height_ceiling)\n\n fixed_height_ref = 100.\n fixed_height_id = list(fixed_heights).index(fixed_height_ref)\n\n plot_item0 = {\n 'data': nc.variables[\"p_ceiling_perc5\"][height_ceiling_id, :, :]\n / nc.variables[\"p_fixed_perc5\"][fixed_height_id, :, :],\n 'contour_fill_levels': np.linspace(1, 6., 21),\n 'contour_line_levels': np.arange(2., 5., 1.),\n 'contour_line_label_fmt': '%.1f',\n 'colorbar_ticks': np.linspace(1, 6., 21)[::4],\n 'colorbar_tick_fmt': '{:.1f}',\n 'colorbar_label': 'Increase factor [-]',\n 'extend': 'max',\n }\n plot_item1 = {\n 'data': nc.variables[\"p_ceiling_perc32\"][height_ceiling_id, :, :]\n / nc.variables[\"p_fixed_perc32\"][fixed_height_id, :, :],\n 'contour_fill_levels': np.linspace(1, 3.5, 21),\n 'contour_line_levels': np.linspace(1, 3.5, 21)[::4],\n 'contour_line_label_fmt': '%.1f',\n 'colorbar_ticks': np.linspace(1, 3.5, 21)[::4],\n 'colorbar_tick_fmt': '{:.1f}',\n 'colorbar_label': 'Increase factor [-]',\n 'extend': 'max',\n }\n plot_item2 = {\n 'data': nc.variables[\"p_ceiling_perc50\"][height_ceiling_id, :, :]\n / nc.variables[\"p_fixed_perc50\"][fixed_height_id, :, :],\n 'contour_fill_levels': np.linspace(1, 3.5, 21),\n 'contour_line_levels': np.linspace(1, 3.5, 21)[::4],\n 'contour_line_label_fmt': '%.1f',\n 'colorbar_ticks': np.linspace(1, 3.5, 21)[::4],\n 'colorbar_tick_fmt': '{:.1f}',\n 'colorbar_label': 'Increase factor [-]',\n 'extend': 'max',\n }\n\n plot_items = [plot_item0, plot_item1, plot_item2]\n\n eval_contour_fill_levels(plot_items)\n plot_panel_1x3_seperate_colorbar(plot_items, column_titles)", "def get_catalog(self, query=None, query_fields=None, print_query=False,**kwargs):\n # Main DES query\n main_cat = super(DELVE_Survey, self).get_catalog(query=query,\n query_fields=query_fields,\n print_query=print_query,**kwargs)\n if len(main_cat) == 0:\n main_cat = catalog_utils.clean_cat(main_cat,photom['DELVE'])\n return main_cat\n main_cat = catalog_utils.clean_cat(main_cat, photom['DELVE'])\n #import pdb; pdb.set_trace()\n for col in main_cat.colnames:\n if main_cat[col].dtype==float:\n mask = np.isnan(main_cat[col])+(main_cat[col]==99.99)\n main_cat[col] = np.where(~mask, main_cat[col], -999.0)\n \n # Finish\n self.catalog = main_cat\n self.validate_catalog()\n return self.catalog", "def execQ11():\n frame = pan.DataFrame(data, columns=['Product', 'Price', 'Period'])\n carrot = frame[(dFrame.Series_title_1 == \"Carrots, 1kg\") & (dFrame.Period == 2012.03)]\n return carrot", "def plot_val(\n db: MongoCosemDB,\n setup: str,\n labelname: str,\n file: Optional[str],\n threshold: int = 127,\n) -> None:\n print(setup, labelname)\n valcrops = db.get_all_validation_crops()\n\n if len(valcrops) != 4:\n raise NotImplementedError(\n \"Number of validation crops has changed so plotting layout has to be updated\"\n )\n\n if detect_8nm(setup):\n raw_datasets = [\"volumes/subsampled/raw/0\", \"volumes/raw/s1\"]\n else:\n raw_datasets = [\"volumes/raw/s0\"]\n\n col = db.access(\"evaluation\", (db.training_version, db.gt_version))\n # query all relevant results\n query = {\n \"setup\": setup,\n \"label\": labelname,\n \"refined\": False,\n \"iteration\": {\"$mod\": [25000, 0]},\n \"threshold\": threshold,\n }\n results = dict()\n max_its = dict()\n for crop in valcrops:\n query[\"crop\"] = crop[\"number\"]\n if col.find_one(query) is None:\n continue\n results[crop[\"number\"]] = dict()\n max_its[crop[\"number\"]] = dict()\n for raw_ds in raw_datasets:\n query[\"raw_dataset\"] = raw_ds\n results[crop[\"number\"]][raw_ds] = dict()\n max_its[crop[\"number\"]][raw_ds] = dict()\n\n max_it_actual = convergence_iteration(query, db)\n max_it_min700 = max_iteration_for_analysis(query, db, conv_it=max_it_actual)\n\n max_its[crop[\"number\"]][raw_ds][\"actual\"] = max_it_actual\n max_its[crop[\"number\"]][raw_ds][\"min700\"] = max_it_min700\n for metric in [\"dice\", \"mean_false_distance\"]:\n query[\"metric\"] = metric\n\n col = db.access(\"evaluation\", (db.training_version, db.gt_version))\n\n scores = list(\n col.aggregate(\n [\n {\"$match\": query},\n {\"$sort\": {\"iteration\": 1}},\n {\"$project\": {\"iteration\": 1, \"_id\": 0, \"value\": 1}},\n ]\n )\n )\n results[crop[\"number\"]][raw_ds][metric] = scores\n\n colors = {\"dice\": \"tab:green\", \"mean_false_distance\": \"tab:blue\"}\n fig, axs = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(30, 15))\n if len(raw_datasets) > 1:\n plt.plot(\n [], [], marker=\".\", ms=1.2, linestyle=\"-\", color=\"k\", label=\"subsampled\"\n )\n plt.plot(\n [], [], marker=\".\", ms=1.2, linestyle=\"--\", color=\"k\", label=\"averaged\"\n )\n plt.plot([], [], linestyle=\"-\", color=\"tab:red\", label=\"max iteration (min 700k)\")\n plt.plot([], [], linestyle=\"-\", color=\"tab:pink\", label=\"max iteration (no min)\")\n fig.legend(loc=\"upper right\", frameon=False, prop={\"size\": 18})\n\n plt.suptitle(\n \"{setup:} - {label:}\".format(setup=setup, label=labelname), fontsize=22\n )\n\n for crop, ax in zip(valcrops, axs.flatten()):\n try:\n crop_res = results[crop[\"number\"]]\n except KeyError:\n continue\n\n ax2 = ax.twinx()\n for raw_ds, ls in zip(raw_datasets, [\"-\", \"--\"]):\n x_vs_dice = [r[\"iteration\"] for r in crop_res[raw_ds][\"dice\"]]\n y_vs_dice = [1 - r[\"value\"] for r in crop_res[raw_ds][\"dice\"]]\n x_vs_mfd = [r[\"iteration\"] for r in crop_res[raw_ds][\"mean_false_distance\"]]\n y_vs_mfd = [r[\"value\"] for r in crop_res[raw_ds][\"mean_false_distance\"]]\n\n ax.plot(\n x_vs_mfd,\n y_vs_mfd,\n linestyle=ls,\n color=colors[\"mean_false_distance\"],\n marker=\"o\",\n ms=3,\n )\n ax2.plot(\n x_vs_dice,\n y_vs_dice,\n linestyle=ls,\n color=colors[\"dice\"],\n marker=\"o\",\n ms=3,\n )\n if max_its[crop[\"number\"]][raw_ds][\"min700\"][1]:\n ax.axvline(\n max_its[crop[\"number\"]][raw_ds][\"min700\"][0],\n linestyle=ls,\n color=\"tab:red\",\n )\n if (\n max_its[crop[\"number\"]][raw_ds][\"min700\"][0]\n != max_its[crop[\"number\"]][raw_ds][\"actual\"][0]\n ):\n ax.axvline(\n max_its[crop[\"number\"]][raw_ds][\"actual\"][0],\n linestyle=ls,\n color=\"tab:pink\",\n )\n\n ax.set_xlabel(\"iteration\", fontsize=18)\n ax.set_title(crop[\"number\"], fontsize=18)\n ax.xaxis.set_major_formatter(ticker.EngFormatter())\n\n ax.set_ylabel(\"MFD\", color=colors[\"mean_false_distance\"], fontsize=18)\n ax.tick_params(axis=\"y\", labelcolor=colors[\"mean_false_distance\"])\n ax.set_ylim(bottom=0)\n\n ax2.set_ylabel(\"1 - dice\", color=colors[\"dice\"], fontsize=18)\n ax2.tick_params(axis=\"y\", labelcolor=colors[\"dice\"])\n ax2.set_ylim([0, 1])\n\n ax.tick_params(axis=\"both\", which=\"major\", labelsize=18)\n ax2.tick_params(axis=\"both\", which=\"major\", labelsize=18)\n\n if file is None:\n plt.show()\n else:\n plt.savefig(file)\n plt.close()", "def pre_exclude_rest_instances(seg_raw_df, class_df):\n import smdt.features.features as features\n\n print \"============start pre exclusion==================================\"\n \n\n seg_mag_df = seg_raw_df.copy(deep=True)\n temp = [seg_mag_df[name]**2 for name in s_info.raw_value_names]\n temp = np.sum(temp, axis=0)\n seg_mag_df['mag'] = np.sqrt(temp)\n\n grouped = seg_mag_df.groupby(s_info.segment_col)\n\n c1 = grouped['mag'].std()\n c2 = grouped['mag'].aggregate(features.f_slope).abs()\n c3 = grouped['mag'].aggregate(features.f_pppeakamplitude, paras={\"q\":10})\n\n # Used for visualization testing\n # import matplotlib.pyplot as pyplot\n # c1.hist()\n # pyplot.figure()\n # c2.hist(bins=100)\n # pyplot.figure()\n # c3.hist()\n\n # pyplot.show()\n # sys.exit(1)\n t1 = 0.13\n t2 = 0.0004\n t3 = 0.5\n print \"===================preexclusion criterions====================\"\n print \"std: <= %f, slope: <= %f, peak-peak amplitude: < %f\" % (t1, t2, t3) \n excluded = (c1 <= t1) & (c2 <= t2) & (c3 <= t3)\n class_df[s_info.classname_col][excluded] = 'rest'\n class_df[s_info.classnum_col][excluded] = -1\n\n c_rest = len(class_df[excluded])\n c_keep = len(class_df[~excluded])\n c_total = len(class_df)\n print \"Exclusion result: excluded/keep/total: %.1f, %.1f, %.1f exclusion rate: %.2f\" % (c_rest, c_keep, c_total, c_rest/float(c_total))\n return class_df", "def make_dataset_for_scatter():\n condition1 = max_corr['bin_width'] == select_bin_width.value\n condition2 = select_n_samples.value[0] <= max_corr['n_points']\n condition3 = max_corr['n_points'] <= select_n_samples.value[1]\n by_bin = max_corr[condition1 & condition2 & condition3]\n return ColumnDataSource(by_bin)", "def plot_colour_mag_diagram(params,mags, colours, local_mags, local_colours,\n target, source, blend, RC, blue_filter, red_filter,\n yaxis_filter, tol, log):\n\n def calc_colour_lightcurve(blue_lc, red_lc, y_lc):\n\n idx1 = np.where( red_lc['mag_err'] > 0.0 )[0]\n idx2 = np.where( blue_lc['mag_err'] > 0.0 )[0]\n idx3 = np.where( y_lc['mag_err'] > 0.0 )[0]\n idx = set(idx1).intersection(set(idx2))\n idx = list(idx.intersection(set(idx3)))\n\n mags = y_lc['mag'][idx]\n magerr = y_lc['mag_err'][idx]\n cols = blue_lc['mag'][idx] - red_lc['mag'][idx]\n colerr = np.sqrt(blue_lc['mag_err'][idx]**2 + red_lc['mag_err'][idx]**2)\n\n return mags, magerr, cols, colerr\n\n\n add_source_trail = False\n add_target_trail = True\n add_crosshairs = True\n add_source = True\n add_blend = True\n add_rc_centroid = True\n add_extinction_vector = True\n\n fig = plt.figure(1,(10,10))\n\n ax = plt.subplot(111)\n\n plt.rcParams.update({'font.size': 18})\n\n plt.scatter(colours,mags,\n c='#E1AE13', marker='.', s=1,\n label='Stars within ROME field')\n\n plt.scatter(local_colours,local_mags,\n c='#8c6931', marker='*', s=4,\n label='Stars < '+str(round(tol,1))+'arcmin of target')\n\n col_key = blue_filter+red_filter\n\n if getattr(source,blue_filter) != None and getattr(source,red_filter) != None\\\n and add_source:\n\n plt.errorbar(getattr(source,col_key), getattr(source,yaxis_filter),\n yerr = getattr(source,'sig_'+yaxis_filter),\n xerr = getattr(source,'sig_'+col_key), color='m',\n marker='d',markersize=10, label='Source crosshairs')\n\n if add_crosshairs:\n plot_crosshairs(fig,getattr(source,col_key),getattr(source,yaxis_filter),'m')\n\n if add_source_trail:\n red_lc = source.lightcurves[red_filter]\n blue_lc = source.lightcurves[blue_filter]\n y_lc = source.lightcurves[yaxis_filter]\n\n (smags, smagerr, scols, scolerr) = calc_colour_lightcurve(blue_lc, red_lc, y_lc)\n\n plt.errorbar(scols, smags, yerr = smagerr, xerr = scolerr,\n color='m', marker='d',markersize=10, label='Source')\n\n if getattr(blend,blue_filter) != None and getattr(blend,red_filter) != None \\\n and add_blend:\n\n plt.errorbar(getattr(blend,col_key), getattr(blend,yaxis_filter),\n yerr = getattr(blend,'sig_'+yaxis_filter),\n xerr = getattr(blend,'sig_'+col_key), color='b',\n marker='v',markersize=10, label='Blend')\n\n if getattr(target,blue_filter) != None and getattr(target,red_filter) != None \\\n and add_target_trail:\n\n plt.errorbar(getattr(target,col_key), getattr(target,yaxis_filter),\n yerr = getattr(target,'sig_'+yaxis_filter),\n xerr = getattr(target,'sig_'+col_key), color='k',\n marker='x',markersize=10)\n\n red_lc = target.lightcurves[red_filter]\n blue_lc = target.lightcurves[blue_filter]\n y_lc = target.lightcurves[yaxis_filter]\n\n (tmags, tmagerr, tcols, tcolerr) = calc_colour_lightcurve(blue_lc, red_lc, y_lc)\n\n plt.errorbar(tcols, tmags, yerr = tmagerr,xerr = tcolerr,\n color='k', marker='+',markersize=10, alpha=0.4,\n label='Blended target')\n\n if add_rc_centroid:\n plt.errorbar(getattr(RC,col_key), getattr(RC,yaxis_filter),\n yerr=getattr(RC,'sig_'+yaxis_filter),\n xerr=getattr(RC,'sig_'+col_key),\n color='g', marker='s',markersize=10, label='Red Clump centroid')\n\n plt.xlabel('SDSS ('+blue_filter+'-'+red_filter+') [mag]')\n\n plt.ylabel('SDSS-'+yaxis_filter+' [mag]')\n\n [xmin,xmax,ymin,ymax] = plt.axis()\n\n plt.axis([xmin,xmax,ymax,ymin])\n\n xticks = np.arange(xmin,xmax,0.1)\n yticks = np.arange(ymin,ymax,0.2)\n\n ax.set_xticks(xticks,minor=True)\n ax.set_yticks(yticks,minor=True)\n\n plot_file = path.join(params['red_dir'],'colour_magnitude_diagram_'+\\\n yaxis_filter+'_vs_'+blue_filter+red_filter\\\n +'.pdf')\n\n plt.grid()\n\n if red_filter == 'i' and blue_filter == 'r' and yaxis_filter == 'i':\n plt.axis([0.5,2.0,20.2,13.5])\n\n if red_filter == 'i' and blue_filter == 'r' and yaxis_filter == 'r':\n plt.axis([0.0,1.5,21.0,13.5])\n\n if red_filter == 'r' and blue_filter == 'g':\n plt.axis([0.5,3.0,22.0,14.0])\n\n if red_filter == 'i' and blue_filter == 'g':\n plt.axis([0.5,4.4,22.0,14.0])\n\n if add_extinction_vector:\n plot_extinction_vector(fig,params,yaxis_filter)\n\n box = ax.get_position()\n ax.set_position([box.x0, box.y0 + box.height * -0.025,\n box.width, box.height * 0.95])\n\n l = ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.2), ncol=2)\n\n l.legendHandles[0]._sizes = [50]\n l.legendHandles[1]._sizes = [50]\n\n plt.rcParams.update({'legend.fontsize':18})\n plt.rcParams.update({'font.size':18})\n plt.rc('xtick', labelsize=18)\n plt.rc('ytick', labelsize=18)\n\n plt.savefig(plot_file,bbox_inches='tight')\n\n plt.close(1)\n\n log.info('Colour-magnitude diagram output to '+plot_file)", "def addCalcSolarVars(df, latitude):\n\tdf['sunset_hour_angle'] = np.rad2deg(np.arccos(-np.tan(np.deg2rad(latitude)) * \\\n\t\t\t\t\t\t\t\t\t\t\t\t np.tan(np.deg2rad(df['declination']))))\n\tdf['ET_insol'] = (24 / np.pi) * \\\n\t\t\t\t\t (df['Isc_prime'] / 1000) * \\\n\t\t\t\t\t ((np.cos(np.deg2rad(latitude)) * np.cos(np.deg2rad(df['declination'])) * np.sin(np.deg2rad(df['sunset_hour_angle']))) + \\\n\t \t\t\t\t (np.deg2rad(df['sunset_hour_angle']) * np.sin(np.deg2rad(latitude)) * np.sin(np.deg2rad(df['declination']))))\n\tdf['clearness'] = df['insolation_horizontal'] / df['ET_insol']\n\t# Calculate diffuse fraction\n\tdf['diffuse_fraction'] = (df['insolation_horizontal'] * (1.39 - (4.027 * df['clearness']) + (5.531 * (df['clearness'] ** 2)) - (3.108 * (df['clearness'] ** 3)))) / df['insolation_horizontal']\n\treturn df", "def __init__(self, df_flow, x1='x', x2='y', x3_value=None,resolution=100,x1_center=0.0,x2_center=0.0, D=None, invert_x1=False,\n crop_x1 = None, crop_x2=None):\n\n # Assign the axis names\n self.x1_name = x1\n self.x2_name = x2\n self.x3_name = [x3 for x3 in ['x','y','z'] if x3 not in [x1,x2]][0]\n\n # Find the nearest value in 3rd dimension\n search_values = np.array(sorted(df_flow[self.x3_name].unique()))\n nearest_idx = (np.abs(search_values-x3_value)).argmin()\n nearest_value = search_values[nearest_idx]\n print('Nearest value to in %s of %.2f is %.2f' % (self.x3_name, x3_value,nearest_value))\n \n # Get a sub-frame of only this 3rd dimension value\n df_sub = df_flow[df_flow[self.x3_name]==nearest_value]\n\n # Make sure cropping is valid\n if crop_x1:\n if crop_x1[0] < min(df_sub[x1]):\n raise Exception(\"Invalid x_1 minimum on cropping\")\n if crop_x1[1] > max(df_sub[x1]):\n raise Exception(\"Invalid x_1 maximum on cropping\")\n\n if crop_x2:\n if crop_x2[0] < min(df_sub[x2]):\n raise Exception(\"Invalid x_2 minimum on cropping\")\n if crop_x2[1] > max(df_sub[x2]):\n raise Exception(\"Invalid x_2 maximum on cropping\")\n\n # If cropping x1 do it now\n # if crop_x1:\n # df_sub = df_sub[(df_sub[x1] >= crop_x1[0]) & (df_sub[x1] <= crop_x1[1])]\n # if crop_x2:\n # df_sub = df_sub[(df_sub[x2] >= crop_x2[0]) & (df_sub[x2] <= crop_x2[1])]\n\n # Store the relevent values\n self.x1_in = df_sub[x1]\n self.x2_in = df_sub[x2]\n self.u_in = df_sub['u']\n self.v_in = df_sub['v']\n self.w_in = df_sub['w']\n\n # Save the desired resolution\n self.res = resolution\n\n # Grid the data, if cropping available use that\n if crop_x1:\n # self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n self.x1_lin = np.linspace(crop_x1[0], crop_x1[1], resolution)\n else:\n self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n if crop_x2:\n # self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n self.x2_lin = np.linspace(crop_x2[0], crop_x2[1], resolution)\n else:\n self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n \n # Mesh and interpolate u, v and w\n # print(self.x1_lin)\n # print(sorted(self.x1_in))\n self.x1_mesh, self.x2_mesh = np.meshgrid(self.x1_lin, self.x2_lin)\n self.u_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.u_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.v_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.v_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.w_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.w_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n \n # Save flat vectors\n self.x1_flat = self.x1_mesh.flatten()\n self.x2_flat = self.x2_mesh.flatten()\n\n # Save u-cubed\n self.u_cubed = self.u_mesh ** 3\n\n\n # Save re-centing points for visualization\n self.x1_center = x1_center\n self.x2_center = x2_center\n\n\n # If inverting, invert x1, and x1_center\n if invert_x1:\n self.x1_mesh = self.x1_mesh * -1\n self.x1_lin = self.x1_lin * -1\n self.x1_flat = self.x1_flat * -1 \n self.x1_center = self.x1_center * -1 \n self.v_mesh =self.v_mesh * -1\n\n\n # Set the diamater which will be used in visualization\n # Annalysis in D or meters?\n if D == None:\n self.plot_in_D = False\n self.D = 1.\n else:\n self.plot_in_D = True\n self.D = D", "def query_vizier(catalog, target=None, sky_coords=None, cols=None, wildcards=['e_*'], names=None, search_radius=20*q.arcsec, idx=0, places=3, cat_name=None, verbose=True, **kwargs):\n # Get the catalog\n if catalog in PHOT_CATALOGS:\n meta = PHOT_CATALOGS[catalog]\n catalog = meta['catalog']\n cols = cols or meta['cols']\n names = names or meta['names']\n\n # Name for the catalog\n if cat_name is None:\n cat_name = catalog\n\n # If search_radius is explicitly set, use that\n if search_radius is not None and isinstance(sky_coords, SkyCoord):\n viz_cat = Vizier.query_region(sky_coords, radius=search_radius, catalog=[catalog])\n\n # ...or get photometry using designation...\n elif isinstance(target, str):\n viz_cat = Vizier.query_object(target, catalog=[catalog])\n\n # ...or abort\n else:\n viz_cat = None\n\n # Check there are columns to fetch\n if cols is None:\n raise ValueError(\"No column names to fetch!\")\n\n # Check for wildcards\n if wildcards is None:\n wildcards = []\n\n # Check for target names or just use native column names\n if names is None:\n names = cols\n\n # Print info\n if verbose:\n n_rec = len(viz_cat)\n print(\"{} record{} found in {}.\".format(n_rec, '' if n_rec == 1 else 's', cat_name))\n\n results = []\n\n # Parse the record\n if viz_cat is not None and len(viz_cat) > 0:\n if len(viz_cat) > 1:\n print('{} {} records found.'.format(len(viz_cat), name))\n\n # Grab the record\n rec = viz_cat[0][idx]\n ref = viz_cat[0].meta['name']\n\n # Pull out the photometry\n for name, viz in zip(names, cols):\n fetch = [viz]+[wc.replace('*', viz) for wc in wildcards]\n if all([i in rec.columns for i in fetch]):\n data = [round(val, places) if u.isnumber(val) else val for val in rec[fetch]]\n results.append([name]+data+[ref])\n else:\n print(\"{}: Could not find all those columns\".format(fetch))\n\n return results", "def index_valid_star_entries(star_catalog,target,tol,log,valid_cat=False):\n\n idx1 = np.where(star_catalog['cal_ref_mag_ip'] > 0.0)[0]\n idx2 = np.where(star_catalog['cal_ref_mag_ip'] <= 22.0)[0]\n idx3 = np.where(star_catalog['cal_ref_mag_rp'] > 0.0)[0]\n idx4 = np.where(star_catalog['cal_ref_mag_rp'] <= 22.0)[0]\n idx5 = np.where(star_catalog['cal_ref_mag_gp'] > 0.0)[0]\n idx6 = np.where(star_catalog['cal_ref_mag_gp'] <= 22.0)[0]\n\n det_idx = set(idx1).intersection(set(idx2))\n det_idx = det_idx.intersection(set(idx3))\n det_idx = det_idx.intersection(set(idx4))\n det_idx = det_idx.intersection(set(idx5))\n det_idx = det_idx.intersection(set(idx6))\n\n log.info('Identified '+str(len(det_idx))+\\\n ' detected stars with valid measurements in gri')\n\n if valid_cat == False:\n return list(det_idx), None, None\n\n idx4 = np.where(star_catalog['imag'] > 0.0)[0]\n idx5 = np.where(star_catalog['rmag'] > 0.0)[0]\n idx6 = np.where(star_catalog['gmag'] > 0.0)[0]\n\n cat_idx = det_idx.intersection(set(idx4))\n cat_idx = cat_idx.intersection(set(idx5))\n cat_idx = list(cat_idx.intersection(set(idx6)))\n det_idx = list(det_idx)\n\n log.info('Identified '+str(len(cat_idx))+\\\n ' detected stars with valid catalogue entries in gri')\n\n close_idx = find_stars_close_to_target(star_catalog, target, tol, log)\n\n close_cat_idx = list(set(cat_idx).intersection(set(close_idx)))\n\n log.info('Identified '+str(len(close_cat_idx))+\\\n ' stars close to the target with valid catalogue entries in gri')\n\n return det_idx, cat_idx, close_cat_idx", "def add_mag(self, ra, dec, mag, mag_err, filt, mjd):\n \n pt = Table(names=self.__mag_colnames, \n data=[[ra],[dec],[mag],[mag_err],[filt],[mjd]])\n \n LightCurve.add_tables(self, pt)", "def compare_clouds(mr1='waroona_run2', mr2='waroona_run2uc', \n hour=datetime(2016,1,5,15), \n cloud_threshold=constants.cloud_threshold,\n extent=None,\n subsubdir=None):\n # x axis for qc-max density\n colormax=0.5\n xs = np.linspace(0,colormax,200)\n ## Colourmap setup for contourf plots\n # linear between 0 and 0.01\n # log between 0.01 and 0.3\n logmax=np.log(colormax)/np.log(10)-0.01\n cmap=plotting._cmaps_['qc']\n norm=colors.SymLogNorm(0.01, vmin=0,vmax=colormax,base=2.)\n clevs=np.union1d(np.union1d(np.logspace(-2,logmax,30),0),colormax) \n \n extentname = mr1.split('_')[0]\n if extent is None:\n extent = constants.extents[extentname]\n \n ## Read a model run\n cubes1 = fio.read_model_run(mr1, hour, extent=extent)\n cubes2 = fio.read_model_run(mr2, hour, extent=extent)\n \n # pull out clouds\n qc, = cubes1.extract(['qc'])\n cqc, = cubes2.extract(['qc'])\n dates = utils.dates_from_iris(qc)\n height = utils.height_from_iris(qc)\n lats = qc.coord('latitude').points\n lons = qc.coord('longitude').points\n clats = cqc.coord('latitude').points\n clons = cqc.coord('longitude').points\n ff1, = fio.read_fire(mr1,dtimes=dates,extent=extent,firefront=True)\n ff2, = fio.read_fire(mr2,dtimes=dates,extent=extent,firefront=True)\n \n # density plot bandwidth\n bandwidth=1\n \n # make 4 vertical bins\n row1 = (2000<=height) * (height<3000)\n row2 = (3000<=height) * (height<5000)\n row3 = (5000<=height) * (height<8000)\n row4 = (8000<=height) * (height<15000)\n titles = ['2km-3km','3km-5km','5km-8km','8km-15km']\n \n # loop over datetimes:\n for di, date in enumerate(dates):\n \n qc1, qc2, qc3, qc4 = [np.sum(qc[di,row,:,:].data, axis=0) for row in [row1,row2,row3,row4]]\n cqc1, cqc2, cqc3, cqc4 = [np.sum(cqc[di,row,:,:].data, axis=0) for row in [row1,row2,row3,row4]]\n \n # Plotting\n plt.close()\n fig, axes = plt.subplots(3,4,figsize=[12,11])\n for i, (qci, cqci) in enumerate(zip([qc1, qc2, qc3, qc4],[cqc1, cqc2, cqc3, cqc4])):\n ## Show contourf of cloud\n plt.sca(axes[0,i])\n \n plotting.map_contourf(extent, qci, lats, lons, cmap=cmap, \n clabel=\"\", norm=norm, cbar=False, levels=clevs,\n cbarform=None, extend='max')\n \n # overlaid with cloud thresh line\n if np.max(qci)>cloud_threshold:\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n plt.contour(lons,lats, qci, np.array([cloud_threshold]),\n colors='teal', linewidths=2)\n \n # Add locations and fire\n plotting.map_add_locations_extent(extentname, hide_text=True)\n if ff1 is not None:\n plotting.map_fire(ff1[di].data,lats,lons)\n # Add ylabel on left most plot\n if i==0: plt.ylabel(mr1)\n # add title along top row\n plt.title(titles[i])\n \n ## for comparison model also\n plt.sca(axes[1,i])\n img,_ = plotting.map_contourf(extent, cqci, clats, clons, cmap=cmap, \n norm=norm, clabel=\"\", levels=clevs, \n cbar=False, cbarform=None, extend='max')\n \n # overlaid with cloud thresh line\n if np.max(cqci)>cloud_threshold:\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n plt.contour(clons, clats, cqci, np.array([cloud_threshold]),\n colors='teal', linewidths=2)\n # add fire\n if ff2 is not None:\n plotting.map_fire(ff2[di].data,clats,clons)\n # add label on leftmost\n if i==0: plt.ylabel(mr2)\n \n ## add density plot\n plt.sca(axes[2,i])\n \n # density function:\n # only if non zero cloud content\n flag=2\n if not np.isclose(np.max(qci),0):\n qcdens = gaussian_kde(qci.flatten(), bw_method=bandwidth)\n plt.plot(xs,qcdens(xs),linewidth=2,color='k')\n flag-=1\n if not np.isclose(np.max(cqci),0):\n cqcdens = gaussian_kde(cqci.flatten(), bw_method=bandwidth)\n plt.plot(xs,cqcdens(xs),color='r', linestyle='--')\n flag-=1\n plt.yticks([],[])\n if i==0: plt.ylabel('cloud density')\n if i==3: \n # manually add legend\n lines = [Line2D([0], [0], color='k', linewidth=2, linestyle='-'),\n Line2D([0], [0], color='r', linewidth=1, linestyle='--')]\n labels = [mr1,mr2]\n plt.legend(lines, labels)\n\n \n ## Add colourbar\n cbar_ax = fig.add_axes([0.35, 0.367, 0.31, 0.01])# X Y Width Height\n cbar=fig.colorbar(img, cax=cbar_ax, format=ticker.ScalarFormatter(), \n pad=0, orientation='horizontal')\n cbar.set_ticks([0,0.01,0.05,0.1,0.2,0.4])\n cbar.set_ticklabels([0,0.01,0.05,0.1,0.2,0.4])\n plt.suptitle(date.strftime(\"Max cloud content: %Y%m%d %H:%M(UTC)\"),\n fontsize=20)\n subdir='clouds'\n if subsubdir is not None:\n subdir += '/'+subsubdir\n fio.save_fig(mr2, _sn_, date, plt, subdir=subdir)", "def target_cov_plot(context):", "def _read_catalog(self, catname):\n print('loading catalog:',catname)\n with fitsio.FITS(catname,lower=True) as fits:\n #cat = fits[1][100000:110000]\n if 'object_data' in fits:\n print('reading from MEDS object data')\n ext='object_data'\n else:\n ext=1\n cat = fits[ext][:]\n\n # one cut here based on if we matched to the galsim cat\n w, = np.where(\n #(cat['mu_class'] < 3)\n #&\n #(cat['mask']==0)\n #&\n (cat['gscosmos_index'] >= 0)\n )\n print('initial cuts %d/%d %g%%' % (w.size,cat.size,w.size/cat.size*100))\n\n cat = cat[w]\n return cat", "def purgeHighSparsedFeatures(df,threshold,barplot=False,title=''):\n \n thr = math.floor(df.shape[1] * threshold)\n rowsToDrop = np.array([])\n logger.debug(Sc+'Patient Threshold is %d' % thr) \n logger.debug(Sc+'Matrix dimensions : Rows %d , Columns %d'% (df.shape[0],df.shape[1]))\n #axis_x = np.arange(0,df.shape[0]) \n axis_y = np.array([]) \n numRows = df.shape[0] \n for i in range(1,numRows):\n arr = pd.isnull(df.iloc[i])\n nnan = np.sum(arr) \n axis_y = np.append(axis_y,nnan)\n if (nnan > thr):\n rowsToDrop = np.append(rowsToDrop,i)\n logger.debug ('%d features to drop ' % len(rowsToDrop))\n np.savetxt('debug/sparseFeaturesaxis_y.txt',axis_y)\n #if(barplot):\n # ax.title.set_text(title)\n # ax.bar(axis_x,axis_y) \n #logger.debug('After purge there are %d columns '% df.shape[1])\n return rowsToDrop", "def sazelmag(source):\n if 1:\n # bizarre failures in s.azel() showing up on Mar 11, 2006....\n try:\n Azel = subarrayControl.s.azel(source, 0.0);\n except Exception, e:\n print \"Problem with s.azel(%s): %s\" %(source,e)\n return (source, 0, -10, 0)\n try:\n mag = subarrayControl.s.queryMag(source)\n except Exception, e:\n print \"Problem with queryMag(%s): %s\" %(source,e) \n return (source, 0, -10, 0)\n return ( source, Azel[0], Azel[1], mag)\n else:\n cmd = 'checksource source=%s comments=t sexa=f' % source\n # the 2nd (last) line contains all the info\n r=os.popen(cmd).readlines()[1].strip()\n rs=r.split()\n re=r.split('=')\n if len(re) == 2:\n mag=float(re[1])\n else:\n mag=-9.99\n return ( source, float(rs[3]), float(rs[4]), mag)", "def _cutoff(xdata, ydata, btype, fs, ff):\r\n try:\r\n# print ff\r\n if ff != None:\r\n nPts = int(1./(((xdata.max()-xdata.min())/xdata.shape[0])*(ff/10.)))\r\n else:\r\n nPts = 0\r\n if nPts%2 == 0:\r\n nPts = nPts + 1\r\n if nPts < xdata.shape[0]:\r\n nPts = xdata.shape[0]\r\n# print nPts\r\n window = np.hanning(ydata.shape[0])\r\n freq = FourierFrequency(xdata, nPts)\r\n index = np.argsort(freq)\r\n tdf = FourierTransform(ydata*window, nPts)\r\n tdf = abs(tdf)\r\n pp = _maxima(tdf[index], freq[index], lookahead = 1)\r\n# mm = _minima(tdf[index], freq[index], lookahead=1)\r\n pp, hh = np.array(np.array(pp).T[0]), np.array(np.array(pp).T[1])\r\n# mm = np.array(np.array(mm).T[0])#, np.array(np.array(mm).T[1])\r\n ind = np.where(pp == min(abs(pp)))[0][0]\r\n ind2 = np.where(hh == max(hh[(ind+1):]))[0][0]\r\n for u, i in enumerate(freq):\r\n if i > abs(pp[ind2])*1.5 or i < -abs(pp[ind2])*1.5 or (i < abs(pp[ind2])/2. and i > -abs(pp[ind2])/2.) or (tdf[u] > hh[ind2]*1.05): #(abs(i) < abs(mm[indmin])) or \r\n tdf[u] = 0.\r\n def lor2(x, A0, x0, gamma0):\r\n return A0*(1/np.pi)*(gamma0/2)/((x-x0)**2+(gamma0/2)**2)+A0*(1/np.pi)*(gamma0/2)/((x+x0)**2+(gamma0/2)**2)\r\n lmod2 = lmf.Model(lor2)\r\n lmod2.make_params()\r\n lmod2.set_param_hint('A0', value=max(tdf), min=max(tdf)/1000.)\r\n lmod2.set_param_hint('x0', value=abs(pp[ind2]), min=0.)\r\n lmod2.set_param_hint('gamma0', value=1., min=0.)\r\n result2 = lmod2.fit(tdf[index], x=freq[index])\r\n# print result2.values.get('x0'), result2.values.get('gamma0')\r\n if btype=='high':\r\n if result2.values.get('x0')-result2.values.get('gamma0') > 0.:\r\n# print \"frequency: \", result2.values.get('x0')-result2.values.get('gamma0')\r\n if hh[ind2] != max(hh[(ind+1):]):\r\n print \"False\", \" maximum\", \"\\n\", \"\\n\", \"\\n\"\r\n return result2.values.get('x0')-result2.values.get('gamma0')\r\n else:\r\n# print \"failed: 0\"\r\n return 0.\r\n elif btype=='low':\r\n return result2.values.get('x0')+result2.values.get('gamma0')\r\n except Exception:\r\n pass\r\n finally:\r\n pass", "def read_and_select(fles, var, area):\n \n ds = xr.open_mfdataset(fles)\n \n # For 20CRv2c geopotential height \n if(var=='hgt'): \n ds = ds.sel(level=150.0)\n \n try:\n ds = ds.rename({'longitude': 'lon', 'latitude': 'lat'}) \n except: \n pass\n \n \n if(ds.lon.values.max() > 350):\n ds = ds.assign_coords(lon=(((ds.lon + 180) % 360) - 180))\n rolls = np.sum(ds.lon.values < 0); ds = ds.roll(lon=rolls*(-1))\n\n if(ds.lat.values[0] > ds.lat.values[-1]):\n ds['lat'] = np.flipud(ds['lat'])\n ds[var].values = np.flip(ds[var], axis=1)\n\n # For 20CRv2c snow cover\n if(var=='snowc'): \n ds[var] = ds[var]/100.\n ds[var] = ds[var].where(ds[var]>=0.5, other=0.0)\n ds[var] = ds[var].where(ds[var] <0.5, other=1.0)\n \n # For HadISST1\n if((var=='sst')|(var=='sic')): \n mask = ds[var].values == -1000.\n ds[var].values[mask] = np.nan\n \n if( area=='europe'): ds = ds.squeeze().sel(lat=slice( 33,73), lon=slice(-12,40)) \n elif(area=='westeu'): ds = ds.squeeze().sel(lat=slice(42,59), lon=slice(-10,17))\n elif(area=='easeur'): ds = ds.squeeze().sel(lat=slice(38,56), lon=slice(17,43))\n elif(area=='meditr'): ds = ds.squeeze().sel(lat=slice(30,45), lon=slice(0,25))\n elif(area=='scandi'): ds = ds.squeeze().sel(lat=slice( 55,71), lon=slice( 4,34)) \n elif(area=='norhem'): ds = ds.squeeze().sel(lat=slice(-10,87)) \n elif(area=='norpol'): ds = ds.squeeze().sel(lat=slice( 50,87))\n else: ds = ds.squeeze()\n \n return ds", "def filter_rare_genes(data, *extra_data, cutoff=0, min_cells=5):\n gene_sums = measure.gene_capture_count(data, cutoff=cutoff)\n keep_genes_idx = gene_sums >= min_cells\n data = select.select_cols(data, *extra_data, idx=keep_genes_idx)\n return data", "def custom_range_check_climatology(gdf_of_interest,\n beta = 1,\n type_of_check = \"Temperature\", \n add_columns = True): \n \n if(type_of_check == \"Temperature\"):\n # check if the Climate Normals have been added to the gdf of interest\n # if they have not then add them\n if((\"Mean Monthly Min Temp\" not in gdf_of_interest) or \n (\"Mean Monthly Max Temp\" not in gdf_of_interest) or \n (\"Mean Monthly Temp\" not in gdf_of_interest)):\n add_temp_clim_normals(gdf_of_interest)\n \n\n # Pull out the observed values, mean values and the climate normals\n observed_values = np.array(gdf_of_interest[\"Air Temperature\"])\n mean_values = np.array(gdf_of_interest[\"Mean Monthly Temp\"])\n lower_limit = np.array(gdf_of_interest[\"Mean Monthly Min Temp\"])\n upper_limit = np.array(gdf_of_interest[\"Mean Monthly Max Temp\"])\n \n # Shift the climate normals by the mean so they are centred on zero\n lower_limit_shifted = lower_limit - mean_values\n upper_limit_shifted = upper_limit - mean_values\n \n # Rescale the allowable range by multiplying the shifted limits by beta\n lower_limit_rescaled = beta * lower_limit_shifted\n upper_limit_rescaled = beta * upper_limit_shifted\n \n # Shift the climate normals back to their original positions centered around the mean\n adjusted_lower_limit = lower_limit_rescaled + mean_values\n adjusted_upper_limit = upper_limit_rescaled + mean_values\n \n # Add the Adjusted limits to the gdf_of_interest\n if(add_columns):\n lower_lim_col_name = \"Adjusted Lower Limit (beta =\" + str(beta) + \")\"\n upper_lim_col_name = \"Adjusted Upper Limit (beta =\" + str(beta) + \")\"\n gdf_of_interest[lower_lim_col_name] = adjusted_lower_limit\n gdf_of_interest[upper_lim_col_name] = adjusted_upper_limit \n \n \n # Check to see if the observed value is within the adjusted range\n flags = (observed_values < adjusted_lower_limit) | (observed_values > adjusted_upper_limit)\n # Convert from True/False to 1/0\n flags = flags.astype(int)\n \n \n \n \n # On advisement from Noel Fitzpatrick the rainfall check has been simplified to simply check if more than\n # a months average rainfall has fallen so far on the day of interest (note that the time of interest should\n # be close to midnight). I have also included the option to scale the months average rainfall by the factor\n # beta in case we want to make the check more/less strict.\n \n if(type_of_check == \"Rainfall\"):\n # check if the Climate Normals have been added to the gdf of interest\n # if they have not then add them\n if(\"Mean Monthly Rainfall\" not in gdf_of_interest):\n add_rain_clim_normals(gdf_of_interest)\n \n \n # Pull out the observed values and the climate normals\n observed_values = np.array(gdf_of_interest[\"Rainfall Accumulation\"])\n mean_monthly_rainfall = np.array(gdf_of_interest[\"Mean Monthly Rainfall\"])\n \n # Scale the mean monthly rainfall by a factor of beta\n adjusted_mean_monthly_rainfall = beta * mean_monthly_rainfall\n \n # Add the Adjusted Monthly Rainfall to the gdf_of_interest\n if(add_columns):\n adjusted_mean_col_name = \"Adjusted Monthly Rainfall (beta =\" + str(beta) + \")\"\n gdf_of_interest[adjusted_mean_col_name] = adjusted_mean_monthly_rainfall\n \n \n # Check to see if the observed value is greater than the scaled monthly rainfall\n flags = (observed_values > adjusted_mean_monthly_rainfall)\n # Convert from True/False to 1/0\n flags = flags.astype(int)\n \n \n return flags", "def spectral_check(self, ):\r\n a, b = self.dfa, self.dfm.copy()\r\n b['ts_a']=a.ts\r\n b['flux_a'] = a.flux\r\n b['dflux'] = (b.flux-b.flux_a)/b.flux_unc\r\n b['eflux100_a'] = a.eflux100\r\n b['deflux'] = (b.eflux100-b.eflux100_a)/b.eflux100_unc\r\n b['pindex_a'] = a.pindex\r\n b['gdelta'] = (b.pindex-b.pindex_a)/b.pindex_unc\r\n self.dfm = b # since copy\r\n\r\n fig,axx = plt.subplots(1,2, figsize=(10,5), sharey=True)\r\n hkw = dict(bins=np.linspace(-5,5,51), histtype='step', lw=2, density=True)\r\n\r\n cut = (b.ts>50) & ~pd.isnull(b.deflux) & ~pd.isnull(b.gdelta) &\\\r\n (b.modelname==\"LogParabola\") & (b.pindex<3) & (b.pindex>0.5) &\\\r\n (b.e0>500) &(b.eflux100_unc>0) &(b.pindex_unc>0)\r\n self.check_total = sum(cut)\r\n for ax, title, val in zip(axx.flatten(), ['Energy Flux', 'Spectral index'], [b.deflux, b.gdelta]): \r\n\r\n df=val[cut]\r\n ax.hist(df.clip(-5,5), label='mean {:5.2f}\\nstd {:5.2f}'.format(df.mean(),df.std()), **hkw);\r\n ax.grid(alpha=0.5); \r\n x=np.linspace(-4,4)\r\n ax.plot(x, stats.norm.pdf(x), '--g' );\r\n ax.set(xlabel='normalized fit deviation', title=title, )\r\n ax.legend(loc='upper left',prop=dict(family='monospace'))\r\n fig.suptitle('Normalized devations of fit from model', fontsize=16);\r\n\r\n return fig", "def quality(self): \n\n subsetInt = [int(s) for s in self.subset.split() if s.isdigit()]\n columnNames = [] \n for i in range(len(subsetInt)):\n if subsetInt[i] == 1:\n columnNames.append(self.varNames[i])\n\n #qualityBand number of subset\n q = columnNames.index('Quality') \n\n if subsetInt[self.qualityBand] == 1:\n dataCount = self.subset.count('1')\n QC = np.repeat(self.DC[:,q].reshape((self.DC.shape[0],1)), dataCount-1, axis = 1)\n if self.dataset == 'MOD09A1.005' or self.dataset == 'MOD13Q1.005':\n QC = np.uint16(QC)\n else:\n QC = np.uint8(QC)\n\n QCm = QC & 1 #flips DCm mask\n DCm = np.delete(self.DC, q, 1) #looks good\n \n DCm = np.ma.masked_where(QCm == 1, DCm)\n DCm = np.ma.masked_where(DCm == 9999.0, DCm) \n \n if len(self.tiles) > 1:\n obs = self.observations/len(self.tiles)\n if len(self.tiles) == 1:\n obs = self.observations/2\n \n outArray = np.empty(shape = (self.rows*self.columns*obs, 0))\n for b in range(0, self.DC.shape[1]-1):\n cfull = DCm[:,b].reshape((self.observations, self.rows, self.columns))\n b16 = np.empty(shape = (self.rows*self.columns*obs, 0))\n for band in range(0,cfull.shape[0],2):\n c16 = np.ma.mean(cfull[band:band+1,:,:], axis=0)\n c16f = np.ma.filled(c16, 9999.0).astype(float).reshape((self.rows*self.columns))\n b16 = np.append(b16, c16f)\n outArray = np.append(outArray, b16.reshape((obs*self.rows*self.columns, 1)), axis = 1)\n \n self.finalDC = outArray\n \n np.save(str(self.directory) + '/' + self.dataset + '.npy', self.finalDC)\n del outArray, QC, DCm\n\n outfile = str(self.directory) + '/' + self.dataset + '.txt'\n f = open(outfile, 'w')\n for name in columnNames:\n if name != 'Quality':\n f.write(name + '\\n')\n var = [a for a in columnNames if not a.startswith('Quality')]\n logger.log('SUCCESS', 'The final 16-day interval quality-masked matrix was created successfully. This matrix has dimensions %d rows by %d columns. Datasets included in the matrix are %s' % (self.finalDC.shape[0], self.finalDC.shape[1], var))\n \n \n if subsetInt[self.qualityBand] != 1:\n cleanDC = np.delete(self.DC, q, 1)\n \n \n if len(self.tiles) > 1:\n obs = self.observations/len(self.tiles)\n if len(self.tiles) == 1:\n obs = self.observations/2\n \n outArray = np.empty(shape = (self.rows*self.columns*obs, 0))\n for b in range(cleanDC.shape[1]):\n cfull = cleanDC[:,b].reshape((self.observations, self.rows, self.columns))\n b16 = np.empty(shape=(self.rows*self.columns*obs))\n for band in range(cfull.shape[0]):\n c16 = np.mean(cfull[band:band+1,:,:], axis=0)\n band16 = np.append(b16, c16, axis=0)\n outArray = np.append(outArray, b16.reshape((obs*self.rows*self.columns, 1)), axis = 1)\n\n np.save(self.directory + '/' + self.dataset + '.npy', self.finalDC)\n del cleanDC, outArray\n \n outfile = self.directory + '/' + self.dataset + '.txt'\n f = open(outfile, 'w')\n for name in columnNames:\n if name != 'Quality':\n f.write(str(name) + ' \\n')\n var = [a for a in columnNames if not a.startswith('Quality')]\n logger.log('SUCCESS', 'The final 16-day interval matrix was created successfully. A quality mask was not applied, though remaining no data values are set at 9999. This matrix has dimensions %d rows by %d columns. Datasets included in the matrix are %s' % (self.finalDC.shape[0], self.finalDC.shape[1], var))", "def add_column_filter(source, args, index):\n include_tags = hxl.TagPattern.parse_list(args.get('cut-include-tags%02d' % index, []))\n exclude_tags = hxl.TagPattern.parse_list(args.get('cut-exclude-tags%02d' % index, []))\n skip_untagged = args.get('cut-skip-untagged%02d' % index, False)\n if include_tags:\n source = source.with_columns(include_tags)\n if exclude_tags or skip_untagged:\n source = source.without_columns(exclude_tags, skip_untagged=skip_untagged)\n return source", "def init_fva_constraints(mod_, opt_frac=0.1, pfba_fact=1.5, verbose=True):\n if verbose==True:\n print(\"...constraining the base cobra model with FVA + pfba constraint\")\n mod = mod_.copy()\n fva_df = flux_variability_analysis(mod, fraction_of_optimum=opt_frac, pfba_factor=pfba_fact)\n for rxn, row in fva_df.iterrows():\n if abs(row[\"maximum\"] - row[\"minimum\"]) > 1e-09:\n mod.reactions.get_by_id(rxn).lower_bound = row[\"minimum\"]\n mod.reactions.get_by_id(rxn).upper_bound = row[\"maximum\"]\n return mod, fva_df", "def front_column_model_p_gain():", "def cut_standard(X, VARS, xcorr_flow=False):\n \n # Fiducial cuts\n #MINPT = 0.5\n #MAXETA = 2.4\n MINPT = 0.7\n MAXETA = 1.5\n \n \n # Construct cuts\n cuts = []\n names = []\n\n #\n cuts.append( X[:,VARS.index('has_gsf')] == True )\n names.append(f'has_gsf == True')\n #\n cuts.append( X[:,VARS.index('gsf_pt')] > MINPT )\n names.append(f'gsf_pt > {MINPT:0.2f}')\n #\n cuts.append( np.abs(X[:,VARS.index('trk_eta')]) < MAXETA )\n names.append(f'|gsf_eta| < {MAXETA:0.2f}')\n #\n #cuts.append( [(len(X[i,VARS.index('image_clu_eta')]) is not 0) for i in range(X.shape[0])] )\n #names.append(f'len(image_clu_eta) != 0')\n \n \n ind = aux.apply_cutflow(cut=cuts, names=names, xcorr_flow=xcorr_flow)\n return ind", "def populate_sell_trend(dataframe: DataFrame, metadata: dict) -> DataFrame:\n conditions = []\n\n conditions.append(\n ((dataframe['bull'] > 0) & (dataframe['rsi'] > params['bull-sell-rsi-value'])) |\n (~(dataframe['bull'] > 0) & (dataframe['rsi'] > params['bear-sell-rsi-value']))\n )\n\n conditions.append(dataframe['volume'] > 0)\n\n dataframe.loc[\n reduce(lambda x, y: x & y, conditions),\n 'sell'] = 1\n\n return dataframe", "def plot_hist_snfit_meta(self): \n \n self.read_meta()\n self.read_snfit_results()\n\n \n self.diff_x0 = []\n self.diff_x0_err = []\n self.diff_x1 = []\n self.diff_x1_err = [] \n self.diff_c = []\n self.diff_c_err = [] \n self.diff_mb = []\n self.diff_mb_err = [] \n self.diff_cov_x0_x1 = []\n self.diff_cov_x0_c = []\n self.diff_cov_x1_c = []\n self.diff_cov_mb_x1 = []\n self.diff_cov_mb_c = []\n\n for i in range (len(self.sn_name)):\n for j in range (len(self.meta_sn_name_list)):\n if self.sn_name[i] == self.meta_sn_name_list[j]:\n if np.abs(self.mb[i] - self.meta_mb[j]) < 0.0001:\n self.diff_x0.append(self.x0[i] - self.meta_x0[j])\n self.diff_x0_err.append(self.x0_err[i] - self.meta_x0_err[j])\n self.diff_x1.append(self.x1[i] - self.meta_x1[j])\n self.diff_x1_err.append(self.x1_err[i] - self.meta_x1_err[j]) \n self.diff_c.append(self.c[i] - self.meta_c[j])\n self.diff_c_err.append(self.c_err[i] - self.meta_c_err[j]) \n self.diff_mb.append(self.mb[i] - self.meta_mb[j])\n self.diff_mb_err.append(self.mb_err[i] - self.meta_mb_err[j])\n# self.diff_cov_x0_x1.append()\n# self.diff_cov_x0_c.append()\n# self.diff_cov_x1_c.append()\n# self.diff_cov_mb_x1.append()\n# self.diff_cov_mb_c.append()\n else:\n print self.x1[i] - self.meta_x1[j], self.sn_name[i],self.meta_sn_name_list[j], self.x1[i], self.meta_x1[j]\n\n\n fig = plt.figure(figsize=(8.,8.)) \n \n gs = gridspec.GridSpec(2, 1) #subplots ratio\n f, (ax0_1, ax0_2) = plt.subplots(2, sharex=True)\n f.subplots_adjust(hspace = 0.5)\n ax0_1 = plt.subplot(gs[0, 0])\n ax0_2 = plt.subplot(gs[1, 0])\n \n ax0_1.hist(self.diff_x0,25,label='$\\Delta$ X0')\n ax0_2.hist(self.diff_x0_err,25,label='$\\Delta$ X0 error')\n ax0_1.legend()\n ax0_2.legend()\n ax0_1.set_ylabel('N')\n ax0_2.set_ylabel('N')\n pdffile = '../sugar_analysis_data/results/x0_plot_meta_snfit.pdf'\n plt.savefig(pdffile, bbox_inches='tight')\n plt.show()\n \n gs = gridspec.GridSpec(2, 1) #subplots ratio\n f, (ax0_1, ax0_2) = plt.subplots(2, sharex=True)\n f.subplots_adjust(hspace = 0.5)\n ax0_1 = plt.subplot(gs[0, 0])\n ax0_2 = plt.subplot(gs[1, 0])\n \n ax0_1.hist(self.diff_x1,25,label='$\\Delta$ X1')\n ax0_2.hist(self.diff_x1_err,25,label='$\\Delta$ X1 error')\n ax0_1.legend()\n ax0_2.legend()\n ax0_1.set_ylabel('N')\n ax0_2.set_ylabel('N')\n pdffile = '../sugar_analysis_data/results/x1_plot_meta_snfit.pdf'\n plt.savefig(pdffile, bbox_inches='tight')\n plt.show()\n \n gs = gridspec.GridSpec(2, 1) #subplots ratio\n f, (ax0_1, ax0_2) = plt.subplots(2, sharex=True)\n f.subplots_adjust(hspace = 0.5)\n ax0_1 = plt.subplot(gs[0, 0])\n ax0_2 = plt.subplot(gs[1, 0])\n \n ax0_1.hist(self.diff_c,25,label='$\\Delta$ Color')\n ax0_2.hist(self.diff_c_err,25,label='$\\Delta$ Color error')\n ax0_1.legend()\n ax0_2.legend()\n ax0_1.set_ylabel('N')\n ax0_2.set_ylabel('N')\n pdffile = '../sugar_analysis_data/results/color_plot_meta_snfit.pdf'\n plt.savefig(pdffile, bbox_inches='tight')\n plt.show()\n\n gs = gridspec.GridSpec(2, 1) #subplots ratio\n f, (ax0_1, ax0_2) = plt.subplots(2, sharex=True)\n f.subplots_adjust(hspace = 0.5)\n ax0_1 = plt.subplot(gs[0, 0])\n ax0_2 = plt.subplot(gs[1, 0])\n \n ax0_1.hist(self.diff_mb,50,label='$\\Delta$ mb')\n ax0_2.hist(self.diff_mb_err,50,label='$\\Delta$ mb error')\n ax0_1.legend()\n ax0_2.legend()\n ax0_1.set_ylabel('N')\n ax0_2.set_ylabel('N')\n pdffile = '../sugar_analysis_data/results/mb_plot_meta_snfit.pdf'\n plt.savefig(pdffile, bbox_inches='tight')\n plt.show()", "def filter_contigs(self, criteria):\n eligible_contigs = self.passed.contigs[self.passed.contigs > 10]\n not_enough_contigs = self.passed.contigs[self.passed.contigs <= 10]\n # TODO Define separate function for this\n med_abs_dev = abs(eligible_contigs - eligible_contigs.median()).mean()\n self.med_abs_devs[\"contigs\"] = med_abs_dev\n # Define separate function for this\n # The \"deviation reference\"\n dev_ref = med_abs_dev * self.contigs\n self.dev_refs[\"contigs\"] = dev_ref\n self.allowed[\"contigs\"] = eligible_contigs.median() + dev_ref\n self.failed[\"contigs\"] = eligible_contigs[\n abs(eligible_contigs - eligible_contigs.median()) > dev_ref\n ].index\n eligible_contigs = eligible_contigs[\n abs(eligible_contigs - eligible_contigs.median()) <= dev_ref\n ]\n eligible_contigs = pd.concat([eligible_contigs, not_enough_contigs])\n eligible_contigs = eligible_contigs.index\n self.passed = self.passed.loc[eligible_contigs]", "def do_qso_split(cat, subdir):\n #Check z_qso split\n oldcond = cat.condition\n high_z = (2.5,3.0,3.5,5.0)\n low_z = (2.0,2.5,3.0,3.5)\n for (high_z_qso, z_qso_split) in zip(high_z, low_z):\n cat.condition = (cat.z_max() < high_z_qso)*(cat.z_max() > z_qso_split)\n cat.plot_omega_dla(label=\"$\"+str(high_z_qso)+\" > z_\\mathrm{QSO} > \"+str(z_qso_split)+\"$\")\n plt.ylim(ymin=0)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"omega_gp_zqso\"+str(cat.lowzcut)))\n plt.clf()\n\n for (high_z_qso, z_qso_split) in zip(high_z, low_z):\n cat.condition = (cat.z_max() < high_z_qso)*(cat.z_max() > z_qso_split)\n cat.plot_line_density(label=\"$\"+str(high_z_qso)+\" > z_\\mathrm{QSO} > \"+str(z_qso_split)+\"$\")\n plt.ylim(ymin=0,ymax=0.15)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"dndx_gp_zqso\"+str(cat.lowzcut)))\n plt.clf()\n cat.condition = oldcond", "def plot_colour_colour_diagram(params,star_catalog,catalog_header,\n target, source, blend, RC,\n det_idx,cat_idx,close_cat_idx,log):\n\n def calc_colours(g_lc,r_lc,i_lc):\n\n idx1 = np.where( g_lc['mag_err'] > 0.0 )[0]\n idx2 = np.where( r_lc['mag_err'] > 0.0 )[0]\n idx3 = np.where( i_lc['mag_err'] > 0.0 )[0]\n idx = set(idx1).intersection(set(idx2))\n idx = list(idx.intersection(set(idx3)))\n\n gr = g_lc['mag'][idx] - r_lc['mag'][idx] - RC.Egr\n gr_err = np.sqrt(g_lc['mag_err'][idx]**2 + r_lc['mag_err'][idx]**2)\n ri = r_lc['mag'][idx] - i_lc['mag'][idx] - RC.Eri\n ri_err = np.sqrt(r_lc['mag_err'][idx]**2 + i_lc['mag_err'][idx]**2)\n\n return gr, gr_err, ri, ri_err\n\n\n add_source_trail = False\n add_target_trail = True\n add_blend = True\n add_source = True\n add_crosshairs = True\n add_blend = True\n add_rc_centroid = True\n\n tol = 2.0\n\n filters = { 'ip': 'SDSS-i', 'rp': 'SDSS-r', 'gp': 'SDSS-g' }\n\n try:\n\n inst_i = star_catalog['cal_ref_mag_ip'][det_idx]\n inst_r = star_catalog['cal_ref_mag_rp'][det_idx]\n inst_g = star_catalog['cal_ref_mag_gp'][det_idx]\n inst_gr = inst_g - inst_r - RC.Egr\n inst_ri = inst_r - inst_i - RC.Eri\n\n linst_i = star_catalog['cal_ref_mag_ip'][close_cat_idx]\n linst_r = star_catalog['cal_ref_mag_rp'][close_cat_idx]\n linst_g = star_catalog['cal_ref_mag_gp'][close_cat_idx]\n lcal_i = star_catalog['imag'][close_cat_idx]\n lcal_r = star_catalog['rmag'][close_cat_idx]\n lcal_g = star_catalog['gmag'][close_cat_idx]\n linst_gr = linst_g - linst_r - RC.Egr\n linst_ri = linst_r - linst_i - RC.Eri\n\n fig = plt.figure(1,(10,10))\n\n ax = plt.axes()\n\n ax.scatter(inst_gr, inst_ri,\n c='#E1AE13', marker='.', s=1,\n label='Stars within ROME field')\n\n ax.scatter(linst_gr, linst_ri, marker='*', s=4, c='#8c6931',\n label='Stars < '+str(round(tol,1))+'arcmin of target')\n\n if source.gr_0 != None and source.ri_0 != None and add_source:\n\n plt.plot(source.gr_0, source.ri_0,'md',markersize=10, label='Source')\n\n if add_crosshairs:\n plot_crosshairs(fig,source.gr_0, source.ri_0,'m')\n\n if add_source_trail:\n g_lc = source.lightcurves['g']\n r_lc = source.lightcurves['r']\n i_lc = source.lightcurves['i']\n\n (sgr, sgr_err, sri, sri_err) = calc_colours(g_lc,r_lc,i_lc)\n\n plt.errorbar(sgr, sri, yerr = sri_err, xerr = sgr_err,\n color='m',marker='d',markersize=10, label='Source')\n\n if blend.gr_0 != None and blend.ri_0 != None and add_blend:\n #plt.plot(blend.gr_0, blend.ri_0,'bv',markersize=10, label='Blend')\n plt.errorbar(blend.gr_0, blend.ri_0,\n yerr = blend.sig_gr_0, xerr = blend.sig_ri_0,\n color='b',marker='v',markersize=10, label='Blend')\n\n if target.lightcurves['g'] != None and target.lightcurves['r'] != None\\\n and target.lightcurves['i'] != None and add_target_trail:\n\n g_lc = target.lightcurves['g']\n r_lc = target.lightcurves['r']\n i_lc = target.lightcurves['i']\n\n (tgr, tgr_err, tri, tri_err) = calc_colours(g_lc,r_lc,i_lc)\n\n plt.errorbar(tgr, tri, yerr = tri_err, xerr = tgr_err,\n color='k',marker='+',markersize=10, alpha=0.4,\n label='Blended target')\n\n plt.errorbar(target.gr_0, target.ri_0,\n yerr = target.sig_ri_0, xerr = target.sig_gr_0,\n color='k',marker='x',markersize=10)\n\n (spectral_type, luminosity_class, gr_colour, ri_colour) = spectral_type_data.get_spectral_class_data()\n\n plot_dwarfs = False\n plot_giants = True\n for i in range(0,len(spectral_type),1):\n\n spt = spectral_type[i]+luminosity_class[i]\n\n if luminosity_class[i] == 'V':\n c = '#8d929b'\n else:\n c = '#8d929b'\n\n if luminosity_class[i] == 'III' and plot_giants:\n\n plt.plot(gr_colour[i], ri_colour[i], marker='s', color=c,\n markeredgecolor='k', alpha=0.5)\n\n plt.annotate(spt, (gr_colour[i], ri_colour[i]-0.1),\n color='k', size=10, rotation=-30.0, alpha=1.0)\n\n if luminosity_class[i] == 'V' and plot_dwarfs:\n\n plt.plot(gr_colour[i], ri_colour[i], marker='s', color=c,\n markeredgecolor='k', alpha=0.5)\n\n plt.annotate(spt, (gr_colour[i],\n ri_colour[i]+0.1),\n color='k', size=10,\n rotation=-30.0, alpha=1.0)\n\n plt.xlabel('SDSS (g-r) [mag]')\n\n plt.ylabel('SDSS (r-i) [mag]')\n\n plot_file = path.join(params['red_dir'],'colour_colour_diagram.pdf')\n\n plt.axis([-1.0,2.0,-1.0,1.0])\n\n plt.grid()\n\n xticks = np.arange(-1.0,2.0,0.1)\n yticks = np.arange(-1.0,1.0,0.1)\n\n ax.set_xticks(xticks, minor=True)\n ax.set_yticks(yticks, minor=True)\n\n\n box = ax.get_position()\n ax.set_position([box.x0, box.y0 + box.height * -0.025,\n box.width, box.height * 0.95])\n\n l = ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.2), ncol=2)\n\n try:\n l.legendHandles[2]._sizes = [50]\n l.legendHandles[3]._sizes = [50]\n except IndexError:\n pass\n\n plt.rcParams.update({'legend.fontsize':18})\n plt.rcParams.update({'font.size':18})\n plt.rc('xtick', labelsize=18)\n plt.rc('ytick', labelsize=18)\n\n plt.savefig(plot_file,bbox_inches='tight')\n\n plt.close(1)\n\n log.info('Colour-colour diagram output to '+plot_file)\n\n except AttributeError:\n\n log.info('Warning: Insufficient data for colour-colour diagram')", "def azureml_main(frame1):\n import pandas as pd\n bins = [0, 2.5, 5, 7.5, 10]\n frame1['wind_cat'] = pd.cut(frame1['wind'], bins)\n return frame1", "def make_source_location_histogram_plots_uvis(data, file_name, ff, im, coordfile, \\\n filt, path_to_cleans=''):\n\tpylab.ion()\n\tif ff == 0:\n\t\tfig = pylab.figure()\n\t\tfig.subplots_adjust(wspace=0.4)\n\telse:\n\t\tpylab.clf()\n\t\t\n\txc,yc = np.loadtxt(coordfile, unpack=True, usecols = (0,1)) \n\t# plot #1 - object position\n\tsz=50.0\n\tx0=np.round(xc)-sz/2.\n\tx1=np.round(xc)+sz/2.\n\ty0=np.round(yc)-sz/2.\n\ty1=np.round(yc)+sz/2.\n\tax1 = pylab.subplot(1,2,1)\n\tax1.imshow(np.log10(im[y0:y1,x0:x1]),interpolation='nearest')\n\tax1.autoscale(axis='both',enable=False)\n\tax1.scatter([xc-x0-1.0], [yc-y0-1.0], marker='x', s=200., color='w')\n\tpylab.title('X = '+str(xc)+' Y = '+str(yc))\n\n\t# plot #2 - background histogram\n\ttmp_image=glob.glob(path_to_cleans + '*back.fits')[0]\n\tbackim = pyfits.getdata(tmp_image)\n\t#--measure back statistics (mean and mode via IRAF)\n\tinitback = iraf.imstatistics(tmp_image+'[0]', fields='mode,stddev', \\\n\t lower = -100, upper = 10000, nclip=7, \\\n\t lsigma=3.0, usigma=3.0, cache='yes', \\\n\t format='no',Stdout=1)\n\t#print 'initback:'\n\t#print initback\n\tif 'INDEF' not in initback[0]:\n\t\tllim = float(initback[0].split(' ')[0]) - 10.0*\\\n\t\t\t\tfloat(initback[0].split(' ')[1])\n\t\tulim = float(initback[0].split(' ')[0]) + 10.0*\\\n\t float(initback[0].split(' ')[1])\n\t\tbackstats=iraf.imstatistics(tmp_image+'[0]', fields='mean,mode', \\\n\t lower=llim, upper=ulim, nclip=7,lsigma=3.0, \\\n\t usigma=3.0, cache='yes', format='no',Stdout=1)\n\t\tbackmean=float(backstats[0].split(' ')[0])\n\t\tbackmode=float(backstats[0].split(' ')[1])\n\t\tfbackim= np.ndarray.flatten(backim)\n\t\tgd=np.where((fbackim > llim) & (fbackim < ulim))[0]\n\t\tbackmedian=meanclip(fbackim[gd],maxiter=7,return_median=1)[0]\n\n\t\tax2 = pylab.subplot(1,2,2)\n\t\tpylab.hist(fbackim[gd],log=True)\n\t\tpylab.ylim(0.5,600000)\n\t\tpylab.xlim(-20,20)\n\t\tpylab.plot([backmode,backmode],[0.5,600000],ls='-',color='red',\\\n\t label='mode')\n\t\tpylab.plot([backmedian,backmedian],[0.5,600000],ls='--',color='aqua',\\\n \t label='median')\n\t\tpylab.plot([backmean,backmean],[0.5,600000],ls=':',color='black',\\\n \t label='mean')\n\t\tpylab.legend(loc=2, handletextpad=0.0, borderpad=0.0, frameon=False, \\\n \t handlelength=1.)\n\t\tpylab.title('Histogram of Background Pixels')\n\t\tpylab.xlabel('Background [e-]')\n\t\tpylab.ylabel('Number of Objects')\n\t\tpylab.annotate('chip '+str(data[ff]['chip']), [0.77,0.95], \\\n \t xycoords='axes fraction')\n\t\tpylab.annotate(filt,[0.77,0.80],xycoords='axes fraction')\n\n\t\t\n\tpylab.savefig(file_name.split('.fits')[0]+'_srcloc.png')\n\tpylab.ioff()", "def quality_cut(df): \n\n quality_cut_components_columns = [\n 'glitch_time_cut',\n 'maintenance_cut',\n 'reset_cut',\n 'offset_ion_cut',\n 'chi2_heat_cut',\n 'chi2_ion_cut',\n ]\n \n truth_array = pd.Series(data=True, index=df.index)\n for col in quality_cut_components_columns:\n truth_array = truth_array & df[col]\n \n df['quality_cut'] = truth_array\n\n return None", "def recentering_convolution_example(sev_clause, en, log2, agg_log2=0, bs=1,\n freq_clause='poisson', remove_fuzz=False):\n df, ag = recentering_convolution(sev_clause, freq_clause, en, log2, bs, remove_fuzz)\n\n # update the ag object if agg_log2\n if agg_log2 == 0:\n # just plot\n fig, ax0 = plt.subplots(1, 1, figsize=(\n 3.5, 2.45), constrained_layout=True)\n\n df.plot(ax=ax0, c='C0')\n ax0.set(\n title=f'Shifted, centered, re-indexed aggregate\\nlog2={log2} buckets')\n bit = None\n\n elif agg_log2 > 0:\n ag.update(approximation='exact', log2=agg_log2, bs=bs, padding=0)\n qd(ag)\n print('-'*80)\n # percentiles - help determining log2 needed for hi freq calculation\n qd(pd.Series([ag.agg_sd, ag.q(0.001), ag.q(0.999),\n ag.q(0.999999) - ag.q(0.000001)],\n index=['std dev', 'p001', 'p999', 'range']))\n print('-'*80)\n\n # merge and compare\n fig, axs = plt.subplots(2, 3, figsize=(\n 3 * 3.5, 2 * 2.45), constrained_layout=True)\n ax0, ax1, ax2, ax3, ax4, ax5 = axs.flat\n\n df.plot(ax=ax0, c='C0')\n ax0.set(\n title=f'Shifted, centered, re-indexed aggregate\\nlog2={log2} buckets')\n bit = pd.concat((ag.density_df.p_total, df), axis=1, join='inner')\n bit.plot(ax=ax1)\n ax1.lines[1].set(ls='--')\n ax1.legend(loc='upper right')\n ax1.set(\n title=f'Shifted vs. agg object\\n'\n f'Linear scale; agg object log2 = {ag.log2}')\n bit.plot(ax=ax2, logy=True)\n ax2.lines[1].set(ls='--')\n ax2.legend(loc='upper right')\n ax2.set(title='Shifted vs. agg object\\nLog scale')\n\n abs_error = (bit.p_total - bit.a).abs()\n abs_error.plot(ax=ax4, c='C2', logy=True,\n title=f'Abs error\\n'\n f'Max {abs_error.max():.5g}; '\n f'Avg {abs_error.mean():.5g}')\n rel_error = abs_error / bit.p_total\n rel_error = rel_error.loc[bit.p_total > 1e-15]\n rel_error.plot(ax=ax5, c='C2', logy=True,\n title=f'Rel error p_total > 1e-15\\n'\n f'Max {rel_error.max():.5g}; '\n f'Avg {rel_error.mean():.5g}')\n ax3.remove()\n\n else:\n raise ValueError('log2 >= 0!')\n\n return df, ag, bit", "def populateSubPlot(df,\n eps=3,\n min_samples=50,\n fig=None, \n axs=None, \n row=None, \n col=None,\n title='Some Ward',\n img=Image.open('maps/map_detailed_723.jpeg')\n ):\n #assign labels to data and get unique labels\n db_labels, unique_labels = getLabels(df,\n eps=eps, \n min_samples=min_samples)\n \n #set the title\n axs[row,col].set_title(title)\n \n #slap image on background\n axs[row,col].imshow(img, extent=[0, 128, 0, 128])\n\n #for each label and color\n for label, color in zip(unique_labels, colors):\n #places where label matches\n label_arg = np.argwhere(db_labels==label).ravel()\n #reduced version of where labels occur\n df_label_cluster = df.iloc[label_arg]\n #add scatter to plot\n axs[row,col].scatter(df_label_cluster['x'],\n df_label_cluster['y'], label=str(label),\n color=color)", "def do_length_split(cat, subdir):\n #Check z_qso split\n oldcond = cat.condition\n high_z = (0.2,0.4,0.6,0.8,2)\n low_z = (0., 0.2, 0.4, 0.6, 0.8)\n z_diff = cat.z_max() - cat.z_min()\n for (high_z_qso, z_qso_split) in zip(high_z, low_z):\n cat.condition = (z_diff < high_z_qso)*(z_diff > z_qso_split)\n cat.plot_omega_dla(label=str(high_z_qso)+\" > zQSO > \"+str(z_qso_split))\n plt.ylim(ymin=0)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"omega_gp_zdiff\"))\n plt.clf()\n\n for (high_z_qso, z_qso_split) in zip(high_z, low_z):\n cat.condition = (z_diff < high_z_qso)*(z_diff > z_qso_split)\n cat.plot_line_density(label=str(high_z_qso)+\" > zQSO > \"+str(z_qso_split))\n plt.ylim(ymin=0,ymax=0.1)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"dndx_gp_zdiff\"))\n plt.clf()\n cat.condition = oldcond", "def get_pre_df(temp_pre_df):\n \n event_time_max = temp_pre_df['event_time'].max()\n cat_dfs = []\n for num in np.arange(0,(1080/2)+1,30)[1:]:\n # making <= null i.e keeping >\n temp_pre_df.loc[temp_pre_df['event_time'] <= int(num), 'event_time'] = np.nan\n for col in ['event_name', 'specialty', 'plan_type']:\n cat_df = temp_pre_df.groupby([\"id\", col]).agg({\"event_time\": 'count'}).unstack(level=col)\n cat_df = cat_df/(event_time_max-num)\n cat_df.columns = ['__'.join(['normChange', col, name, str(int(num))]) for name in cat_df.columns.droplevel()]\n cat_dfs.append(cat_df)\n pre_df = pd.concat(cat_dfs, axis = 1) \n return pre_df.fillna(0)", "def select_data(self, abundance=-8.5, opr=1, temperature=20, tolerance=0.1):\n key = (abundance, opr, temperature, tolerance)\n if key in self._datacache:\n return self._datacache[key]\n else:\n radtab = self.radtab # shorthand\n #tolerance = {-10:0.1, -9.5: 0.3, -9: 0.1, -8:0.1, -8.5: 0.3}[abundance]\n OKtem = radtab['Temperature'] == temperature\n if opr is not None:\n OKopr = radtab['opr'] == opr\n else:\n OKopr = True\n OKabund = np.abs((radtab['log10col'] - radtab['log10dens'] - np.log10(3.08e18)) - abundance) < tolerance\n OK = OKtem * OKopr * OKabund\n\n tau1x = radtab['TauLow'][OK]\n tau2x = radtab['TauUpp'][OK]\n dens = radtab['log10dens'][OK]\n col = radtab['log10col'][OK]\n\n self._datacache[key] = tau1x,tau2x,dens,col\n\n return tau1x,tau2x,dens,col", "def main():\n housing = pd.read_csv(\"Data/train_original.csv\")\n housing[\"TotalSF\"] = (\n housing[\"TotalBsmtSF\"] + housing[\"1stFlrSF\"] + housing[\"2ndFlrSF\"]\n )\n training_features, testing_features, training_target, testing_target = impute_dummify_and_split(\n housing, drop_target=False\n )\n\n p_values = [\n (c, pearsonr(training_features[\"SalePrice\"], training_features[c])[1])\n for c in training_features.columns\n ]\n\n p_value_limits = [0.05]\n\n result = []\n ps_and_cols = {}\n\n for p_value_limit in p_value_limits:\n\n high_ps = list(\n map(lambda t: t[0], sorted(p_values, key=lambda t1: t1[1])[:15])\n )\n\n print(training_features[high_ps].corr())\n\n columns = [p[0] for p in p_values if p[1] < p_value_limit]\n\n training_features_restricted = training_features[columns].drop(\n \"SalePrice\", axis=\"columns\"\n )\n\n testing_features_restricted = testing_features[columns].drop(\n \"SalePrice\", axis=\"columns\"\n )\n\n for model in (\n linear_model.Lasso(alpha=2.1),\n linear_model.Ridge(alpha=2.1),\n ):\n\n model.fit(training_features_restricted, training_target)\n\n train_score = model.score(\n training_features_restricted, training_target\n )\n\n test_score = model.score(\n testing_features_restricted, testing_target\n )\n\n name = str(model).split(\"(\")[0]\n\n result = result + [\n (\n \"_2_restrict_features\",\n name,\n \"p value limit: {:.3f}, alpha: 2.1\".format(p_value_limit),\n train_score,\n test_score,\n )\n ]\n\n print(ps_and_cols)\n return training_features[high_ps].corr()", "def __init__(self, ministry, central_only=False, zbins=None, magbins=None,\n catalog_type=['galaxycatalog'], tag=None, CMASS=False,\n lightcone=True, **kwargs):\n\n if zbins is None:\n zbins = np.linspace(ministry.minz, ministry.maxz, 5)\n\n if magbins is None:\n magbins = np.linspace(-25, -16, 50)\n\n self.lightcone = lightcone\n\n MagnitudeMetric.__init__(self, ministry, zbins=zbins, magbins=magbins,\n catalog_type=catalog_type, tag=tag, **kwargs)\n\n self.central_only = central_only\n self.CMASS = CMASS\n \n if central_only:\n self.mapkeys = ['luminosity', 'central']\n else:\n self.mapkeys = ['luminosity']\n\n if self.lightcone:\n self.mapkeys.append('redshift')\n\n if self.CMASS:\n self.mapkeys.append('appmag')\n\n self.aschema = 'galaxyonly'\n\n self.lumcounts = None" ]
[ "0.5334454", "0.5270723", "0.526012", "0.5255001", "0.51794606", "0.51098704", "0.50803125", "0.5008346", "0.49690634", "0.49599707", "0.49506727", "0.49429023", "0.49181986", "0.49170044", "0.4914409", "0.48817256", "0.4856762", "0.48513559", "0.48380238", "0.48282054", "0.48239675", "0.48133162", "0.48073754", "0.47983837", "0.4748099", "0.47433808", "0.47298405", "0.47141623", "0.47028458", "0.4694439", "0.46906862", "0.46904612", "0.46888646", "0.46736932", "0.46593887", "0.465506", "0.46468487", "0.46454757", "0.4638631", "0.46379954", "0.46295074", "0.46272075", "0.4626315", "0.46155262", "0.4613108", "0.4605431", "0.46030536", "0.459683", "0.45908806", "0.4588492", "0.4586043", "0.45843136", "0.45815006", "0.45779133", "0.45769662", "0.45720252", "0.45686322", "0.45679513", "0.4567435", "0.45648304", "0.45642582", "0.45637193", "0.45621127", "0.45603827", "0.45588475", "0.45558718", "0.45542058", "0.45536506", "0.4552505", "0.4552159", "0.45478708", "0.45471144", "0.45461982", "0.4546154", "0.45457467", "0.45435634", "0.4542428", "0.45423612", "0.45407677", "0.45371512", "0.45347908", "0.4533234", "0.45327652", "0.45270228", "0.45253775", "0.452286", "0.4521537", "0.4516186", "0.45151412", "0.4514936", "0.4513795", "0.4511169", "0.45089334", "0.44983035", "0.44915238", "0.44887283", "0.44872016", "0.44860727", "0.44852647", "0.44845775" ]
0.5875681
0
load and unpickle a pickled pandas.DataFrame 'pkl_file'. Should not use pickles because of version compatibility issues.
def get_pickle(get_file='simbad_mag_errors.pkl'): print "Opening %s and unpickling the DataFrame.." % get_file with open(get_file, 'r') as opened_file: df_unpickled = cPickle.load(opened_file) print "..Done" return df_unpickled
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_pkl(file_name):\n with open(file_name) as fp:\n data = pkl.load(fp)\n return data", "def load_pickle(args):\n with open(args.pickle_name, 'rb') as fh:\n datum = pickle.load(fh)\n\n df = pd.DataFrame.from_dict(datum['labels'])\n\n return df", "def load_pickle_data(filename):\n path = \"../tmp/{}.pckl\".format(filename)\n if os.path.exists(path):\n print(\"LOADING PCKL FILE FROM {}\".format(path))\n f = open(path, 'rb')\n obj = pickle.load(f)\n f.close()\n return obj", "def load_pkl_data(path):\n with open(path, 'rb') as fi:\n data = pickle.load(fi)\n return data", "def load_pkl_file(p):\n pkl_file = open(p, 'rb')\n obj = pickle.load(pkl_file)\n pkl_file.close()\n return obj", "def load_pkl_file(path):\n with open(path, 'rb') as pkl_file:\n return pickle.load(pkl_file)", "def readDataFromPickle(config, filename):\n path = config.picklePath + filename\n\n # if stopid in pickle variable and it is a dataframe-> sort columns\n return pd.read_pickle(path)", "def _read_pkl(self, input_file):\n data = pickle.load(open(input_file, 'rb'))\n return data", "def load_from_pkl(cls, path):\n with open(path, 'rb') as f:\n raw_result, skipped_gts, skipped_dets = pkl.load(f)\n return cls(raw_result, skipped_gts, skipped_dets)", "def load_pkl(path):\r\n f = open(path, 'rb')\r\n try:\r\n rval = cPickle.load(f)\r\n finally:\r\n f.close()\r\n return rval", "def from_pickle(cls, path):\n res = pd.read_pickle(path)\n instance = cls.from_pandas(res)\n\n return instance", "def load_pkl(self, name, file_object=None):\n if file_object:\n f = file_object\n else:\n f = gzip.open(name, 'rb')\n temp = pickle.load(f)\n if temp.ht_version < HT_OLD_COMPAT_VERSION:\n raise ValueError('Version of ' + name + ' is ' + str(temp.ht_version)\n + ' which is not at least ' +\n str(HT_OLD_COMPAT_VERSION))\n # assert temp.ht_version >= HT_COMPAT_VERSION\n params = temp.params\n self.hashbits = temp.hashbits\n self.depth = temp.depth\n if hasattr(temp, 'maxtimebits'):\n self.maxtimebits = temp.maxtimebits\n else:\n self.maxtimebits = _bitsfor(temp.maxtime)\n if temp.ht_version < HT_COMPAT_VERSION:\n # Need to upgrade the database.\n print(\"Loading database version\", temp.ht_version,\n \"in compatibility mode.\")\n # Offset all the nonzero bins with one ID count.\n temp.table += np.array(1 << self.maxtimebits).astype(np.uint32) * (\n temp.table != 0)\n temp.ht_version = HT_VERSION\n self.table = temp.table\n self.ht_version = temp.ht_version\n self.counts = temp.counts\n self.names = temp.names\n self.hashesperid = np.array(temp.hashesperid).astype(np.uint32)\n self.dirty = False\n self.params = params", "def load_pkl(fd_or_filename: Union[str, io.IOBase], **kwargs):\n with as_file_descriptor(fd_or_filename, 'rb') as f:\n try:\n return pickle.load(f, **kwargs)\n except UnicodeDecodeError:\n if 'encoding' in kwargs:\n raise\n return pickle.load(f, encoding='latin1', **kwargs)", "def pklload(path:str):\n pkl = pickle.load(open(path, 'rb'))\n return pkl", "def read(self):\n\n if self.filename.endswith(\".pkl\"):\n logging.debug(\"Loading pickle file %s\", self.filename)\n data = pd.read_pickle(self.filename)\n\n elif self.filename.endswith(\".hdf5\"):\n logging.debug(\"Loading HDF5 file %s\", self.filename)\n with h5py.File(self.filename, \"r\") as data_file:\n\n data = pd.DataFrame(\n {\n column: data_file[\"/haloTrees/%s\" % column].value\n for column in self.columns\n }\n ).set_index(\"nodeIndex\")\n\n # with open(\"./data/cache.pkl\", \"w\") as pickle_file:\n # data.to_pickle(pickle_file)\n\n else:\n raise TypeError(\"Unknown filetype %s\" % self.filename)\n\n return data", "def open_pickle(file):\n if file.split('.')[-1] != \"pkl\":\n file += \".pkl\"\n\n with open(file, 'rb') as f:\n return pickle.load(f)", "def load(pkl_filepath):\n try:\n with open(pkl_filepath, \"rb\") as f:\n return pickle.load(f)\n except Exception as e:\n logging.info(\"Could not load pickled SentMessagesDatabase, initializing new one %s\" % e)\n return SentMessagesDatabase()", "def pkl_load(name, path = 'obj'):\n if '.p' not in name:\n name = name + '.pkl'\n path = os.path.join(path, name)\n try:\n obj = pickle.load(open(path, 'rb'))\n except FileNotFoundError:\n obj = None\n return obj", "def pickle_load(path):\n try:\n data = pickle.load(open(path, \"rb\"))\n return data\n except UnpicklingError:\n unix_path = path.replace(\".pkl\", \"_unix.pkl\")\n try:\n data = pickle.load(open(unix_path, \"rb\"))\n return data\n except FileNotFoundError:\n path = _to_unix(path)\n data = pickle.load(open(path, \"rb\"))\n return data", "def to_pickel(pickle_name, dataframe, dir=\"../dataset\"):\n os.chdir(dir)\n pickle.dump(dataframe, open(pickle_name + '.pickle', 'wb'))", "def read_data(filename=DATAFILE):\n df = pd.read_pickle(filename)\n return df", "def _unpickle(filename):\n\n # Create full path for the file.\n file_path = _get_file_path(filename)\n\n print(\"Loading data: \" + file_path)\n\n with open(file_path, mode='rb') as file:\n # In Python 3.X it is important to set the encoding,\n # otherwise an exception is raised here.\n data = pickle.load(file, encoding='bytes')\n\n return data", "def load_pickle(file):\n with open(file, 'rb') as fh:\n datum = pickle.load(fh)\n\n return datum", "def load_pickle(filename):\n\n with open(filename, 'rb') as file:\n if filename.split('.')[-1] == 'dill':\n obj = dill.load(file)\n else:\n obj = pickle.load(file)\n return obj", "def load_pickle(path):\n with open(path, 'rb') as f:\n data = pickle.load(f)\n return data", "def load_data_set_from_pickle(file_name=None):\n if not file_name:\n try:\n file_name = max(glob.glob(os.path.join(__pickled_data_directory__, '*.chars74k-lite.gz')), key=os.path.getctime)\n except ValueError as e:\n log.error('Unable to load data set from file since no pickled files could be found, ')\n return None\n\n log.debug('Loading data set from file: %s' % file_name)\n return unpickle_data(file_name)", "def load_scm_object(pkl_file_path):\n with open(pkl_file_path, \"rb\") as file:\n return pickle.load(file)", "def pload(filename):\n return pickle.load(open(filename, 'rb'))", "def loadPickle(pickle_file):\n print(\"Loading pickle data from file: \"+pickle_file)\n\n data = None\n try:\n with open(pickle_file, \"rb\") as fd:\n data = pickle.load(fd)\n except EOFError:\n pass\n except pickle.UnpicklingError as upe:\n print(\"Failed: Loading Pickle Data\")\n except IOError:\n data = {}\n\n return data", "def load_data(path=DEFAULT_PICKLE_FILE, pickle=True):\n if type(path) is list:\n dfs = [load_data(p, pickle='%s.pickle' % os.path.basename(p))\n for p in path]\n return pd.concat(dfs)\n elif type(path) is str:\n print(\"Loading\", path)\n if os.path.isfile(path):\n df = pd.read_pickle(path)\n else:\n df = parse_directory(path)\n dataset = basename(normpath(path))\n PICKLE_FILE = join(DATA_DIR, '%s.pickle' % dataset)\n if pickle:\n pickle_path = PICKLE_FILE\n if type(pickle) is str:\n pickle_path = pickle\n print(\"Pickling to\", pickle_path)\n df.to_pickle(pickle_path)\n return df", "def load_lsh(filename):\n with open(filename, 'rb') as handle:\n return pickle.load(handle)", "def load_object(self, filename):\n with open(filename, 'rb') as inp: # Overwrites any existing file.\n data = pickle.load(inp)\n return data", "def load_data_pickle(PATH, dataset, filename):\n with open(PATH + '/' + dataset + \"_\" + filename + \".pkl\",\"rb\") as f:\n new_data = pickle.load(f)\n\n # print(filename, \"opened\")\n return new_data", "def load_pickle(path):\n assert osp.exists(path)\n # gc.disable()\n with open(path, 'rb') as f:\n ret = pickle.load(f)\n # gc.enable()\n return ret", "def load_pickle(path):\n assert osp.exists(path)\n with open(path, 'r') as f:\n ret = pickle.load(f)\n return ret", "def load_pickled_price_history(self, ticker: str) -> pd.DataFrame:\n try:\n price_history = pd.read_pickle(f\"../data/pickles/{ticker}.pkl\")\n return price_history\n except FileNotFoundError:\n print(f\"no pickle available for {ticker}; falling back to DB\")\n return None", "def load_pickle(filename):\n with open(filename, 'rb') as file:\n obj = pickle.load(file)\n return obj", "def _load_data(df, use_preprocessdata=False, save_path=None):\n if use_preprocessdata:\n try:\n with open(save_path, 'rb') as f:\n data = pickle.load(f)\n except: # noqa\n use_preprocessdata = False\n\n if not use_preprocessdata:\n data = []\n for i in range(len(df)):\n words, pronnoun_index = utils.charpos_to_word_index(df['Text'][i], df['Pronoun-offset'][i], df['Pronoun'][i].split()[0])\n _, A_index = utils.charpos_to_word_index(df['Text'][i], df['A-offset'][i], df['A'][i].split()[0], words=words)\n _, B_index = utils.charpos_to_word_index(df['Text'][i], df['B-offset'][i], df['B'][i].split()[0], words=words)\n data.append((words, [pronnoun_index, A_index, B_index]))\n with open(save_path, 'wb') as f:\n pickle.dump(data, f, protocol=pickle.HIGHEST_PROTOCOL)\n print(\"Data Loaded\")\n return data", "def load_pickle(path):\n with open(path, 'rb') as handle:\n return pickle.load(handle)", "def load_pickle(filename):\n with open(filename, \"rb\") as f:\n obj = pickle.load(f)\n\n return obj", "def load_data_pickle(self, load_full=False):\n self.train = pd.read_pickle('../input/train_mod.pkl')\n self.test = pd.read_pickle('../input/test_mod.pkl')\n if load_full:\n self.train_full = pd.read_pickle('../input/train_full_mod.pkl')", "def load_dataset(path):\n if '.h5' in str(path):\n dataframe = pd.read_hdf(path)\n elif '.pkl' in str(path):\n dataframe = pd.read_pickle(path)\n else:\n print('Wrong file')\n sys.exit()\n\n # Make it multiindex\n dataframe['event'] = dataframe.index\n dataframe = dataframe.set_index(['sample_nr', 'event'])\n dataframe = dataframe.reset_index('event', drop=True)\n dataframe = dataframe.set_index(dataframe.groupby(level=0).cumcount().rename('event'), append=True)\n\n return dataframe", "def unpickle(filename: str) -> dict:\n with open(os.path.join(f\"{ROOT_DIR}/dataset/\", filename), \"rb\") as file:\n dict = pickle.load(file, encoding=\"bytes\")\n return dict", "def load_from_pickle(self):\n if 'data_sets.pckl' in self.expected_pickles:\n self.data_sets = from_file(\n os.path.join(self.logdir, 'data_sets.pckl')\n )\n if 'all_params.pckl' in self.expected_pickles:\n self.all_params = from_file(\n os.path.join(self.logdir, 'all_params.pckl')\n )\n if 'minimiser_info.pckl' in self.expected_pickles:\n self.minimiser_info = from_file(\n os.path.join(self.logdir, 'minimiser_info.pckl')\n )\n if 'labels.pckl' in self.expected_pickles:\n self.labels = from_file(\n os.path.join(self.logdir, 'labels.pckl')\n )", "def load_pickle(filepath):\n logging.info('Loading object from pickle: {}'.format(filepath))\n with open(filepath, 'rb') as infile:\n return pickle.load(infile)", "def get_df(df_pkl=None):\n\tif not df_pkl:\n\t\tdata_df = get_df_from_psv()\n\telse:\n\t\tdata_df = pd.read_pickle(df_pkl)\n\tdata_df = data_df[data_df.content.notnull()]\n\treturn data_df", "def load_pickle(path):\n try:\n debug(\"trying to load pickle data\")\n with open(path, mode='rb') as file:\n debug(\"opened file %s for reading\", path)\n return pickle.load(file, encoding='utf-8')\n except (pickle.UnpicklingError, OSError) as err:\n debug(\"error in pickling from %s, error: %s\", path, err)\n return None", "def read_pkl(infile):\n # loading\n if infile is not None:\n logging.info('No pkl provided. Creating a new object')\n pkl = pickle.load(bz2.open(infile, 'r'))\n else:\n logging.info('Reading in: {}'.format(infile))\n # creating new object\n pkl = {'taxonomy' : {}, 'markers' : {}}\n return pkl", "def read_data_model(filename='data/data_model.pkl'):\n\n with open(filename, 'r') as pklfile:\n root = pkl.load(pklfile)\n\n return root", "def loadpickle(fln):\n if not os.path.exists(fln) and os.path.exists(fln + '.gz'):\n gzip = True\n fln += '.gz'\n else:\n try:\n with open(fln, 'rb') as fh:\n try: #Py3k\n return pickle.load(fh, encoding='latin1')\n except TypeError:\n return pickle.load(fh)\n except pickle.UnpicklingError: #maybe it's a gzip?\n gzip = True\n else:\n gzip = False\n if gzip:\n try:\n import zlib\n with open(fln, 'rb') as fh:\n stream = zlib.decompress(fh.read(), 16 + zlib.MAX_WBITS) \n try: #Py3k\n return pickle.loads(stream, encoding='latin1')\n except TypeError:\n return pickle.loads(stream)\n except MemoryError:\n import gzip\n with open(fln) as fh:\n gzh = gzip.GzipFile(fileobj=fh)\n try: #Py3k\n contents = pickle.load(gzh, encoding='latin1')\n except TypeError:\n contents = pickle.load(gzh)\n gzh.close()\n return contents", "def load_cleaned_data(self):\n try:\n self.train = pd.read_pickle('../input/train_clean.pkl')\n self.test = pd.read_pickle('../input/test_clean.pkl')\n except FileNotFoundError:\n self.load_raw_data()", "def pickle_from_file(fname):\n\ttry:\n\t\tfh = open(fname, 'r')\n\t\tdata = cPickle.load(fh)\n\t\tfh.close()\n\texcept:\n\t\t#raise\n\t\tprint \"Loading pickled data failed!\", sys.exc_info()[0]\n\t\tdata = None\n \n\treturn data", "def get_pkl(fname):\n global errtime\n while True:\n try:\n with open(fname, 'rb') as pkl_file:\n pkl_dict = _pickle.load(pkl_file)\n return pkl_dict\n except EOFError as e:\n print 'WARNING: EOFError in file', fname + '.'\n _time.sleep(errtime)", "def restoreData(filename='laue.dat'):\r\n import cPickle\r\n with open(filename, 'rb') as fp:\r\n return cPickle.load(fp)", "def load_df(\n file_name: str, mode: str = \"pandas\", save: bool = True, chunksize: int = 1_000_000\n) -> pd.DataFrame:\n\n file_path = os.path.join(DATA_PATH, file_name)\n\n if mode == \"bz2\":\n keys = [\"quoteID\", \"quotation\", \"speaker\", \"date\", \"numOccurrences\", \"phase\"]\n\n with bz2.open(file_path, \"rb\") as quote_file:\n df = pd.DataFrame(\n [\n dict(zip(keys, map(json.loads(instance).get, keys)))\n for instance in tqdm(quote_file)\n ]\n )\n else:\n if not save:\n print(\"Please enable save option.\")\n return\n\n with pd.read_json(file_path, lines=True, chunksize=chunksize) as df_reader:\n for i, chunk in enumerate(df_reader):\n file_name = file_name.strip(\".json.bz2\")\n pkl_path = os.path.join(PKL_PATH, f\"{file_name}-{i:03d}.pkl\")\n chunk.to_pickle(pkl_path)\n\n if save and not os.path.exists(pkl_path):\n file_name = file_name.strip(\".json.bz2\")\n df.to_pickle(os.path.join(PKL_PATH, pkl_path))\n\n return df", "def load_from_pickle(path: str):\r\n if not config.silent:\r\n logger.info(f'Load from {path}')\r\n with open(path, 'rb') as f:\r\n return pickle.load(f)", "def pickle_data(path=PATH_TO_RAW_DATA):\r\n\r\n files = os.listdir(path)\r\n xlsx_files = [path+\"./\"+f for f in files if f[-4:] == 'xlsx']\r\n\r\n print(\"Beginning to read excel sheets...will take a few minutes\")\r\n df_list = [pd.read_excel(f) for f in xlsx_files]\r\n master_df = pd.concat(df_list)\r\n\r\n master_df.to_pickle(path+\"./qc_data.pkl\")", "def read_pk(filename):\n with open(filename, 'rb') as fd:\n ret = pickle.load(fd)\n return ret", "def read_pickle_object_in_file(self):\n outobj = None\n if os.path.exists(self.pickle_file):\n with gzip.open(self.pickle_file, 'rb') as pkl_file:\n outobj = pickle.load(pkl_file)\n return outobj", "def load(file_, name='_pkl', use_cpickle=False):\n file_.seek(0) # To be able to read several objects in one file\n if use_cpickle:\n unpickler = cPickle.Unpickler\n else:\n unpickler = pickle.Unpickler\n with tarfile.open(fileobj=file_, mode='r') as tar_file:\n p = unpickler(\n tar_file.extractfile(tar_file.getmember(name)))\n if '_parameters' in tar_file.getnames():\n p.persistent_load = _PersistentLoad(tar_file)\n return p.load()", "def from_pickle(cls, picklefile):\n return cls(unpickle_file(picklefile))", "def load_data():\n with open('data.pickle', 'rb') as f:\n data = pickle.load(f)\n return data", "def _read_dataset(self, dataset_path):\n dataset = pd.read_pickle(dataset_path)\n return dataset", "def load_obj(name):\n with open('../../data/' + name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def load_obj(name):\n with open('../../data/' + name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def load(fname):\r\n with open(fname, 'rb') as f:\r\n data = pickle.load(f)\r\n return data", "def safe_pickle_load(file_name):\n try:\n f = open(file_name, \"r\")\n try:\n data = pickle.load(f)\n except EOFError:\n data = None\n finally:\n f.close()\n except IOError:\n data = None\n\n return data", "def loadPickle(filepath):\n\tf = open(filepath, 'rb')\n\tobj = pickle.load(f)\n\tf.close()\n\treturn obj", "def pickle_load(path):\n data = pickle.load(open(os.path.join(os.getcwd(), path), 'rb'))\n return data", "def load_data():\n dictionary = corpora.Dictionary.load(app.config['DICTIONARY'])\n matrix = similarities.MatrixSimilarity.load(app.config['MATRIX'])\n model = models.LsiModel.load(app.config['MODEL'])\n df = pd.read_pickle(app.config['DATA_FRAME'])\n return Data(matrix=matrix, model=model, dictionary=dictionary, data_frame=df)", "def from_pickle(pkl):\n assert os.path.exists(pkl), f\"{pkl} not exists\"\n with open(pkl, 'rb') as f:\n vocab = pickle.load(f)\n \n return vocab", "def load_pklgz(filename: str, **kwargs):\n with open_gz(filename, 'rb') as f:\n return load_pkl(f)", "def loadData(dataPathFile):\r\n if dataPathFile[-3:] == 'pkl':\r\n dataBaseDict = pickle.load(open(dataPathFile, 'rb'))\r\n return dataBaseDict\r\n else:\r\n raise Exception('File that is trying to be loaded is not a pickle file\\n')", "def load_pickle(path):\n with open(path, 'rb') as f:\n pickle_file = pickle.load(f)\n return pickle_file", "def load():\n return load_pandas()", "def read_pickle(fp, assertions=None, *args, **kwargs):\n\n return _verify_assertions(pd.read_pickle(fp, *args, **kwargs), assertions)", "def load_object(fpath):\r\n with open(fpath, 'rb') as i:\r\n return pickle.load(i)", "def load(filename):\n with open(filename, 'rb') as f:\n return pickle.load(f)", "def deserialize(self):\n with open(self.path+self.name, \"rb\") as pfile:\n dataSet = pickle.load(pfile)\n return dataSet", "def read_pickle(path):\n with open(path, \"rb\") as f:\n data = pickle.load(f)\n\n return data", "def load(filename, mmap_mode=None):\r\n with open(filename, 'rb') as file_handle:\r\n # We are careful to open the file handle early and keep it open to\r\n # avoid race-conditions on renames. That said, if data are stored in\r\n # companion files, moving the directory will create a race when\r\n # joblib tries to access the companion files.\r\n if _read_magic(file_handle) == _ZFILE_PREFIX:\r\n if mmap_mode is not None:\r\n warnings.warn('file \"%(filename)s\" appears to be a zip, '\r\n 'ignoring mmap_mode \"%(mmap_mode)s\" flag passed'\r\n % locals(), Warning, stacklevel=2)\r\n unpickler = ZipNumpyUnpickler(filename, file_handle=file_handle)\r\n else:\r\n unpickler = NumpyUnpickler(filename, file_handle=file_handle,\r\n mmap_mode=mmap_mode)\r\n\r\n try:\r\n obj = unpickler.load()\r\n finally:\r\n if hasattr(unpickler, 'file_handle'):\r\n unpickler.file_handle.close()\r\n return obj", "def loadStuff(path=None):\n\n if path == None:\n print(\"No path specified\")\n return\n\n try:\n pkl_file = open(path, 'rb')\n obj = cPickle.load(pkl_file)\n pkl_file.close()\n print('Data correctly loaded and returned')\n return obj\n\n except IOError as e:\n #print \"I/O error({0}):{1}\".format(e.errno, e.strerror)\n print('I/O error')\n except:\n print(\"Unexpected error\" % sys.exc_info()[0])\n raise", "def load_file(self, filename):\n with open(filename, \"rb\") as pickle_handle:\n return pickle.load(pickle_handle)", "def load(filename):\n with open(filename,'rb') as f:\n return pickle.load(self,f)", "def pickle_load(file_path):\n if not os.path.isfile(file_path):\n return None\n\n with open(file_path, 'rb') as f:\n return pickle.load(f)", "def to_pickles(df, path, split_size=3, inplace=True):\n print(f'shape: {df.shape}')\n \n if inplace==True:\n df.reset_index(drop=True, inplace=True)\n else:\n df = df.reset_index(drop=True)\n gc.collect()\n mkdir_p(path)\n \n kf = KFold(n_splits=split_size)\n for i, (train_index, val_index) in enumerate(tqdm(kf.split(df))):\n df.iloc[val_index].to_pickle(f'{path}/{i:03d}.p')\n return", "def load_pickle(filename):\n try:\n with open(filename, 'rb') as handle:\n obj = joblib.load(handle)\n return obj\n except:\n print(\"File {} not found. This is ok if about to generate cov matrices\".format(filename))\n return np.nan", "def from_pickle(input_path):\n with open(input_path, 'rb') as f:\n unpickler = pickle.Unpickler(f)\n return unpickler.load()", "def unpickle_data(file_name):\n infile = open(file_name, \"rb\")\n try:\n data = pickle.load(infile)\n except:\n data = {}\n infile.close()\n\n return data", "def load_from_disk(name):\n shortname = _dumpify(_compress_name(name) + '.pkl')\n print 'load_from_disk(%s)' % shortname\n pkl_file = open(shortname, 'rb')\n object = pickle.load(pkl_file)\n pkl_file.close()\n return object", "def _load_dataset(self, dataset_path, dataframe_path):\n # Load the DataFrame, if it was already pickled before\n if os.path.exists(dataframe_path):\n try:\n return pd.read_pickle(dataframe_path)\n except ValueError:\n pass\n\n # Check if the dataset has labels or not\n json_file = json.loads(open(dataset_path).read())\n if (\n len(\n pd.json_normalize(json_file, self.JSON_RECORD_PATH[:-1]).loc[\n 0, \"answers\"\n ]\n )\n > 0\n ):\n df = self._load_dataset_with_labels(json_file)\n else:\n df = self._load_dataset_no_labels(json_file)\n\n # Reset the index to [0, N]\n df = df.reset_index(drop=True)\n\n # Save the dataframe to a pickle file\n df.to_pickle(dataframe_path)\n\n return df", "def load_from_database(filename,key):\n\t# Opening file\n\tstore = pd.HDFStore(filename)\n\t# getting the df\n\tdata = store[key]\n\t# And its metadata\n\tmetadata = store.get_storer(key).attrs.metadata\n\tstore.close()\n\t# Ok returning the data now\n\treturn data, metadata", "def load_obj(saved_name):\n with open( saved_name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def get_data(pkl_fname, label, sample, replicate, \n incl_curvature=False,\n load_attn1=None, load_attn2=None, \n modelpkl_fname1=None, modelpkl_fname2=None,\n preloadn2v=False,\n out_channels=8, heads=8, negative_slope=0.2, dropout=0.4, \n verbose=True):\n pdfp = os.path.split(pkl_fname)[0]\n \n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n \n if load_attn1 is None and load_attn2 is None and not incl_curvature and preloadn2v is None:\n\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[label]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # load all edge_feat\n elif load_attn1 is not None and load_attn2 is not None and incl_curvature and preloadn2v is not None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n # second attention\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn2]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname2\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn2 = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(attn2, dtype=float),\n torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # only load attn1\n elif load_attn1 is not None and load_attn2 is None and not incl_curvature and preloadn2v is None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n# F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n# n2v = utils.node2vec_dot2edge(datapkl['adj'], \n# os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n# preloaded=preloadn2v)\n# edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n# torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n# torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n edge_attr = torch.tensor(attn, dtype=float) \n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n\n # attn2 \n elif load_attn1 is None and load_attn2 is not None and not incl_curvature and preloadn2v is None:\n # second attention\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn2]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname2\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn2 = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n# F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n# n2v = utils.node2vec_dot2edge(datapkl['adj'], \n# os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n# preloaded=preloadn2v)\n# edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n# torch.tensor(attn2, dtype=float),\n# torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n# torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n edge_attr = torch.tensor(attn2, dtype=float)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean \n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # curvature\n elif load_attn1 is None and load_attn2 is None and incl_curvature and preloadn2v is None:\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[label]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n# n2v = utils.node2vec_dot2edge(datapkl['adj'], \n# os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n# preloaded=preloadn2v)\n# edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n# torch.tensor(attn2, dtype=float),\n# torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n# torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n edge_attr = torch.tensor(utils.range_scale(F_e)).reshape(-1,1)\n d = Data(x=node_features, edge_index=edge_index, edge_attr=edge_attr, y=labels)\n del node_features,edge_index,labels,edge_attr\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # n2v\n elif load_attn1 is None and load_attn2 is None and not incl_curvature and preloadn2v is not None:\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[label]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n # add other edge feats\n# F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n# edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n# torch.tensor(attn2, dtype=float),\n# torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n# torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n edge_attr = torch.tensor(utils.range_scale(n2v)).reshape(-1,1)\n d = Data(x=node_features, edge_index=edge_index, edge_attr=edge_attr, y=labels)\n del node_features,edge_index,labels,edge_attr\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # attn1 + attn2\n elif load_attn1 is not None and load_attn2 is not None and not incl_curvature and preloadn2v is None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n # second attention\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn2]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname2\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn2 = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(attn2, dtype=float)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # attn1 + attn2 + n2v\n elif load_attn1 is not None and load_attn2 is not None and not incl_curvature and preloadn2v is not None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n # second attention\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn2]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname2\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn2 = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(attn2, dtype=float),\n torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # attn1 + attn2 + curvature\n elif load_attn1 is not None and load_attn2 is not None and incl_curvature and preloadn2v is None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n # second attention\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn2]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n del datapkl # clear space\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname2\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn2 = model(d)\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(attn2, dtype=float),\n torch.tensor(utils.range_scale(F_e)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n del model # extra clean\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # n2v + curvature\n elif load_attn1 is None and load_attn2 is None and incl_curvature and preloadn2v is not None:\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[label]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n edge_attr = torch.cat((torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n d = Data(x=node_features, edge_index=edge_index, edge_attr=edge_attr, y=labels)\n del node_features,edge_index,labels,edge_attr\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n \n # attn1 + curvature\n elif load_attn1 is not None and load_attn2 is None and incl_curvature and preloadn2v is None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(utils.range_scale(F_e)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n \n # attn1 + n2v\n elif load_attn1 is not None and load_attn2 is None and not incl_curvature and preloadn2v is not None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n \n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n \n # attn1 + n2v + curvature\n elif load_attn1 is not None and load_attn2 is None and incl_curvature and preloadn2v is not None:\n # model for DATA EXTRACTION\n ## TODO: clean this up in some other script or fx\n\n # load proper label\n node_features = datapkl['X']\n if isinstance(node_features, sparse.csr_matrix):\n node_features = torch.from_numpy(node_features.todense()).float()\n else:\n node_features = torch.from_numpy(node_features).float()\n labels = datapkl[load_attn1]\n if False:\n # assume label_encoding is done in pre-processing steps\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'ctype_labels_encoding.csv'))\n if False:\n # labels as pd.Series\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels) # assumes labels as list\n edge_index,_ = utils.scipysparse2torchsparse(datapkl['adj'])\n\n d = Data(x=node_features, edge_index=edge_index, y=labels)\n del node_features,edge_index,labels\n\n # model to grab attn\n class GAT(torch.nn.Module):\n def __init__(self):\n super(GAT, self).__init__()\n self.gat1 = GATConv(d.num_node_features, out_channels=out_channels,\n heads=heads, concat=True, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n self.gat2 = GATConv(out_channels*heads, d.y.unique().size()[0],\n heads=heads, concat=False, negative_slope=negative_slope,\n dropout=dropout, bias=True)\n\n def forward(self, data):\n x, edge_index = data.x, data.edge_index\n x,attn1 = self.gat1(x, edge_index)\n x = F.elu(x)\n x,attn2 = self.gat2(x, edge_index)\n return F.log_softmax(x, dim=1),attn1\n\n\n # load edge_feature \n model = GAT()\n if False:\n # general fname loading?\n model_pkl = glob.glob(os.path.join(pdfp,'*{}{}_{}*.pkl'.format(sample,replicate,load_attn)))[0]\n else:\n model_pkl = modelpkl_fname1\n model.load_state_dict(torch.load(model_pkl, map_location=torch.device('cpu')))\n model.eval()\n\n logsoftmax_out, attn = model(d)\n\n del model\n\n\n # update labels\n with open(pkl_fname,'rb') as f :\n datapkl = pickle.load(f)\n f.close()\n labels = datapkl[label]\n if False:\n label_encoder = {v:i for i,v in enumerate(labels.unique())}\n labels = labels.map(label_encoder)\n pd.DataFrame(label_encoder,index=[0]).T.to_csv(os.path.join(pdfp,'cond_labels_encoding.csv'))\n if False:\n labels = torch.LongTensor(labels.to_numpy())\n else:\n labels = torch.LongTensor(labels)\n\n # add other edge feats\n F_e = utils.forman_curvature(datapkl['adj'], verbose=True, plot=False)\n n2v = utils.node2vec_dot2edge(datapkl['adj'], \n os.path.join(pdfp,'{}_n2v_{}.txt'.format(sample.split('_')[0], os.path.split(pkl_fname)[1].split('.p')[0])),\n preloaded=preloadn2v)\n edge_attr = torch.cat((torch.tensor(attn, dtype=float),\n torch.tensor(utils.range_scale(F_e)).reshape(-1,1), \n torch.tensor(utils.range_scale(n2v)).reshape(-1,1)),dim=1)\n d = Data(x=d.x, edge_index=d.edge_index, edge_attr=edge_attr, y=labels)\n\n if verbose:\n print('\\nData shapes:')\n print(d)\n print('')\n\n else:\n print('Can only load edge feats of a specific entry set type. Exiting.')\n exit()\n \n return d", "def load(cls, filename, format=None, mode='rb'):\n format = infer_format(filename, format)\n if not os.path.isfile(filename):\n raise RuntimeError(\"{0!r} not found.\".format(filename))\n if format == 'pkl.gz':\n f = gzip.open(filename, 'rb')\n data = pickle.loads(f.read())\n f.close()\n elif format == 'pkl':\n with io.open(filename, 'rb') as f:\n data = pickle.loads(f.read())\n x = cls(**data)\n return x", "def pickleLoad(filename):\n #Todo: Handle exceptions from pickle\n filehandler = open(\"obj/\" + filename + \".obj\", 'rb')\n object = pickle.load(filehandler)\n return object", "def load_data(file_name):\n with open(file_name, 'rb') as f:\n data = pickle.load(f)\n return data", "def pickle_load(file_name: str) -> Any:\n with open(file_name, 'rb') as file:\n return pickle.load(file)", "def load_model_from_file(self, filename):\n try:\n with open(filename, \"rb\") as infile:\n self.dict_lock.acquire()\n self.levels_dict = pickle.load(infile)\n self.dict_lock.release()\n\n self._reset_view()\n\n except Exception as e: # Failed to load the model\n print(\"Failed to load the model from {filename}\".format(filename=filename))\n print(e)", "def load_model(self, filename):\n\n with open(filename, 'rb') as file:\n model_dict = pickle.load(file)\n\n self.model = model_dict['model']\n self.vectorizer = model_dict['vec']\n self.vectorized_data = model_dict['vec_data']\n self.df_topic_keywords = model_dict['df']" ]
[ "0.7040217", "0.7036856", "0.6931901", "0.68135583", "0.6794832", "0.6742458", "0.67017835", "0.6617142", "0.655173", "0.65279627", "0.6512124", "0.630458", "0.6266877", "0.6249877", "0.6215119", "0.6211559", "0.6184485", "0.61801195", "0.61761963", "0.61551476", "0.61521256", "0.61398476", "0.6122566", "0.61137515", "0.6083972", "0.6046902", "0.5959833", "0.5954626", "0.5954159", "0.59344107", "0.59186727", "0.5913642", "0.5904876", "0.5904391", "0.5900117", "0.58985674", "0.5894265", "0.5893705", "0.58816016", "0.5871747", "0.5863769", "0.585259", "0.58479065", "0.58477545", "0.5840118", "0.58379954", "0.58335376", "0.58295333", "0.5821423", "0.58173823", "0.581716", "0.5800276", "0.57990396", "0.5793373", "0.5789112", "0.5786848", "0.5782352", "0.5776647", "0.5768725", "0.5764799", "0.5758846", "0.57537645", "0.57493955", "0.5742063", "0.5742063", "0.57228327", "0.57224584", "0.5719726", "0.5715377", "0.57019395", "0.5700696", "0.56968594", "0.5680538", "0.5670929", "0.566818", "0.5665558", "0.5660546", "0.56500286", "0.5636379", "0.5633894", "0.5629668", "0.56276", "0.5627352", "0.56269515", "0.56255215", "0.562462", "0.5621293", "0.56176573", "0.5614921", "0.5614163", "0.561192", "0.5609827", "0.5604466", "0.5598585", "0.55877775", "0.55854404", "0.55795455", "0.55779994", "0.55753493", "0.55745244" ]
0.67094475
6
add two pandas.DataFrames together on columns 'hip' and 'tycho2_id' columns.
def merge_df(merge_on_df, merge_with_df, merge_column=None): if merge_column is None: merge_column = ['hip', 'tycho2_id'] merge_on_df = merge_on_df.merge(merge_on_df, merge_with_df, on=merge_column) return merge_on_df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __add__(self, other):\n\n if not isinstance(other, Photons):\n raise ValueError('Can only add a Photons object to another Photons object.')\n\n # don't want to modify what is being added\n other = other.copy()\n\n # make column units consistent with self\n other.match_units(self)\n\n # add and /or update observation columns as necessary\n self.add_observations_column()\n other.add_observations_column()\n n_obs_self = len(self.obs_metadata)\n other['n'] += n_obs_self\n\n # re-reference times to the datum of self\n other.set_time_datum(self.time_datum)\n\n # stack the data tables\n photons = _tbl.vstack([self.photons, other.photons])\n\n # leave it to the user to deal with sorting and grouping and dealing with overlap as they see fit :)\n obs_metadata = self.obs_metadata + other.obs_metadata\n obs_times = list(self.obs_times) + list(other.obs_times)\n obs_bandpasses = list(self.obs_bandpasses) + list(other.obs_bandpasses)\n\n return Photons(photons=photons, obs_metadata=obs_metadata, time_datum=self.time_datum, obs_times=obs_times,\n obs_bandpasses=obs_bandpasses)", "def create_tycho_id(tycho2df):\n tycho2df['tycho2_id'] = tycho2df.TYC1.astype(str).str.cat(tycho2df.TYC2.astype(str), sep='-')\\\n .str.cat(tycho2df.TYC3.astype(str), sep='-')\n tycho2df = tycho2df.rename(columns={'HIP': 'hip'})\n return tycho2df", "def add(self, other):\n if not isinstance(other, self.__class__):\n raise ValueError(\n f\"Argument (type {type(other)}) is not a {self.__class__} instance\"\n )\n if len(other.data):\n self.data = pd.concat([self.data, other.data], ignore_index=True)\n self.sort()", "def combine_stats(self, self2):\n if self.covs_ds[\"variable\"] != self2.covs_ds[\"variable\"]:\n raise ValueError(\"Variable names in the two datasets are not the same\")\n\n self.covs_ds[\"num_times\"] += self2.covs_ds[\"num_times\"]\n self.covs_ds[\"sum\"] += self2.covs_ds[\"sum\"]\n self.covs_ds[\"sumsq\"] += self2.covs_ds[\"sumsq\"]\n if 'dstn' in self.covs_ds.dims:\n if self.covs_ds.dims['dstn'] != self2.covs_ds.dims['dstn']:\n raise ValueError(\"Number of distances in the two datasets are not the same\")\n self.covs_ds[self.nam_sumsq_var] += self2.covs_ds[self.nam_sumsq_var]", "def __add__(self, other, inplace=False, **kwargs):\n output = super(HERAData, self).__add__(other, inplace=inplace, **kwargs)\n if inplace:\n output = self\n output._determine_blt_slicing()\n output._determine_pol_indexing()\n if not inplace:\n return output", "def __append_columns(self, new_dataframe):\n self.dataframe = pd.merge(self.dataframe, new_dataframe)", "def __add__(self, other):\n if not isinstance(other, IntColumn):\n raise TypeError(\"Unsupported operand type(s) for +: \"\n \"'IntColumn' and '{}'\".format(other.__class__.__name__))\n\n merged_profile = IntColumn(None)\n BaseColumnPrimitiveTypeProfiler._add_helper(merged_profile, self, other)\n NumericStatsMixin._add_helper(merged_profile, self, other)\n self._merge_calculations(merged_profile.__calculations,\n self.__calculations,\n other.__calculations)\n return merged_profile", "def __iadd__(self, other):\n if not isinstance(other, type(self)):\n raise TypeError(\"Only DFs of the same type can be combined.\")\n self.dfs.extend(other.dfs)\n self.counts.extend(other.counts)\n self._unique = False\n self._original += other._original\n if self.label is None:\n if other.label is not None:\n self.label = other.label\n else:\n if other.label is not None:\n self.label += \"+\" + other.label\n self.tags.update(other.tags)\n self._average = None\n return self", "def __add__(self, other):\n sum_ct = ContingencyTable(*(self.table + other.table).tolist())\n return sum_ct", "def merge_arrival_and_completion_time(tests_dataframe):\r\n arrival_time_df = tests_dataframe[['time_test_arrives_lab', 'server_size']]\r\n completion_time_df = tests_dataframe[['completion_time', 'server_size']]\r\n arrival_time_df['add'] = 1\r\n completion_time_df['add'] = -1\r\n arrival_time_df = arrival_time_df.rename(columns={\"time_test_arrives_lab\":\"time\"})\r\n completion_time_df = completion_time_df.rename(columns={\"completion_time\":\"time\"})\r\n union = pd.concat([arrival_time_df, completion_time_df])\r\n union = union.sort_values(by=\"time\")\r\n prev_server_size = 0\r\n for index, row in union.iterrows():\r\n if index == 0:\r\n current_server_size= row['server_size'] + row['add']\r\n prev_server_size = current_server_size\r\n #union['server_size'] = union['server_size'] + union['add']\r\n else:\r\n current_server_size = prev_server_size + row['add'] \r\n prev_server_size = current_server_size\r\n union.at[index,'server_size'] = current_server_size\r\n #union.to_csv('union.csv')\r\n return union", "def concat(self: TAvalancheDataset, other: TAvalancheDataset) -> TAvalancheDataset:\n return self.__class__([self, other])", "def __add__(self, other):\n merged_profile = super().__add__(other)\n\n # struct specific property merging\n merged_profile.row_has_null_count = \\\n self.row_has_null_count + other.row_has_null_count\n merged_profile.row_is_null_count = \\\n self.row_is_null_count + other.row_is_null_count\n merged_profile.hashed_row_dict.update(self.hashed_row_dict)\n merged_profile.hashed_row_dict.update(other.hashed_row_dict)\n\n self_to_other_idx = self._get_and_validate_schema_mapping(self._col_name_to_idx,\n other._col_name_to_idx)\n\n # merge profiles\n for idx in range(len(self._profile)):\n other_idx = self_to_other_idx[idx]\n merged_profile._profile.append(self._profile[idx] +\n other._profile[other_idx])\n\n # schemas are asserted to be identical\n merged_profile._col_name_to_idx = copy.deepcopy(self._col_name_to_idx)\n\n # merge correlation\n if (self.options.correlation.is_enabled\n and other.options.correlation.is_enabled):\n merged_profile.correlation_matrix = self._merge_correlation(other)\n\n # recompute chi2 if needed\n if self.options.chi2_homogeneity.is_enabled and \\\n other.options.chi2_homogeneity.is_enabled:\n\n chi2_mat1 = self.chi2_matrix\n chi2_mat2 = other.chi2_matrix\n n1 = self.total_samples - self.row_is_null_count\n n2 = other.total_samples - other.row_is_null_count\n if n1 == 0:\n merged_profile.chi2_matrix = chi2_mat2\n elif n2 == 0:\n merged_profile.chi2_matrix = chi2_mat1\n elif chi2_mat1 is None or chi2_mat2 is None:\n merged_profile.chi2_matrix = None\n else:\n merged_profile.chi2_matrix = merged_profile._update_chi2()\n\n return merged_profile", "def mergeAggregatedCsvData(self, contexts, obj, aggData1, aggData2):\n return aggData1 + aggData2", "def set_data(self):\n # take care of samples\n patients = self.samples.iloc[:,1].tolist()\n samples = self.samples.iloc[:,0].tolist()\n self.samples = pd.DataFrame(patients,index = samples,columns = ['patient']) # indexed by sample\n #\n # take care of expression data\n cols = self.expression.SYMBOL.tolist() # set new column names to transposed expression_data \n \n new_exp = self.expression.T.ix[1:,:] # transpose\n new_exp.columns = cols\n self.expression = new_exp # add columns\n self.data = pd.merge(self.expression,self.samples,left_index = True,right_index=True) # merged data sets\n #pd.merge(df1,df2,how = 'left',left_index=True,right_index=True) # do a left join", "def combine_for_correlation(df1=get_us_ridership(), df2=get_sales_data()):\n df1.index.astype(int)\n df2.index.astype(int)\n temp = pd.concat([df1, df2], axis=1)\n return temp.dropna()", "def merge(df):\n return (df['utterance_t-3'] + df['utterance_t-2'] + df['utterance_t-1'] \\\n + df['utterance_t'])", "def create_Xy_df(X_df, y_df, on_cols):\n return pd.merge(X_df, y_df, how='inner', on=on_cols)", "def add(self, other):\n\n def merge_dicts(d1, d2):\n \"\"\"\n Merge two dictionaries\n\n param d1: dictionary changed in place to have combined values\n type d1: dictionary(key -> set)\n param d2: dictioanry to be merged\n type d2: dictionary(key -> set)\n \"\"\"\n for key,value in d2.items():\n if key not in d1:\n d1[key] = value\n else:\n d1[key] |= value\n \n self.num_documents += other.num_documents\n self.num_expressions += other.num_expressions\n self.global_expressions += other.global_expressions\n self.expressions_with_e += other.expressions_with_e\n self.num_keywords += other.num_keywords\n merge_dicts(self.missing_tags, other.missing_tags)\n merge_dicts(self.problem_files, other.problem_files)", "def data_unification(self, data1, data2):\r\n data = data1 + data2\r\n return data", "def add_features(self, other_features, on=\"time_exchange\"):\n self.data = self.data.join(other_features, on=on).dropna()", "def union_all(x: DataFrame, y: DataFrame) -> DataFrame:\n _check_xy(x, y)\n return bind_rows(x, y, __calling_env=CallingEnvs.REGULAR)", "def merge_id_into_df(df, id_df):\n df = pd.merge(df, id_df, on=['track_id', 'song_id'])\n return df", "def merge_dataframes(df_metrics, df_tweets):\r\n df_tweets = df_tweets.rename(columns={'id': 'tweet_ID'})\r\n df_tweets[['tweet_ID']] = df_tweets[['tweet_ID']].astype('int64')\r\n df_metrics[['tweet_ID']] = df_metrics[['tweet_ID']].astype(\r\n \"float64\").astype('int64')\r\n ans = df_tweets.join(\r\n df_metrics.set_index('tweet_ID'), on='tweet_ID', how='inner').dropna()\r\n return ans", "def __iadd__(self, other):\n self.MergeWith(other)\n return self", "def _add_column(df_main, serie, name):\n df = serie.to_frame(name=name)\n df_main_new = df_main.merge(df, left_on='customerId2', right_index='customerId2', how='left')\n return df_main_new", "def append_row(row: pd.DataFrame, df: pd.DataFrame, to_top=True):\n # return pd.concat([row,df], keys=list(get_player_dict().keys())) # persist player_dict so don't have to call func each time\n return df.append(row)", "def _copy_experimental_conditions_to_second_df(self, df1, df1_cols, df2, df2_cols):\n _cols_ = np.array([df1_cols, df2_cols])\n has_cols = _cols_ != set([])\n exp_cols = _cols_[has_cols]\n if len(exp_cols) == 1: # only one DataFrame has additional columns\n _dfs_ = [df1, df2]\n exp_cols = list(exp_cols[0])\n df_with_cols, df_without_cols = _dfs_[list(has_cols).index(True)], _dfs_[list(has_cols).index(False)]\n exp_cols_only_df = df_with_cols[exp_cols].drop_duplicates()\n num_unique_exp_rows = len(exp_cols_only_df)\n len_df_without_cols = len(df_without_cols)\n\n try:\n expanded_df_without_cols = pd.concat([df_without_cols] * num_unique_exp_rows, ignore_index=True)\n expanded_df_without_cols[exp_cols] = pd.DataFrame(np.repeat(\n exp_cols_only_df.values, len_df_without_cols, axis=0),\n columns=exp_cols)\n return tuple([(expanded_df_without_cols, df_with_cols)[i] for i in _cols_ != set([])]\n + [set(exp_cols), exp_cols_only_df])\n\n except ValueError: # breaks when df_with_out_columns is of len 0.\n return tuple([(pd.DataFrame(columns=list(set(exp_cols)|set(df_without_cols.columns))), df_with_cols)[i]\n for i in _cols_ != set([])] + [set(exp_cols), exp_cols_only_df])\n else:\n return self._combine_experimental_conditions(df1, df1_cols, df2, df2_cols)", "def __add__(self, other):\n if not isinstance(other, type(self)):\n raise TypeError(\"Only DFs of the same type can be combined.\")\n\n dfs = self.dfs + other.dfs\n counts = self.counts + other.counts\n return self.__class__(dfs, counts)", "def merge_datasets(self, other):\r\n if isinstance(other, SpatialDataFrame) and \\\r\n other.geometry_type == self.geometry_type:\r\n return pd.concat(objs=[self, other], axis=0)\r\n elif isinstance(other, DataFrame):\r\n return pd.concat(objs=[self, other], axis=0)\r\n elif isinstance(other, Series):\r\n self['merged_datasets'] = other\r\n elif isinstance(other, SpatialDataFrame) and \\\r\n other.geometry_type != self.geometry_type:\r\n raise ValueError(\"Spatial DataFrames must have the same geometry type.\")\r\n else:\r\n raise ValueError(\"Merge datasets cannot merge types %s\" % type(other))", "def concat(self, others):\n table = pd.concat([otr.data for otr in others], ignore_index=True)\n result = self.as_dataframe(table)\n result.sort()\n return result", "def merge_specimen_data(\n self, mouse_df: DataFrame, embryo_df: DataFrame\n ) -> DataFrame:\n specimen_cols = [\n \"_centreID\",\n \"_specimenID\",\n \"_colonyID\",\n \"_isBaseline\",\n \"_productionCentre\",\n \"_phenotypingCentre\",\n \"phenotyping_consortium\",\n ]\n\n # Select the relevant columns for specimen data\n mouse_specimen_df = mouse_df.select(*specimen_cols)\n if embryo_df is not None:\n embryo_specimen_df = embryo_df.select(*specimen_cols)\n\n # Merge both the mouse specimen data and the embryo specimen data in one DataFrame\n specimen_df = mouse_specimen_df.union(embryo_specimen_df)\n else:\n specimen_df = mouse_specimen_df\n return specimen_df", "def __iadd__(self, other):\n\t\t#print(\"iadd\")\t\t\n\t\t# merge other branch\t\t\n\t\tself.graph.update(other.graph)\n\t\tself.bottoms.update(other.bottoms)\n\t\tself.output_shape.update(other.output_shape)\n\t\tlayer_name = \"iadd_{}\".format(len(self.graph))\n\t\tself.graph[layer_name] = layer_name\n\t\tself.bottoms[layer_name] = [self.cur_id, other.cur_id]\n\t\tself.output_shape[layer_name] = self.cur_tensor.size()\n\t\tself.cur_id = layer_name\n\t\t# save memory\n\t\tdel other\t\t\n\t\treturn self", "def add_zeros(df: pd.DataFrame, mI: pd.MultiIndex):\n df_zeros = pd.DataFrame(0, index=mI, columns=df.columns)\n df = pd.merge(df_zeros, df, how=\"outer\", on=mI.names).fillna(0)\n\n columns_x = [sc + \"_x\" for sc in df_zeros.columns]\n df1 = df[columns_x]\n df1.columns = df_zeros.columns\n\n columns_y = [sc + \"_y\" for sc in df_zeros.columns]\n df2 = df[columns_y]\n df2.columns = df_zeros.columns\n\n return df1.add(df2)", "def concat(column_based_table_1: dict[str, list[str]], column_based_table_2: dict[str, list[str]]) -> dict[str, list[str]]:\n combined_data_table: dict[str, list[str]] = {}\n for column in column_based_table_1:\n combined_data_table[column] = column_based_table_1[column]\n keys_list = list(combined_data_table.keys())\n for column in column_based_table_2:\n if column in keys_list:\n column_data = combined_data_table[column]\n column_data_2 = column_based_table_2[column]\n # append to list\n for item in column_data_2:\n column_data.append(item)\n combined_data_table[column] = column_data\n else:\n combined_data_table[column] = column_based_table_2[column]\n return combined_data_table", "def add_data(self, df):\n # TODO: improve merging code\n self.data = self.data.append(df, ignore_index=False)\n self.data = self.data[~self.data.index.duplicated(keep='first')]", "def add(q_1: Q, q_2: Q) -> Q:\n\n q_1.check_representations(q_2)\n\n add_q_type = f\"{q_1.q_type}+{q_2.q_type}\"\n\n t_1, x_1, y_1, z_1 = q_1.t, q_1.x, q_1.y, q_1.z\n t_2, x_2, y_2, z_2 = q_2.t, q_2.x, q_2.y, q_2.z\n\n add_q = Q(q_type=add_q_type, representation=q_1.representation)\n add_q.t = t_1 + t_2\n add_q.x = x_1 + x_2\n add_q.y = y_1 + y_2\n add_q.z = z_1 + z_2\n\n return add_q", "def merge_df(self, first_df, second_df):\n self.file = pd.merge(second_df, first_df, on=[(\"NAME\", \"TYPE\")])", "def __add__(self, other):\n\n if isinstance(other, type(self)):\n # always create new fields, since otherwise c = a - b changes a as well!\n p = fields(self)\n p.elec[:] = self.elec + other.elec\n p.magn[:] = self.magn + other.magn\n return p\n else:\n raise DataError(\"Type error: cannot add %s to %s\" % (type(other), type(self)))", "def concat(self, other):\n self.add_rules(other.cliques)\n self.prop_names.update(other.prop_names)", "def __add__(self, other: Any) -> ColumnOperators:\n return self.operate(add, other)", "def add(self,x,y):\n # assert that independent variable is as long as each of the\n # dependent variables\n for ch in self.chs:\n assert len(x) == len(y[ch-1])\n apply(Storage.add, (self,[x,y]))", "def merge(self, other):\n from .dataset import Dataset\n\n if other is None:\n return self.to_dataset()\n else:\n other_vars = getattr(other, 'variables', other)\n coords = merge_coords_without_align([self.variables, other_vars])\n return Dataset._from_vars_and_coord_names(coords, set(coords))", "def join_columns(self, other: \"MultiRegionTimeseriesDataset\") -> \"MultiRegionTimeseriesDataset\":\n if not other.latest_data.empty:\n raise NotImplementedError(\"No support for joining other with latest_data\")\n other_df = other.data_with_fips.set_index([CommonFields.LOCATION_ID, CommonFields.DATE])\n self_df = self.data_with_fips.set_index([CommonFields.LOCATION_ID, CommonFields.DATE])\n other_geo_columns = set(other_df.columns) & set(GEO_DATA_COLUMNS)\n other_ts_columns = (\n set(other_df.columns) - set(GEO_DATA_COLUMNS) - set(TimeseriesDataset.INDEX_FIELDS)\n )\n common_ts_columns = other_ts_columns & set(self.data_with_fips.columns)\n if common_ts_columns:\n # columns to be joined need to be disjoint\n raise ValueError(f\"Columns are in both dataset: {common_ts_columns}\")\n common_geo_columns = list(set(self.data_with_fips.columns) & other_geo_columns)\n # TODO(tom): fix geo columns check, no later than when self.data is changed to contain only\n # timeseries\n # self_common_geo_columns = self_df.loc[:, common_geo_columns].fillna(\"\")\n # other_common_geo_columns = other_df.loc[:, common_geo_columns].fillna(\"\")\n # try:\n # if (self_common_geo_columns != other_common_geo_columns).any(axis=None):\n # unequal_rows = (self_common_geo_columns != other_common_geo_columns).any(axis=1)\n # _log.info(\n # \"Geo data unexpectedly varies\",\n # self_rows=self_df.loc[unequal_rows, common_geo_columns],\n # other_rows=other_df.loc[unequal_rows, common_geo_columns],\n # )\n # raise ValueError(\"Geo data unexpectedly varies\")\n # except Exception:\n # _log.exception(f\"Comparing df {self_common_geo_columns} to {other_common_geo_columns}\")\n # raise\n combined_df = pd.concat([self_df, other_df[list(other_ts_columns)]], axis=1)\n return MultiRegionTimeseriesDataset.from_timeseries_df(\n combined_df.reset_index()\n ).append_latest_df(self.latest_data_with_fips.reset_index())", "def merge_table(t1, t2):\r\n input1 = pd.merge(t1, t2, on=\"zip_code\", how=\"inner\")\r\n covid_zip = gpd.GeoDataFrame(input1)\r\n\r\n #change column name\r\n covid_zip.columns = [\"zip_code\", \"covid_cases\", \"time\", \"geometry\"]\r\n return covid_zip", "def __add__(self, other):\n if isinstance(other, NeuralQueryExpression):\n self._check_type_compatibility(self.type_name, other.type_name, 'add')\n provenance = NQExprProvenance(\n operation='add', inner=self.provenance, other=other.provenance)\n return self.context.as_nql(self.tf + other.tf, self.type_name, provenance)\n else:\n # hopefully a constant\n provenance = NQExprProvenance(\n operation='add',\n inner=self.provenance,\n args=(None, other),\n other=NQExprProvenance(operation='constant'))\n return self.context.as_nql(self.tf + other, self.type_name, provenance)", "def merge_both_tables():\n old = Table.read('data/data_table_cartesian_including_tims_stars_with_bg_ols_and_component_overlaps.fits')\n wanted = Table.read('data/scocen_candidates_300k_only_spatial_cut.fits')\n additional = Table.read('data/scocen_candidates_300k_only_spatial_cut_200k_to_determine_bg_ols.fits')\n\n d_old = dict(zip(old['source_id'], old['background_log_overlap']))\n d_add = dict(zip(additional['source_id'], additional['background_log_overlap']))\n d_old.update(d_add)\n dct = d_old\n\n ln_bg_ols = [dct[source_id] for source_id in wanted['source_id']]\n print\n len(ln_bg_ols), len(wanted)\n\n wanted['background_log_overlap'] = ln_bg_ols\n print\n wanted\n\n wanted.write('data/scocen_candidates_300k_only_spatial_cut.fits', overwrite=True, format='fits')", "def merge(self, other):\n if other.n_points != self.n_points:\n raise ValueError(\n 'Deduplicator size mismatch: '\n f'{self.n_points} != {other.n_points}'\n )\n self.data_reduced.extend(other.data_reduced)\n self.data_kd.extend(other.data_kd)", "def combine(self, other) -> None:\n assert self.id_ == other.id_\n assert self.type_ == other.type_\n self.count += other.count", "def __add__(self, other):\n if not isinstance(other, SMTimeSeries):\n raise TypeError(\"NotImplemented Error\")\n ts = self._fsm.get(self._id) + other._fsm.get(other._id)\n return SMTimeSeries(ts._time, ts._value, self._fsm)", "def _merge_row(self, row1, row2):\n\n duprow = list(row1)\n duprow.extend(list(row2))\n row1.clear()\n overlap_map = {}\n\n for body, overlap in duprow:\n if body not in overlap_map:\n overlap_map[body] = 0\n overlap_map[body] += overlap\n\n for body, overlap in overlap_map.items():\n row1.add((body, overlap))", "def combine_position_dataframes(dataframe1, dataframe2):\n\n # check that the dataframes have the same number of columns\n print(\"Dimensions of dataframe1: \", dataframe1.shape)\n print(\"Dimensions of dataframe2: \", dataframe2.shape)\n\n frames = [dataframe1, dataframe2]\n\n combined_dataframe = pandas.concat(frames)\n\n dataframe1.drop(dataframe1.index, inplace=True) # Delete data from dataframe to save memory\n dataframe2.drop(dataframe2.index, inplace=True) # Delete data from dataframe to save memory\n\n # confirm that the dataframes no longer exist (saving memory)\n print(\"Dimensions of dataframe1: \", dataframe1.shape)\n print(\"Dimensions of dataframe2: \", dataframe2.shape)\n\n # check that all rows of both dataframes have been combined into the new dataframe. Sort by date and time.\n print(\"Dimensions of combined dataframe: \", combined_dataframe.shape)\n combined_dataframe_sorted = combined_dataframe.sort_values('date_time')\n\n print(\"Sample of combined dataframe: \", combined_dataframe_sorted.sample(10))\n\n return combined_dataframe_sorted", "def add_id(Z, df_with_id):\n assert Z.shape[0] == df_with_id.shape[0]\n assert 'individual_id' in df_with_id.columns\n\n results_df = pd.DataFrame(Z)\n results_df.index = list(df_with_id.index) # make sure the two dataframes have the same index. \n results_df.loc[:, 'individual_id'] = df_with_id.loc[:, 'individual_id'].values # similarly with individual id. \n results_df = move_last_col_to_first(results_df)\n return results_df", "def add_data(self, s1: np.ndarray, s2: np.ndarray):\n if s1.shape[0] != s2.shape[0]:\n raise ValueError('Both data must have the same # frames, not %d vs. %d' %\n (s1.shape[0], s2.shape[0]))\n self._store1.add_data(s1)\n self._store2.add_data(s2)", "def _addMats(X1,X2):\n _checkSize(X1,X2)\n return [ _addVectors(X1[i],X2[i]) for i in range(len(X1))]", "def __iadd__(self, other):\n\n other_data = self._setup_numeric(other)\n self.data[:] = self.data + other_data\n\n return self", "def merge_weather(weather):\n\n weather1 = weather[weather[\"Station\"] == 1]\n weather2 = weather[weather[\"Station\"] == 2]\n\n rows, rows1, rows2 = (weather.shape[0],\n weather1.shape[0],\n weather2.shape[0])\n\n weather = pd.merge(weather1, weather2, on=\"Date\")\n weather.drop([\"Station_x\", \"Station_y\"], axis=1, inplace=True)\n\n newrows = weather.shape[0]\n # sanity check the rows\n assert(rows1 + rows2 == rows)\n assert(rows1 == newrows)\n\n return weather", "def iadd(self, other):\n self._store += other._store\n self._gross += other.gross", "def __iadd__(self, other):\n\n if not isinstance(other, Ulog):\n raise UlogError('Ulog.__iadd__: can' +\n ' only add another Ulog to an Ulog.')\n\n nccd = set(self.utc.keys())\n if set(other.utc.keys()) != nccd:\n raise UlogError('Ulog.__iadd__: CCD numbers of inputs do not match')\n\n for nc in nccd:\n if len(self.x[nc]) != len(other.x[nc]):\n raise UlogError('Ulog.__iadd__: incompatible' +\n ' aperture numbers for CCD ' + nc)\n\n # OK, tests passed, add new arrays onto the end of the old ones\n for nc in nccd:\n\n self.utc[nc] = np.concatenate((self.utc[nc], other.utc[nc]))\n self.tflag[nc] = np.concatenate((self.tflag[nc], other.tflag[nc]))\n self.expose[nc] = np.concatenate((self.expose[nc],other.expose[nc]))\n self.fwhm[nc] = np.concatenate((self.fwhm[nc],other.fwhm[nc]))\n self.beta[nc] = np.concatenate((self.beta[nc],other.beta[nc]))\n\n for nap in range(len(self.x[nc])):\n self.x[nc][nap] = np.concatenate(\n (self.x[nc][nap],other.x[nc][nap]))\n self.y[nc][nap] = np.concatenate(\n (self.y[nc][nap],other.y[nc][nap]))\n self.xm[nc][nap] = np.concatenate(\n (self.xm[nc][nap],other.xm[nc][nap]))\n self.ym[nc][nap] = np.concatenate(\n (self.ym[nc][nap],other.ym[nc][nap]))\n self.exm[nc][nap] = np.concatenate(\n (self.exm[nc][nap],other.exm[nc][nap]))\n self.eym[nc][nap] = np.concatenate(\n (self.eym[nc][nap],other.eym[nc][nap]))\n self.counts[nc][nap] = np.concatenate(\n (self.counts[nc][nap],other.counts[nc][nap]))\n self.sigma[nc][nap] = np.concatenate(\n (self.sigma[nc][nap],other.sigma[nc][nap]))\n self.sky[nc][nap] = np.concatenate(\n (self.sky[nc][nap],other.sky[nc][nap]))\n self.nsky[nc][nap] = np.concatenate(\n (self.nsky[nc][nap],other.nsky[nc][nap]))\n self.nrej[nc][nap] = np.concatenate(\n (self.nrej[nc][nap],other.nrej[nc][nap]))\n self.worst[nc][nap] = np.concatenate(\n (self.worst[nc][nap],other.worst[nc][nap]))\n self.eflag[nc][nap] = np.concatenate(\n (self.eflag[nc][nap],other.eflag[nc][nap]))\n return self", "def __add__(self, other):\n tmp = VectorHeat1D(self.size)\n tmp.set_values(self.get_values() + other.get_values())\n return tmp", "def ohe_inverse(df_shap_values):\n\n # Auxiliary list to recreate original shap_values dataframe\n list_shap_original = []\n\n # Regular expression to pick attributes names.\n # Since in our case attributes names are the genomic positions (i.e. an integer number), we use the regex below\n import re\n pattern = \"^\\d+\"\n\n # Auxiliary dictionary to create one pd.DataFrame for each sample, summing the shap values for each attribute.\n # Later, these dataframes will be appended together, resulting in the final df.\n dic={}\n\n # for each sample.\n for i, sample in df_shap_values.iterrows():\n # initialize an empty dictionary, that will contain \"attribute : summed shap values\" for\n # all attributes in this sample.\n dic = {}\n # The code below sums the importances for each category in each attribute in this sample.\n for pos in sample.index:\n attr = re.match(pattern, pos).group()\n if attr not in dic.keys():\n dic[attr] = sample[pos]\n else:\n dic[attr] += sample[pos]\n # Create a df containing only the current sample\n df_sample = pd.DataFrame(dic, index=[i])\n # Append it to a list that will become the full dataframe later\n list_shap_original.append(df_sample)\n\n # Create a DataFrame containing the shap values for the \"original\" attributes.\n shap_original = pd.concat(list_shap_original, axis=0)\n return shap_original", "def concatenate(self, other):\n assert self.same_col_labels_as(other)\n newlabels = list(self.row_labels) + list(other.row_labels)\n return DenseMatrix(np.concatenate([self, other]), newlabels, self.col_labels)", "def join(self, other, on):\n\t\t# check for correct join\n\t\tif not (on in self.headers or on in other.headers):\n\t\t\tprint \"Error: header '{0}' not found in both collections\".format(on)\n\t\t\treturn None\n\n\t\t# create new dataset\n\t\tjoined = Dataset()\n\t\t\n\t\t# fill new dataset with combined data\n\t\tmappedHeaders = joinHeaders(self, other, joined, on)\n\t\tmergeRows(self, other, joined, on, mappedHeaders)\n\t\tjoined.ensureFilled()\n\n\t\t# return newly created dataset\n\t\treturn joined", "def __add__(self, other):\n if (self.m != other.m) or (self.n != other.n):\n raise TypeError(\"Dimensions of matrices does not match\")\n tmp = [[0 for _ in xrange(self.m)] for _ in xrange(self.n)]\n for i in xrange(self.n):\n for j in xrange(self.m):\n tmp[i][j] = self.values[i][j] + other.values[i][j]\n res = []\n for i in tmp:\n res += i\n return simplematrix(self.m, self.n, res)", "def combine_frames (data0, data1, datatype) :\n data0.n_processed += 1\n if (data1 is None) :\n return data0\n if (isinstance(data1, null_data)) :\n if (data1.file_error) :\n data0.n_file_error += 1\n elif (data1.low_signal) :\n data0.n_low_signal += 1\n elif (data1.wrong_bravais) :\n data0.n_wrong_bravais += 1\n elif (data1.wrong_cell) :\n data0.n_wrong_cell += 1\n elif (getattr(data1,\"reason\",None) is not None):\n if str(data1.reason)!=\"\":\n data0.failure_modes[str(data1.reason)] = data0.failure_modes.get(str(data1.reason),0) + 1\n elif repr(type(data1.reason))!=\"\":\n data0.failure_modes[repr(type(data1.reason))] = data0.failure_modes.get(repr(type(data1.reason)),0) + 1\n else:\n data0.failure_modes[\"other reasons\"] = data0.failure_modes.get(\"other reasons\",0) + 1\n return data0\n if (data1.accept) :\n data0.n_accepted += 1\n data0.completeness += data1.completeness\n data0.completeness_predictions += data1.completeness_predictions\n data0.summed_N += data1.summed_N\n data0.summed_weight += data1.summed_weight\n data0.summed_wt_I += data1.summed_wt_I\n data0.ISIGI.extend(data1.ISIGI)\n else :\n data0.n_low_corr += 1\n data0.uc_values.add_cell(data1.indexed_cell,\n rejected=(not data1.accept))\n if not data0.params.short_circuit:\n data0.observations.append(data1.n_obs)\n if (data1.n_obs > 0) :\n frac_rejected = data1.n_rejected / data1.n_obs\n data0.rejected_fractions.append(frac_rejected)\n data0.d_min_values.append(data1.d_min)\n data0.corr_values.append(data1.corr)\n data0.wavelength.append(data1.wavelength)\n data0.finished_db_mgr.sequencer += data1.finished_db_mgr.sequencer\n return data0", "def __add__(self, other):\n if isinstance(other, Trit):\n value = (other,)\n else:\n value = tuple(other)\n return Trits(self.trits + value)", "def combine_data_main(data1,data2,lookup,foutput):\n\n # Get the maximum number of ortholog probesets we'll have to append\n max_orthologs = 0\n for probe_set_id in data1.keys():\n max_orthologs = max(max_orthologs,len(lookup(probe_set_id)))\n logging.debug(\"Max_orthologs = %d\" % max_orthologs)\n \n # Write header line\n line = [data1.header()]\n for i in range(1,max_orthologs+1):\n logging.debug(\"Adding header set #%d\" % i)\n for item in data2.header().split('\\t'): line.append(\"%s_%s\" % (item,i))\n foutput.write(\"%s\\n\" % '\\t'.join(line))\n\n # Append data\n for probe_set_id in data1.keys():\n # Build line to output to file\n line = [data1.fetch(probe_set_id)]\n # Get the corresponding ortholog probe set ID(s)\n logging.debug(\"Processing probe set ID %s\" % probe_set_id)\n for ortholog_probe_set_id in lookup(probe_set_id):\n ortholog_data = data2.fetch(ortholog_probe_set_id)\n if ortholog_data is not None:\n line.append(ortholog_data)\n # Write line to file\n foutput.write(\"%s\\n\" % '\\t'.join(line))", "def merge_df_counts(pin, pin2):\n df = pd.read_csv(pin)\n df = add_mutkey(df)\n df = add_codonkey(df)\n # df_syn = df.loc[df[\"mutkey\"].str[0] == df[\"mutkey\"].str[-1]]\n\n df2 = pd.read_csv(pin2)\n df2 = add_codonkey(df2)\n # df2_syn = df.loc[df[\"mutkey\"].str[0] == df[\"mutkey\"].str[-1]]\n df_reps = df.merge(\n df2, left_on=\"codonkey\", right_on=\"codonkey\", suffixes=(\"_r1\", \"_r2\")\n )\n return df_reps", "def merge_quantities(self, first, second):\n dom = self.get_canonical(first)\n add = self.get_canonical(second)\n self._qm.merge(dom, add)\n self.import_cfs(second)", "def add_other_meta_data(self, other: _MetaData) -> None:\n\n for key in other._meta_data_dict.keys():\n self.add_data(key, other._meta_data_dict[key])", "def append_trees(self, other):\n assert self.is_fitted_\n assert other.is_fitted_\n assert isinstance(other, IsolationForest)\n\n if (self._is_extended_) != (other._is_extended_):\n raise ValueError(\"Cannot mix extended and regular isolation forest models (ndim=1).\")\n\n if self.cols_categ_.shape[0]:\n warnings.warn(\"Merging models with categorical features might give wrong results.\")\n\n self._cpp_obj.append_trees_from_other(other._cpp_obj, self._is_extended_)\n self._ntrees += other._ntrees\n\n return self", "def __add__(self,other):\n if self.h != other.h or self.w != other.w:\n raise(ValueError, \"Matrices can only be added if the dimensions are the same\") \n # \n # TODO - your code here\n #\n result = [];\n for i in range(self.h):\n result.append([a+b for a,b in zip(self.g[i],other.g[i])]);\n \n return Matrix(result);", "def join_daily_cweeds_wy2_and_wy3(wy2_df, wy3_df):\n assert wy2_df['CWEEDS Format'] == 'WY2'\n assert wy3_df['CWEEDS Format'] == 'WY3'\n assert wy2_df['Time Format'] == wy3_df['Time Format']\n\n time_wy23 = np.hstack([wy2_df['Time'], wy3_df['Time']])\n time_wy23 = np.unique(time_wy23)\n time_wy23 = np.sort(time_wy23)\n\n wy23_df = {}\n wy23_df['Time Format'] = wy3_df['Time Format']\n wy23_df['CWEEDS Format'] = 'WY2+WY3'\n\n # Copy the header info from WY3 dataset :\n\n for key in ['HORZ version', 'Location', 'Province', 'Country',\n 'Station ID', 'Latitude', 'Longitude', 'Time Zone',\n 'Elevation']:\n wy23_df[key] = wy3_df[key]\n\n # Merge the two datasets :\n\n wy23_df['Time'] = time_wy23\n wy23_df['Years'] = np.empty(len(time_wy23)).astype(int)\n wy23_df['Months'] = np.empty(len(time_wy23)).astype(int)\n wy23_df['Days'] = np.empty(len(time_wy23)).astype(int)\n wy23_df['Hours'] = np.empty(len(time_wy23)).astype(int)\n wy23_df['Irradiance'] = np.empty(len(time_wy23)).astype('float64')\n\n for dataset in [wy2_df, wy3_df]:\n indexes = np.digitize(dataset['Time'], time_wy23, right=True)\n for key in ['Years', 'Months', 'Days', 'Hours', 'Irradiance']:\n wy23_df[key][indexes] = dataset[key]\n\n return wy23_df", "def merge_other(self, other, itp_method=\"linear\", dropna=True):\n if not isinstance(other, Series):\n raise ValueError(\"Need pandas Series instance (or objects \"\n \"inheriting from it)\")\n df = DataFrame(dict(s1=self,s2=other)).interpolate(itp_method)\n if dropna:\n df = df.dropna()\n return df.s1, df.s2", "def __add__(self, other):\n new_measure = Measure()\n settings = [\"raw\", \"fil\"]\n\n for rf in settings:\n new_measure.hit1[rf] = (self.hit1[rf] + other.hit1[rf])\n new_measure.hit3[rf] = (self.hit3[rf] + other.hit3[rf])\n new_measure.hit10[rf] = (self.hit10[rf] + other.hit10[rf])\n new_measure.mrr[rf] = (self.mrr[rf] + other.mrr[rf])\n new_measure.mr[rf] = (self.mr[rf] + other.mr[rf])\n return new_measure", "def _merge(dts):\n df = pd.concat(dts)\n\n ma = df.pivot(index='isomir', columns='sample', values='counts')\n ma_mirna = ma\n ma = ma.fillna(0)\n ma_mirna['mirna'] = [m.split(\":\")[0] for m in ma.index.values]\n ma_mirna = ma_mirna.groupby(['mirna']).sum()\n ma_mirna = ma_mirna.fillna(0)\n return ma, ma_mirna", "def combine_fields(self):\n humidity_fields = ['Humidity 8am', 'Humidity noon', 'Humidity 8pm']\n self.df['Relative Humidity'] = np.mean(self.df[humidity_fields], axis=1)", "def merge(self, other):\n\n assert self.ins_addr == other.ins_addr\n assert self.type == other.type\n\n o = self.copy()\n o.targets |= other.targets\n\n return o", "def enrich_dataframe(df, name):\n if(name == 'taux_incidence'):\n df['taux_incidence'] = df['P']*100000/df['pop']\n if(name == 'taux_positivite'):\n df['taux_positivite'] = df['P']/df['T'] * 100\n if(name == 'taux_occupation'):\n df['TO'] = df['TO']*100\n if(name == 'vaccins_vaccines_couv_majeurs'):\n df['couv_complet'] = 100 * df['n_cum_complet'] / df['pop']\n if(name == 'vaccins_vaccines_couv_ado_majeurs'):\n df['couv_complet'] = 100 * df['n_cum_complet'] / df['pop']\n if(name == 'taux_classes_fermees'):\n df['taux_classes'] = 100* df['nombre_classes_fermees'] / df['nombre_total_classes']\n if(name == 'taux_structures_fermees'):\n df['taux_structures'] = 100* df['nombre_structures_fermees'] / df['nombre_total_structures']\n\n \n \n return df", "def combine_phenotypes(df_ph_full, df_ph_perimeter):\n key_cols = ['well', 'tile', 'cell']\n\n val_cols = [\n \"dapi_gfp_nuclear_corr\",\n \"dapi_nuclear_int\",\n \"dapi_nuclear_max\",\n \"dapi_nuclear_median\",\n \"gfp_nuclear_int\",\n \"gfp_nuclear_max\",\n \"gfp_nuclear_mean\",\n \"gfp_nuclear_median\",\n \"x\",\n \"y\",\n \"dapi_gfp_cell_corr\",\n \"gfp_cell_mean\",\n \"gfp_cell_median\",\n \"gfp_cell_int\"\n ]\n \n df_ph_perimeter = (df_ph_perimeter\n .set_index(key_cols)[val_cols]\n .rename(columns=lambda x: x + '_perimeter'))\n \n return df_ph_full.join(df_ph_perimeter, on=key_cols)", "def add_matrices(m1, m2): \n output = []\n \n for index in range(len(m1)):\n row_1 = m1[index]\n row_2 = m2[index]\n new_row = []\n for index2 in range(len(row_1)):\n sum = row_1[index2] + row_2[index2]\n new_row.append(sum)\n output.append(new_row)\n return output", "def add_times(self,df,link):\n \n if link not in self.to_concat:\n self.to_concat[link] = []\n self.to_concat[link].append(df)", "def combine_features(feature1, feature2):\n new_feature = Feature()\n new_feature.coord = feature1.coord.copy()\n for x in feature2.coord:\n new_feature.coord.append(x)\n return new_feature", "def __iadd__(self, other):\n raise NotImplementedError(\"Implement this if needed\")", "def concat_dataframe(df_msg: pd.DataFrame, df_time: pd.DataFrame) -> pd.DataFrame:\n\n concated = df_msg.join(df_time.set_index(\"name\"), on=\"name\")\n return concated", "def add_matrices(m1, m2):\n import numpy as np\n shape = np.shape(m1)\n sum_matrix_l = []\n for row, column in enumerate(m1):\n sum_matrix_l.append(m1[row,column]+m2[row,column])\n sum_matrix = np.array(sum_matrix_l)\n return sum_matrix.reshape(shape)", "def __iadd__(self, other):\n #print \"adding \", other, \" to \", self\n # need to do shallow copy, or otherwise smth like \"cm += cm\"\n # would loop forever and exhaust memory eventually\n othersets = copy.copy(other.__sets)\n for set in othersets:\n self.add(*set)#[0], set[1])\n return self", "def __add__(self, other):\n other_data = self._setup_numeric(other)\n new_line = empty_like(self)\n\n if isinstance(other, line):\n other_data = other.data\n else:\n other_data = other\n\n new_line.data[:] = self.data + other_data\n\n return new_line", "def extend(self, other, adapt_conf=True):\n # Check if category metadata match\n if (self.size() > 0) and (other.size() > 0):\n for attr in [\"is_cat_inclusive\", \"is_categorised\"]:\n a, b = getattr(self, attr), getattr(other, attr)\n if a != b:\n raise ConcatenationError(\n f\"Categorisation metadata is different for '{attr}': {a} != {b}\"\n )\n elif other.size() > 0:\n for attr in [\"is_cat_inclusive\", \"is_categorised\"]:\n setattr(self, attr, getattr(other, attr))\n if getattr(self, \"tstep_h\", None) is None:\n self.tstep_h = getattr(other, \"tstep_h\", None)\n else:\n if getattr(other, \"tstep_h\", None) is not None:\n if self.tstep_h != other.tstep_h:\n raise ConcatenationError(\n \"Extending by a TrackRun with different timestep is not allowed\"\n )\n if adapt_conf and other.conf is not None:\n if self.conf is None:\n self.conf = other.conf.copy()\n else:\n for field in self.conf._fields:\n if getattr(self.conf, field) != getattr(other.conf, field):\n setattr(self.conf, field, None)\n self.sources.extend(other.sources)\n\n new_data = pd.concat([self.data, other.data], sort=False)\n new_track_idx = new_data.index.get_level_values(0).to_series()\n new_track_idx = new_track_idx.ne(new_track_idx.shift()).cumsum() - 1\n\n mux = pd.MultiIndex.from_arrays(\n [new_track_idx, new_data.index.get_level_values(1)], names=new_data.index.names\n )\n self.data = new_data.set_index(mux)\n\n # Concatenate categories\n if (self.cats is not None) or (other.cats is not None):\n new_cats = pd.concat([self.cats, other.cats], sort=False).fillna(False)\n new_track_idx = new_cats.index.get_level_values(0).to_series()\n new_track_idx = new_track_idx.ne(new_track_idx.shift()).cumsum() - 1\n\n ix = pd.Index(new_track_idx, name=new_cats.index.name)\n self.cats = new_cats.set_index(ix)", "def ConcatDF(train_set, test_set):\n return pd.concat([train_set, test_set], sort=True).reset_index(drop=True)", "def __add__(self, other):\n output = Spectrum(self.wavelengths, self.intensities)\n for wavelength, intensity in other:\n if output[wavelength]:\n output[wavelength] += intensity\n else:\n output[wavelength] = intensity\n return output", "def __add__(self, other):\r\n # Make a defaultdict of defaultdicts, the latter of which returns\r\n # None when an key is not present\r\n merged_data = defaultdict(lambda: defaultdict(lambda: None))\r\n\r\n # We will keep track of all unique sample_ids and metadata headers\r\n # we have seen as we go\r\n all_sample_ids = set()\r\n all_headers = set()\r\n\r\n # add all values from self into the merged_data structure\r\n for sample_id, data in self._metadata.iteritems():\r\n all_sample_ids.add(sample_id)\r\n for header, value in data.iteritems():\r\n all_headers.add(header)\r\n merged_data[sample_id][header] = value\r\n\r\n # then add all data from other\r\n for sample_id, data in other._metadata.iteritems():\r\n all_sample_ids.add(sample_id)\r\n for header, value in data.iteritems():\r\n all_headers.add(header)\r\n # if the two mapping files have identical sample_ids and\r\n # metadata columns but have DIFFERENT values, raise a value\r\n # error\r\n if merged_data[sample_id][header] is not None and \\\r\n merged_data[sample_id][header] != value:\r\n raise ValueError(\"Different values provided for %s for \"\r\n \"sample %s in different mapping files.\"\r\n % (header, sample_id))\r\n else:\r\n merged_data[sample_id][header] = value\r\n\r\n # Now, convert what we have seen into a normal dict\r\n normal_dict = {}\r\n for sample_id in all_sample_ids:\r\n if sample_id not in normal_dict:\r\n normal_dict[sample_id] = {}\r\n\r\n for header in all_headers:\r\n normal_dict[sample_id][header] = \\\r\n merged_data[sample_id][header]\r\n\r\n # and create a MetadataMap object from it; concatenate comments\r\n return self.__class__(normal_dict, self.Comments + other.Comments)", "def merge_with(self, other):\n old2new = {}\n for other_id, other_token in iteritems(other):\n if other_token in self.token2id:\n new_id = self.token2id[other_token]\n else:\n new_id = len(self.token2id)\n self.token2id[other_token] = new_id\n self.dfs[new_id] = 0\n old2new[other_id] = new_id\n try:\n self.dfs[new_id] += other.dfs[other_id]\n except Exception:\n # `other` isn't a Dictionary (probably just a dict) => ignore dfs, keep going\n pass\n try:\n self.num_docs += other.num_docs\n self.num_nnz += other.num_nnz\n self.num_pos += other.num_pos\n except Exception:\n pass\n\n import gensim.models\n return gensim.models.VocabTransform(old2new)", "def __add__ (self, other):\n if self.dimensions == other.dimensions:\n result = []\n for row in zip(self._m, other._m):\n result.append(sum(cell) for cell in zip(*row))\n return Matrix(*result)\n else:\n raise ValueError(\"Matrices must have the same dimensions.\")", "def concatTwoHMMs(hmm1, hmm2):\n hmmOut = {}\n M1,D1 =hmm1['transmat'].shape\n M2,D2 = hmm2['transmat'].shape\n hmmOut['name']=hmm1['name']+hmm2['name']\n hmmOut['startprob'] = hmm2['startprob'] * hmm1['startprob'][M1-1]\n hmmOut['startprob'] = np.concatenate((hmm1['startprob'][0:M1-1], hmmOut['startprob']))\n mul = np.reshape(hmm1['transmat'][0:-1, -1], (M1-1, 1)) @ np.reshape(hmm2['startprob'], (1, M2))\n hmmOut['transmat'] = np.concatenate((hmm1['transmat'][0:-1, 0:-1], mul), axis=1)\n tmp = np.concatenate((np.zeros([M2,M1-1]), hmm2['transmat']), axis=1)\n hmmOut['transmat'] = np.concatenate((hmmOut['transmat'], tmp), axis=0)\n hmmOut['means'] = np.vstack((hmm1['means'],hmm2['means']))\n hmmOut['covars'] = np.vstack((hmm1['covars'],hmm2['covars']))\n return hmmOut", "def add(self, other):\n return self._new_rep(self.rep + other.rep)", "def __add__(self, other):\n assert isinstance(other, Solution)\n assert self.dim == other.dim\n assert self.n_obj == other.n_obj\n _ = [self.tolist()] if len(self.shape) == 1 else self.tolist()\n __ = [other.tolist()] if len(other.shape) == 1 else other.tolist()\n return Solution(_ + __, self.fitness.tolist() + other.fitness.tolist(),\n self.n_eval.tolist() + other.n_eval.tolist(), \n # self.index.tolist() + other.index.tolist(),\n var_name=self.var_name, n_obj=self.n_obj, \n verbose=self.verbose)", "def mergeLines(self,firstLineID,secondLineID):\n # 027 Get the lines\n sql=\"select word, total_count, netloc_count, path_count, params_count, query_count, fragment_count from BOW where bow_id=? or bow_id=?;\"\n args=(firstLineID,secondLineID,) \n self.DBcursor.execute(sql,args)\n result = self.DBcursor.fetchall()\n # 027 Combine the lines\n combined=[]\n # 027 Required check if both lines exist.\n if not len(result)==2 or not result[0] or not result[1]:\n self.logger.warning(\"One of input lines (%i,%i) does not exist in result: %s\"%(firstLineID,secondLineID,str(result)))\n else:\n #\n \n for item in itertools.izip(result[0],result[1]):\n # 027 Skips string and None - in both cases keeps original value.\n # 027 Column word is string. No sense to combine.\n if not isinstance(item[1],int):\n combined.append(item[0])\n # 027 If first is int and second null addition is not defined. Using the non-null one.\n elif not item[0]:\n combined.append(item[1])\n else:\n combined.append(item[0]+item[1])\n # 027 Writing changes into the db.\n sql=\"update BOW set total_count=?, netloc_count=?, path_count=?, params_count=?, query_count=?, fragment_count=? where bow_id=?;\"\n # 027 combined[0] is word - not updating.\n args=(combined[1],combined[2],combined[3],combined[4],combined[5],combined[6],firstLineID,)\n self.DBcursor.execute(sql,args)\n sql=\"delete from BOW where bow_id=?;\"\n args=(secondLineID,)\n self.DBcursor.execute(sql,args)\n return combined", "def extend_dataset(intial_df):\n all_data = []\n for i,row in intial_df.iterrows():\n all_data.extend(create_all_combination(row))\n\n extended_results = pd.DataFrame(all_data)\n return extended_results", "def tup_add(t1, t2):\n return tuple(map(operator.add, t1, t2))", "def extend(self, other):\n for x, y in other:\n self.add(x, y)" ]
[ "0.553466", "0.54892987", "0.54729784", "0.53718424", "0.53287685", "0.5248236", "0.5239626", "0.5215545", "0.5187559", "0.5185057", "0.51510555", "0.5100996", "0.50776625", "0.5064464", "0.5058133", "0.50135696", "0.50105774", "0.5002041", "0.4988571", "0.49817595", "0.49551702", "0.49526647", "0.49271837", "0.48994192", "0.4886662", "0.4881248", "0.4879453", "0.48443058", "0.4843166", "0.48325217", "0.48230946", "0.48080915", "0.48048222", "0.477847", "0.4771577", "0.47714612", "0.4770501", "0.47611496", "0.47549403", "0.47547635", "0.47536808", "0.474654", "0.47407085", "0.47245416", "0.4721077", "0.47119418", "0.47075203", "0.46985948", "0.4693041", "0.4692199", "0.4684101", "0.46814665", "0.46550184", "0.46508", "0.46483105", "0.46336576", "0.46304208", "0.4629645", "0.4626335", "0.4618759", "0.4615641", "0.46089557", "0.46081117", "0.46080405", "0.46063522", "0.4605932", "0.46054506", "0.45875755", "0.45874214", "0.458016", "0.45778653", "0.4570559", "0.45691466", "0.45333397", "0.45220035", "0.45204577", "0.45203087", "0.45196688", "0.45117405", "0.4509699", "0.45092684", "0.45083353", "0.45049924", "0.45036393", "0.45027187", "0.44956148", "0.44947964", "0.4493159", "0.44705373", "0.4469967", "0.44661266", "0.4465065", "0.44591817", "0.44579485", "0.44565403", "0.44554773", "0.44485202", "0.4444141", "0.44441086", "0.44425446" ]
0.51093644
11
plot the background stars (HR diagram). The plot is a 2d histogram, for better readability. Only bins with at least 10 stars a shown.
def plot_hr_diag(hr_df, x='B_V', y='M_V', cutoff=0.2, bvcutoff=0.05): plt.figure(figsize=(11., 10.)) print "Plotting background stars.." plt.set_cmap('gray_r') plt.hist2d(hr_df[x].tolist(), hr_df[y].tolist(), (200, 200), norm=LogNorm(), cmin=10) plt.axis([-0.2, 2.35, -3., 7.]) plt.gca().invert_yaxis() plt.xlabel(r'$BT-VT$ (mag)') plt.ylabel(r'$M_{VT}$ (mag)') # Plotting M_{VT} plt.title(r'$\sigma_\pi / \pi < %s, \sigma_{BT-VT}< %s$ mag' % (cutoff, bvcutoff)) print "..Done" return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_histogram(img):\n rgb_hist = rgb_histogram(img)\n plt.figure()\n for color, hist in rgb_hist.items():\n plt.plot(hist, color=color)\n plt.xlim([0, 256])", "def plot_hist(self):\n labels = [self.get_class_str(action, obj)\n for (action, obj, subj, rec, beg, end) in self.action_clips]\n visualize.plot_hist(labels, proportion=True)", "def plotHistogram(a):\n plt.figure(figsize=(10,5))\n plt.subplot(1,2,1)\n plt.imshow(a)\n plt.axis('off')\n histo = plt.subplot(1,2,2)\n histo.set_ylabel('Count')\n histo.set_xlabel('Pixel Intensity')\n n_bins = 30\n plt.hist(a[:,:,0].flatten(), bins= n_bins, lw = 0, color='r', alpha=0.5);\n plt.hist(a[:,:,1].flatten(), bins= n_bins, lw = 0, color='g', alpha=0.5);\n plt.hist(a[:,:,2].flatten(), bins= n_bins, lw = 0, color='b', alpha=0.5);", "def plotHistogram(a):\n plt.figure(figsize=(10,5))\n plt.subplot(1,2,1)\n plt.imshow(a)\n plt.axis('off')\n histo = plt.subplot(1,2,2)\n histo.set_ylabel('Count')\n histo.set_xlabel('Pixel Intensity')\n n_bins = 30\n plt.hist(a[:,:,0].flatten(), bins= n_bins, lw = 0, color='r', alpha=0.5);\n plt.hist(a[:,:,1].flatten(), bins= n_bins, lw = 0, color='g', alpha=0.5);\n plt.hist(a[:,:,2].flatten(), bins= n_bins, lw = 0, color='b', alpha=0.5);", "def plotHistogram(a):\n plt.figure(figsize=(10,5))\n plt.subplot(1,2,1)\n plt.imshow(a)\n plt.axis('off')\n histo = plt.subplot(1,2,2)\n histo.set_ylabel('Count')\n histo.set_xlabel('Pixel Intensity')\n n_bins = 30\n plt.hist(a[:,:,0].flatten(), bins= n_bins, lw = 0, color='r', alpha=0.5);\n plt.hist(a[:,:,1].flatten(), bins= n_bins, lw = 0, color='g', alpha=0.5);\n plt.hist(a[:,:,2].flatten(), bins= n_bins, lw = 0, color='b', alpha=0.5);", "def entries_histogram(turnstile_weather):\n\n plt.figure()\n turnstile_weather[turnstile_weather.rain == 0][\n 'ENTRIESn_hourly'].hist() # your code here to plot a historgram for hourly entries when it is raining\n turnstile_weather[turnstile_weather.rain == 1][\n 'ENTRIESn_hourly'].hist() # your code here to plot a historgram for hourly entries when it is not raining\n return plt", "def plot_hist(self):\n \n plt.figure();\n self.dist_frame.plot(kind='hist',legend=False,orientation='horizontal')", "def plot_star_classes(obj_catalog):\n\n fig = plt.figure(num=None,figsize=(8,8), dpi=100)\n ax = fig.add_subplot(1,1,1)\n\n phot_class = obj_catalog.phot_star_class\n sclass = obj_catalog.star_class\n phot_class_num = np.zeros(obj_catalog.shape[0])\n sclass_num = np.zeros(obj_catalog.shape[0])\n\n star_classes = ['WD',\\\n 'O','O8','O9','OB','B0','B1','B2','B3','B5','B6','B7','B8','B9',\\\n 'A0','A1','A2','A3','A4','A5','A6','A8','A9',\\\n 'F0','F2','F3','F5','F6','F8','F9',\\\n 'G0','G1','G2','G3','G4','G5','G8','G9',\\\n 'K0','K1','K2','K3','K4','K5','K7',\\\n 'M0','M1','M2','M3','M4','M5','M6','M7','M8','M9', \\\n 'L0','L1','L2','L3','L4','L5','L9','Ldwarf', \\\n 'T','other','C']\n print len(star_classes)\n\n star_dict = dict(zip(star_classes,np.arange(len(star_classes))))\n\n # print phot_class.value_counts()\n\n for i in range(len(phot_class)):\n print phot_class[i], star_dict[phot_class[i]], sclass[i],star_dict[sclass[i]]\n phot_class_num[i] = star_dict[phot_class[i]]\n sclass_num[i] = star_dict[sclass[i]]\n\n #ax.plot(sclass_num,phot_class_num,'.')\n\n cmap = plt.cm.Blues\n cmap.set_bad('0.85',1.0)\n\n cax = plt.hist2d(sclass_num,phot_class_num, bins=65,range = [[0,65], [0,65]], norm = LogNorm(), cmap=cmap, zorder=0)\n cbar = plt.colorbar(ticks=[1,5,10,15,20,25,30,40])\n cbar.ax.set_yticklabels([1,5,10,15,20,25,30,40],fontsize=12)\n\n ax.plot(np.arange(65),np.arange(65),'r')\n\n plt.xticks(np.arange(len(star_classes)),star_classes,fontsize=8,rotation='vertical')\n plt.yticks(np.arange(len(star_classes)),star_classes,fontsize=8)\n\n plt.grid(True)\n return plt", "def hist(data):\n\n fig = plt.figure()\n ax1 = fig.add_subplot(111)\n plt.hold(True)\n for x in xrange(len(data[:,0,0])):\n counts, edges = np.histogram(data[x,:,:],bins=100)\n centers = [(edges[i]+edges[i+1])/2.0 for i,v in enumerate(edges[:-1])]\n ax1.plot(centers,counts)\n plt.hold(False)\n\n plt.show(block=False)\n\n # return fig", "def plot_bacteria_hist(folder, depth=6, mid_quantile=False):\n\n # Get the stool dataset and discretize it\n ds = parser.get_dataset()\n ds = compute_relative_values(ds)\n t = Tree(ds)\n ds = t.dataset_at_depth(depth)\n\n # Get header names to priint on the plots\n headers = ds[0][2:]\n\n for index, header in enumerate(headers):\n\n node = t.node_for_clade_name(header)\n abundances = t.abundance_column_in_subtree(node)\n abundances = [round(x,3) for x in abundances]\n\n if mid_quantile:\n abundances.sort()\n abundances = abundances[int(len(abundances)*0.25): -int(len(abundances)*0.25)]\n\n xlabel('Relative abundance')\n ylabel('Bin size')\n\n title_text = header.replace('/','-').replace('|', '-')\n title(title_text)\n binwidth = 0.001\n bins, bin_sizes, patches = hist(abundances, bins=np.arange(min(abundances), max(abundances) + binwidth, binwidth), color='#0066FF')\n\n # Write discretized values\n threshold, discretized_abundances = discretize_row(abundances, maxent_discretization_splitter)\n _0 = '0: ' + str(len([x for x in discretized_abundances if x == 0]))\n _1 = '1: ' + str(len([x for x in discretized_abundances if x == 1]))\n\n text_x = 0.7\n\n smaples_text = 'Samples: %d' % len(abundances)\n figtext(text_x, 0.85, smaples_text, fontsize=10)\n\n threshold_text = 'Splitter: %f' % threshold\n figtext(text_x, 0.82, threshold_text, fontsize=10)\n figtext(text_x, 0.79, _0, fontsize=10)\n figtext(text_x, 0.76, _1, fontsize=10)\n\n # Draw threshold line\n max_bin = len(abundances)\n if len(bins) != 0:\n max_bin = max(bins)\n\n a, b = [threshold, threshold], [0, max_bin]\n plot(a, b, c='r')\n\n grid(True)\n\n # Write max and avg\n # max_abundance = 'max: %f' % max(abundances)\n # avg_abundance = 'avg: %f' % (sum(abundances) / float(len(abundances)))\n # figtext(text_x, 0.76, max_abundance, fontsize=10)\n # figtext(text_x, 0.73, avg_abundance, fontsize=10)\n\n # write variance\n # variance = 'var: %f' % tvar(abundances)\n # figtext(text_x, 0.70, variance, fontsize=10)\n\n # Save fig to folder\n if not (os.path.exists(folder)):\n os.makedirs(folder)\n file_name = os.path.join(folder, title_text)\n print 'Hist: ', file_name\n savefig(file_name)\n\n close()", "def plot_loss(self):\n #x = [k for k in range(self.rep)]\n loss = self.min_list[:,0]//100 #For clarity\n #plt.plot(x,self.min_list[:,0])\n plt.hist(loss,density=True)\n plt.xlabel(self.list_name + '_loss//100')\n plt.ylabel('Frequency')\n #plt.xticks(range(8),[0,250,500,750,1000,1250,1500,1750])\n plt.title('Distribution of '+self.list_name+'_loss ('+str(self.rep)+' iterations)')\n plt.savefig('img/stats/'+self.list_name+'_lossFrequency_'+self.model_name+'.png')\n plt.show()", "def getHistogram( self, img):\n bins = 256\n range_scale = [0,254]\n nivel_transparencia = 0.5\n plt.hist(img.ravel(),bins,range_scale, label=\"histogram\", alpha=nivel_transparencia);\n plt.legend(loc='upper right')\n plt.show()", "def create_histogram(self, i):\n # styling\n sns.set(style=\"whitegrid\")\n font = {'weight': 'normal'}\n plt.rc('font', **font)\n plt.rc('axes', labelsize=25) # fontsize of the x and y labels\n plt.rc('xtick', labelsize=25) # fontsize of the tick labels\n plt.rc('ytick', labelsize=25)\n fig, ax = plt.subplots(1, 1, figsize=(5, 5), dpi=100)\n try:\n if self.dtype_is_object() or self.num_of_values() <= 15:\n if self.num_of_values() > 15:\n data = pd.to_numeric(self.data, errors='coerce')\n plot = sns.distplot(data.dropna())\n else:\n plot = sns.countplot(self.remove_nan_values())\n else:\n plot = sns.distplot(self.remove_nan_values())\n plot.set(xlabel='', ylabel='')\n except Exception:\n plt.text(0.5, 0.5, f'Unable to plot', ha='center', va='center', transform=ax.transAxes, fontsize=16)\n if not os.path.isdir('hist_images'):\n os.mkdir('hist_images')\n plt.savefig(f'hist_images/histogram{i}.png', bbox_inches='tight')\n plt.close()\n plt.clf()", "def plotYields(data,signal=None,backgrounds=[],bins=[]):\n print \n if not bins:\n center = [i+0.5 for i,d in enumerate(data)] # pseudo-data points for making histogram\n bins = [i for i in range( len(data)+1 )] # pseudo-binning\n else:\n center = [ 0.5*(b+bins[i+1]) for i,b in enumerate(bins) if i<len(bins)-1]\n data = np.array(data)\n\n # stack the backgrounds on top of each other in the plot\n nbckgs = len(backgrounds)\n labels = ['background {0}'.format(i) for i in range(nbckgs)]\n weights = list(backgrounds)\n bincenters = [ list(center) for _ in range(nbckgs)]\n\n # stack the signal on top of the backgrounds\n if signal is not None:\n # 'signal' is what we want to unfold, e.g., ttbar\n labels += ['signal']\n weights += [list(signal)]\n bincenters += [list(center)]\n\n # plot backgrounds & signal\n d,bb,pp = plt.hist(bincenters,weights=weights,stacked=True,\n histtype='stepfilled',label=labels,\n edgecolor='k',bins=bins)\n\n # plot the data as error bars\n plt.errorbar(center,data,color='k',\n fmt='o',yerr=np.sqrt(data),\n label='Data')\n\n plt.ylim(ymin=0,ymax=plt.ylim()[1]*1.6) # scale the y-axis to accommodate the legend\n plt.legend()\n plt.xlabel(\"Distribution\")\n plt.ylabel(\"Events\")\n\n return", "def het_hist(het_check_df: pd.DataFrame):\n\n fig = plt.figure(figsize=(8,6))\n plt.hist(het_check_df['het_rate'])\n plt.axvline(het_check_df['low_limit'][0], c='red', ls='--')\n plt.axvline(het_check_df['up_limit'][0], c='red', ls='--')\n plt.xlabel(\"Heterozygosity Rate\")\n plt.ylabel(\"Number of Samples\")\n plt.title(\"Heterozygosity Distribution of All Samples\\n (< {:.3f} or > {:.3f} are removed)\".format(het_check_df['low_limit'][0], het_check_df['up_limit'][0]))\n return fig", "def plotblackbody(_zband, _yband, _jband, _hband, _kband, _parallax, _perr):\n # Set pyplot style to be consistent within the program\n plt.style.use('seaborn-whitegrid')\n # Import raw data to plot Hertzsprung-Russell diagram\n _hrdata = inithr('hr.dat')\n # Determine distance in parsecs\n _distance = 1 / np.tan(_parallax * 10**-3)\n _derr = (_perr * 10**-3) / ((_parallax * 10**-3)**2)\n # Create single data array with all bands\n _bands = [_zband, _yband, _jband, _hband, _kband]\n _lambda = [0.9, 1.02, 1.22, 1.63, 2.2]\n # Set up empty arrays for each star\n _largestar = np.zeros((1, 2))\n _smallstar = np.zeros((1, 2))\n\n # Determine the spectral flux density from the large star\n i = 0\n while i < 5:\n # Determine the maximum and minimum values of the observed band\n _max, _min = lightcurve.maxminvals(_bands[i])\n # The large star uses the maximum flux value (smallest magnitude)\n _largestar = np.append(_largestar, np.array([_lambda[i], (magtoflux(_min, i))], ndmin=2), axis=0)\n i += 1\n # Delete first empty row of the array\n _largestar = np.delete(_largestar, 0, axis=0)\n\n # Determine the spectral flux density from the small star\n i = 0\n while i < 5:\n # Determine the maximum and minimum values of the observed band\n _max, _min = lightcurve.maxminvals(_bands[i])\n # Smaller star flux value is combined value minus the large star\n _smallstar = np.append(_smallstar, np.array([_lambda[i], (magtoflux(_max, i) -\n magtoflux(_min, i))], ndmin=2), axis=0)\n i += 1\n # Delete the first empty row of the array\n _smallstar = np.delete(_smallstar, 0, axis=0)\n\n # Determine the luminosity and effective temperature of each star\n _luma, _lumaerr, _wiena = getwientemp(_largestar, _distance, _derr, 1)\n _lumb, _lumberr, _wienb = getwientemp(_smallstar, _distance, _derr, 2)\n\n # Calculate luminosities in solar units\n _solluma = _luma / (3.828*10**26)\n _sollumb = _lumb / (3.828*10**26)\n _lumaerr = _lumaerr / (3.828*10**26)\n _lumberr = _lumberr / (3.828*10**26)\n\n # Calculate masses using the mass/luminosity relation in solar mass units\n # N.B. only works as an approximation for main sequence stars, giants and dwarfs are not sutiable for this\n # approximation\n _solmassa = np.power(_solluma, 1/3.5)\n _solmassaerr = ((_solmassa * (1/3.5) * _lumaerr) / _solluma)**2\n _solmassb = np.power(_sollumb, 1/3.5)\n _solmassberr = ((_solmassb * (1 / 3.5) * _lumberr) / _sollumb) ** 2\n\n # Calculate stellar radius in solar radii using the relationship between luminosity, surface area and temperature\n _solrada = np.sqrt(_solluma / np.power(_wiena / 5778, 4))\n _solradb = np.sqrt(_sollumb / np.power(_wienb / 5778, 4))\n _solradaerr = ((_solrada * 0.5 * _lumaerr) / _solluma)**2\n _solradberr = ((_solradb * 0.5 * _lumberr) / _sollumb)**2\n\n # Output determined values to the screen and write to file\n print('Values for the large star:')\n print('Effective temperature: ' + str(round_sig(_wiena)))\n print('Solar luminosities: ' + str(round_sig(_solluma)) + ', error: ' + str(round_sig(_lumaerr)))\n print('Solar radii: ' + str(round_sig(_solrada)) + ', error: ' + str(round_sig(_solradaerr)))\n print('Solar masses: ' + str(round_sig(_solmassa)) + ', error: ' + str(round_sig(_solmassaerr)))\n print('-----------------------------------------------------')\n print('Values for the small star:')\n print('Effective temperature: ' + str(round_sig(_wienb)))\n print('Solar luminosities: ' + str(round_sig(_sollumb)) + ', error: ' + str(round_sig(_lumberr)))\n print('Solar radii: ' + str(round_sig(_solradb)) + ', error: ' + str(round_sig(_solradberr)))\n print('Solar masses: ' + str(round_sig(_solmassb)) + ', error: ' + str(round_sig(_solmassberr)))\n\n # Convert from luminosity to magnitude in solar units\n _luma = -2.5 * np.log10(_luma / (3.0128 * 10**28))\n _lumb = -2.5 * np.log10(_lumb / (3.0128 * 10**28))\n\n # Plot Hertzsprung-Russell diagram using provided array\n plt.scatter(_hrdata[:, 1], _hrdata[:, 0], s=0.5)\n # Plot determined values for each star\n plt.scatter(_wiena, _luma, s=16, c='red', label='Larger Star')\n plt.scatter(_wienb, _lumb, s=16, c='green', label='Smaller Star')\n # Set the x and y axis limits to sensible values\n plt.legend()\n plt.xlim(3000, 10000)\n plt.ylim(-10, 20)\n # Invert both axes as convention\n plt.gca().invert_xaxis()\n plt.gca().invert_yaxis()\n # Save figure to current folder\n plt.savefig('hr.png')\n # Display to screen\n plt.show()", "def plot_histograms(p_hist, p_hbins, title, figure_path=None):\n\n base_fig_size = 7\n h_fig = base_fig_size\n w_fig = base_fig_size * 4\n\n fig = plt.figure(figsize=(w_fig, h_fig))\n fig.suptitle(title)\n iplot = 0\n\n p_Nx, p_Ny = np.amax(p_hbins, axis=1) + 1\n\n p_hist = np.reshape(p_hist, (4, p_Ny, p_Nx))\n\n iplot += 1\n p_plot_title = 'Spectral BEHistogram Amp (log10 of counts)'\n p_plot = fig.add_subplot(1, 4, iplot, title=p_plot_title)\n p_im = p_plot.imshow(np.rot90(np.log10(p_hist[0])), interpolation='nearest')\n p_plot.axis('tight')\n fig.colorbar(p_im, fraction=0.1)\n\n iplot += 1\n p_plot_title = 'Spectral BEHistogram Phase (log10 of counts)'\n p_plot = fig.add_subplot(1, 4, iplot, title=p_plot_title)\n p_im = p_plot.imshow(np.rot90(np.log10(p_hist[1])), interpolation='nearest')\n p_plot.axis('tight')\n fig.colorbar(p_im, fraction=0.1)\n\n iplot += 1\n p_plot_title = 'Spectral BEHistogram Real (log10 of counts)'\n p_plot = fig.add_subplot(1, 4, iplot, title=p_plot_title)\n p_im = p_plot.imshow(np.rot90(np.log10(p_hist[2])), interpolation='nearest')\n p_plot.axis('tight')\n fig.colorbar(p_im, fraction=0.1)\n\n iplot += 1\n p_plot_title = 'Spectral BEHistogram Imag (log10 of counts)'\n p_plot = fig.add_subplot(1, 4, iplot, title=p_plot_title)\n p_im = p_plot.imshow(np.rot90(np.log10(p_hist[3])), interpolation='nearest')\n p_plot.axis('tight')\n fig.colorbar(p_im, fraction=0.1)\n\n if figure_path:\n plt.savefig(figure_path, format='png')\n\n return fig", "def plot_hist(df, num_bins=8):\n df.hist(figsize=(24, 20), bins=num_bins)\n plt.axes", "def stars(self, magnitude=20):\n # Get the stars that are visible within this chart.\n thestars = []\n for s in self.hip_stars:\n if not s: continue\n hip_id, mag, ra, dec, bv = s\n if mag>magnitude: continue\n if dec<min(self.inner_dec, self.outer_dec): continue\n if dec>max(self.inner_dec, self.outer_dec): continue\n thestars.append(s)\n # This should sort them by increasing magnitude (brightest first).\n thestars.sort(key=lambda a:a[1])\n if not thestars: return\n # Set the least bright magnitude.\n self.dimmest_mag = math.floor(thestars[-1][1])\n # Create the star group.\n star_g = self.make_element(self.centered, 'g', (\n 'stroke', 'none'), ('fill', 'black'), (\n 'clip-path', 'url(#innerClipPath)'))\n for hip_id, mag, ra, dec, bv in thestars:\n x, y = self.radec2xy(ra, dec)\n self.make_element(star_g, 'circle', (\n 'cx', x), ('cy', y), ('r', self.starsize(hip_id)))", "def plot_random_schedules(scores):\n\n\tplt.hist(scores, bins = len(scores))\n\tplt.ylabel(\"Score\")\n\tplt.xlabel(\"Times\")\n\tplt.title(\"Histogram random schedules\")\n\tplt.show()", "def histogram_plot(val_addr):\n pos = np.zeros((val_addr.shape[0],2))\n for i in range(val_addr.shape[0]):\n if i%1e6==0:\n print(i)\n temp=val_addr[i]\n if temp<25:\n if temp<=4:\n pos[i,0]=np.sqrt(3)\n pos[i,1] = temp-2.\n elif temp<=8:\n pos[i,0]=np.sqrt(3)/2.\n pos[i,1] = temp-6.5 \n elif temp<=13:\n pos[i,0]=0\n pos[i,1] = temp-11. \n elif temp<=17:\n pos[i,0]=-np.sqrt(3)/2.\n pos[i,1] = temp-15.5 \n elif temp<=23:\n pos[i,0]=-np.sqrt(3)\n pos[i,1] = temp-20.\n plt.hexbin(pos[:,1],pos[:,0],gridsize=(4,2))\n plt.colorbar()\n plt.show()\n return(pos)", "def histogram(values, title, fig_size=(4,3), path=None):\n plt.clf()\n f, ax = plt.subplots(1, figsize=fig_size)\n ax.hist(values, bins=60)\n ax.set_title(title)\n f.tight_layout()\n if(path != None):\n f.savefig(path+'/hist_'+title+'.png')", "def rho_plot2(self, pred=None):\n axRect = [0.1446, 0.2150, 0.7604, 0.7100]\n # plt.figure(22, figsize = (8.5, 11), dpi=300)\n fig, ax = plt.subplots()\n if pred is not None:\n self.rho_sub_plot(ax, axRect, pred=pred)\n else:\n self.rho_sub_plot(ax, axRect)", "def interactive_hess(gr,g):\n def plot(size=100):\n fig,ax = plt.subplots()\n fig.set_size_inches(8,6)\n ax.hexbin(gr, g, gridsize=size, bins='log', cmap='inferno', label=\"Relative stellar density\")\n ax.set_title(\"HESS DIAGRAM, gridsize={0:d}\".format(size), fontsize = 15)\n ax.set_xlabel(r\"$g-r$\",fontsize = 25)\n ax.set_ylabel(r\"$g$\",fontsize = 25)\n ax.legend(loc='upper left')\n ax.set_ylim(ax.get_ylim()[::-1])\n plt.show()\n interact(plot, size=(50,300,1),continuous_update=False);", "def pixel_ts_distribution(self):\n fig,ax = plt.subplots(figsize=(8,6))\n bins = np.linspace(0,25,501)\n tsvec=self.tsmap.vec\n ax.hist(tsvec, bins, log=True, histtype='step', lw=2, cumulative=-1, label='data');\n # make array corresponding to the hist\n h = np.histogram(tsvec, bins, )[0]\n x = bins[:-1]\n yh = sum(h)-h.cumsum() \n f = lambda x: np.exp(-x/2)\n ye=6e5*f(x)\n ax.plot(x, ye, '-g', lw=2, label='exp(-TS/2)')\n ax.fill_between(x,yh,ye,where=x>5, facecolor='red', alpha=0.6)\n plt.setp(ax, xscale='linear', xlabel='TS', ylim=(1,None), ylabel='# greater than TS')\n ax.legend()\n ax.set_title('Cumulative distribution of single-pixel TS values for {}'.format(self.skymodel),\n fontsize=14)\n ax.grid(True, alpha=0.5) \n fig.set_facecolor('white')\n return fig", "def histogram(list):\n for i in range(0,len(list)):#go over the number in the list\n print('*'*list[i])", "def replot_shadow_half( ilh, val, xdata, ydata, color, alpha ):\n shadow_list = get_shadow_list( val, num_lower, num_higher )\n if shadow_list[ilh]:\n l2shadow[ilh] = ax['plot'].plot( xdata, ydata[:,shadow_list[ilh]], '-', color=color, lw=1, alpha=alpha )", "def hist(self):\r\n plt.hist(self.data_array, bins='auto', density=False, facecolor='b')\r\n plt.title(self.column_name)\r\n plt.savefig(self.column_name + \".svg\")\r\n plt.close()", "def draw_histogram(m, rolls, width):\n # Count the number of rolls of each side and store them in a list\n roll_count = []\n for i in list(range(1, m+1)):\n count = rolls.count(i)\n roll_count.append(count)\n\n # Finding the the maximum count from the list\n max_count = max(roll_count)\n\n print(\"Frequency Histogram: \" + str(m) + \"-sided Die\")\n\n # Counting the number of same outputs and then scaling these outputs\n # to the given width\n for j in list(range(1, m+1)):\n count = rolls.count(j)\n scaled_count = round(count*(width/max_count))\n print(str(j)+'.', end='')\n print('#'*scaled_count, end='')\n print('-'*(width-scaled_count))", "def histogram(arr, xlbl, xrng=None, nbins=20, alpha=1.):\n if xrng is None:\n xrng = (np.min(arr),np.max(arr))\n p = figure(plot_width=600, plot_height=400)\n # Histogram\n hist, edges = np.histogram(arr, range=xrng, density=True, bins=nbins)\n p.quad(top=hist, bottom=0, left=edges[:-1], right=edges[1:], fill_color='blue', alpha=alpha)\n # Label\n p.xaxis.axis_label = xlbl\n # Show\n show(p)", "def drawHist(data, xLabel, unit, binSize, title):\n mean = np.mean(data)\n median = np.median(data)\n mode = stats.mode(data)[0].astype(float)\n \n q1, q3 = np.percentile(data, [25, 75])\n iqr = q3 - q1\n sigma = np.std(data)\n \n \n bins = np.arange(min(data), max(data) + 1, binSize)\n plt.style.use('dark_background')\n fig, ax = plt.subplots(figsize=(12,7))\n plt.hist(data, bins=bins, histtype='bar') \n plt.title(title)\n plt.xlabel(xLabel + \" \" + unit)\n plt.ylabel('count')\n ymax = ax.get_ylim()[1]\n ax.vlines(mean, 0, ymax, color='red', label='mean')\n ax.vlines(mean-sigma, 0, ymax, color='red', linestyle='--', \n label='mean +/- std')\n ax.vlines(mean+sigma, 0, ymax, color='red', linestyle='--')\n plt.legend()\n plt.show()\n \n print(\"Einheit: \", unit)\n print(\"Minimum: \", round(data.min(),3))\n print(\"Maximum: \", round(data.max(),3))\n print(\"Mittelwert: \", round(mean,3))\n print(\"Median: \", round(median,3))\n print(\"Modus: \", round(mode[0],3))\n print(\"Standardabweichung: \", round(sigma, 3))\n print(\"1. Quartil: \", round(q1,3))\n print(\"3. Quartil: \", round(q3,3))\n print(\"Quartilsdifferenz: \", round(iqr,3))", "def visualize(self):\n self.dataFrame.hist()\n plt.show()", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):", "def plot_histograms(top, bot, edges, resolution, *, ax=None):\n if ax is None:\n ax = plt.gca()\n\n ax.hlines(y=0,\n xmin=0,\n xmax=1,\n linestyle='dashed',\n color='black',\n alpha=0.2)\n ax.bar(edges, top, width=resolution)\n ax.bar(edges, -bot, width=resolution)\n # Set some sensible defaults - these can be overridden after the fact,\n # since we return the axes object\n ax.set_xlim((-0.05, 1.05))\n ax.set_xlabel('Predicted Probability')\n height = max(abs(x) for x in ax.get_ylim())\n ax.set_ylim((-height, height))\n ax.set_ylabel('Count')\n return ax", "def show_histogram(im):\n\n if im.ndim == 2:\n # Input image is single channel\n plt.hist(im.flatten(), 256, range=(0, 250), fc='k')\n plt.show()\n\n elif im.ndim == 3:\n # Input image is three channels\n fig = plt.figure()\n fig.add_subplot(311)\n plt.hist(im[..., 0].flatten(), 256, range=(0, 250), fc='b')\n fig.add_subplot(312)\n plt.hist(im[..., 1].flatten(), 256, range=(0, 250), fc='g')\n fig.add_subplot(313)\n plt.hist(im[..., 2].flatten(), 256, range=(0, 250), fc='r')\n plt.show()", "def plot_histogram(self,ax=None,**kwargs):\n if not ax:\n fig = plt.figure()\n ax = fig.add_subplot(111)\n probs,bins,patches = ax.hist(self.scores_list,normed=True,label=\"Sample\",**kwargs)\n ax.vlines(self.xhat,*ax.get_ylim(),label='Mean',color='r')\n ax.legend()\n return ax,probs,bins", "def plotHClustProportions(figh, Z, resDf, alpha_col='pvalue', alpha=0.05, colors=None, ann='N', xLim=None, maxY=None, min_count=20):\n\n nCategories = len(resDf['observed'].iloc[0])\n if colors is None:\n colors = sns.color_palette('Set1', n_colors=nCategories)\n labels = resDf['labels'].iloc[0]\n \n dend = sch.dendrogram(Z, no_plot=True,\n color_threshold=None,\n link_color_func=lambda lid: hex(lid),\n above_threshold_color='FFFFF')\n figh.clf()\n axh = plt.axes((0.05, 0.07, 0.8, 0.8), facecolor='w')\n\n lowestY = None\n annotateCount = 0\n for xx, yy, hex_cid in zip(dend['icoord'], dend['dcoord'], dend['color_list']):\n cid = int(hex_cid, 16)\n xx = np.array(xx) / 10\n axh.plot(xx, yy, zorder=1, lw=0.5, color='k', alpha=1)\n\n N = np.sum(resDf.loc[cid, 'observed'])\n if alpha is None or resDf.loc[cid, alpha_col] <= alpha and N > min_count:\n obs = np.asarray(resDf.loc[cid, 'observed_prop'])\n obs = obs / np.sum(obs)\n L = (xx[2] - xx[1])\n xvec = L * np.concatenate(([0.], obs, [1.]))\n curX = xx[1]\n for i in range(len(obs)):\n c = colors[i]\n axh.plot([curX, curX + L*obs[i]],\n yy[1:3],\n color=c,\n lw=10,\n solid_capstyle='butt')\n curX += L*obs[i]\n if ann == 'N':\n s = '%1.0f' % N\n elif ann == 'CID':\n s = cid\n elif ann == 'alpha':\n if resDf.loc[cid, alpha_col] < 0.001:\n s = '< 0.001'\n else:\n s = '%1.3f' % resDf.loc[cid, alpha_col]\n if not ann == '':# and annotateCount < annC:\n xy = (xx[1] + L/2, yy[1])\n # print(s,np.round(xy[0]), np.round(xy[1]))\n annotateCount += 1\n axh.annotate(s,\n xy=xy,\n size='x-small',\n horizontalalignment='center',\n verticalalignment='center')\n if lowestY is None or yy[1] < lowestY:\n lowestY = yy[1]\n yl = axh.get_ylim()\n if not lowestY is None:\n yl0 = 0.9*lowestY\n else:\n yl0 = yl[0]\n if not maxY is None:\n yl1 = maxY\n else:\n yl1 = yl[1]\n axh.set_ylim((yl0, yl1))\n \n axh.set_yticks(())\n if not xLim is None:\n if xLim[1] is None:\n xl1 = axh.get_xlim()[1]\n xLim = (xLim[0], xl1)\n axh.set_xlim(xLim)\n else:\n xLim = axh.get_xlim()\n\n xt = [x for x in range(0, Z.shape[0]) if x <= xLim[1] and x>= xLim[0]]\n xt = xt[::len(xt) // 10]\n # xtl = [x//10 for x in xt]\n axh.set_xticks(xt)\n # axh.set_xticklabels(xtl)\n legh = axh.legend([plt.Rectangle((0,0), 1, 1, color=c) for c in colors],\n labels,\n loc='upper left', bbox_to_anchor=(1, 1))", "def overlay_histogram_phred(df, path, settings):\n df[\"phredIdentity\"] = -10 * np.log10(1 - (df[\"percentIdentity\"] / 100))\n df[\"phredIdentity\"][np.isinf(df[\"phredIdentity\"])] = 60\n\n palette = (\n settings[\"colors\"] if settings[\"colors\"] else cycle(plotly.colors.DEFAULT_PLOTLY_COLORS)\n )\n\n hist_phred = Plot(\n path=path + \"NanoComp_OverlayHistogram_PhredScore.html\",\n title=\"Histogram of Phred scores\",\n )\n\n hist_phred.html, hist_phred.fig = plot_overlay_histogram(\n df, palette, \"phredIdentity\", hist_phred.title, bins=20, density=True\n )\n\n hist_phred.save(settings)\n\n return hist_phred", "def plot_hitstogram_graph(data_values, title,\r\n number_of_keys,\r\n max_val,\r\n file_in):\r\n\r\n # bins = max(data_values)\r\n # pylab.hist(data_values, facecolor='blue')\r\n pylab.hist(data_values, facecolor='green', alpha=0.6)\r\n pylab.grid(True)\r\n pylab.title(title + \"_histogram\")\r\n pylab.xlabel('number in cluster')\r\n pylab.ylabel('Count')\r\n pylab.savefig(file_in + \"_\" + title + '_histogram.png')\r\n plt.close()\r\n pylab.close()\r\n os.chdir('..')", "def _plot_psth_flat(self, sigma=5, figsize = (15, 8)):\n\t\n\t\tgaus_filt = sp.ndimage.gaussian_filter1d\n\t\tall_resp = gaus_filt(self.conditions_hist_mean.flatten(), sigma)\n\t\t\n\t\tfig = plt.figure(figsize=figsize)\n\t\tax = fig.add_subplot(1, 1, 1)\n\t\t\n\t\tax.plot(all_resp, linestyle='-', color='0.28')\n\t\t\n\t\tn_con = self.parameters['conditions']\n\t\tcon_mark = np.arange(0, (self.bins.size -1) * n_con, self.bins.size -1)\n\t\t\t\t\n\t\tax.xaxis.set_ticks(con_mark)\n\n\t\ttry:\n\t\t\tax.xaxis.set_ticklabels(self.cond_label)\n\t\texcept:\n\t\t\tax.xaxis.set_ticklabels(np.unique(self.marker_codes))\n\t\t\n\t\tfreq_label = np.round(ax.get_yticks() * (1/self.bin_width),\n\t\t\t\t\t\t\t decimals = 1)\n\t\tax.set_yticklabels(freq_label)\n\t\tax.set_ylabel('Frequency')\n\t\t\n\t\tfor label in ax.xaxis.get_majorticklabels():\n\t\t\tlabel.set_horizontalalignment('left')\n\t\t\t\n\t\tax.set_xlim(0, (self.bins.size -1) * n_con)\n\t\t\n\t\t# bug with macosx backend\n# plt.tight_layout()\n\t\tplt.subplots_adjust(hspace=0.45)", "def draw_heaters(ax, windtunnel):\n draw_heater(ax, windtunnel.heater_l)\n draw_heater(ax, windtunnel.heater_r)", "def histogram(self, mask=None, extrema=None):\r\n uni, counts = self._getcolors()\r\n return [l for l in counts]", "def plot_spectrumxichange(self):\n countgood = 0 ; countbad = 0\n for idata in self.datarg:\n if idata[-1, 0] == 1.: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'b') \n countgood += 1\n print countgood , 'good solution'\n else: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'r') \n print countbad, 'bad solution'\n countbad += 1\n print 'We found %g good solutions and %g tda startdistributions that broke down before xi = 1, we hope that\\'s what you expected' %(countgood,countbad)\n #Create custom artistsr[goodline,badline],['solution','breakdown']\n goodline = pl.Line2D((0,1),(0,0), color='b') \n badline = pl.Line2D((0,1),(0,0), color='r')\n self.layout(self.reader.depvar['depvar'] , r'energy spectrum (a.u.)' , tit = r'All tda start distributions $\\xi$' , legendhand = [goodline , badline] , legendlab = ['solution', 'breakdown'] )\n self.savefig('xispec')", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\n pylab.hist(values, bins = numBins)\n pylab.xlabel(xLabel)\n pylab.ylabel(yLabel)\n if not title == None:\n pylab.title(title)\n pylab.show()", "def plotPosteriors(posteriors):\n for i,p in enumerate(posteriors):\n plt.hist(p,bins=20,histtype='stepfilled',alpha=0.5,\n density=True,label='Bin {0}'.format(i))\n plt.legend()\n plt.ylabel(\"Probability\")\n plt.xlabel(\"Posterior\")\n\n return", "def draw_pitch_histogram(snd, title=None):\n \n pitch = snd.to_pitch()\n # If desired, pre-emphasize the sound fragment before calculating the spectrogram\n pre_emphasized_snd = snd.copy()\n pre_emphasized_snd.pre_emphasize()\n spectrogram = pre_emphasized_snd.to_spectrogram(window_length=0.03, maximum_frequency=8000)\n \n # definitions for the axes\n left, width = 0.12, 0.55\n bottom, height = 0.1, 0.65\n spacing = 0.105\n \n rect_specto = [left, bottom, width, height]\n rect_histy = [left + width + spacing, bottom, 0.2, height]\n \n plt.figure()\n \n ax_specto = plt.axes(rect_specto)\n ax_specto.tick_params(direction='in', right=True)\n ax_histy = plt.axes(rect_histy)\n ax_histy.tick_params(direction='in', labelleft=False)\n \n # draw_spectrogram(spectrogram)\n dynamic_range = 70\n X, Y = spectrogram.x_grid(), spectrogram.y_grid()\n sg_db = 10 * np.log10(spectrogram.values)\n ax_specto.pcolormesh(X, Y, sg_db, vmin=sg_db.max() - dynamic_range, cmap='afmhot')\n ax_specto.set_xlim([snd.xmin, snd.xmax])\n ax_specto.set_ylim([spectrogram.ymin, spectrogram.ymax])\n ax_specto.set_xlabel(\"time [s]\")\n ax_specto.set_ylabel(\"frequency [Hz]\")\n \n ax_pitch = ax_specto.twinx()\n \n # draw_pitch(pitch)\n # Extract selected pitch contour, and\n # replace unvoiced samples by NaN to not plot\n pitch_values = pitch.selected_array['frequency']\n pitch_values_max = pitch_values.max()\n pitch_values[pitch_values==0] = np.nan\n ax_pitch.plot(pitch.xs(), pitch_values, 'o', markersize=5, color='w')\n ax_pitch.plot(pitch.xs(), pitch_values, 'o', markersize=2)\n ax_pitch.grid(False)\n ax_pitch.set_ylim(0, pitch.ceiling)\n ax_pitch.set_ylabel(\"fundamental frequency [Hz]\")\n \n # Histogram\n # now determine nice limits by hand:\n binwidth = 5\n lim = np.ceil(pitch_values_max / binwidth) * binwidth\n \n bins = np.arange(-lim, lim + binwidth, binwidth)\n ax_histy.hist(pitch_values, bins=bins, orientation='horizontal', facecolor= u'b', edgecolor = u'b')\n ax_histy.set_ylim(ax_pitch.get_ylim())\n \n plt.title(title)\n \n plt.show()", "def hist(self,geo,pfile):\n\n # Create histogram of box data, rounding to nearest integers if temperature\n boxdata = self.img.flatten()\n imin = int(round(min(boxdata))) - 1\n imax = int(round(max(boxdata))) + 1\n ni = imax-imin+1 # number of bins to plot\n h = np.zeros(ni,dtype=int) # initialise with zeros\n for val in boxdata: # assign each image value to a bin\n i = int(round(val)) - imin \n h[i] += 1\n n = sum(h) # total number of values binned\n h = h * 100.0/n # convert no.in bins to %frequency\n plt.figure(WINDOW_HIST,figsize=(4,4))\n plt.clf()\n # Create title for histogram plot\n ttl = self.desc + '\\n' + \\\n 'Box: X=' + str(self.ix-self.mbox) + ':' \\\n + str(self.ix) + ':' \\\n + str(self.ix+self.mbox) + \\\n ', Y=' + str(self.iy-self.mbox) + ':' \\\n + str(self.iy) + ':' \\\n + str(self.iy+self.mbox)\n plt.title(ttl)\n plt.ylabel(\"% Frequency\")\n tdisp = self.label in ( 'T9', 'T10', 'TS' )\n if tdisp: plt.xlabel(\"Pixel Temperature [K]\")\n else: plt.xlabel(\"Pixel Value [0:255]\")\n xval = np.arange(imin,imax+1,dtype=int)\n # Set colour of histogram according to channel\n plt.bar(xval,h,color=plot_colours.get(self.label,'gray'))\n x0,x1 = plt.xlim()\n y0,y1 = plt.ylim()\n boxmean = np.mean(boxdata)\n boxsd = np.std(boxdata)\n midpix = self.img[self.mbox,self.mbox]\n plt.plot( boxmean+[0,0], [y0,y1], ':', color='black' )\n plt.errorbar ( boxmean, 0.9*y1, xerr=boxsd, color='black', \n capsize=4 )\n plt.plot ( midpix, 0.9*y1, 's', color='black', \n markerfacecolor='none' ) \n plt.tight_layout()\n if boxmean > 0.5 * ( x1 + x0 ): xt = x0 + 0.4 * ( x1 - x0 )\n else: xt = x0 + 0.95*(x1-x0)\n yt = y0 + 0.95*(y1-y0)\n yd = 0.05*(y1-y0)\n text = 'Mean = {:6.2f}'.format(boxmean)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n text = 'S.D. = {:6.2f}'.format(boxsd)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n text = 'NPix = {:6n}'.format(n)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n if tdisp: text = 'MidPix = {:6.2f}'.format(midpix)\n else: text = 'MidPix = {:6n}'.format(midpix)\n plt.text(xt,yt,text,ha=\"right\")\n if geo.cal:\n lat,lon,zen = geo.locate(self.ix,self.iy) \n text = 'Lat = {:6.2f}'.format(lat)\n yt -= yd\n plt.text(xt,yt,text,ha=\"right\") \n text = 'Lon = {:6.2f}'.format(lon)\n yt -= yd\n plt.text(xt,yt,text,ha=\"right\") \n if pfile: \n file = input ( \"Save to file (<CR>=hist.pdf): \" ) or \"hist.pdf\"\n plt.savefig(file)", "def histogram(data, title, path):\n plt.hist(data,\n bins=60)\n plt.xticks(size=22)\n plt.yticks(size=22)\n plt.title(title,\n fontsize=30)\n plt.savefig(path)\n plt.clf()", "def plot_pIdent_hist(d, outfile):\n\n print('Plotting...')\n # Set the colors\n bar_color = '#2171b5'\n gridM = '#bdbdbd'\n gridm = '#d9d9d9'\n alpha = 0.6\n\n\n # Build the plot\n fig, ax = plt.subplots(figsize=(7, 10))\n\n '''\n # Plot titles\n ax.set_title(\n f'Histogram of Percent Identity\\nof Read Alignments',\n fontsize=20, y=1.02\n )\n '''\n\n # Plot labels\n ax.set_xlabel(\n 'Number of Base Pairs Aligned',\n fontsize=14, fontweight='bold'\n )\n ax.set_ylabel(\n 'Percent Identity of Alignment',\n fontsize=14, fontweight='bold'\n )\n\n # Set plot/grid style\n ax.minorticks_on()\n ax.tick_params(\n which='minor', axis='both', left=False, bottom=False\n )\n ax.tick_params(\n which='major', axis='both',\n left=False, bottom=True,\n size=4, width=2, tickdir='in',\n labelsize=11, zorder=10\n )\n ax.xaxis.grid(\n which=\"minor\", color=gridm, linestyle='--',\n linewidth=1, alpha=0.6, zorder=1\n )\n ax.xaxis.grid(\n which=\"major\", color=gridM, linestyle='--',\n linewidth=1.5, alpha=0.4, zorder=1\n )\n ax.set_axisbelow(True)\n for spine in ax.spines.values(): spine.set_linewidth(2)\n\n # Plot the data\n ax.barh(\n d['ys'],\n d['xs'],\n align='center',\n height=0.9,\n color=bar_color,\n alpha=alpha,\n )\n\n # Set plot axis ranges\n #ax.set_xlim(left=0, right=int((max(d['xs'])+min(d['xs']))))\n\n # adjust layout, save, and close\n plt.gca().invert_yaxis()\n fig.set_tight_layout(True)\n plt.savefig(outfile)\n plt.close()\n\n print('\\n\\nComplete success space cowboy! Hold on to your boots.\\n\\n')", "def _plot_inset_histogram_for_attributes_diagram(\n figure_object, num_examples_by_bin,\n bar_face_colour=DEFAULT_HISTOGRAM_FACE_COLOUR,\n bar_edge_colour=DEFAULT_HISTOGRAM_EDGE_COLOUR,\n bar_edge_width=DEFAULT_HISTOGRAM_EDGE_WIDTH):\n\n error_checking.assert_is_integer_numpy_array(num_examples_by_bin)\n error_checking.assert_is_numpy_array(num_examples_by_bin, num_dimensions=1)\n error_checking.assert_is_geq_numpy_array(num_examples_by_bin, 0)\n num_forecast_bins = len(num_examples_by_bin)\n error_checking.assert_is_geq(num_forecast_bins, 2)\n\n example_frequency_by_bin = (\n num_examples_by_bin.astype(float) / numpy.sum(num_examples_by_bin)\n )\n\n forecast_bin_edges = numpy.linspace(0., 1., num=num_forecast_bins + 1)\n forecast_bin_width = forecast_bin_edges[1] - forecast_bin_edges[0]\n forecast_bin_centers = forecast_bin_edges[:-1] + forecast_bin_width / 2\n\n inset_axes_object = figure_object.add_axes([\n INSET_HISTOGRAM_LEFT_EDGE, INSET_HISTOGRAM_BOTTOM_EDGE,\n INSET_HISTOGRAM_WIDTH, INSET_HISTOGRAM_HEIGHT\n ])\n\n inset_axes_object.bar(\n forecast_bin_centers, example_frequency_by_bin, forecast_bin_width,\n color=plotting_utils.colour_from_numpy_to_tuple(bar_face_colour),\n edgecolor=plotting_utils.colour_from_numpy_to_tuple(bar_edge_colour),\n linewidth=bar_edge_width\n )\n\n max_y_tick_value = rounder.floor_to_nearest(\n 1.05 * numpy.max(example_frequency_by_bin),\n INSET_HISTOGRAM_Y_TICK_SPACING\n )\n num_y_ticks = 1 + int(numpy.round(\n max_y_tick_value / INSET_HISTOGRAM_Y_TICK_SPACING\n ))\n y_tick_values = numpy.linspace(0., max_y_tick_value, num=num_y_ticks)\n\n pyplot.xticks(INSET_HISTOGRAM_X_TICKS, axes=inset_axes_object)\n pyplot.yticks(y_tick_values, axes=inset_axes_object)\n inset_axes_object.set_xlim(0., 1.)\n inset_axes_object.set_ylim(0., 1.05 * numpy.max(example_frequency_by_bin))", "def hist2d(ax, x, y, sigs=[1], color=\"k\", pcolor=\"grey\", *args, **kwargs):\n\n extent = kwargs.get(\"extent\", None)\n if extent is None:\n extent = [[x.min(), x.max()], [y.min(), y.max()]]\n\n bins = 45\n linewidths = 0.8\n\n # Instead of this, create a color map with the peak color.\n\n if pcolor != \"grey\":\n # print(pcolor)\n r,g,b = pcolor\n # print(r, g, b)\n\n # Make our custom intensity scale\n dict_cmap = {'red':[(0.0, r, r),\n (1.0, 1.0, 1.0)],\n\n 'green': [(0.0, g, g),\n (1.0, 1.0, 1.0)],\n\n 'blue': [(0.0, b, b),\n (1.0, 1.0, 1.0)]}\n\n cmap = LSC(\"new\", dict_cmap)\n else:\n cmap = cm.get_cmap(\"gray\")\n\n cmap._init()\n\n # The only thing he's changing here is the alpha interpolator, I think\n\n # He's saying that we will have everything be black, and change alpha from 1 to 0.0\n\n # cmap._lut[:-3, :-1] = 0.\n cmap._lut[:-3, -1] = np.linspace(1, 0, cmap.N)\n\n # N is the number of levels in the colormap\n # Dunno what _lut is\n # look up table\n # Is he setting everything below some value to 0?\n\n\n X = np.linspace(extent[0][0], extent[0][1], bins + 1)\n # Y = np.linspace(extent[1][0], extent[1][1], bins + 1)\n Y = np.logspace(np.log10(extent[1][0]), np.log10(extent[1][1]), bins + 1)\n\n try:\n H, X, Y = np.histogram2d(x.flatten(), y.flatten(), bins=(X, Y))\n except ValueError:\n raise ValueError(\"It looks like at least one of your sample columns \"\n \"have no dynamic range. You could try using the \"\n \"`extent` argument.\")\n\n # V = 1.0 - np.exp(-0.5 * np.array([1.0, 2.0, 3.0]) ** 2)\n V = 1.0 - np.exp(-0.5 * np.array(sigs) ** 2)\n #V = 1.0 - np.exp(-0.5 * np.arange(0.5, 2.1, 0.5) ** 2)\n Hflat = H.flatten()\n inds = np.argsort(Hflat)[::-1]\n Hflat = Hflat[inds]\n sm = np.cumsum(Hflat)\n sm /= sm[-1]\n\n for i, v0 in enumerate(V):\n try:\n V[i] = Hflat[sm <= v0][-1]\n except:\n V[i] = Hflat[0]\n\n X1, Y1 = 0.5 * (X[1:] + X[:-1]), 0.5 * (Y[1:] + Y[:-1])\n X, Y = X[:-1], Y[:-1]\n\n # Plot the contours\n ax.pcolor(X, Y, H.max() - H.T, cmap=cmap)\n ax.contour(X1, Y1, H.T, V, colors=color, linewidths=linewidths)\n\n # ax.set_xlim(extent[0])\n # ax.set_ylim(extent[1])", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\r\n pylab.hist(values, bins = numBins)\r\n pylab.xlabel(xLabel)\r\n pylab.ylabel(yLabel)\r\n if title != None:\r\n pylab.title(title)\r\n pylab.show()", "def calculateHistogram(self):\n \n # Define color map\n colors = [ (255,0,0),(0,255,0),(0,0,255) ]\n # Define empty image to plot histogram in\n plot_to_fill = np.zeros((280,400,3))\n # Define bins of the histogram\n bins = np.arange(256).reshape(256,1)\n \n # Boucle sur les canaux\n for channel, color in enumerate(colors):\n # Calcul de l'histogramme\n hist_item = cv2.calcHist(self.frame,[channel],None,[256],[0,256])\n # Normalisation\n cv2.normalize(hist_item,hist_item,0,255,cv2.NORM_MINMAX)\n # Conversion\n hist = np.int32(np.around(hist_item))\n pts = np.int32(np.column_stack((bins, hist)))\n cv2.polylines(plot_to_fill, [pts], False, color)\n # Mettre dans le bon sens\n histplot = np.flipud(plot_to_fill)\n histplot = np.uint8(histplot)\n \n # Conversion en objet QPixelMap\n self.histplot_qpix = self.convertToQPixelmap(histplot)", "def plot_img_hist(img, num_plot, title):\n # Imagen\n plt.subplot(num_plot)\n plt.imshow(img, cmap='gray')\n plt.title(title)\n # Histograma\n plt.subplot(num_plot + 4)\n plt.hist(img_as_float(img).ravel(), bins=256)\n plt.xlim(0, 1)", "def pm_histogram(fig, ax, data, title, dwarf_pmra=None, dwarf_pmdec=None, cut=None, colorbar=True, append_title=\"\"):\n if cut is not None:\n ra, dec, pmra, pmdec, parallax, = cut_on_parallax(*data, cut)\n else:\n ra, dec, pmra, pmdec, parallax, _ = data\n\n # bin data from gaia in 2d histogram\n bound = 5\n bins = np.linspace(-bound, bound, num=20*bound)\n counts, xedges, yedges, im = ax.hist2d(pmra, pmdec, bins=(bins, bins), vmin=0, cmap='gnuplot')\n print(title, str(counts.max()))\n title = fix_names(title)\n # plot pm motion of dwarf from simbad\n if dwarf_pmra is not None:\n dwarf_pmra, dwarf_pmdec = fix_pms(title, dwarf_pmra, dwarf_pmdec)\n ax.plot(dwarf_pmra, dwarf_pmdec, marker='X', markersize=10, color='xkcd:white', alpha=1)\n\n ax.set_title(title + append_title)\n ax.set_xlabel(r\"Right ascension proper motion [mas/yr])\")\n ax.set_ylabel(r\"Declination proper motion [mas/yr]\")\n\n cbar = colorbar_for_subplot(fig, ax, cm.gnuplot, image=im)\n cbar.ax.set_ylabel(\"Bin counts\", rotation=270, labelpad=10)\n\n return counts, xedges, yedges, im", "def __init__(self, **kwargs): \n self.kwargs = kwargs\n\n # pretty figure up\n prettyplot() \n pretty_colors = prettycolors() \n \n self.fig = plt.figure(1) \n self.sub = self.fig.add_subplot(111) \n\n self.hist_max = 0.0", "def histogram_print(lista):\n divisor = ((lista[0])[1])//50\n def slashout(value):\n \"\"\"a function that converts a number into the appropriate\n number of slashes\"\"\"\n intvalue = value//divisor\n slashes = \"#\" * intvalue\n return slashes\n\n for item in lista:\n print(\"{} \\t\\t {}\".format(item[0], slashout(item[1])))", "def cat_hist(val, rug, side, shade, ax, **shade_kwargs):\n bins = get_bins(val)\n _, binned_d, _ = histogram(val, bins=bins)\n\n bin_edges = np.linspace(np.min(val), np.max(val), len(bins))\n heights = np.diff(bin_edges)\n centers = bin_edges[:-1] + heights.mean() / 2\n\n if rug and side == \"both\":\n side = \"right\"\n\n if side == \"right\":\n left = None\n elif side == \"left\":\n left = -binned_d\n elif side == \"both\":\n left = -0.5 * binned_d\n\n ax.barh(centers, binned_d, height=heights, left=left, alpha=shade, **shade_kwargs)\n return binned_d", "def visualize_raw_dat_ratings(self, dat):\n\n # Ratings Plots\n print('Plots for Ratings Data')\n\n # Shown as binned ratings\n bins = [0,1,2,3,4,5]\n pd.cut(dat['Average User Rating'], bins).value_counts().plot(kind = 'bar')\n plt.xlabel('Scores')\n plt.ylabel('Count')\n plt.title('Scores Histogram')\n plt.show()\n\n #Show all ratings\n plt.hist(dat['Average User Rating'].dropna())\n plt.xlabel('Scores')\n plt.ylabel('Count')\n plt.title('Scores Histogram')\n plt.show()", "def plot_comparison_GHR(data, data1):\n # Loads the different datasets\n runs = data[data.columns[0]]\n distance = data[data.columns[1]]\n\n runs1 = data1[data1.columns[0]]\n distance1 = data1[data1.columns[1]]\n\n # Forms the histogram\n plt.plot(runs, distance, label=\"Simulated Annealing\")\n plt.plot(runs1, distance1, color = 'orange', label=\"Hillclimber\")", "def plot_histogram(self,**kwargs):\n axes = []\n for i in range(self.score_length):\n fig = plt.figure()\n scores = np.array([s[i] for s in self.scores_list])\n probs,bins,patches = plt.hist(scores,label=\"Sample {}\".format(self.labels[i]), **kwargs)\n plt.vlines(self.xhat,fig.get_axes().get_ylim(),label='Mean',color='r')\n plt.legend()\n axes.append(fig.get_axes())\n return axes", "def plot_frequency(self, x='age') -> None:\n data = PreprocessData.impute(self.data)\n strokers = data[data['stroke'] == 1]\n print(strokers.head())\n fig = plt.figure()\n sns.distplot(strokers[x], norm_hist=False, kde=False,\n hist_kws=dict(edgecolor='black', linewidth=2),\n color='green')\n sns.despine(fig=fig, top=True, right=True)\n plt.ylabel('Stroke Frequency')\n plt.title('Distribution of stroke incidence by {}'.format(x))\n\n if self.savefig:\n fname = os.path.join(stroke_assessment.HIST_PLOTS_DIR, f'{x}.png')\n plt.savefig(fname, dpi=300, bbox_inches='tight')\n else:\n plt.show()", "def superpixel_plot(im,seg,title = \"Superpixels\"):\n clust = np.unique(seg)\n mapper_dict = {i: im[seg == i].mean(axis = 0)/255. for i in clust}\n\n seg_img = np.zeros((seg.shape[0],seg.shape[1],3))\n for i in clust:\n seg_img[seg == i] = mapper_dict[i]\n \n plot_image(seg_img,title)\n \n return", "def histogram(ratings, min_rating=None, max_rating=None):\r\n if min_rating is None:\r\n min_rating = min(ratings)\r\n if max_rating is None:\r\n max_rating = max(ratings)\r\n num_ratings = int(max_rating - min_rating + 1)\r\n hist_ratings = [0 for x in range(num_ratings)]\r\n for r in ratings:\r\n hist_ratings[r - min_rating] += 1\r\n return hist_ratings", "def plot(self):\n\t\tself.plotOfHeatingCurrent().plot()", "def super_hist(self, data_list, alpha=0.5, log_scale=True, bins=45):\r\n\r\n fig, _ = mp.subplots(1, 1, figsize=(15, 10), constrained_layout=True)\r\n\r\n names = []\r\n for data in data_list:\r\n plot_data = data[data.Day_First_N_Infections != \"None\"]\r\n column_data = plot_data[\"Day_First_N_Infections\"].values\r\n sns.distplot(column_data,\r\n kde=False,\r\n bins=bins,\r\n hist_kws={\r\n \"linewidth\": 1,\r\n \"alpha\": alpha,\r\n \"edgecolor\": 'black',\r\n \"log\": log_scale\r\n })\r\n\r\n mp.legend(loc='upper left', fontsize=20)\r\n mp.xlabel(\"Days from outbreak to case number \" + str(data_list[0].N) +\r\n \" in county\",\r\n fontsize=18)\r\n mp.ylabel(\"Frequency\", fontsize=18)\r\n\r\n fig.savefig(\"hist_N\" + str(data_list[0].N) + \"_\" + \"_\".join(names) +\r\n \".png\")", "def img_histogram(img):\n\n plt.figure()\n\n if len(img.shape) > 2:\n\n plt.subplot(3,1,1)\n plt.hist(img[:,:,0].ravel(),bins=range(257),color='b')\n plt.title('Image Histogram')\n plt.legend('Blue')\n plt.xlabel('Pixel Values')\n plt.ylabel('Frequency')\n\n plt.subplot(3,1,2)\n plt.hist(img[:,:,1].ravel(),bins=range(257),color='g')\n plt.legend('Green')\n plt.xlabel('Pixel Values')\n plt.ylabel('Frequency')\n\n plt.subplot(3,1,3)\n plt.hist(img[:,:,2].ravel(),bins=range(257),color='r')\n plt.legend('Red')\n plt.xlabel('Pixel Values')\n plt.ylabel('Frequency')\n\n plt.ion()\n plt.show()\n\n else:\n\n plt.hist(img[:,:].ravel(),bins=range(257))\n plt.title('Image Histogram - Grayscale')\n plt.xlabel('Pixel Values')\n plt.ylabel('Frequency')\n\n plt.ion()\n plt.show()", "def plot_hist(delays):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n alldelays = []\n for _s in delays.keys():\n alldelays += delays[_s]\n if len(alldelays) > 0:\n n, bins, patches = ax.hist(alldelays, bins=np.arange(0, 30, 0.5),\n color='green', histtype='bar', rwidth=1.0)\n ax.set_xlabel('Envelope delays [s]')\n med = np.median(alldelays)\n percentile25 = scoreatpercentile(alldelays, 25)\n percentile75 = scoreatpercentile(alldelays, 75)\n ax.text(0.6, 0.80, 'Median: %.1f s' % (med), horizontalalignment='left',\n transform=ax.transAxes, color='black')\n ax.text(0.6, 0.75, '25th percentile: %.1f s' % (percentile25), horizontalalignment='left',\n transform=ax.transAxes, color='black')\n ax.text(0.6, 0.70, '75th percentile: %.1f s' % (percentile75), horizontalalignment='left',\n transform=ax.transAxes, color='black')\n plt.show()", "def histogram(ratings, min_rating=None, max_rating=None):\n if min_rating is None:\n min_rating = min(ratings)\n if max_rating is None:\n max_rating = max(ratings)\n num_ratings = int(max_rating - min_rating + 1)\n hist_ratings = [0 for x in range(num_ratings)]\n for r in ratings:\n hist_ratings[r - min_rating] += 1\n return hist_ratings", "def histogram(ratings, min_rating=None, max_rating=None):\n if min_rating is None:\n min_rating = min(ratings)\n if max_rating is None:\n max_rating = max(ratings)\n num_ratings = int(max_rating - min_rating + 1)\n hist_ratings = [0 for x in range(num_ratings)]\n for r in ratings:\n hist_ratings[r - min_rating] += 1\n return hist_ratings", "def histogram(ratings, min_rating=None, max_rating=None):\n if min_rating is None:\n min_rating = min(ratings)\n if max_rating is None:\n max_rating = max(ratings)\n num_ratings = int(max_rating - min_rating + 1)\n hist_ratings = [0 for x in range(num_ratings)]\n for r in ratings:\n hist_ratings[r - min_rating] += 1\n return hist_ratings", "def summaryPlot(df):\n import datetime as dt\n import matplotlib.pyplot as plt\n import matplotlib as mpl\n import numpy as np\n import pandas as pd\n from numpy import array\n import matplotlib.patches as mpatches\n import seaborn as sns\n from matplotlib.pyplot import figure\n\n class color:\n # Allows for bolded and underlined text\n BOLD = \"\\033[1m\"\n UNDERLINE = \"\\033[4m\"\n END = \"\\033[0m\"\n\n # Reads df and fills empty values\n df.index = pd.to_datetime(df.date)\n df = df.drop(\"date\", axis=1)\n df_all = df.resample(\"1D\")\n df_all = df_all.fillna(method=\"ffill\")\n\n dataPoints = [\"pm25\", \"co\", \"so2\", \"pm10\", \"o3\", \"no2\", \"nox\", \"wd\", \"ws\"]\n\n i = 0\n sub = 1\n while i < 9:\n # Plots line and histogram plots for ecery polutant\n # in the correct location based on subplot\n plt.figure(1, figsize=(50, 50))\n plt.subplot(9, 2, sub)\n sub = sub + 1\n a = df_all[dataPoints[i]].plot.line(color=\"gold\")\n a.axes.get_xaxis().set_visible(False)\n a.yaxis.set_label_position(\"left\")\n plt.ylabel(dataPoints[i], fontsize=75, bbox=dict(facecolor=\"whitesmoke\"))\n # print(df['pm25'].max())\n\n plt.subplot(9, 2, sub)\n sub = sub + 1\n plt.hist(df_all[dataPoints[i]], bins=50, color=\"green\")\n i = i + 1\n i = 0\n while i < 9:\n # Calculates statistics\n nDf = df[dataPoints[i]]\n missing = nDf.isna().sum() + sum(n < 0 for n in nDf)\n minVal = nDf.min()\n maxVal = nDf.max()\n meanVal = nDf.mean()\n medianVal = nDf.median()\n percentile = nDf.quantile(0.95)\n print(\"---------------\")\n print(color.BOLD + color.UNDERLINE + dataPoints[i] + color.END)\n print(\"min = \" + str(0))\n print(\"max = \" + str(maxVal))\n print(\"missing = \" + str(missing))\n print(\"mean = \" + str(meanVal))\n print(\"median = \" + str(medianVal))\n print(\"95th percentile = \" + str(percentile))\n i = i + 1", "def draw_histogram(self):\n\n hframe = self.dpar.latest_frame[::4,::4]\n\n gcy_shrink = 0.8\n gcy_offset = (1. - gcy_shrink)/2.\n \n gcurve_x = [0, self.dpar.iwindow[0][0]/100., self.dpar.iwindow[0][1]/100., 1.]\n gcurve_y = [gcy_offset, gcy_offset, 1.-gcy_offset, 1.-gcy_offset]\n\n hist, bin_edges = np.histogram(hframe.flatten(), bins=HIST_NBINS, range=(0., self.camera.pixel_maxval))\n hcurve_y = np.array(hist).astype(float) / float(max(hist))\n #hcurve_x = np.array(bin_edges).astype(float)[0:-1] / 256.\n hcurve_x = np.arange(HIST_NBINS) / HIST_NBINS\n\n\n self.hist_canvas.gain_trace.set_data(gcurve_x, gcurve_y)\n for hb,hy in zip(self.hist_canvas.hist_bars, hcurve_y):\n hb.set_height(hy)\n\n self.hist_canvas.draw()", "def DrawBands(self, count):\n value = self.little[0]\n mobile_average = float(sum([float(self.little[i])\n for i in range(len(self.little))])) / float(self.period)\n standard_derivation = sqrt(sum([pow(self.little[i] - mobile_average, 2)\n for i in range(len(self.little))]) / self.period)\n upper_band = mobile_average + (standard_derivation * self.sd_coef)\n lower_band = mobile_average - (standard_derivation * self.sd_coef)\n self.upper.insert(0, upper_band)\n self.lower.insert(0, lower_band)\n if len(self.upper) >= self.period:\n self.upper.pop()\n if len(self.lower) >= self.period:\n self.lower.pop()\n if count >= self.period:\n for i in range(len(self.little) - 1):\n self.canvas.create_line((i * self.incr / 1.725) + self.incr * 4,\n self.height - self.incr * 4 + (self.little[i] - 1) * 5000 - 200,\n (i * self.incr / 1.725) + self.incr * 4 + self.incr / 1.725,\n self.height - self.incr * 4 + (self.little[i + 1] - 1) * 5000 - 200,\n fill = \"#FFFF00\", width = 2)\n for i in range(len(self.upper) - 1):\n self.canvas.create_line((i * self.incr / 1.635) + self.incr * 4,\n self.height - self.incr * 4 + (self.upper[i] - 1) * 5000 - 200,\n (i * self.incr / 1.635) + self.incr * 4 + self.incr / 1.635,\n self.height - self.incr * 4 + (self.upper[i + 1] - 1) * 5000 - 200,\n fill = \"#FF6600\", width = 3)\n self.canvas.create_line((i * self.incr / 1.635) + self.incr * 4,\n self.height - self.incr * 4 + (self.lower[i] - 1) * 5000 - 200,\n (i * self.incr / 1.635) + self.incr * 4 + self.incr / 1.635,\n self.height - self.incr * 4 + (self.lower[i + 1] - 1) * 5000 - 200,\n fill = \"#FF0000\", width = 3)", "def PlotLogPHistogram(lag):\n (n,bins) = pylab.mlab.hist(P(lag), bins=100, normed=True)\n binCenters = bins + (bins[1]-bins[0])/2.\n pylab.plot(binCenters, scipy.log(n+1.e-4))", "def print_stars():\n for i in range(2):\n for j in range(35):\n print(\"*\", end = '')\n print('')", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\r\n # TODO\r\n pylab.hist(values, bins = numBins)\r\n pylab.xlabel(xLabel)\r\n pylab.ylabel(yLabel)\r\n if title != None:\r\n pylab.title(title)\r\n pylab.show()", "def plot_histogram(self) -> None:\n\n if self.data:\n plt.hist(self.data)\n plt.title(\"Histogram of data\")\n plt.xlabel(\"data\")\n plt.ylabel(\"count\")\n else:\n raise ValueError(\"Histogram cannot be generated as no\\\n data has been provided\")", "def plot_spectrums_overlapping(\n self, energies_dict: dict, title:str,\n figsize: tuple = (16,12)\n ) -> None:\n \n plt.figure(figsize=figsize)\n plt.title(title)\n for key in energies_dict.keys():\n spectre, bins = np.histogram(energies_dict[key],range = (0,202),bins = 3000)\n \n plt.plot(bins[:-1],spectre)\n plt.xlabel(\"Energy (keV)\")\n plt.ylabel(\"Counts\")", "def plot_regime_diagram_background_L19(\n ax=None,\n ):\n if ax is None:\n ax = plt.gca()\n # range of power\n xpr = [-1, 1]\n ypr = [-3, 3]\n # range\n xlims = [10**i for i in xpr]\n ylims = [10**i for i in ypr]\n # background following Fig. 3 of Belcher et al., 2012\n nx = 500\n ny = 500\n xx = np.logspace(xpr[0], xpr[1], nx)\n yy = np.logspace(ypr[0], ypr[1], ny)\n zz1 = np.zeros([nx, ny])\n zz2 = np.zeros([nx, ny])\n zz3 = np.zeros([nx, ny])\n for i in np.arange(nx):\n for j in np.arange(ny):\n zz1[i,j] = 2*(1-np.exp(-0.5*xx[i]))\n zz2[i,j] = 0.22*xx[i]**(-2)\n zz3[i,j] = 0.3*xx[i]**(-2)*yy[j]\n zz = zz1 + zz2 + zz3\n\n rz_ST = zz1/zz\n rz_LT = zz2/zz\n rz_CT = zz3/zz\n fr = np.ones(zz.shape) * 7\n cfrac = 0.25\n fr[(rz_LT<cfrac) & (rz_CT<cfrac)] = 1\n fr[(rz_ST<cfrac) & (rz_CT<cfrac)] = 2\n fr[(rz_ST<cfrac) & (rz_LT<cfrac)] = 3\n fr[(rz_ST>=cfrac) & (rz_LT>=cfrac) & (rz_CT<cfrac)] = 4\n fr[(rz_ST>=cfrac) & (rz_CT>=cfrac) & (rz_LT<cfrac)] = 5\n fr[(rz_LT>=cfrac) & (rz_CT>=cfrac) & (rz_ST<cfrac)] = 6\n color_list = ['firebrick','forestgreen','royalblue','gold','orchid','turquoise','w']\n cb_ticks = [0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5]\n cmap, norm = from_levels_and_colors(cb_ticks, color_list)\n ax.contourf(xx, yy, np.transpose(fr), cmap=cmap, norm=norm)\n ax.contour(xx, yy, np.transpose(fr), colors='darkgray')\n ax.set_xlim(xlims)\n ax.set_ylim(ylims)\n ax.set_xscale('log')\n ax.set_yscale('log')\n ax.set_xlabel('La$_t$')\n ax.set_ylabel('$h/L_L$')\n ax.set_aspect(aspect=1/3)\n ax.text(0.11, 4e-3, 'Langmuir', bbox=dict(boxstyle=\"square\",ec='k',fc='w'))\n ax.text(3, 4e-3, 'Shear', bbox=dict(boxstyle=\"square\",ec='k',fc='w'))\n ax.text(0.13, 1e2, 'Convection', bbox=dict(boxstyle=\"square\",ec='k',fc='w'))", "def get_extended_hist(img, sid_bin_edges):\n extended_bin_edges = np.append(sid_bin_edges.numpy(), float('inf'))\n img_hist, _ = np.histogram(img, bins=extended_bin_edges)\n return img_hist", "def np_hsv_hue_histogram(h):\n figure = plt.figure()\n canvas = figure.canvas\n _, _, patches = plt.hist(h, bins=360)\n plt.title(\"HSV Hue Histogram, mean=%3.1f, std=%3.1f\" % (np.mean(h), np.std(h)))\n\n bin_num = 0\n for patch in patches:\n rgb_color = colorsys.hsv_to_rgb(bin_num / 360.0, 1, 1)\n patch.set_facecolor(rgb_color)\n bin_num += 1\n\n canvas.draw()\n w, h = canvas.get_width_height()\n np_hist = np.fromstring(canvas.get_renderer().tostring_rgb(), dtype=np.uint8).reshape(h, w, 3)\n plt.close(figure)\n util.np_info(np_hist)\n return np_hist", "def histograma_colorido(imagem, intervalo=(0, 256)):\n \n color = ('b','g','r')\n \n fig, ax = plt.subplots(3,1, figsize=(12,8))\n \n for i,col in enumerate(color):\n histr = cv2.calcHist([imagem],[i],None,[intervalo[1]],[intervalo[0],intervalo[1]])\n ax[i].plot(histr, color = col)\n ax[i].set_xlim([intervalo[0],intervalo[1]])\n# plt.plot(histr,color = col)\n# plt.xlim([intervalo[0],intervalo[1]])\n plt.show()", "def histograma(p):\n img = read_img(p)\n show_histograma(img.reshape((-1)))", "def DrawHistograms(Histograms, ran, title, xlabel, ylabel, Save=False,Normalize=True,DrawTitle=False, t_sleep=0):\n canvas = rt.TCanvas('canvas','canvas',600,600)\n\tif DrawTitle: \n\t\tcanvas.SetTitle(title)\n\telse:\n\t\trt.gStyle.SetOptTitle(0)\n\t\n\thistlist = []\n if len(Histograms) > 1:\n rt.gStyle.SetOptStat(0)#something is wrong with this\n legend = rt.TLegend(0.9,0.9,0.65,0.75)\n for nr, Histogram in enumerate(Histograms):\n\t\thistlist.append(Histogram[0])\n\t\tif len(Histogram)>2:\n\t\t\thistlist[nr].SetLineColor(Histogram[2])\n\t\telse:\n\t\t\tif nr < 3:\n \t\t \thistlist[nr].SetLineColor(nr+2)\n\t\t\telse:\n\t\t\t\thistlist[nr].SetLineColor(nr+3)\n if nr == 0:\n\t\t\tif DrawTitle: histlist[nr].SetTitle(title)\n histlist[nr].GetXaxis().SetTitle(xlabel)\n histlist[nr].GetYaxis().SetTitle(ylabel)\n histlist[nr].GetYaxis().SetTitleOffset(1.5)\n if Normalize:\n \thistlist[nr].DrawNormalized()\n else:\n histlist[nr].Draw()\n else:\n if Normalize:\n histlist[nr].DrawNormalized(\"SAME\")\n else:\n histlist[nr].Draw(\"SAME\")\n if len(Histograms)>1:\n legend.AddEntry(histlist[nr],Histogram[1])\n if len(Histograms)>1: \n\t\t#rt.gStyle.SetOptStat(0)#something is wrong with this\n\t\tlegend.Draw()\n if Save: canvas.SaveAs(\"Thesis_Plots/\"+title+\".png\")\n sleep(t_sleep)", "def _hist(xs, bins=100, range=None, stats=('entries', 'mean', 'rms'),\n xylabels = (), stats_xypos=(0.1, 0.7),\n *args, **kargs):\n if (range==None):\n range = (np.min(xs), np.max(xs))\n cc = hst.hist(xs, bins=bins, range=range, *args, **kargs);\n if (not stats):\n return cc\n ys, xedges = np.histogram(xs, bins, range=range)\n ns = len(xs)\n sel = np.logical_and(xs >= range[0], xs <= range[1])\n nos, mean, rms = len(xs[sel]), np.mean(xs[sel]), np.std(xs[sel])\n epsilon = (1.*nos)/(1.*ns)\n ss = ''\n if ('total entries') in stats:\n ss += 'total entries {0:d} \\n'.format(ns)\n if ('entries') in stats:\n ss += 'entries {0:d} \\n'.format(nos)\n if ('mean') in stats:\n ss += 'mean {0:.3f} \\n'.format(mean)\n if ('rms') in stats:\n ss += 'rms {0:.3f} \\n'.format(rms)\n xp, yp = _xypos(xedges, ys, xf=stats_xypos[0], yf=stats_xypos[1])\n ##plt.set_label(ss)\n # plt.gca().set_label(ss)\n # plt.legend()\n plt.text(xp, yp, ss)\n return cc", "def example():\n # --- input data filename ---\n infile = os.path.join(os.path.dirname(__file__),\n '../../ExampleDataFiles/MarsTopo719.shape')\n coeffs, lmax = pysh.shio.shread(infile)\n\n # --- plot grid ---\n grid = pysh.expand.MakeGridDH(coeffs, csphase=-1)\n fig_map = plt.figure()\n plt.imshow(grid)\n\n # ---- compute spectrum ----\n ls = np.arange(lmax + 1)\n pspectrum = pysh.spectralanalysis.spectrum(coeffs, unit='per_l')\n pdensity = pysh.spectralanalysis.spectrum(coeffs, unit='per_lm')\n\n # ---- plot spectrum ----\n fig_spectrum, ax = plt.subplots(1, 1)\n ax.set_xscale('log')\n ax.set_yscale('log')\n ax.set_xlabel('degree l')\n ax.grid(True, which='both')\n\n ax.plot(ls[1:], pspectrum[1:], label='power per degree l')\n ax.plot(ls[1:], pdensity[1:], label='power per degree l and order m')\n\n ax.legend()\n\n fig_map.savefig('SHRtopography_mars.png')\n fig_spectrum.savefig('SHRspectrum_mars.png')\n print('mars topography and spectrum saved')\n\n # plt.show()", "def draw_spectrum(meas, x, rel=False, scale=True, **kwargs):\n\n if rel:\n s0 = meas.spec(meas.spec.central)\n s = meas.spec(x)\n line = draw_point_hist(100*(s/s0-1), **kwargs)\n plt.ylabel('Relative offset percentage')\n if scale:\n rescale_plot()\n\n else:\n line = draw_point_hist(meas.spec(x), **kwargs)\n plt.ylabel('Spctral value / bin')\n if scale:\n rescale_plot()\n plt.ylim(ymin=0)\n\n plt.xlabel('Bin')\n\n return line", "def plot_pixel_intensity(image, path='./pixel_intensity_before_normalization.png'):\n\n plt.figure(figsize=(10, 5))\n plt.subplot(1, 2, 1)\n plt.imshow(image)\n plt.axis('off')\n histo = plt.subplot(1, 2, 2)\n histo.set_ylabel('Count')\n histo.set_xlabel('Pixel Intensity')\n n_bins = 30\n plt.hist(image[:, :, 0].flatten(), bins=n_bins, lw=0, color='r', alpha=0.5)\n plt.hist(image[:, :, 1].flatten(), bins=n_bins, lw=0, color='g', alpha=0.5)\n plt.hist(image[:, :, 2].flatten(), bins=n_bins, lw=0, color='b', alpha=0.5)\n plt.savefig(path)\n plt.show()", "def plot_spectrum(sims, noise=False, maxtime=240):\n logging.log(15, \"starte plotting\")\n #ein Spektrum mit max 30 Chroms, gemeinsame Zeitenliste erstellen\n if len(sims) < 30:\n spectrum = [0,maxtime]\n #evtl Rauschen hinzufuegen\n if noise:\n for i in range(int(sims[0].number*len(sims)/10)):\n spectrum.append(random.uniform(0, maxtime))\n for sim in sims:\n for t in sim.times:\n if sim.pd[0] < 250:\n spectrum.append(t)\n hist, bins = np.histogram(spectrum, bins= maxtime, normed = True)\n offset = bins[1:]-bins[:-1]\n plt.plot(bins[:-1]+offset, hist, \"k\")\n #plt.ylim((0, 0.3))\n plt.xlim((0, maxtime))\n plt.xlabel(\"Retentionszeit/s\")\n plt.ylabel(\"Intensität\")\n title = \"Spektrum\"\n if noise:\n title += \" mit Rauschen\"\n plt.suptitle(title)\n plt.show()", "def plot_hypnogram(eeg, stages, srate):\r\n \r\n fig,ax1 = plt.subplots() #Needed for the multiple y-axes\r\n \r\n #Use the specgram function to draw the spectrogram as usual\r\n y_lim = 40;\r\n plt.specgram(eeg/np.sum(eeg),NFFT=512,Fs=srate)\r\n\r\n #Label your x and y axes and set the y limits for the spectrogram\r\n ax1.set_ylim((0,y_lim))\r\n ax1.set_xlim((0,len(eeg)/srate))\r\n plt.title ('Hypnogram')\r\n ax1.set_xlabel('Time in Seconds')\r\n ax1.set_ylabel('Frequency in Hz')\r\n \r\n ax2 = ax1.twinx() #Necessary for multiple y-axes\r\n \r\n #Use ax2.plot to draw the hypnogram. Be sure your x values are in seconds\r\n #HINT: Use drawstyle='steps' to allow step functions in your plot\r\n ax2.plot(np.arange(0,len(stages))*30,stages,drawstyle='steps')\r\n\r\n #Label your right y-axis and change the text color to match your plot\r\n ax2.set_ylabel('NREM Stages',color='b')\r\n\r\n \r\n #Set the limits for the y-axis \r\n ax2.set_ylim(0.5,3.5)\r\n ax2.set_xlim((0,len(eeg)/srate))\r\n #Only display the possible values for the stages\r\n ax2.set_yticks(np.arange(1,4))\r\n \r\n #Change the left axis tick color to match your plot\r\n for t1 in ax2.get_yticklabels():\r\n t1.set_color('b')\r\n \r\n #Title your plot \r", "def scan2plot(datafolder, start, end, first, last,\n theta_range, theta_bins, chi_range, chi_bins,\n gamma, delta,\n ci, cj, w, h, SDD, pxl_size, ph, d5i=None,\n fraction=1):\n chi_bins = int(chi_bins) # make sure the input is an integer\n theta_bins = int(theta_bins) # make sure the input is an integer\n chi_ax = np.linspace(chi_range[0], \n chi_range[1], chi_bins) # init chi axis\n tth_ax = np.linspace(theta_range[0], \n theta_range[1], theta_bins) # init 2th axis\n int_bin = np.zeros((chi_bins, theta_bins)) # init intensity plot\n tth_weight = np.zeros(theta_bins) # init weight normalization\n # (i.e., the number of times a certain bin has been filled)\n for i in range(first, last + 1):\n print(\"delta = \" + str(delta[i - start]) + \", gamma = \" + \n str(gamma[i - start]) + \n \", status: \" + str(i - start) + \"/\" + \n str(last - first)) # print info on current status\n fname = finder(\"*\" + str(i) + \n \".tif\", datafolder).find() # find image with index i\n with Image.open(fname) as img:\n tth_map, chi_map, PL = angle_maps(gamma[i - start], \n delta[i - start], \n ci, cj, w, h, \n SDD, pxl_size, ph) # angle calculations\n det_img = np.array(img) # convert image to numpy array\n if d5i.any() != None:\n det_img = det_img/(d5i[i - start]) # normalize data to monitor\n det_img /= PL # correct by Lorentz-pol.\n # data binning:\n for j in range(int(h/2*(1-fraction)), int(h/2*(1+fraction))):\n for k in range(int(w/2*(1-fraction)), int(w/2*(1+fraction))):\n # find bin on the 2th axis\n idx = closest(tth_ax, np.rad2deg(tth_map[j][k]))\n # find bin on the chi axis\n jdx = closest(chi_ax, np.rad2deg(chi_map[j][k]))\n # fill bin\n int_bin[jdx][idx] += det_img[j][k]\n # every time a bin is filled add 1 to the weight function\n tth_weight[idx] += 1\n print(\"Done!\")\n return tth_ax, chi_ax, int_bin, tth_weight", "def plot_hist(xdata, ylabels):\n fig = plt.figure(figsize=(6,3))\n #plt.hist(xdata, 2, normed=True)\n #plt.show()\n\n ##weights = np.ones_like(xdata)/float(len(xdata))\n ##plt.hist(xdata, bins=100, weights=weights)\n ##plt.show()\n counter = 0\n for dataset in xdata:\n density, bins = np.histogram(dataset, bins=100, density=True)\n unity_density = density/density.sum()\n bincenters = 0.5*(bins[1:]+bins[:-1])\n plt.plot(bincenters, unity_density, label=ylabels[counter])\n counter += 1\n plt.legend(loc='lower right', frameon=False, numpoints=1)\n plt.show()", "def showHistogram(x,y,txt,**options):\n plot2d_system = pyformex.cfg['gui/plot2d']\n\n if plot2d_system == 'gnuplot':\n if not utils.hasModule('gnuplot'):\n error(\"You do not have the Python Gnuplot module installed.\\nI can not draw the requested plot.\")\n return\n \n import Gnuplot\n maxlen = min(len(x),len(y))\n data = Gnuplot.Data(x[:maxlen],y[:maxlen],title=txt, with_='histeps') \n g = Gnuplot.Gnuplot(persist=1)\n g.title('pyFormex histogram: %s' % txt)\n g.plot(data)\n \n elif plot2d_system == 'qwt':\n pass\n #from PyQt4.Qwt5.qplt import *", "def print_TRT_cell_histograms(samples_df,cfg_set_tds):\r\n \r\n fig_hist, axes = plt.subplots(3, 2)\r\n fig_hist.set_size_inches(12, 15)\r\n\r\n ## Analyse distribution of ranks\r\n \"\"\"\r\n nw = np.sum(np.logical_and(samples_df[\"RANKr\"]>=12, samples_df[\"RANKr\"]<15))\r\n ng = np.sum(np.logical_and(samples_df[\"RANKr\"]>=15, samples_df[\"RANKr\"]<25))\r\n ny = np.sum(np.logical_and(samples_df[\"RANKr\"]>=25, samples_df[\"RANKr\"]<35))\r\n nr = np.sum(np.logical_and(samples_df[\"RANKr\"]>=35, samples_df[\"RANKr\"]<=40))\r\n print(\" The number of Cells with TRT Rank w is: %s\" % nw)\r\n print(\" The number of Cells with TRT Rank g is: %s\" % ng)\r\n print(\" The number of Cells with TRT Rank y is: %s\" % ny)\r\n print(\" The number of Cells with TRT Rank r is: %s\" % nr)\r\n pw = patches.Rectangle((1.2, 65000), 0.3, 10000, facecolor='w')\r\n pg = patches.Rectangle((1.5, 65000), 1, 10000, facecolor='g')\r\n py = patches.Rectangle((2.5, 65000), 1, 10000, facecolor='y')\r\n pr = patches.Rectangle((3.5, 65000), 0.5, 10000, facecolor='r')\r\n axes[0,0].add_patch(pw); axes[0,0].add_patch(pg); axes[0,0].add_patch(py); axes[0,0].add_patch(pr)\r\n axes[0,0].annotate(str(nw),(1.35,70000),(1.25,90500),ha='center',va='center',color='k',arrowprops={'arrowstyle':'->'}) #,arrowprops={arrowstyle='simple'}\r\n axes[0,0].annotate(str(ng),(2,70000),ha='center',va='center',color='w') \r\n axes[0,0].annotate(str(ny),(3,70000),ha='center',va='center',color='w')\r\n axes[0,0].annotate(str(nr),(3.75,70000),ha='center',va='center',color='w') \r\n \"\"\"\r\n axes[0,0] = plot_band_TRT_col(axes[0,0],samples_df[\"RANKr\"],65000,10000,arrow_start=90500)\r\n samples_df[\"RANKr\"] = samples_df[\"RANKr\"]/10.\r\n samples_df[\"RANKr\"].hist(ax=axes[0,0],bins=np.arange(0,4.25,0.25),facecolor=(.7,.7,.7),alpha=0.75,grid=True)\r\n axes[0,0].set_xlabel(\"TRT rank\")\r\n axes[0,0].set_title(\"TRT Rank Distribution\")\r\n \r\n samples_df[\"area\"].hist(ax=axes[0,1],bins=np.arange(0,650,50),facecolor=(.7,.7,.7),alpha=0.75,grid=True)\r\n axes[0,1].set_xlabel(\"Cell Area [km$^2$]\")\r\n axes[0,1].set_title(\"Cell Size Distribution\")\r\n \r\n samples_df[\"date\"] = samples_df[\"date\"].astype(np.datetime64)\r\n \r\n samples_df[\"date\"].groupby(samples_df[\"date\"].dt.month).count().plot(kind=\"bar\",ax=axes[1,0],facecolor=(.7,.7,.7),\r\n alpha=0.75,grid=True)\r\n #axes[1,0].set_xlabel(\"Months\")\r\n axes[1,0].set_xlabel(\"\")\r\n axes[1,0].set_xticklabels([\"Apr\",\"May\",\"Jun\",\"Jul\",\"Aug\",\"Sep\"],rotation=45)\r\n axes[1,0].set_title(\"Monthly Number of Cells\")\r\n\r\n samples_df[\"date\"].groupby([samples_df[\"date\"].dt.month,\r\n samples_df[\"date\"].dt.day]).count().plot(kind=\"bar\",\r\n ax=axes[1,1],facecolor=(.7,.7,.7),alpha=0.75,edgecolor=(.7,.7,.7),grid=True)\r\n axes[1,1].get_xaxis().set_ticks([])\r\n axes[1,1].set_xlabel(\"Days over period\")\r\n axes[1,1].set_title(\"Daily Number of Cells\")\r\n \r\n samples_df[\"date\"].groupby(samples_df[\"date\"]).count().hist(ax=axes[2,0],bins=np.arange(0,150,10),\r\n facecolor=(.7,.7,.7),alpha=0.75,grid=True)\r\n axes[2,0].set_xlabel(\"Number of cells\")\r\n axes[2,0].set_title(\"Number of cells per time step\")\r\n \r\n #samples_df[\"date\"].loc[samples_df[\"RANKr\"]>=1].groupby(samples_df[\"date\"]).count().hist(ax=axes[2,1],bins=np.arange(0,65,5),\r\n # facecolor=(.7,.7,.7),alpha=0.75,grid=True)\r\n #axes[2,1].set_xlabel(\"Number of cells\")\r\n #axes[2,1].set_title(\"Number of cells (TRT Rank >= 1)\\n per time step\")\r\n axes[2,1].axis('off')\r\n \r\n fig_hist.savefig(os.path.join(cfg_set_tds[\"fig_output_path\"],u\"TRT_Histogram.pdf\"))", "def rose_plot(ax, angles, bins=16, density=None, offset=0, lab_unit=\"degrees\",\n start_zero=False, fill=False, color='white', max_count=None,\n max_size=None, smooth=False, mean=None, sigma=None, **param_dict):\n # Wrap angles to [-pi, pi)\n radians = np.array([math.radians(angle) for angle in angles])\n radians = (radians + np.pi) % (2*np.pi) - np.pi\n\n # Set bins symetrically around zero\n if start_zero:\n # To have a bin edge at zero use an even number of bins\n if bins % 2:\n bins += 1\n bins = np.linspace(-np.pi, np.pi, num=bins+1)\n\n # Bin data and record counts\n count, bin = np.histogram(radians, bins=bins)\n if smooth:\n smoothedCount = np.zeros(len(count))\n for i in np.arange(0,len(count),1):\n if i == len(count)-1:\n smoothedCount[i] = (count[i-1]+count[i]+count[0]) / 3.\n else:\n smoothedCount[i] = (count[i-1]+count[i]+count[i+1]) / 3.\n count = smoothedCount\n maxCount = np.max(count)\n\n # Compute width of each bin\n widths = np.diff(bin)\n\n # By default plot density (frequency potentially misleading)\n# maxArea = None\n if density is None or density is True:\n # Area to assign each bin\n if max_size and max_count:\n area = count / max_size\n else:\n area = count / radians.size\n # Calculate corresponding bin radius\n radius = (area / np.pi)**.5\n else:\n radius = count\n ax.bar(bin[:-1], radius, zorder=1, align='edge', width=widths,\n edgecolor='C0', fill=fill, linewidth=1, color=color)\n if max_count and max_size:\n if density is None or density is True:\n max_area = max_count / max_size\n max_radius = (max_area / np.pi)**.5\n else:\n max_radius = max_count\n ax.bar(0,max_radius,width=0.001)\n\n if mean is not None:\n maxR = np.max(radius)*1.05\n ax.bar([math.radians(mean),math.radians(mean+180)],[maxR,maxR],width=0.01,linewidth=5,color='red')\n if sigma is not None:\n maxR = np.max(radius)*1.05\n ax.bar([math.radians(mean+sigma),math.radians(mean+sigma+180)],[maxR,maxR],width=0.01,linewidth=5,color='black')\n ax.bar([math.radians(mean-sigma),math.radians(mean-sigma+180)],[maxR,maxR],width=0.01,linewidth=5,color='black')\n\n # Set the direction of the zero angle\n ax.set_theta_offset(offset)\n\n # Remove ylabels, they are mostly obstructive and not informative\n ax.set_yticks([])\n\n if lab_unit == \"radians\":\n label = ['$0$', r'$\\pi/4$', r'$\\pi/2$', r'$3\\pi/4$',\n r'$\\pi$', r'$5\\pi/4$', r'$3\\pi/2$', r'$7\\pi/4$']\n ax.set_xticklabels(label)\n return maxCount,radians.size", "def missingness_hist(missfile: str=\"plink\"):\n imiss_file = missfile+\".imiss\"\n lmiss_file = missfile+\".lmiss\"\n imiss = pd.read_csv(imiss_file, delimiter=\" \", skipinitialspace=True)\n lmiss = pd.read_csv(lmiss_file, delimiter=\" \", skipinitialspace=True)\n\n fig, ax = plt.subplots(1,2, figsize=(16,6), sharex=True)\n imiss.hist(column='F_MISS', ax=ax[0])\n ax[0].axvline(0.2, c='red', linestyle='--')\n ax[0].set_xlabel(\"Proportion of missing SNPs\")\n ax[0].set_ylabel(\"Number of individuals\")\n ax[0].set_title(\"Proportion of missing SNPs per individual \\n (> 0.2 are removed)\")\n lmiss['F_MISS'].hist(ax=ax[1])\n ax[1].axvline(0.2, c='red', linestyle='--')\n ax[1].set_xlabel(\"Proportion of individuals with missing SNPs\")\n ax[1].set_ylabel(\"Number of SNPs\")\n ax[1].set_title(\"Proportion of missing individuals per SNP \\n (> 0.2 are removed)\")\n return fig", "def hist_2_panel(path: str, outfile: str, kernel: str, s_above=5):\n sns.set(style=\"white\", color_codes=True, font_scale=1)\n fig, axes = plt.subplots(1, 2, figsize=(10, 5))\n fig.suptitle(SUB_TITLE, y=0.97)\n plt.subplots_adjust(wspace=0.3)\n\n sig = np.load('{}/sig_{}.npy'.format(path, kernel))\n is_peak = sig > s_above\n sig_finite_flat = sig[np.isfinite(sig)].flatten()\n\n bins = 20\n\n axes[0].hist(sig_finite_flat, bins=bins)\n axes[0].set_title('%e pixels' % len(sig_finite_flat))\n axes[0].set_ylabel('number of pixels')\n\n axes[1].hist(sig_finite_flat, bins=bins, density=True)\n axes[1].set_title('sig > %0.1f= %d pixels' % (s_above, np.sum(is_peak)))\n axes[1].set_ylabel('normalized density of pixels')\n\n # norm distribution\n mu, variance = 0., 1.\n sigma = np.sqrt(variance)\n xmin, xmax = sig_finite_flat.min(), sig_finite_flat.max()\n x = np.linspace(mu + xmin * sigma, mu + xmax * sigma, 100)\n axes[1].plot(x, stats.norm.pdf(x, mu, sigma), lw=3)\n axes[1].set_xlim([xmin, 10])\n axes[1].set_ylim([1e-10, 1])\n\n for u in range(2):\n axes[u].set_xlabel('significance in %s' % kernel)\n axes[u].set_yscale('log')\n\n _filename = \"{}-{}.png\".format(outfile, kernel)\n plt.savefig(_filename, bbox_inches='tight', dpi=100)", "def histogram(self):\r\n channel = self.ui.channel_selection.itemData(self.ui.channel_selection.currentIndex())\r\n\r\n #create a window, the reference must be stored, because the window\r\n #gets destroyed when its reference is garbage collected\r\n #make plotWindow a list and append to that if multiple windows should be possible\r\n title = \"histogram of {:s} channel\".format(self.ui.channel_selection.currentText())\r\n self.plotWindow = pyguitools.SimplePlotWindow(name = title)\r\n self.plotWindow.ax1.hist(self.npImg[self.ui.y0.value():self.ui.y1.value(),\r\n self.ui.x0.value():self.ui.x1.value(), \r\n channel].flatten(),\r\n bins=self.settings[\"histogramm bins\"],\r\n range=(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]))\r\n self.plotWindow.ax1.set_xlim(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]) \r\n self.plotWindow.show()", "def _histogram_with_spread(self):\n complexity_hist = np.bincount(\n self.epoch.array_annotations['complexity'])\n num_bins = (self.t_stop - self.t_start).rescale(\n self.bin_size.units).item() / self.bin_size.item()\n num_bins = round_binning_errors(num_bins, tolerance=self.tolerance)\n time_hist = np.zeros(num_bins, dtype=int)\n\n start_bins = (self.epoch.times - self.t_start).rescale(\n self.bin_size.units).magnitude / self.bin_size.item()\n stop_bins = (self.epoch.times + self.epoch.durations - self.t_start\n ).rescale(self.bin_size.units\n ).magnitude / self.bin_size.item()\n\n if self.sampling_rate is not None:\n shift = (.5 / self.sampling_rate / self.bin_size).simplified.item()\n # account for the first bin not being shifted in the epoch creation\n # if the shift would move it past t_start\n if self.epoch.times[0] == self.t_start:\n start_bins[1:] += shift\n else:\n start_bins += shift\n stop_bins += shift\n\n start_bins = round_binning_errors(start_bins, tolerance=self.tolerance)\n stop_bins = round_binning_errors(stop_bins, tolerance=self.tolerance)\n\n for idx, (start, stop) in enumerate(zip(start_bins, stop_bins)):\n time_hist[start:stop] = \\\n self.epoch.array_annotations['complexity'][idx]\n\n time_hist = neo.AnalogSignal(\n signal=np.expand_dims(time_hist, axis=1),\n sampling_period=self.bin_size, units=pq.dimensionless,\n t_start=self.t_start)\n\n empty_bins = (self.t_stop - self.t_start - self.epoch.durations.sum())\n empty_bins = empty_bins.rescale(self.bin_size.units\n ).magnitude / self.bin_size.item()\n empty_bins = round_binning_errors(empty_bins, tolerance=self.tolerance)\n complexity_hist[0] = empty_bins\n\n return time_hist, complexity_hist" ]
[ "0.5921788", "0.5898935", "0.586834", "0.586834", "0.586834", "0.5814485", "0.575388", "0.5714343", "0.5698721", "0.56886125", "0.5656541", "0.5636839", "0.56319624", "0.5586074", "0.55594313", "0.5537156", "0.54848033", "0.54779595", "0.5466038", "0.54587734", "0.5457407", "0.54543346", "0.54367083", "0.54366755", "0.5418318", "0.54174066", "0.54023594", "0.5377258", "0.53762317", "0.5368388", "0.5363484", "0.53587973", "0.53518116", "0.5336387", "0.5334526", "0.53305286", "0.530573", "0.5298229", "0.5290074", "0.5241797", "0.52388746", "0.5235029", "0.5228331", "0.52265", "0.52066135", "0.5203339", "0.5203188", "0.5198341", "0.51978886", "0.51933455", "0.51929766", "0.5191049", "0.5182825", "0.51797557", "0.51740587", "0.51619416", "0.5149034", "0.51479596", "0.5146527", "0.51450807", "0.51425767", "0.5137868", "0.512952", "0.5124446", "0.51150787", "0.5106526", "0.51046324", "0.5103646", "0.5099399", "0.5099399", "0.5099399", "0.5094952", "0.5093112", "0.50887233", "0.50878733", "0.5086154", "0.5085738", "0.50815684", "0.50741076", "0.50721633", "0.5069523", "0.5068971", "0.50652033", "0.5062905", "0.5062248", "0.50475425", "0.50471133", "0.50462776", "0.50413233", "0.5039924", "0.50372684", "0.5036911", "0.5035469", "0.5029101", "0.5024265", "0.50189424", "0.50119895", "0.50117224", "0.50104153", "0.5010177" ]
0.66867584
0
Parent function of get_variable_stars. Sequencially select 'variableTypes' variable stars and plot them on the HR diagram.
def plot_variable_stars(variablesdf, variabletype=None, x='B_V', y='M_V'): if variabletype is None: variabletype = ['CEP', 'BCEP', 'BCEPS', 'DSCT', 'SR', 'SRA', 'SRB', 'SRC', 'SRD', 'RR', 'RRAB', 'RRC', 'GDOR', 'SPB', 'M', 'LPV', 'roAp'] markers = ['^', 'D', 'D', 'v', 's', 'D', 'D', 'D', 'D', 's', 'D', 'D', 'D', 'o', 'p', 'o', 'o'] colors = ['k', 'k', 'k', '#00c000', 'r', 'r', 'r', 'r', 'r', 'm', 'm', 'm', '#00c0ff', (1, .7, 0), 'w', 'w', 'r'] sizes = [50, 40, 40, 40, 50, 40, 40, 40, 40, 50, 50, 50, 40, 40, 45, 40, 40] labels = ['', "BCEP, BCEPS", '', 'DSCT', 'SR', "SRA, SRB, SRC, SRD", '', '', '', 'RR', "RRAB, RRC", '', 'GDOR', 'SPB', '', 'LPV', 'roAp'] for i in range(len(variabletype)): if i in [2, 6, 7, 8, 11]: my_label = None else: my_label = "%s" % labels[i] plt.scatter(variablesdf[x].loc[variablesdf.loc[:, 'Type'] == variabletype[i]], variablesdf[y] .loc[variablesdf.loc[:, 'Type'] == variabletype[i]], facecolor=colors[i], marker=markers[i], s=sizes[i], label=my_label, edgecolor='k') print "plotting %s as %s%s" % (variabletype[i], colors[i], markers[i]) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_variable_stars(df_data, df_variables_names, variabletype=None):\n if variabletype is None:\n variabletype = ['CEP', 'BCEP', 'BCEPS', 'DSCT', 'SR', 'SRA', 'SRB', 'SRC', 'SRD', 'RR', 'RRAB', 'RRC',\n 'GDOR', 'SPB', 'M', 'LPV']\n\n print \"Selecting variable stars..\"\n # create a string \"var_type\" of variabletype separated by or ('|').\n # var_type = \"|\".join(variabletype)\n # check if var_type is contained in Type (any or all, partial or not)\n # are_variables = df_variables_names[df_variables_names.Type.str.contains(var_type) == True] # fails with \"is True\"\n # are_variables.Type = are_variables.Type.str.replace(\".*BCEP.*\", \"BCEP\") # rename all types containing 'BCEP'\n are_variables = df_variables_names[df_variables_names.Type.isin(variabletype)]\n types_df = are_variables[['hip', 'tycho2_id', 'source_id', 'Type', 'Name']]\n print \"..Done\"\n print \"Preparing subselection of initial DataFrame..\"\n print \"..Making Hipparcos list..\"\n hip_list = are_variables.hip.tolist()\n hip_list = np.array(hip_list)\n hip_list = hip_list[~np.isnan(hip_list)] # remove the nans\n hip_list = list(hip_list)\n print \"..Making Tycho2 list..\"\n tycho2_list = are_variables.tycho2_id.tolist()\n tycho2_list = np.array(tycho2_list)\n tycho2_list = tycho2_list[tycho2_list != 'nan'] # tycho2 is str\n tycho2_list = list(tycho2_list)\n print \"..Done\\n----------\"\n\n print \"Getting Hipparcos and Tycho variable objects..\"\n hip_objects = df_data[df_data.hip.isin(hip_list)]\n hip_objects = pd.merge(hip_objects, types_df, on='hip', how='inner')\n if 'tycho2_id_y' in hip_objects.columns:\n hip_objects = hip_objects.drop('tycho2_id_y', axis=1)\n hip_objects = hip_objects.rename(columns={'hip_x': 'hip', 'tycho2_id_x': 'tycho2_id'})\n\n tycho_objects = df_data[df_data.tycho2_id.isin(tycho2_list)]\n tycho_objects = pd.merge(tycho_objects, types_df, on='tycho2_id', how='inner')\n if 'hip_y' in tycho_objects.columns:\n tycho_objects = tycho_objects.drop('hip_y', axis=1)\n tycho_objects = tycho_objects.rename(columns={'hip_x': 'hip', 'tycho2_id_x': 'tycho2_id'})\n print \"..Done\\n----------\"\n\n print \"Getting roAp stars from file..\"\n # roAP_names.csv contains tycho2_id names of roAp stars\n with open('roAP/roAP_names.csv') as roAP_file:\n roap_objects_list = roAP_file.readlines()\n roap_objects_list = [line.rstrip() for line in roap_objects_list]\n roap_objects = df_data[df_data.tycho2_id.isin(roap_objects_list)]\n column_number = len(roap_objects.columns)\n roap_objects.insert(column_number, 'Type', 'roAp')\n print \"..Done\\n----------\"\n\n variable_df = pd.concat([hip_objects, tycho_objects, roap_objects], axis=0, ignore_index=True)\n variable_df.source_id = variable_df.source_id.fillna(-9999).astype(int)\n\n return variable_df", "def plot_data_types(self, variable, **kwargs):\n return self.visualizer.plot_data_types(variable, **kwargs)", "def update_graph_type(variable_dropdown_x, variable_dropdown_y):\n\n options = {\n \"violin\": {\"label\": \"Violin\", \"value\": 1},\n \"scatter\": {\"label\": \"Scatter\", \"value\": 2},\n \"bar\": {\"label\": \"Bar\", \"value\": 3},\n \"pie\": {\"label\": \"Pie\", \"value\": 4},\n # \"box\": {\"label\": \"Box\", \"value\": 5,},\n }\n\n if variable_dropdown_x is None:\n return [], None, True, \"Select a graph type\"\n\n graph_selection_list = []\n\n if variable_dropdown_y is None:\n # Only one variable selected\n field_id = variable_dropdown_x\n value_type = get_field_type(field_id)\n\n supported_graphs = value_type.supported_graphs\n\n for option_key in options:\n option = options[option_key]\n graph_type = option[\"value\"]\n if graph_type in supported_graphs:\n graph_selection_list.append(option)\n\n else:\n # Both variables selected\n # Logic is:\n # If the x-axis variable is continuous, integer, date or time:\n # If the y-axis variable is continuous or integer:\n # You can use scatter plot\n # Else if x-axis variable is categorical:\n # If the y-axis variable is continuous or integer:\n # You can use violin plot, box plot\n x_value_type = get_field_type(str(variable_dropdown_x))\n y_value_type = get_field_type(str(variable_dropdown_y))\n\n if (\n x_value_type == ValueType.INTEGER\n or x_value_type == ValueType.CONT\n or x_value_type == ValueType.DATE\n or x_value_type == ValueType.TIME\n ):\n if y_value_type == ValueType.INTEGER or y_value_type == ValueType.CONT:\n graph_selection_list.append(options[\"scatter\"])\n\n elif x_value_type == ValueType.CAT_SINGLE or x_value_type == ValueType.CAT_MULT:\n if y_value_type == ValueType.INTEGER or y_value_type == ValueType.CONT:\n # graph_selection_list.append(options[\"box\"])\n graph_selection_list.append(options[\"violin\"])\n\n if len(graph_selection_list) == 0:\n return graph_selection_list, None, True, \"No supported graph types\"\n\n return (\n graph_selection_list,\n graph_selection_list[0][\"value\"],\n False,\n \"Select a graph type\",\n )", "def stars(self, magnitude=20):\n # Get the stars that are visible within this chart.\n thestars = []\n for s in self.hip_stars:\n if not s: continue\n hip_id, mag, ra, dec, bv = s\n if mag>magnitude: continue\n if dec<min(self.inner_dec, self.outer_dec): continue\n if dec>max(self.inner_dec, self.outer_dec): continue\n thestars.append(s)\n # This should sort them by increasing magnitude (brightest first).\n thestars.sort(key=lambda a:a[1])\n if not thestars: return\n # Set the least bright magnitude.\n self.dimmest_mag = math.floor(thestars[-1][1])\n # Create the star group.\n star_g = self.make_element(self.centered, 'g', (\n 'stroke', 'none'), ('fill', 'black'), (\n 'clip-path', 'url(#innerClipPath)'))\n for hip_id, mag, ra, dec, bv in thestars:\n x, y = self.radec2xy(ra, dec)\n self.make_element(star_g, 'circle', (\n 'cx', x), ('cy', y), ('r', self.starsize(hip_id)))", "def create_plot(x_var, y_var):\r\n\r\n FILE_PATH = 'application/star_data.csv'\r\n TARGET_VAR = 'star_type'\r\n SIZE_VAR = 'r_clipped'\r\n WIDTH = 1000\r\n HEIGHT = 600\r\n\r\n # Get the data\r\n df = pd.read_csv(FILE_PATH)\r\n fig = px.scatter(df, x=x_var, y=y_var, color=TARGET_VAR, size=SIZE_VAR, \r\n width=WIDTH, height=HEIGHT)\r\n graphJSON = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)\r\n\r\n return graphJSON", "def get_safety_vars_plot(self):\n if 'safety_vars_stats' not in self.stats:\n raise ValueError('No safety vars statistics present in this evaluator.')\n\n safety_vars = self.stats['safety_vars_stats'][0].keys()\n n_plots = len(safety_vars)\n fig, axes = plt.subplots(n_plots, 1, figsize=(8, 6 * n_plots))\n\n for idx, var in enumerate(safety_vars):\n series = collections.defaultdict(list)\n for ep in self.stats['safety_vars_stats']:\n for stat in ep[var]:\n series[stat].append(ep[var][stat])\n ax = axes[idx]\n for stat in ['min', 'max']:\n ax.plot(np.squeeze(np.array(series[stat])), label=stat)\n x = range(len(series['mean']))\n\n mean = np.squeeze(np.array(series['mean']))\n std_dev = np.squeeze(np.array(series['std_dev']))\n ax.plot(x, mean, label='Value')\n ax.fill_between(\n range(len(series['mean'])), mean - std_dev, mean + std_dev, alpha=0.3)\n ax.set_title('Stats for {}'.format(var))\n ax.legend()\n ax.spines['top'].set_visible(False)\n\n ax.xaxis.set_ticks_position('bottom')\n ax.set_xlabel('Episode #')\n ax.set_ylabel('Magnitude')\n ax.plot()\n return fig", "def plotvars_core(gs, data, plotfun=vis.plot_r, plot_radars=True,\n projection=PROJECTION, **kws):\n trans = ccrs.PlateCarree()\n axd = dict(ker=plt.subplot(gs[0, 0], projection=projection),\n kum=plt.subplot(gs[0, 1], projection=projection),\n van=plt.subplot(gs[1, 0], projection=projection),\n com=plt.subplot(gs[1, 1], projection=projection))\n for key in ['ker', 'kum']:\n axd[key].set_xticks([])\n for key in ['kum', 'com']:\n axd[key].set_yticks([])\n ax_cb = plt.subplot(gs[:, -1])\n for key in NAMES.keys():\n ax = axd[key]\n ax.set_ymargin(0)\n ax.set_xmargin(0)\n plotfun(data[I_RADAR[key]], ax=ax, cax=ax_cb, transform=trans, **kws)\n ax.set_title(NAMES[key])\n ax.coastlines(resolution='10m')\n if plot_radars:\n if key != 'com':\n RADAR[key].draw_marker(ax=ax, transform=trans)\n else:\n for radarkey in ['ker', 'kum', 'van']:\n RADAR[radarkey].draw_marker(ax=ax, transform=trans)\n return axd", "def read_stars(self):\n if self.hip_stars: return\n all_stars = list(hipparcos.stars())\n self.hip_stars = [None]*(max(s[0] for s in all_stars)+1)\n for s in all_stars: self.hip_stars[s[0]] = s", "def draw_points(stars, errors, ax):\n\n # Open the file of common star names to HIP numbers. Store this in names dictionary.\n names = {}\n with open(os.environ['HOKU_PROJECT_PATH'] + '/data/star-names.dat') as names_f:\n names_f.readline()\n\n for line in names_f:\n n, h = line.split(',')[0].strip(), line.split(',')[1].strip()\n names.update({h: n})\n\n # Plot clean data set as black.\n for star in stars:\n if quiver_flag:\n ax.quiver(0, 0, 0, star[0], star[1], star[2], arrow_length_ratio=0.0000001, alpha=0.2)\n ax.scatter(star[0], star[1], star[2], marker='*', color='k', s=100)\n\n if str(int(star[3])) in names:\n ax.text(star[0], star[1], star[2], names[str(int(star[3]))])\n else:\n ax.text(star[0], star[1], star[2], 'HIP{}'.format(int(star[3])))\n\n # Plot error models with specified colors.\n for model in errors:\n for error in model:\n if quiver_flag:\n ax.quiver(0, 0, 0, error[0], error[1], error[2], arrow_length_ratio=0.0000001, alpha=0.2)\n ax.scatter(error[0], error[1], error[2], marker='*', color=error[4])\n ax.text(error[0], error[1], error[2], 'ERR{}'.format(int(error[3])))", "def SetStars(self):\r\n\t\tstartype = [self._iconstars[\r\n\t\t\tself.CalcStar(starnum,\\\r\n\t\t\t\tself._configtmp[\"imagerating\"],\r\n\t\t\t\tself._configtmp[\"userrating\"])]\\\r\n\t\t\tfor starnum in range(1,6)]\r\n\t\tself.bitmapButton1Star.SetBitmapLabel(startype[0])\r\n\t\tself.bitmapButton2Star.SetBitmapLabel(startype[1])\r\n\t\tself.bitmapButton3Star.SetBitmapLabel(startype[2])\r\n\t\tself.bitmapButton4Star.SetBitmapLabel(startype[3])\r\n\t\tself.bitmapButton5Star.SetBitmapLabel(startype[4])", "def plot_star_classes(obj_catalog):\n\n fig = plt.figure(num=None,figsize=(8,8), dpi=100)\n ax = fig.add_subplot(1,1,1)\n\n phot_class = obj_catalog.phot_star_class\n sclass = obj_catalog.star_class\n phot_class_num = np.zeros(obj_catalog.shape[0])\n sclass_num = np.zeros(obj_catalog.shape[0])\n\n star_classes = ['WD',\\\n 'O','O8','O9','OB','B0','B1','B2','B3','B5','B6','B7','B8','B9',\\\n 'A0','A1','A2','A3','A4','A5','A6','A8','A9',\\\n 'F0','F2','F3','F5','F6','F8','F9',\\\n 'G0','G1','G2','G3','G4','G5','G8','G9',\\\n 'K0','K1','K2','K3','K4','K5','K7',\\\n 'M0','M1','M2','M3','M4','M5','M6','M7','M8','M9', \\\n 'L0','L1','L2','L3','L4','L5','L9','Ldwarf', \\\n 'T','other','C']\n print len(star_classes)\n\n star_dict = dict(zip(star_classes,np.arange(len(star_classes))))\n\n # print phot_class.value_counts()\n\n for i in range(len(phot_class)):\n print phot_class[i], star_dict[phot_class[i]], sclass[i],star_dict[sclass[i]]\n phot_class_num[i] = star_dict[phot_class[i]]\n sclass_num[i] = star_dict[sclass[i]]\n\n #ax.plot(sclass_num,phot_class_num,'.')\n\n cmap = plt.cm.Blues\n cmap.set_bad('0.85',1.0)\n\n cax = plt.hist2d(sclass_num,phot_class_num, bins=65,range = [[0,65], [0,65]], norm = LogNorm(), cmap=cmap, zorder=0)\n cbar = plt.colorbar(ticks=[1,5,10,15,20,25,30,40])\n cbar.ax.set_yticklabels([1,5,10,15,20,25,30,40],fontsize=12)\n\n ax.plot(np.arange(65),np.arange(65),'r')\n\n plt.xticks(np.arange(len(star_classes)),star_classes,fontsize=8,rotation='vertical')\n plt.yticks(np.arange(len(star_classes)),star_classes,fontsize=8)\n\n plt.grid(True)\n return plt", "def radial_graph(self):\n \n if self['M_RADIAL']['intens'] != None:\n name = self['name']\n id = self._getGraphId()\n figname = 'RADIAL_%s.eps' % id\n sxlabel = 'Pixel Radius' ; sylabel = 'Intens' \n title = 'Radial profile, %s' % (name,)\n y = self['M_RADIAL']['intens']\n x = self['M_RADIAL']['radii']\n xy = ((x,y),)\n Plot(xy,figname,sxlabel,sylabel,title)\n self['figures']['radial'] = figname\n else : pass", "def plot_shape(self, theta=0):\n x = np.zeros(self.nz)\n y_re = np.zeros(self.nz)\n y_ri = np.zeros(self.nz)\n for i in range(0, self.nz):\n x[i] = i * self.dz\n y_re[i] = self.re[i][theta]\n y_ri[i] = self.ri[i][theta]\n p = figure(\n title=\"Shapes of stator and rotor along Z; Theta=\" + str(theta),\n x_axis_label=\"Points along Z\",\n y_axis_label=\"Radial direction\",\n )\n p.line(x, y_re, line_width=2, color=\"red\")\n p.line(x, y_ri, line_width=2, color=\"blue\")\n return p", "def listofstars():\n a = []\n for star in Star.select():\n a.append(star.name)\n return a", "def plot(self, dis_type,diameter=\"*\",thickness=\"*\", loglog=False):\n if dis_type not in self.dis_types:\n print(\"Type %s does not exist, please check it\" % dis_type)\n return\n if diameter != \"*\" and (diameter not in self.diameters):\n print(\"Diameter %s does not exist, please check it\" % diameter)\n return\n if thickness != \"*\" and (thickness not in self.thicknesses):\n print(\"thickness %s does not exist, please check it\" % thickness)\n return\n fig = plt.figure()\n ax = fig.add_subplot(111)\n ax.set_title('%s' % self.plotTypes[dis_type])\n if diameter != \"*\":\n if thickness != \"*\":\n ax.set_title('%s , diameter = %s nm, thickness = %s nm' % (self.plotTypes[dis_type],diameter,thickness))\n else:\n ax.set_title('%s , diameter = %s nm' % (self.plotTypes[dis_type],diameter))\n \n if (thickness != \"*\" and diameter == \"*\"):\n ax.set_title('%s , thickness = %s nm' % (self.plotTypes[dis_type],thickness))\n\n for diam in sorted(self.distrs[dis_type]):\n if (diam==diameter and diameter!=\"*\") or diameter==\"*\":\n for thick in sorted(self.distrs[dis_type][diam]):\n if (thick==thickness and thickness!=\"*\") or thickness==\"*\":\n d = self.distrs[dis_type][diam][thick]\n if thickness==\"*\" and diameter==\"*\":\n lb = \" d= %s nm, t= %s nm\" % (diam,thick)\n else:\n if diameter==\"*\":\n lb = \"d= %s nm\" % (diam)\n else:\n lb = \"t= %s nm\" % (thick)\n ax.plot(d.x, d.y, label=lb)\n \n ax.legend(numpoints=1,loc=4)\n ax.grid(True)\n # Here we need to explicity say to show the plot\n plt.show()", "def set_plot_props(self):\n \n if self.type == \"gas\":\n self.marker = \"v\"\n self.color = \"cyan\"\n \n elif self.type == \"cluster\":\n self.marker = \"o\"\n self.color = \"maroon\"\n \n elif self.type == \"spiral\":\n self.marker = \"*\"\n self.color = \"green\"\n \n elif self.type == \"loop\":\n self.marker = \"o\"\n self.color = \"maroon\"\n \n elif self.type == \"giant\":\n self.marker = \"s\"\n self.color = \"red\"\n \n return", "def aperture_photometry(self, x_stars, y_stars, aperture, background):\n print '--------------------------------------------------------------------- aperture_photometry'\n\n #--- CONSTANTS ---#\n gain = 0.73 # Gain of camera: electrons pr ADU (ADU = counts from object)- ajust to camera!\n ron = 3.3 # Read out noise - ajust to camera!\n con = 25 # Magnitude constant\n\n #--- PHOTOMETRY ---#\n # Find fluxes:\n N = len(x_stars) # Number of stellar objects \n flux_star = zeros((self.n,N))\n SNR_i = zeros((self.n,N))\n for i in range(self.n): # Loop over all images: if timeseries is available\n for j in range(N): # Loop over all stars and find flux: using same aperture size\n flux_sky, n_star_pix, flux_star[i][j] = self.aperture(self.LF_i[i], x_stars[j], y_stars[j],\\\n aperture, background)\n SNR_i[i][j] = self.SNR(flux_sky, n_star_pix, flux_star[i][j], gain, ron)\n\n #--- FINAL CORRECTIONS ---#\n print flux_star, flux_sky, SNR_i\n return flux_star, SNR_i", "def geom_type(self): # -> str:\n ...", "def plot_rfs(self):\n self.xe = self.data['XE']\n self.ye = self.data['YE']\n# self.IE = self.data['IE']\n self.Var = self.data['Var']\n std = np.sqrt(np.mean(self.Var))\n fig = plt.gcf()\n ax = plt.gca()\n ax.set_xlim((np.min(self.xe), np.max(self.xe)))\n ax.set_ylim((np.min(self.ye), np.max(self.ye)))\n for xe, ye in zip(self.xe, self.ye):\n circ = plt.Circle((xe, ye), std, color='b', alpha=0.4)\n fig.gca().add_artist(circ)", "def createGraphics(self):\r\n\r\n def variableColor(variable):\r\n if variable.type.startswith(('Float', 'Real')):\r\n return QColor.fromRgb(26, 77, 179)\r\n elif variable.type.startswith(('Enumeration', 'Int', 'UInt')):\r\n return QColor.fromRgb(179, 77, 26)\r\n elif variable.type == 'Boolean':\r\n return QColor.fromRgb(255, 0, 255)\r\n elif variable.type == 'String':\r\n return QColor.fromRgb(26, 114, 16)\r\n elif variable.type == 'Binary':\r\n return QColor.fromRgb(81, 81, 81)\r\n else:\r\n return QColor.fromRgb(0, 0, 0)\r\n\r\n inputVariables = []\r\n outputVariables = []\r\n maxInputLabelWidth = 0\r\n maxOutputLabelWidth = 0\r\n\r\n textItem = QGraphicsTextItem()\r\n fontMetrics = QFontMetricsF(textItem.font())\r\n\r\n for variable in self.modelDescription.modelVariables:\r\n if variable.causality == 'input':\r\n inputVariables.append(variable)\r\n elif variable.causality == 'output':\r\n outputVariables.append(variable)\r\n\r\n for variable in inputVariables:\r\n maxInputLabelWidth = max(maxInputLabelWidth, fontMetrics.width(variable.name))\r\n\r\n for variable in outputVariables:\r\n maxOutputLabelWidth = max(maxOutputLabelWidth, fontMetrics.width(variable.name))\r\n\r\n from math import floor\r\n\r\n scene = QGraphicsScene()\r\n self.ui.graphicsView.setScene(scene)\r\n group = QGraphicsItemGroup()\r\n scene.addItem(group)\r\n group.setPos(200.5, -50.5)\r\n lh = 15 # line height\r\n\r\n w = max(150., maxInputLabelWidth + maxOutputLabelWidth + 20)\r\n h = max(50., 10 + lh * max(len(inputVariables), len(outputVariables)))\r\n\r\n block = QGraphicsRectItem(0, 0, w, h, group)\r\n block.setPen(QColor.fromRgb(0, 0, 0))\r\n\r\n pen = QPen()\r\n pen.setWidthF(1)\r\n\r\n font = QFont()\r\n font.setPixelSize(10)\r\n\r\n # inputs\r\n y = floor((h - len(inputVariables) * lh) / 2 - 2)\r\n for variable in inputVariables:\r\n text = QGraphicsTextItem(variable.name, group)\r\n text.setDefaultTextColor(QColor.fromRgb(0, 0, 0))\r\n text.setFont(font)\r\n text.setX(3)\r\n text.setY(y)\r\n\r\n polygon = QPolygonF([QPointF(-8, y + 7.5), QPointF(-1, y + 11), QPointF(-8, y + 14.5)])\r\n\r\n path = QPainterPath()\r\n path.addPolygon(polygon)\r\n path.closeSubpath()\r\n contour = QGraphicsPathItem(path, group)\r\n contour.setPen(QPen(Qt.NoPen))\r\n contour.setBrush(variableColor(variable))\r\n pen = QPen()\r\n pen.setColor(variableColor(variable))\r\n pen.setJoinStyle(Qt.MiterJoin)\r\n contour.setPen(pen)\r\n\r\n y += lh\r\n\r\n # outputs\r\n y = floor((h - len(outputVariables) * lh) / 2 - 2)\r\n for variable in outputVariables:\r\n text = QGraphicsTextItem(variable.name, group)\r\n text.setDefaultTextColor(QColor.fromRgb(0, 0, 0))\r\n text.setFont(font)\r\n text.setX(w - 3 - text.boundingRect().width())\r\n text.setY(y)\r\n\r\n polygon = QPolygonF([QPointF(w + 1, y + 7.5), QPointF(w + 8, y + 11), QPointF(w + 1, y + 14.5)])\r\n\r\n path = QPainterPath()\r\n path.addPolygon(polygon)\r\n path.closeSubpath()\r\n contour = QGraphicsPathItem(path, group)\r\n contour.setPen(QPen(Qt.NoPen))\r\n contour.setBrush(variableColor(variable))\r\n pen = QPen()\r\n pen.setColor(variableColor(variable))\r\n pen.setJoinStyle(Qt.MiterJoin)\r\n contour.setPen(pen)\r\n\r\n y += lh", "def SNR(self, flux_sky, n_pix_star, flux_star, gain, ron):\n SNR = (gain*flux_star/sqrt(gain*flux_star + n_pix_star*gain*flux_sky + n_pix_star*ron**2)) \n return SNR", "def __init__(self, stars_x, stars_y, stars_f):\n self.xpos = stars_x\n self.ypos = stars_y\n self.flux = stars_f\n\n return", "def __init__(self, temperatures, daytypes, consumptions, nb_days, nb_particles, sigma2, kappa, u_heat):\n self.temperatures = temperatures\n self.daytypes = daytypes\n self.consumptions = consumptions\n self.nb_days = nb_days\n self.nb_particles = nb_particles\n self.sigma2 = sigma2\n self.kappa = kappa\n self.u_heat = u_heat\n #Var init\n self.s = np.zeros((nb_days, nb_particles)) \n self.g_heat = np.zeros((nb_days, nb_particles))\n #sigma_s and sigma_g are fixed\n self.sigma_s_star_2 = np.zeros((1, nb_particles)) \n self.sigma_g_star_2 = np.zeros((1, nb_particles))\n self.x_season = np.zeros((1, nb_particles))\n self.x_heat = np.zeros((1, nb_particles))\n self.x = np.zeros((1, nb_particles))\n self.w = np.zeros((1, nb_particles))", "def Mstar_function(**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n if not p.xlim:\n p.xlim = np.array([1e10,1e13])\n\n df_all = pd.read_pickle(p.d_data + 'galaxy_selection/all_z0_galaxies')\n Mstar = df_all['M_star_caesar'].values\n\n logM_star = np.log10(Mstar)\n dM = 0.25\n N_gal = len(np.where((Mstar > Mstar.min()) & (Mstar < (Mstar.min() + dM)))[0])\n logM_star_bin = np.arange(logM_star.min(), logM_star.max(), dM)\n logM_star_bin_c = logM_star_bin[0:-1] + (logM_star_bin[1]-logM_star_bin[0])/2\n\n N_gal_array = np.zeros(len(logM_star_bin)-1)\n\n # Number of galaxies in each stellar mass bin\n for i in range(len(logM_star_bin)-1):\n N_gal_array[i] = len(np.where((logM_star > logM_star_bin[i]) & (logM_star < (logM_star_bin[i+1])))[0])\n\n # Corresponding volume density of galaxies\n n_gal_array = N_gal_array / (p.box_size)**3 # number of galaxies per Mpc^3\n\n fig, ax = plt.subplots()\n hb = ax.plot(logM_star_bin_c, np.log10(n_gal_array))\n ax.set_ylabel('$\\log\\Phi$ [Mpc$^{-3}$]')\n ax.set_xlabel('log Stellar Mass [M$_{\\odot}$]')\n ax.set_ylim([-7,0.2])\n plt.tight_layout()\n plt.show()", "def get_variables_of_type(self, variable_type):\n if isinstance(variable_type,str):\n variable_key = variable_type\n else:\n #it is a class\n variable_key = variable_type.__name__\n return self._var_kinds[variable_key]", "def parse(self):\n\n special_vars = {'amplification', 'copy number loss', \n 'epigenetic silencing', 'overexpression'}\n\n special_terms = ['dna binding domain', 'egfrv', 'truncating mutation',\n 'fusion', 'mutation', 'deletion', 'duplication', 'insertion',\n 'hypermethylation']\n\n var = self.var.lower()\n\n # Check if the stop sign '*' in the variation\n if '*' in var:\n self.stop_sign = True\n \n # Type \"exact match with special pre-difined variations\"\n if var in special_vars:\n self.type = var\n return\n \n # Type \"with special term\"\n for term in special_terms:\n if term in var:\n self.type = term\n return\n\n # Type \"point\": A123B or A123* or A123\n if re.match('^[a-z][0-9]+[a-z|*]?$', var):\n split = re.split('[0-9]+', var)\n self.type = 'point'\n self.start_amino = split[0]\n self.end_amino = split[1]\n s = re.search('[0-9]+', var)\n self.pos = int(s.group())\n return\n\n # Type \"del/ins/trunc/splice/dup/fs\": A123del or A123_B234del\n for suffix in ['del', 'ins', 'trunc', 'splice', 'dup', 'fs']:\n if suffix in var:\n self.type = self.alias_dict.get(suffix, suffix)\n self._parse_suffix(var, suffix)\n return\n\n print('[INFO] variation cannot be parsed: %s' % self.var)", "def format_variable(self, variablesReference, filter=None, start=None, count=None, format=None):\n\n # format is ignored, TODO?\n\n vs = None if start is None or start == 0 else start\n es = None if count is None or count == 0 else count\n\n var, name, tt, parent = self.scope_assign[variablesReference]\n\n # print(str(var) + \", \" + str(name) + \", \" + str(tt))\n\n is_slotted = False\n\n if not isinstance(var, dict) and not isinstance(var, list):\n if hasattr(var, \"__dict__\"):\n var = var.__dict__\n else:\n is_slotted = True\n\n # print (str(var))\n\n if not is_slotted and isinstance(var, dict):\n if filter is not None and filter == \"indexed\":\n return []\n keys = sorted(var.keys())\n elif not is_slotted:\n if filter is not None and filter == \"named\":\n return []\n keys = range(len(var))\n elif is_slotted:\n keys = dir(var)\n\n if \"self\" in keys:\n keys.remove(\"self\")\n keys = [\"self\"] + keys\n\n # print (str(keys))\n\n it = 0\n total = 0\n variables = []\n for vkey in keys:\n if vs is None or it >= vs:\n var_ref = self.scope_var_id\n if is_slotted:\n value = getattr(var, vkey)\n else:\n value = var[vkey]\n\n vardesc = {}\n variables.append(vardesc)\n\n vardesc[\"name\"] = vkey\n vardesc[\"value\"] = str(value)\n vardesc[\"type\"] = str(type(value))\n # vardesc[\"presentationHint\"] # TODO!!!\n vardesc[\"evaluateName\"] = vkey\n vardesc[\"variablesReference\"] = var_ref\n\n vv_inner = value\n vv_slotted = False\n if not isinstance(vv_inner, dict) and not isinstance(vv_inner, list):\n if hasattr(vv_inner, \"__dict__\"):\n vv_inner = vv_inner.__dict__\n else:\n vv_slotted = True\n\n if not vv_slotted and isinstance(vv_inner, dict):\n vardesc[\"namedVariables\"] = len(vv_inner.keys())\n elif not vv_slotted:\n vardesc[\"indexedVariables\"] = len(vv_inner)\n else:\n vardesc[\"namedVariables\"] = len(dir(vv_inner))\n\n self.scope_assign[var_ref] = (value, vkey, str(type(value)), var)\n\n self.scope_var_id += 1\n total += 1\n it += 1\n if es is not None and total >= es:\n break\n\n return variables", "def _parse_stars_according_to_image(self, starClipSigma=40.0):\n if issubclass(self.imageType, ReducedScience):\n # Check if all the images were corrected to Airmass 0.0\n if np.sum([img.airmass for img in self.imageList]) > 0:\n raise ValueError('All images in the imageList must be corrected to airmass=0.0 before combining')\n\n # Compute the star masks for this image stack.\n starMask = self._construct_star_mask()\n\n else:\n starMask = False\n starClipSigma = 0\n\n return starMask, starClipSigma", "def analyse_type(self):\n \n t = \" \" # Holder string\n \n data_base = '/local/duman/SIMULATIONS/many_polymers_5/density_0.2/kappa_'\n path = data_base + str(self.k) + '/fp_' + str(self.f) + '/CLUSTER/avg_size.txt'\n if os.path.exists(path):\n data = np.loadtxt(path, dtype=float)\n else:\n data = 10.\n self.cs = data\n print path\n print data\n if data < 12.:\n t = \"gas\"\n elif data > 200.:\n t = \"giant\"\n else:\n t = \"cluster\"\n\n self.type = t # Type of point\n \n return", "def generate_plot(self):\r\n\t\tx, y = zip(*[p.p for p in self.universe])\r\n\t\tself.ax.cla()\r\n\t\tself.ax.plot(x, y, '.')\r\n\t\tself.ax.set_title('Universe at time: %d' % self.universe.time)\r\n\t\tself.ax.set_xlim([P_MU-4*P_STD, P_MU+4*P_STD])\r\n\t\tself.ax.set_ylim([P_MU-4*P_STD, P_MU+4*P_STD])", "def compute_luminosity(sed_star, types, verbose=False):\n lam = sed_star['lam']\n fstar1 = 1e23 * sed_star[\"fstar1\"]\n\n sed_star1_si = fluxToJy(fstar1, lam * 1e-6, 1, True)\n f_sed_star1 = interp1d(lam, fstar1)\n f_star1_vis = f_sed_star1(0.5)\n D = 1 * pc\n L1 = (4 * np.pi * (D * 1e-2) ** 2) * \\\n ip.trapz(sed_star1_si, lam) / const.L_sun.value\n L2 = ratio_vis = None\n\n if len(types) == 2:\n fstar2 = 1e23 * sed_star[\"fstar2\"]\n sed_star2_si = fluxToJy(fstar2, lam * 1e-6, 1, True)\n L2 = (4 * np.pi * (D * 1e-2) ** 2) * \\\n ip.trapz(sed_star2_si, lam) / const.L_sun.value\n f_sed_star2 = interp1d(lam, fstar2)\n f_star2_vis = f_sed_star2(0.5)\n ratio_vis = f_star2_vis / f_star1_vis\n\n if len(types) == 1:\n if verbose:\n cprint(\"\\nLtot = %2.1f Lsun\" % (L1), color=\"magenta\")\n return L1\n elif len(types) == 2:\n if verbose:\n cprint(\"\\nLtot = %2.1f + %2.1f = %2.1f Lsun\" %\n (L1, L2, L1 + L2), color=\"magenta\",)\n cprint(\"ratio[0.5mu] = %2.2f\" % ratio_vis, color=\"magenta\")\n return L1 + L2", "def set_stars():\n prod_id = int(request.vars.prod_id)\n logger.info(\"changing stars on prod_id {%s}\" %prod_id)\n rating = int(request.vars.rating)\n logger.info(\"auth.user from api: %s\"%auth.user.email )\n db.stars.update_or_insert(\n (db.stars.prod_id == prod_id) & (db.stars.user_email == auth.user.email),\n prod_id = prod_id,\n user_email = auth.user.email,\n rating = rating\n )\n new_avg = calc_avg_rating(prod_id)\n return response.json(dict(new_avg=new_avg))", "def _variance_symbol(self, variable):\n return Symbol(\"e_{0}\".format(variable))", "def visit_Var(self, node: Var) -> VarSymbol:\n\n var_name = node.value\n var_symbol = self.symbol_table.get_token(var_name)\n var_symbol.type = DoubleType()\n\n return var_symbol", "def get_variables(self, shader_type=None):\n if not shader_type:\n return self.variables\n else:\n return [var for (_, var) in self.variables.iteritems() \\\n if var['shader_type'] == shader_type]", "def Diagnostic_plot1(self, v=False):\n\n # sort modes by frequency (radial order)\n ds.mode_id.sort_values(['f0'], axis=0, ascending=True, inplace=True)\n\n # SNR values after smoothing/interpolating at radial mode freqs\n u = np.full(len(ds.mode_id), -99) # unsmoothed\n s1 = np.full(len(ds.mode_id), -99) # after Gaussian smoothing\n s2 = np.full(len(ds.mode_id), -99) # after uniform smoothing\n s3 = np.full(len(ds.mode_id), -99) # after linear interpolation\n\n for idx, f in ds.mode_id.iterrows():\n width = abs(f['w0']) # width to convolve/interpolate over\n\n # smooth by convolving with Guassian\n smoo = star.Conv(self.snr, width)\n\n # smooth with uniform filter\n smoo2 = ndim.filters.uniform_filter1d(self.snr, size=int(np.around(width)))\n\n # smooth by interpolating\n bins = np.arange(0., self.ds.freq[-1], width) # rebin data to get highest SNR\n smoo3 = np.interp(bins, self.ds.freq, self.snr) # SNR values at these freqs\n\n index = np.abs(self.ds.freq-f['f0']).argmin() # use the frequency closest to mode\n if v:\n print(self.ds.freq[index], self.snr[index])\n print('before smoo', self.snr[index])\n print('smoo1', smoo[index])\n print('smoo2', smoo2[index])\n print('smoo3', smoo3[np.abs(bins-f['f0']).argmin()], '\\n')\n\n u[idx] = self.snr[index]\n s1[idx] = smoo[index]\n s2[idx] = smoo2[index]\n s3[idx] = smoo3[np.abs(bins-f['f0']).argmin()]\n\n fig = plt.figure(figsize=(12, 18))\n plt.rc('font', size=26)\n plt.plot(self.ds.mode_id['f0'], u, label=r'unsmoothed')\n plt.plot(self.ds.mode_id['f0'], s1, label=r'Smoothed with 1D Gaussian')\n plt.plot(self.ds.mode_id['f0'], s2, label=r'Smoothed with uniform filter')\n plt.plot(self.ds.mode_id['f0'], s3, label=r'Smoothed by interpolating')\n plt.xlabel(r'$\\nu / \\mu$Hz')\n plt.ylabel(r'SNR')\n plt.legend(loc='upper right')\n plt.show()\n fig.savefig(os.getcwd() + os.sep + 'DetTest1_plots' + os.sep +'DetTest_Diagnostic_plot1_' + self.ds.epic + '.pdf')\n #sys.exit()", "def plot(dsname, wdir = './', width = 1000.0, dt = 5.0*yt.units.Myr, fields = all_fields,\n thickness = 20.0, outdir = './enrichment_plots_kpc'):\n\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n gal = Galaxy(dsname, wdir = wdir)\n data = gal.df\n\n @derived_field(name=\"logNO\", units=\"\")\n def _logNO(field, data):\n return np.log10(data['N_Abundance'] / data['O_Abundance'])\n gal.ds.add_field((\"gas\", \"logNO\"), function=_logNO, units=\"\")\n\n make_filtered_field(gal.ds, 'logNO', ['O_Fraction','N_Fraction'])\n make_filtered_field(gal.ds, 'O_over_H', ['O_Fraction'])\n make_filtered_field(gal.ds, 'N_over_O', ['O_Fraction','N_Fraction'])\n# def _logNO_filtered(field,data):\n# x = data[('gas','logNO')]\n#\n# f1 = data[('gas','O_Fraction')]\n# f2 = data[('gas','N_Fraction')]\n#\n# x[ (f1 < tol) + (f2 < tol)] = np.nan\n#\n# return x\n# gal.ds.add_field(('gas','logNO_filtered'), function = _logNO_filtered, units = \"\")\n\n M = data['birth_mass']\n t_o = data['creation_time'].convert_to_units('Myr')\n MS_lifetime = data[('io','particle_model_lifetime')].to('Myr')\n MS_death = t_o + MS_lifetime\n px = (data['particle_position_x'] - gal.ds.domain_center[0]).to('pc')\n py = (data['particle_position_y'] - gal.ds.domain_center[1]).to('pc')\n pz = (data['particle_position_z'] - gal.ds.domain_center[2]).to('pc')\n\n recent_death = (MS_death > gal.ds.current_time - dt) * (MS_death <= gal.ds.current_time + 0.001*yt.units.Myr)\n alive = MS_death > gal.ds.current_time + 0.001*yt.units.Myr\n\n AGB = M < 8.0\n massive_star = (M > 8.0) * (M < 25.0)\n\n boxdim = np.array([width*1.25,width*1.25,thickness])*yt.units.pc\n region = gal.ds.box(gal.ds.domain_center - boxdim*0.5, gal.ds.domain_center + boxdim*0.5)\n\n proj = yt.ProjectionPlot(gal.ds, 'z', fields,\n weight_field = 'number_density', data_source = region, width = (width,'pc'))\n\n if 'number_density' in fields:\n proj.set_unit('number_density','cm**(-3)')\n proj.set_cmap('number_density','viridis')\n proj.set_zlim('number_density',1.0E-4,200.0)\n\n if 'O_over_H_filtered' in fields:\n proj.set_cmap('O_over_H_filtered','cubehelix')\n proj.set_log('O_over_H_filtered', False)\n proj.set_zlim('O_over_H_filtered', -5, 1)\n proj.set_colorbar_label('O_over_H_filtered', r'[O/H]')\n\n if 'N_over_O_filtered' in fields:\n proj.set_cmap('N_over_O_filtered','PRGn')\n proj.set_log('N_over_O_filtered',False)\n proj.set_zlim('N_over_O_filtered',-2,2)\n proj.set_colorbar_label('N_over_O_filtered', r'[N/O]')\n\n if 'logNO' in fields:\n proj.set_cmap('logNO','PRGn')\n proj.set_log('logNO',False)\n proj.set_zlim('logNO',-2,0.5)\n proj.set_colorbar_label('logNO', r'log( N / O )')\n\n if 'logNO_filtered' in fields:\n proj.set_cmap('logNO_filtered','PRGn')\n proj.set_log('logNO_filtered',False)\n proj.set_zlim('logNO_filtered',-2,0.5)\n proj.set_colorbar_label('logNO_filtered', r'log( N / O )')\n\n if 'Temperature' in fields:\n proj.set_cmap('Temperature', 'RdYlBu_r')\n proj.set_log('Temperature',True)\n proj.set_zlim('Temperature',10.0, 1.0E7)\n proj.set_colorbar_label('Temperature', r'Temperature (K)')\n\n if 'G_o' in fields:\n proj.set_cmap('G_o', 'cubehelix')\n proj.set_log('G_o', True)\n proj.set_zlim('G_o',0.05, 100.0)\n proj.set_colorbar_label('G_o', r'ISRF (G$_{\\rm o}$)')\n\n if 'Q0_flux':\n proj.set_cmap('Q0_flux', 'magma')\n proj.set_log('Q0_flux',True)\n proj.set_zlim('Q0_flux',1.0E-6, 1.0E-1)\n proj.set_colorbar_label('Q0_flux', r'HI Ionizing Radiation (s$^{-1}$)')\n\n Mstar = np.sum(gal.df['particle_mass'][ gal.df['particle_type'] == 11]).to('Msun')\n time = gal.ds.current_time.to('Myr')\n# proj.annotate_title(r\"Time = %1.1f Myr M$_{*}$ = %2.2E M$_{\\odot}$\"%(time.value,Mstar.value))\n proj.set_font( {'size' : 32} )\n proj.save(outdir + '/') # necessary\n\n\n dt = 5.0 * yt.units.Myr\n # buffer around image. otherwise points plotted near edge of image my run a little outside\n # viewing area, causing weird shifts in plotting. Not sure how to control this otherwise\n buffer = 15.0 # in pc\n in_image = (np.abs(pz) <= boxdim[2]*0.5) *\\\n (np.abs(px) <= (width*0.5 - buffer)) *\\\n (np.abs(py) <= (width*0.5 - buffer))\n\n pp = {}\n pp['massive_star_winds'] = in_image * alive * massive_star\n pp['AGB_winds'] = in_image * recent_death * AGB\n pp['SN'] = in_image * recent_death * massive_star\n #pp['other_stars'] = in_image * alive * (np.logical_not(pp['massive_star_winds']))\n\n for k in list(proj.plots.keys()):\n image = proj.plots[k]\n\n #\n # Now select and annotate the points we want\n #\n for s in list(pp.keys()):\n if np.size(px[pp[s]].value) > 0:\n print(np.size(px[pp[s]]), 'Particles in ', s, px[pp[s]], py[pp[s]])\n image.axes.scatter(px[pp[s]].value,py[pp[s]].value, s = ps[s], marker = markers[s], color = colors[s])\n else:\n print('No particles in ', s)\n\n# proj.refresh()\n# proj.hide_axes()\n proj.save(outdir + '/') # necessary\n\n if 'N_over_O' in fields:\n vmin,vmax = -2,2\n x = proj.plots['N_over_O']\n x.image.set_norm( MidpointNormalize(midpoint= 0.5*(vmin+vmax), vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.5*(vmin+vmax),vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_N_over_O_number_density.png')\n\n if 'logNO' in fields:\n vmin, vmax = -2, 0.25\n x = proj.plots['logNO']\n x.image.set_norm( MidpointNormalize(midpoint= 0.0, vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.0, vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_logNO_number_density.png')\n\n del(proj)\n del(gal)\n\n return", "def draw_star(x=0,y=0,radius=10):\n cx = x\n cy = y+radius\n bx = cx * math.cos(2*math.pi/3) - ( cy * math.sin(2*math.pi/3) )\n by = cx * math.sin(2*math.pi/3) + ( cy * math.cos(2*math.pi/3) )\n ax = cx * math.cos(4*math.pi/3) - ( cy * math.sin(4*math.pi/3) )\n ay = cx * math.sin(4*math.pi/3) + ( cy * math.cos(4*math.pi/3) )\n my_turtle.penup()\n my_turtle.goto(cx, cy)\n my_turtle.pendown()\n my_turtle.goto(bx, by)\n my_turtle.goto(ax, ay)\n my_turtle.goto(cx, cy)\n my_turtle.penup()\n cy = y-radius\n bx = cx * math.cos(2*math.pi/3) - ( cy * math.sin(2*math.pi/3) )\n by = cx * math.sin(2*math.pi/3) + ( cy * math.cos(2*math.pi/3) )\n ax = cx * math.cos(4*math.pi/3) - ( cy * math.sin(4*math.pi/3) )\n ay = cx * math.sin(4*math.pi/3) + ( cy * math.cos(4*math.pi/3) )\n my_turtle.penup()\n my_turtle.goto(cx, cy)\n my_turtle.pendown()\n my_turtle.goto(bx, by)\n my_turtle.goto(ax, ay)\n my_turtle.goto(cx, cy)\n my_turtle.penup()", "def _sig_stars(val):\n star = \"\"\n if 0 <= val < 0.001:\n star = \"***\"\n elif 0.001 <= val < 0.01:\n star = \"**\"\n elif 0.01 <= val < 0.05:\n star = \"*\"\n elif 0.05 <= val < 0.1:\n star = \".\"\n return star", "def create_star(rk_settings, screen, stars, star_number, row_number):\r\n\tstar = Star(rk_settings, screen)\r\n\tstar_width = star.rect.width\r\n\tstar.x = star_width + 2 * star_width * star_number\r\n\tstar.rect.x = star.x\r\n\tstar.rect.y = star.rect.height + 2 * star.rect.height * row_number\r\n\tstars.add(star)", "def dyn_flareplots(df, folderpath, dyn_list, itype, flare_template = False):\n os.makedirs(folderpath, exist_ok = True)\n colors_auld = ['#800000', '#860000', '#8c0000', '#930000', '#990000', '#9f0000', '#a60000', '#ac0000', '#b20000', '#b90000', '#bf0000', '#c50000', '#cc0000', '#d20000', '#d80000', '#df0000', '#e50000', '#eb0000', '#f20000', '#f80000', '#ff0000', '#ff0700', '#ff0e00', '#ff1500', '#ff1c00', '#ff2300', '#ff2a00', '#ff3100', '#ff3800', '#ff3f00', '#ff4600', '#ff4d00', '#ff5400', '#ff5b00', '#ff6200', '#ff6900', '#ff7000', '#ff7700', '#ff7e00', '#ff8500', '#ff8c00', '#ff9100', '#ff9700', '#ff9d00', '#ffa300', '#ffa800', '#ffae00', '#ffb400', '#ffba00', '#ffbf00', '#ffc500', '#ffcb00', '#ffd100', '#ffd600', '#ffdc00', '#ffe200', '#ffe800', '#ffed00', '#fff300', '#fff900', '#ffff00', '#f2ff00', '#e5ff00', '#d8ff00', '#ccff00', '#bfff00', '#b2ff00', '#a5ff00', '#99ff00', '#8cff00', '#7fff00', '#72ff00', '#66ff00', '#59ff00', '#4cff00', '#3fff00', '#33ff00', '#26ff00', '#19ff00', '#0cff00', '#00ff00', '#0afc0a', '#15fa15', '#1ff81f', '#2af62a', '#34f434', '#3ff13f', '#49ef49', '#54ed54', '#5eeb5e', '#69e969', '#74e674', '#7ee47e', '#89e289', '#93e093', '#9ede9e', '#a8dba8', '#b3d9b3', '#bdd7bd', '#c8d5c8', '#d3d3d3']\n colors_ylorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#ffeea3', '#fff0a7', '#fff1ab', '#fff3ae', '#fff4b2', '#fff6b6', '#fff7b9', '#fff9bd', '#fffac1', '#fffcc4', '#fffdc8', '#ffffcc']\n colors_inferno = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02010E', '#020210', '#030212', '#040314', '#040316', '#050418', '#06041B', '#07051D', '#08061F', '#090621', '#0A0723', '#0B0726', '#0D0828', '#0E082A', '#0F092D', '#10092F', '#120A32', '#130A34', '#140B36', '#160B39', '#170B3B', '#190B3E', '#1A0B40', '#1C0C43', '#1D0C45', '#1F0C47', '#200C4A', '#220B4C', '#240B4E', '#260B50', '#270B52', '#290B54', '#2B0A56', '#2D0A58', '#2E0A5A', '#300A5C', '#32095D', '#34095F', '#350960', '#370961', '#390962', '#3B0964', '#3C0965', '#3E0966', '#400966', '#410967', '#430A68', '#450A69', '#460A69', '#480B6A', '#4A0B6A', '#4B0C6B', '#4D0C6B', '#4F0D6C', '#500D6C', '#520E6C', '#530E6D', '#550F6D', '#570F6D', '#58106D', '#5A116D', '#5B116E', '#5D126E', '#5F126E', '#60136E', '#62146E', '#63146E', '#65156E', '#66156E', '#68166E', '#6A176E', '#6B176E', '#6D186E', '#6E186E', '#70196E', '#72196D', '#731A6D', '#751B6D', '#761B6D', '#781C6D', '#7A1C6D', '#7B1D6C', '#7D1D6C', '#7E1E6C', '#801F6B', '#811F6B', '#83206B', '#85206A', '#86216A', '#88216A', '#892269', '#8B2269', '#8D2369', '#8E2468', '#902468', '#912567', '#932567', '#952666', '#962666', '#982765', '#992864', '#9B2864', '#9C2963', '#9E2963', '#A02A62', '#A12B61', '#A32B61', '#A42C60', '#A62C5F', '#A72D5F', '#A92E5E', '#AB2E5D', '#AC2F5C', '#AE305B', '#AF315B', '#B1315A', '#B23259', '#B43358', '#B53357', '#B73456', '#B83556', '#BA3655', '#BB3754', '#BD3753', '#BE3852', '#BF3951', '#C13A50', '#C23B4F', '#C43C4E', '#C53D4D', '#C73E4C', '#C83E4B', '#C93F4A', '#CB4049', '#CC4148', '#CD4247', '#CF4446', '#D04544', '#D14643', '#D24742', '#D44841', '#D54940', '#D64A3F', '#D74B3E', '#D94D3D', '#DA4E3B', '#DB4F3A', '#DC5039', '#DD5238', '#DE5337', '#DF5436', '#E05634', '#E25733', '#E35832', '#E45A31', '#E55B30', '#E65C2E', '#E65E2D', '#E75F2C', '#E8612B', '#E9622A', '#EA6428', '#EB6527', '#EC6726', '#ED6825', '#ED6A23', '#EE6C22', '#EF6D21', '#F06F1F', '#F0701E', '#F1721D', '#F2741C', '#F2751A', '#F37719', '#F37918', '#F47A16', '#F57C15', '#F57E14', '#F68012', '#F68111', '#F78310', '#F7850E', '#F8870D', '#F8880C', '#F88A0B', '#F98C09', '#F98E08', '#F99008', '#FA9107', '#FA9306', '#FA9506', '#FA9706', '#FB9906', '#FB9B06', '#FB9D06', '#FB9E07', '#FBA007', '#FBA208', '#FBA40A', '#FBA60B', '#FBA80D', '#FBAA0E', '#FBAC10', '#FBAE12', '#FBB014', '#FBB116', '#FBB318', '#FBB51A', '#FBB71C', '#FBB91E', '#FABB21', '#FABD23', '#FABF25', '#FAC128', '#F9C32A', '#F9C52C', '#F9C72F', '#F8C931', '#F8CB34', '#F8CD37', '#F7CF3A', '#F7D13C', '#F6D33F', '#F6D542', '#F5D745', '#F5D948', '#F4DB4B', '#F4DC4F', '#F3DE52', '#F3E056', '#F3E259', '#F2E45D', '#F2E660', '#F1E864', '#F1E968', '#F1EB6C', '#F1ED70', '#F1EE74', '#F1F079', '#F1F27D', '#F2F381', '#F2F485', '#F3F689', '#F4F78D', '#F5F891', '#F6FA95', '#F7FB99', '#F9FC9D', '#FAFDA0', '#FCFEA4']\n colors_magma = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02020D', '#02020F', '#030311', '#040313', '#040415', '#050417', '#060519', '#07051B', '#08061D', '#09071F', '#0A0722', '#0B0824', '#0C0926', '#0D0A28', '#0E0A2A', '#0F0B2C', '#100C2F', '#110C31', '#120D33', '#140D35', '#150E38', '#160E3A', '#170F3C', '#180F3F', '#1A1041', '#1B1044', '#1C1046', '#1E1049', '#1F114B', '#20114D', '#221150', '#231152', '#251155', '#261157', '#281159', '#2A115C', '#2B115E', '#2D1060', '#2F1062', '#301065', '#321067', '#341068', '#350F6A', '#370F6C', '#390F6E', '#3B0F6F', '#3C0F71', '#3E0F72', '#400F73', '#420F74', '#430F75', '#450F76', '#470F77', '#481078', '#4A1079', '#4B1079', '#4D117A', '#4F117B', '#50127B', '#52127C', '#53137C', '#55137D', '#57147D', '#58157E', '#5A157E', '#5B167E', '#5D177E', '#5E177F', '#60187F', '#61187F', '#63197F', '#651A80', '#661A80', '#681B80', '#691C80', '#6B1C80', '#6C1D80', '#6E1E81', '#6F1E81', '#711F81', '#731F81', '#742081', '#762181', '#772181', '#792281', '#7A2281', '#7C2381', '#7E2481', '#7F2481', '#812581', '#822581', '#842681', '#852681', '#872781', '#892881', '#8A2881', '#8C2980', '#8D2980', '#8F2A80', '#912A80', '#922B80', '#942B80', '#952C80', '#972C7F', '#992D7F', '#9A2D7F', '#9C2E7F', '#9E2E7E', '#9F2F7E', '#A12F7E', '#A3307E', '#A4307D', '#A6317D', '#A7317D', '#A9327C', '#AB337C', '#AC337B', '#AE347B', '#B0347B', '#B1357A', '#B3357A', '#B53679', '#B63679', '#B83778', '#B93778', '#BB3877', '#BD3977', '#BE3976', '#C03A75', '#C23A75', '#C33B74', '#C53C74', '#C63C73', '#C83D72', '#CA3E72', '#CB3E71', '#CD3F70', '#CE4070', '#D0416F', '#D1426E', '#D3426D', '#D4436D', '#D6446C', '#D7456B', '#D9466A', '#DA4769', '#DC4869', '#DD4968', '#DE4A67', '#E04B66', '#E14C66', '#E24D65', '#E44E64', '#E55063', '#E65162', '#E75262', '#E85461', '#EA5560', '#EB5660', '#EC585F', '#ED595F', '#EE5B5E', '#EE5D5D', '#EF5E5D', '#F0605D', '#F1615C', '#F2635C', '#F3655C', '#F3675B', '#F4685B', '#F56A5B', '#F56C5B', '#F66E5B', '#F6705B', '#F7715B', '#F7735C', '#F8755C', '#F8775C', '#F9795C', '#F97B5D', '#F97D5D', '#FA7F5E', '#FA805E', '#FA825F', '#FB8460', '#FB8660', '#FB8861', '#FB8A62', '#FC8C63', '#FC8E63', '#FC9064', '#FC9265', '#FC9366', '#FD9567', '#FD9768', '#FD9969', '#FD9B6A', '#FD9D6B', '#FD9F6C', '#FDA16E', '#FDA26F', '#FDA470', '#FEA671', '#FEA873', '#FEAA74', '#FEAC75', '#FEAE76', '#FEAF78', '#FEB179', '#FEB37B', '#FEB57C', '#FEB77D', '#FEB97F', '#FEBB80', '#FEBC82', '#FEBE83', '#FEC085', '#FEC286', '#FEC488', '#FEC689', '#FEC78B', '#FEC98D', '#FECB8E', '#FDCD90', '#FDCF92', '#FDD193', '#FDD295', '#FDD497', '#FDD698', '#FDD89A', '#FDDA9C', '#FDDC9D', '#FDDD9F', '#FDDFA1', '#FDE1A3', '#FCE3A5', '#FCE5A6', '#FCE6A8', '#FCE8AA', '#FCEAAC', '#FCECAE', '#FCEEB0', '#FCF0B1', '#FCF1B3', '#FCF3B5', '#FCF5B7', '#FBF7B9', '#FBF9BB', '#FBFABD', '#FBFCBF']\n colors_ylgnbl = ['#081d58', '#0a1e5d', '#0c2062', '#0f2267', '#11246c', '#142671', '#162876', '#182a7b', '#1b2c80', '#1d2e85', '#20308a', '#22328f', '#253494', '#243795', '#243b97', '#243e99', '#24429a', '#23459c', '#23499e', '#234c9f', '#2350a1', '#2253a3', '#2257a4', '#225aa6', '#225ea8', '#2162aa', '#2166ac', '#206aae', '#206fb0', '#1f73b2', '#1f77b4', '#1f7bb6', '#1e80b8', '#1e84ba', '#1d88bc', '#1d8cbe', '#1d91c0', '#2094c0', '#2397c0', '#269ac1', '#299dc1', '#2ca0c1', '#2fa3c2', '#32a6c2', '#35a9c2', '#38acc3', '#3bafc3', '#3eb2c3', '#41b6c4', '#46b7c3', '#4bb9c2', '#50bbc1', '#55bdc1', '#5abfc0', '#60c1bf', '#65c3be', '#6ac5be', '#6fc7bd', '#74c9bc', '#79cbbb', '#7fcdbb', '#85cfba', '#8bd1b9', '#91d4b9', '#97d6b8', '#9dd8b8', '#a3dbb7', '#a9ddb6', '#afdfb6', '#b5e2b5', '#bbe4b5', '#c1e6b4', '#c7e9b4', '#caeab3', '#cdebb3', '#d0ecb3', '#d3eeb3', '#d6efb2', '#daf0b2', '#ddf1b2', '#e0f3b2', '#e3f4b1', '#e6f5b1', '#e9f6b1', '#edf8b1', '#eef8b4', '#f0f9b7', '#f1f9bb', '#f3fabe', '#f4fac1', '#f6fbc5', '#f7fcc8', '#f9fccb', '#fafdcf', '#fcfdd2', '#fdfed5', '#ffffd9']\n colors_grorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#fbeaa4', '#f7e8a8', '#f4e6ac', '#f0e4b1', '#ece2b5', '#e9e0b9', '#e5ddbd', '#e1dbc2', '#ded9c6', '#dad7ca', '#d6d5ce', '#d3d3d3']\n colors = colors_grorrd\n for dyn in dyn_list:\n\n # Select top interactions based on its mean frequency. Also asign color based on mean value\n color_len = len(colors) -1\n df_clust = df.filter(items = [dyn, 'APosition1', 'APosition2', 'BPosition1', 'BPosition2','CPosition1', 'CPosition2','FPosition1', 'FPosition2',])\n df_clust['color'] = df_clust[dyn].apply(lambda x: colors[color_len-round(x*color_len/100)]) #There are 101 colors avalible in list\n\n #Filter top 5 in df_clust\n df_clust = df_clust.nlargest(20, dyn)\n\n # 'Edge' entry for json file\n df_dict = pd.DataFrame(columns = [\"name1\", \"name2\", \"frames\"])\n df_dict['name1'] = df_clust['APosition1'] \n df_dict['name2'] = df_clust['APosition2']\n df_dict['frames'] = [[1]]*len(df_dict)\n df_dict['color'] = df_clust['color']\n df_dict['value'] = df_clust[dyn]\n edges = df_dict.to_dict(orient=\"records\")\n\n # Appending edges to flare plot template, if any submitted\n if flare_template:\n flare_template['edges'] = edges\n jsondict = flare_template\n else:\n jsondict = { 'edges' : edges }\n\n #'Edge' multi-entries, based on the 4 GPCR nomenclatures\n for leter in ['A', 'B', 'C', 'F']:\n df_dict = pd.DataFrame(columns = [\"name1\", \"name2\", \"frames\"])\n df_dict['name1'] = df_clust[leter+'Position1'] \n df_dict['name2'] = df_clust[leter+'Position2']\n df_dict['frames'] = [[1]]*len(df_dict)\n df_dict['color'] = df_clust['color']\n df_dict['value'] = df_clust[dyn]\n leter_edges = df_dict.to_dict(orient=\"records\")\n\n #Appending edges\n if flare_template:\n flare_template[leter+'edges'] = leter_edges\n jsondict = flare_template\n else:\n jsondict = { leter+'edges' : leter_edges }\n\n #Writing json\n jsonpath = folderpath + dyn + \"_top.json\"\n with open(jsonpath, 'w') as jsonfile:\n dump(jsondict, jsonfile, ensure_ascii=False, indent = 4)", "def planetStardata():\n return planetAndStar()", "def find_constant_stars(xmatch, phot_data, log):\n\n # Identify the overall primary reference dataset:\n ref_dset_idx = np.where(xmatch.datasets['primary_ref'] == 1)[0]\n ref_datacode = xmatch.datasets['dataset_code'][ref_dset_idx]\n\n # Fetch the indices of the images from this dataset in the photometry table\n image_index = np.where(xmatch.images['dataset_code'] == ref_datacode)[0]\n\n # Extract the timeseries photometry for this dataset:\n (mag_col, merr_col) = field_photometry.get_field_photometry_columns('corrected')\n ref_phot = np.zeros((phot_data.shape[0],len(image_index),2))\n ref_phot[:,:,0] = phot_data[:,image_index,mag_col]\n ref_phot[:,:,1] = phot_data[:,image_index,merr_col]\n\n # Evaluate the photometric scatter of all stars, and select those with\n # the lowest scatter for the brightest quartile of stars.\n (mean_mag, mean_magerr) = calc_weighted_mean_no_qc(ref_phot)\n rms = calc_weighted_rms_no_qc(ref_phot, mean_mag)\n\n # Function identifies constant stars as those with an RMS in the lowest\n # 1 - 25% of the set. This excludes both stars with high scatter and those\n # with artificially low scatter due to having few measurements.\n rms_range = rms.max() - rms.min()\n min_cut = rms.min()\n max_cut = rms.min() + (rms_range)*0.25\n\n constant_stars = np.where((rms >= min_cut) & (rms <= max_cut))[0]\n\n log.info('Identified '+str(len(constant_stars))\n +' stars with RMS between '+str(round(min_cut,3))+' and '+str(round(max_cut,3))\n +'mag to use for the normalization')\n\n return constant_stars", "def plot_params(variable,context, custom_plot_params=None) :\n\n defaults = { \n 'contours' : 1 ,\n 'color' :'temp_19lev',\n }\n\n per_variable = {}\n # --> Adding the default plot params\n per_variable.update(atmos_plot_params.dict_plot_params)\n per_variable.update(ocean_plot_params.dict_plot_params)\n if centerspecs : \n # --> Then, add the plot params specific to the centers\n per_variable.update(atmos_plot_params_centerspecs.dict_plot_params)\n per_variable.update(ocean_plot_params_centerspecs.dict_plot_params)\n # --> If needed, adding a custom dictionnary of plot params\n if custom_plot_params:\n per_variable.update(custom_plot_params)\n #\n rep=defaults.copy()\n if variable in per_variable : \n var_entry=per_variable[variable]\n for cont in [ 'default', context ] :\n if cont in var_entry : rep.update(var_entry[cont])\n return rep", "def background(self):\n sun = graphics.Circle(graphics.Point(200, 310), 50)\n sun.setFill('yellow')\n sun.draw(self.win)\n \n earth = graphics.Circle(graphics.Point(40, 250), 30)\n earth.setFill('blue')\n earth.draw(self.win)\n continent = graphics.Circle(graphics.Point(30, 265), 10)\n continent.setFill('green')\n continent.draw(self.win)\n cont_2 = graphics.Circle(graphics.Point(30, 235), 10)\n cont_2.setFill('green')\n cont_2.draw(self.win)\n cont_3 = graphics.Circle(graphics.Point(55, 245), 10)\n cont_3.setFill('green')\n cont_3.draw(self.win)\n \n stars = graphics.Circle(graphics.Point(250, 250), 5)\n stars.setFill('white')\n stars.draw(self.win)\n star1 = graphics.Circle(graphics.Point(100, 250), 5)\n star1.setFill('white')\n star1.draw(self.win)\n star2 = graphics.Circle(graphics.Point(150, 150), 5)\n star2.setFill('white')\n star2.draw(self.win)\n star3 = graphics.Circle(graphics.Point(50, 100), 5)\n star3.setFill('white')\n star3.draw(self.win)\n star3 = graphics.Circle(graphics.Point(100, 50), 5)\n star3.setFill('white')\n star3.draw(self.win)\n star4 = graphics.Circle(graphics.Point(250, 80), 5)\n star4.setFill('white')\n star4.draw(self.win)\n star4 = graphics.Circle(graphics.Point(200, 60), 5)\n star4.setFill('white')\n star4.draw(self.win)", "def generate_var_scatter(self):\n pass", "def starsProject(stars, visit):\n names=['x','y','radius']\n types=[float,float,float]\n xtemp,ytemp = gnomonic_project_toxy(np.radians(stars['ra']),np.radians(stars['decl']),\n visit['ra'], visit['dec'])\n # Rotate the field using the visit rotSkyPos. Hope I got that sign right...\n # Hopefully this can be replaced with some cameraGeom stuff.\n sin_rot = np.sin(visit['rotSkyPos'])\n cos_rot = np.cos(visit['rotSkyPos'])\n stars['x'] = cos_rot*xtemp + sin_rot*ytemp\n stars['y'] = -1.*sin_rot*xtemp+cos_rot*ytemp\n\n # XXX-temp try not rotating\n #stars['x'] = xtemp\n #stars['y'] = ytemp\n\n stars['radius'] = (stars['x']**2+stars['y']**2)**0.5\n return stars", "def star_rating(table, record_id, splitstars=False):\n import uuid\n id = uuid.uuid4()\n row=db(db.plugin_wiki_rating.tablename==table)(db.plugin_wiki_rating.record_id==record_id).select().first()\n rating = row.rating if row else 0\n callback = URL('plugin_wiki', 'star_rate', args = [table,record_id])\n incr = 0.5 if splitstars else 1\n return TAG[''](DIV(_id='star'+str(id),_class='rating'),\n SCRIPT(\"jQuery(document).ready(function(){jQuery('%(uid)s').rating('%(callback)s',{increment:%(incr)s, maxvalue:5, curvalue:%(rating)s});});\" % dict(uid='#star'+str(id), callback=callback,incr=incr, rating=rating)))", "def _declaration_variable(self, node: ET.Element):\n # variable names\n variables_and_values = self.transform_all_subnodes(\n self.get_one(node, './variables'), skip_empty=True,\n ignored={'entity-decl-list__begin', 'entity-decl-list','attr-spec' })\n if not variables_and_values:\n _LOG.error('%s', ET.tostring(node).decode().rstrip())\n raise SyntaxError('at least one variable expected in variables list')\n variables = [var for var, _ in variables_and_values]\n # base type of variables\n base_type = self.transform_one(self.get_one(node, './type'))\n\n # dimensionality information (only for array types)\n dimensions_node = node.find('./dimensions')\n variable_dimensions = [getattr(var, 'fortran_metadata', {}).get('dimensions', None)\n for var in variables]\n has_variable_dimensions = any([_ is not None for _ in variable_dimensions])\n if has_variable_dimensions and not self._split_declarations:\n raise NotImplementedError('inline dimensions not implemented yet')\n if dimensions_node is not None and has_variable_dimensions:\n raise SyntaxError(\n 'declaration dimension data as well as per-variable dimension data present')\n if dimensions_node is not None:\n dimensions = self.transform_one(dimensions_node)\n assert len(dimensions) >= 1\n self.ensure_import('static_typing', 'st')\n annotation = make_st_ndarray(base_type, dimensions)\n annotations = [annotation for _ in variables]\n elif has_variable_dimensions:\n self.ensure_import('static_typing', 'st')\n annotations = [base_type if _ is None else make_st_ndarray(base_type, _)\n for _ in variable_dimensions]\n else:\n annotations = [base_type for _ in variables]\n\n # initial values\n if dimensions_node is not None:\n values = [None if val is None else make_numpy_constructor('array', val, base_type)\n for _, val in variables_and_values]\n elif has_variable_dimensions:\n assert len(variables_and_values) == len(variable_dimensions)\n values = [None if val is None\n else (val if dim is None else make_numpy_constructor('array', val, base_type))\n for (_, val), dim in zip(variables_and_values, variable_dimensions)]\n else:\n values = [val for _, val in variables_and_values]\n\n metadata = {'is_declaration': True}\n intent_node = node.find('./intent')\n if intent_node is not None:\n metadata['intent'] = intent_node.attrib['type']\n\n attributes = ('allocatable', 'asynchronous', 'external', 'intrinsic', 'optional',\n 'parameter', 'pointer', 'protected', 'save', 'target', 'value', 'volatile')\n for attribute in attributes:\n if node.find('./attribute-{}'.format(attribute)) is not None:\n metadata['is_{}'.format(attribute)] = True\n\n if metadata:\n metadata_node = horast_nodes.Comment(\n value=ast.Str(' Fortran metadata: {}'.format(repr(metadata))), eol=False)\n\n _handled = {'variables', 'type', 'dimensions', 'intent'}\n extra_results = self.transform_all_subnodes(node, ignored={\n 'type-declaration-stmt'} | _handled | {'attribute-{}'.format(_) for _ in attributes})\n if extra_results:\n _LOG.warning('ignoring additional information in the declaration:\\n%s', extra_results)\n\n if not self._split_declarations:\n raise NotImplementedError()\n assignments = [{\"name\":var, \"type\":ann, \"value\":val}\n for var, ann, val in zip(variables, annotations, values)]\n if metadata:\n new_assignments = []\n for assignment in assignments:\n assignment.update({\"metadata\":metadata})\n new_assignments.append(assignment)\n new_assignments.append(metadata_node)\n assignments = new_assignments\n\n return assignments", "def line_SFR(**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n if p.line == 'CO(2-1)': p.select = 'Zsfr'\n\n GR = glo.global_results(sim_run=p.sim_run,nGal=p.nGal)\n \n marker = 'o'\n if p.sim_run == p.sim_runs[0]: marker = '^'\n\n L_line = getattr(GR,'L_'+p.line+'_sun')#[380:400]#[0:100]\n SFR = getattr(GR,'SFR')#[380:400]#[0:100]\n M_star = getattr(GR,'M_star')#[380:400]#[0:100]\n # G0_mw = getattr(GR,'F_FUV_mw')#[0:100]\n Zsfr = getattr(GR,'Zsfr')#[380:400]#[0:100]\n R_gas = getattr(GR,'R2_gas')#[380:400]#[0:100]\n M_H2 = getattr(GR,'M_H2_R2_gas')#[380:400]#[0:100]\n # if 'CO' in p.line: p.select = 'Sigma_M_H2'\n\n # Take only MS galaxies?\n if p.MS == True:\n indices = aux.select_salim18(GR.M_star,GR.SFR)\n L_line = L_line[indices]\n SFR = SFR[indices]\n M_star = M_star[indices]\n Zsfr = Zsfr[indices]\n R_gas = R_gas[indices]\n M_H2 = M_H2[indices]\n print('With MS selection criteria: only %i galaxies' % (len(L_line)))\n\n # Just selection of galaxies\n #SFR = SFR[0:10]\n #Zsfr = Zsfr[0:10]\n #R_gas = R_gas[0:10]\n #M_H2 = M_H2[0:10]\n #L_line = L_line[0:10]\n #M_star = M_star[0:10]\n\n SFR = SFR[L_line > 0]\n M_star = M_star[L_line > 0]\n Zsfr = Zsfr[L_line > 0]\n R_gas = R_gas[L_line > 0]\n M_H2 = M_H2[L_line > 0]\n L_line = L_line[L_line > 0]\n print('%i data points ' % (len(L_line)))\n\n lSFR = np.log10(SFR)\n lL_line = np.log10(L_line)\n\n\n # plt.plot(np.log10(M_star),np.log10(SFR),'o')\n # s = aseg\n\n labs = {'_100Mpc_M10':'Mach=10 power-law',\\\n '_100Mpc_arepoPDF_CMZ':'SIGAME v3',\\\n '_25Mpc_arepoPDF_M51':'SIGAME v3 (Simba-25)',\\\n '_100Mpc_arepoPDF_M51':'SIGAME v3 (Simba-100)'}\n lab = labs[p.sim_run+p.table_ext]\n\n if p.add:\n ax = p.ax\n else:\n fig,ax = plt.subplots(figsize=(8,6))\n\n if p.select == 'Sigma_M_H2':\n Sigma_M_H2 = M_H2/(np.pi*R_gas**2)/1e6 # per pc^-2\n m = ax.scatter(lSFR[np.argsort(Sigma_M_H2)],lL_line[np.argsort(Sigma_M_H2)],marker=marker,s=14,\\\n c=np.log10(Sigma_M_H2[np.argsort(Sigma_M_H2)]),vmin=-2.5,vmax=2.2,label=lab,alpha=0.5,zorder=10)\n p.vmin = np.log10(Sigma_M_H2.min())\n p.vmax = np.log10(Sigma_M_H2.max())\n if p.cb:\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label=r'log $\\Sigma_{H2}$ [M$_{\\odot}$/pc$^2$]',size=15)\n if p.select == 'M_star':\n m = ax.scatter(lSFR[np.argsort(M_star)],lL_line[np.argsort(M_star)],marker=marker,s=8,\\\n c=np.log10(M_star[np.argsort(M_star)]),vmin=-2.5,vmax=2.2,label=lab,alpha=0.5,zorder=10)\n # Just one galaxy\n # m = ax.scatter(lSFR,lL_line,marker=marker,s=15,\\\n # c=np.log10(Sigma_M_H2),vmin=-2.5,vmax=2.2,label=lab,alpha=1,zorder=10)\n if p.cb:\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label=r'log $M_{star}$ [M$_{\\odot}$]',size=15)\n if p.select == 'Zsfr':\n print('min and max Zsfr in sims: ',Zsfr.min(),Zsfr.max())\n p.vmin = np.log10(0.01)\n p.vmax = np.log10(3)\n m = ax.scatter(lSFR,lL_line,marker=marker,s=20,\\\n c=np.log10(Zsfr),label=lab,alpha=0.6,zorder=10,vmin=p.vmin,vmax=p.vmax)\n if p.cb:\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label=r'log $\\langle Z\\rangle_{\\mathrm{SFR}}$ [Z$_{\\odot}$]',size=15)\n if p.select == 'F_FUV_mw':\n m = ax.scatter(lSFR,lL_line,marker=marker,s=20,\\\n c=np.log10(G0_mw),label=lab,alpha=0.6,zorder=10)\n if p.cb:\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label='log ' + getlabel('G0'),size=15)\n if p.select == 'f_HII':\n f_HII[f_HII == 0] = np.min(f_HII[f_HII > 0])\n m = ax.scatter(lSFR[np.argsort(f_HII)],lL_line[np.argsort(f_HII)],marker=marker,s=20,\\\n c=np.log10(f_HII[np.argsort(f_HII)]),label=lab,alpha=0.6,zorder=10)\n if p.cb:\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label='log HII region fraction',size=15)\n\n\n # Label galaxies?\n # for i in range(len(SFR)):\n # if SFR[i] > 0:\n # ax.text(SFR[i],L_line[i],'G%i' % GR.gal_num[i],fontsize=7)\n\n if p.add_obs:\n if (p.select == 'Zsfr') | (p.select == 'Sigma_M_H2'): \n add_line_SFR_obs(p.line,L_line,ax,select=p.select,vmin=p.vmin,vmax=p.vmax)\n else:\n add_line_SFR_obs(p.line,L_line,ax,select=p.select)\n\n ax.set_xlabel('log ' + getlabel('SFR'))\n ax.set_ylabel('log ' + getlabel(p.line))\n handles,labels = ax.get_legend_handles_labels()\n handles = np.flip(handles)\n labels = np.flip(labels)\n if ('CO' in p.line) | ('[OI]' in p.line): \n ax.legend(handles,labels,loc='upper left',fontsize=7,frameon=True,framealpha=0.5)\n else:\n ax.legend(handles,labels,loc='lower right',fontsize=7,frameon=True,framealpha=0.5)\n if not p.xlim: p.xlim = np.array([-3,4])\n if not p.ylim: \n p.ylim = [np.median(lL_line) - 5,np.median(lL_line) + 3]\n if p.line == '[OI]63': p.ylim = [np.median(lL_line) - 5,np.median(lL_line) + 4]\n if 'CO' in p.line: p.ylim = [np.median(lL_line) - 4,np.median(lL_line) + 4]\n\n ax.set_xlim(p.xlim)\n ax.set_ylim(p.ylim)\n ax.grid(ls='--')\n\n if p.savefig & (not p.add):\n if not os.path.isdir(p.d_plot + 'luminosity/'): os.mkdir(p.d_plot + 'luminosity/') \n plt.savefig(p.d_plot + 'luminosity/%s_SFR.png' % p.line, format='png', dpi=300)", "def simbad_brightstars(image_file=\"../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits\",\n brighter_than='G0', extra_criteria=\"(ra < 84.4 | dec < -6.66)\", otypes=\"Star\",\n replace_ra='hourangle', replace_dec='deg', add_sptype_letter_column=True,\n output=None, output_format='fits'):\n try:\n wcs = WCS(image_file).celestial #Drop non-celestial axes (like velocity and stokes). \n except:\n raise(\"image_file must be a fits image or cube with wcs in header.\")\n\n footprint = wcs.calc_footprint()\n\n \n ### ra_min/max, dec_min/max need to be in degrees.\n ### In the fits headers I have they are, but this may not always be true.\n ###\n ra_min, ra_max = footprint[:,0].min(), footprint[:,0].max()\n dec_min, dec_max = footprint[:,1].min(), footprint[:,1].max()\n\n s = Simbad()\n s.add_votable_fields('sptype')\n\n if extra_criteria:\n stars = s.query_criteria(\"ra > {} & ra < {} & dec > {} & dec < {} & sptypes < {} & {}\".format(\n ra_min, ra_max, dec_min, dec_max, brighter_than, extra_criteria), otypes=\"Star\")\n else:\n stars = s.query_criteria(\"ra > {} & ra < {} & dec > {} & dec < {} & sptypes < {}\".format(\n ra_min, ra_max, dec_min, dec_max, brighter_than), otypes=\"Star\")\n\n stars_coord = coord.SkyCoord(stars['RA'], stars['DEC'], unit=(u.hourangle, u.deg))\n\n if replace_ra:\n stars.replace_column('RA', Column(stars_coord.ra, name='RA', unit=replace_ra))\n if replace_dec:\n stars.replace_column('DEC', Column(stars_coord.dec, name='DEC', unit=replace_dec))\n\n if add_sptype_letter_column:\n stars.add_column(Column([sptype[0] for sptype in stars['SP_TYPE'].astype('str')], name='SP_LETTER', unit='str'))\n\n if output:\n stars.write(output, format=output_format)##\n else:\n return stars", "def getSolRatioTimes( self, var, type = \"all\" ):\n\n values = self.getSolRatioData( var, type )\n return values[1]", "def line_sSFR(**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n GR = glo.global_results()\n \n L_line = getattr(GR,'L_'+p.line+'_sun')#[0:100]\n SFR = getattr(GR,'SFR')#[0:100]\n Zsfr = getattr(GR,'Zsfr')#[0:100]\n R_gas = getattr(GR,'R2_gas')#[0:100]\n M_H2 = getattr(GR,'M_H2_R2_gas')#[0:100]\n M_star = getattr(GR,'M_star')#[0:100]\n\n # Take only MS galaxies?\n if p.select == '_MS':\n indices = aux.select_salim18(GR.M_star,GR.SFR)\n L_line = L_line[indices]\n SFR = SFR[indices]\n Zsfr = Zsfr[indices]\n print('With MS selection criteria: only %i galaxies' % (len(L_line)))\n\n SFR = SFR[L_line > 0]\n Zsfr = Zsfr[L_line > 0]\n R_gas = R_gas[L_line > 0]\n M_H2 = M_H2[L_line > 0]\n M_star = M_star[L_line > 0]\n sSFR = SFR/M_star\n L_line = L_line[L_line > 0]\n\n print('%i data points ' % (len(L_line)))\n\n labs = {'_M10':'Mach=10 power-law',\\\n '_arepoPDF_dim':'AREPO parametric PDF with extinction',\\\n '_arepoPDF':'AREPO parametric PDF'}\n lab = labs[p.table_ext]\n\n if p.add:\n ax = p.ax\n else:\n fig,ax = plt.subplots(figsize=(8,6))\n\n if p.select == 'Sigma_M_H2':\n Sigma_M_H2 = M_H2/(np.pi*R_gas**2)\n m = ax.scatter(sSFR[np.argsort(Sigma_M_H2)],L_line[np.argsort(Sigma_M_H2)],marker='o',s=20,\\\n c=np.log10(Sigma_M_H2[np.argsort(Sigma_M_H2)]),vmin=3.5,label=lab,alpha=0.6,zorder=10)\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label=r'log $\\Sigma_{H2}$ [M$_{\\odot}$/kpc$^2$]',size=15)\n else:\n m = ax.scatter(sSFR,L_line,marker='o',s=20,\\\n c=Zsfr,label=lab,alpha=0.6,zorder=10)\n cbar = plt.colorbar(m,ax=ax)\n cbar.set_label(label=r'$\\langle Z\\rangle_{\\mathrm{SFR}}$ [Z$_{\\odot}$]',size=15)\n\n if p.add_obs:\n add_line_sSFR_obs(p.line,L_line,ax,select=p.select)\n\n ax.set_xscale('log')\n ax.set_yscale('log')\n ax.set_xlabel(getlabel('sSFR'))\n ax.set_ylabel(getlabel(p.line))\n handles,labels = ax.get_legend_handles_labels()\n handles = np.flip(handles)\n labels = np.flip(labels)\n # ax.legend(handles,labels,loc='upper left',fontsize=7)\n ax.legend(handles,labels,loc='lower right',fontsize=7,frameon=True,framealpha=0.5) \n print(np.min(sSFR),np.max(sSFR))\n if not p.xlim: p.xlim = 10.**np.array([-13,-7])\n if not p.ylim: \n p.ylim = [np.median(L_line)/1e6,np.median(L_line)*1e4]\n ax.set_xlim(p.xlim)\n ax.set_ylim(p.ylim)\n ax.grid(ls='--')\n\n if p.savefig & (not p.add):\n if not os.path.isdir(p.d_plot + 'luminosity/'): os.mkdir(p.d_plot + 'luminosity/') \n plt.savefig(p.d_plot + 'luminosity/%s_sSFR.png' % p.line, format='png', dpi=300)", "def _create_galaxy(self):\n # Make a star.\n star = Star(self)\n stars_width, stars_height = star.rect.size\n # Fill galaxy across the screen\n available_space_x = self.settings.screen_width - (2 * stars_width)\n number_stars_x = available_space_x // (2 * stars_width)\n # Determine the number of rows of stars that fit on the screen.\n ship_height = self.ship.rect.height\n available_space_y = (self.settings.screen_height - (3 * stars_height) - ship_height)\n number_rows = available_space_y // (2 * stars_height)\n # Create the full galaxy of stars.\n for row_number in range(number_rows):\n # Create the first row of stars.\n for stars_number in range(number_stars_x):\n self._create_stars(stars_number, row_number)", "def show_variables(self):\r\n\r\n variablelist = [(x_temp,self.variables[x_temp]) for x_temp in sorted(self.variables.keys())]\r\n display.noteprint(('/C/ '+labels.VARIABLES.upper(), EOL.join([x_temp[0]+BLANK\r\n +COLON+BLANK\r\n +abridge(str(x_temp[1]),40)\r\n for x_temp in variablelist])))", "def update(self):\n self.plot.draw()\n \n func=str(self.edit1b.currentText())\n if self.win.test()==0:\n x=np.linspace(0,10,200)\n elif self.win.test()==1:\n x=np.linspace(0,0.40,200)\n \n pattern1=r'Steel'\n pattern2=r'Aluminium'\n pattern3=r'[\\d]+'\n \n if (func!='Comparison Chart'):\n self.edit2b.setDisabled(False)\n self.edit3b.setDisabled(False)\n self.edit4b.setDisabled(False)\n if (func=='Quenched/Tempered Steel'):\n alpha = 0.0025\n elif (func=='Annealed Steel'):\n alpha = 0.01\n elif (func=='Steel (input Su)'):\n S = str(self.edit2b.text())\n if (self.win.test()==0):\n S = str(float(S)/6.895)\n alpha = notch.alpha(eval(S))\n elif (func=='Aluminium Alloy 356.0 as cast'):\n rho = 0.08\n elif (func=='Aluminium Alloy 6061'):\n rho = 0.025\n elif (func=='Aluminium Alloy 7075'):\n rho = 0.015\n elif (func=='Material dropdown'):\n pass\n \n y1=[]\n if re.search(pattern1,func):\n Su=notch.su_s(alpha)\n if (self.win.test()==0):\n Su = Su*6.895\n for i in range(len(x)):\n y1.append(notch.nsp(alpha,x[i],self.win.test()))\n y=np.asarray(y1)\n if (re.search(pattern3,str(self.edit3b.text()))):\n r=eval(str(self.edit3b.text()))\n self.edit4b.setText(str(notch.nsp(alpha,r,self.win.test())))\n elif re.search(pattern2,func):\n Su=notch.su_a(rho)\n if (self.win.test()==0):\n Su = Su*6.895\n for i in range(len(x)):\n y1.append(notch.nsn(rho,x[i],self.win.test()))\n y=np.asarray(y1)\n if (re.search(pattern3,str(self.edit3b.text()))):\n r=eval(str(self.edit3b.text()))\n self.edit4b.setText(str(notch.nsn(rho,r,self.win.test())))\n \n self.edit2b.setText(str(Su))\n func1 = 'Steel (Su='+str(self.edit2b.text())+')'\n if (func!='Steel (input Su)'):\n self.plot.redraw(x,y,func, self.xlabel)\n elif (func=='Steel (input Su)'):\n self.plot.redraw(x,y,func1, self.xlabel)\n \n elif (func=='Comparison Chart'):\n self.edit2b.setText(\"\")\n self.edit2b.setDisabled(True)\n self.edit3b.setText(\"\")\n self.edit3b.setDisabled(True)\n self.edit4b.setText(\"\")\n self.edit4b.setDisabled(True)\n self.plot.draw_comp(self.xlabel, self.win.test())", "def _variable_types(self):\n return self._variable_single_types + self._variable_array_types", "def list(show=0):\n global stars_\n if len(stars_) == 0:\n print \"No stars have been selected, go use 'stars()'\"\n return\n if show == 0:\n i=0\n for s in stars_:\n i=i+1\n print i,s[0],s[1],s[2],s[3]\n else:\n if show > 0 and show <= len(stars_):\n s = stars_[show-1]\n print show,s[0],s[1],s[2],s[3]\n else:\n print \"Bad star index\"", "def plot_variant_positions(strains):\n if strains.lower() == 'all':\n strains = None\n strains = get_required_strains(strains)\n gd_data = []\n with database.make_connection() as connection:\n for strain in strains:\n hits = r.table(TABLE).filter(lambda row: row['StrainID'].match(\n strain)).pluck('Position', 'Class').run(connection)\n feat = []\n for hit in hits:\n cur = hit['Position']\n feat.append(misc.create_feature(cur, cur, hit['Class'], strand=None))\n gd_data.append(feat)\n imaging.plot_SNPs(gd_data, strains)", "def plot_missing_values(self, variable, **kwargs):\n return self.visualizer.plot_missing_values(variable, **kwargs)", "def spherical_galaxy_orbit(\n orbit_x,\n orbit_y,\n orbit_z,\n N_stars=100,\n sigma_r=1,\n orbit_visible=False,\n orbit_line_interpolate=5,\n N_star_orbits=10,\n color=[255, 220, 200],\n size_star=1,\n scatter_kwargs={},\n):\n if orbit_line_interpolate > 1:\n x = np.linspace(0, 1, len(orbit_x))\n x_smooth = np.linspace(0, 1, len(orbit_x) * orbit_line_interpolate)\n kind = 'quadratic'\n orbit_x_line = scipy.interpolate.interp1d(x, orbit_x, kind)(x_smooth)\n orbit_y_line = scipy.interpolate.interp1d(x, orbit_y, kind)(x_smooth)\n orbit_z_line = scipy.interpolate.interp1d(x, orbit_z, kind)(x_smooth)\n else:\n orbit_x_line = orbit_x\n orbit_y_line = orbit_y\n orbit_z_line = orbit_z\n line = ipv.plot(orbit_x_line, orbit_y_line, orbit_z_line, visible=orbit_visible)\n x = np.repeat(orbit_x, N_stars).reshape((-1, N_stars))\n y = np.repeat(orbit_y, N_stars).reshape((-1, N_stars))\n z = np.repeat(orbit_z, N_stars).reshape((-1, N_stars))\n xr, yr, zr = np.random.normal(0, scale=sigma_r, size=(3, N_stars)) # +\n r = np.sqrt(xr ** 2 + yr ** 2 + zr ** 2)\n\n for i in range(N_stars):\n a = np.linspace(0, 1, x.shape[0]) * 2 * np.pi * N_star_orbits\n xo = r[i] * np.sin(a)\n yo = r[i] * np.cos(a)\n zo = a * 0\n xo, yo, zo = np.dot(_randomSO3(), [xo, yo, zo])\n # print(x.shape, xo.shape)\n x[:, i] += xo\n y[:, i] += yo\n z[:, i] += zo\n\n sprite = ipv.scatter(\n x, y, z, texture=radial_sprite((64, 64), color), marker='square_2d', size=size_star, **scatter_kwargs\n )\n with sprite.material.hold_sync():\n sprite.material.blending = pythreejs.BlendingMode.CustomBlending # pylint: disable=no-member\n sprite.material.blendSrc = pythreejs.BlendFactors.SrcColorFactor # pylint: disable=no-member\n sprite.material.blendDst = pythreejs.BlendFactors.OneFactor # pylint: disable=no-member\n sprite.material.blendEquation = 'AddEquation'\n sprite.material.transparent = True\n sprite.material.depthWrite = False\n sprite.material.alphaTest = 0.1\n return sprite, line", "def createGraph(self):\n self.measurements(45,50,10)\n avg = self.readFile(\"avg.pickle\")\n table = []\n for a in avg:\n table.append((a[0], a[1], a[2], a[3], a[4], \"Boolean\"))\n table.append((a[0], a[1], a[2], a[5], a[6], \"Fractional\"))\n table.append((a[0], a[1], a[2], a[7], a[8], \"Hierarchical\"))\n df = pd.DataFrame(table)\n df.columns = [\"nPages\", \"nCentroids\", \"Time\", \"Mean\", \"Std\", \"Type\"]\n print(df)\n sns.set(style = 'darkgrid')\n sns.lmplot(x = \"nCentroids\", y = \"Mean\", col = \"Type\", hue=\"Type\", data = df)\n #sns.lmplot(x = \"nPages\", y = \"Mean\", col = \"Type\", hue=\"Type\", data = df)\n #sns.scatterplot(x = \"nCentroids\", y = \"Mean\", size = \"Time\", hue = \"Type\", sizes = (20, 200), data = df)\n #sns.scatterplot(x = \"nPages\", y = \"Mean\", size = \"Time\", hue = \"Type\", sizes = (20, 200), data = df)\n plt.show()", "def plot_global(type):\n click.echo(click.style(\n \"Generating Plot....\", fg='cyan', bold='true'))\n plot_time_series.TimeSeriesPloTs.plot_global(type)\n click.echo(click.style(\n \"Done....\", fg='green', bold='true'))", "def var(self, _type):\n return Slot()", "def variable_type(self, variable): # pragma: no cover\n raise NotImplementedError('Implemented in child class')", "def create_star():\n if config.S_LIST == []:\n sitem = scene.Star(randint(2, common.COLS-2), randint(2, common.R1_R))\n config.S_LIST.append(sitem)\n elif randint(0, 5) == 1:\n sitem = scene.Star(randint(2, common.COLS-2), randint(2, common.R1_R))\n config.S_LIST.append(sitem)", "def _create_stars(self, stars_number, row_number):\n star = Star(self)\n stars_width, stars_height = star.rect.size\n star.x = stars_width + 2 * stars_width * stars_number\n star.rect.x = star.x\n star.rect.y = star.rect.height + 2 * star.rect.height * row_number\n self.stars.add(star)", "def print_stars():\n for i in range(2):\n for j in range(35):\n print(\"*\", end = '')\n print('')", "def velocity_dispersion(galaxy_names, content, time, xlabel, ylabel):\r\n # Setting figure\r\n fig = plt.figure()\r\n ax = fig.add_subplot(111)\r\n ax.set_xlabel(xlabel)\r\n ax.set_ylabel(ylabel)\r\n picName = \"\"\r\n\r\n for galaxy_name in galaxy_names:\r\n # Read file\r\n fileName = '/home/humar50/RING/RUNS/' + galaxy_name + '/DUMPS/' + galaxy_name + '/data_sigma/sigma_v' + content + '.out'\r\n # times, distance from center, velocity dispersion in x, y and z\r\n rtimes, rdist, rsr, rst, rsz = np.loadtxt(fileName, usecols=(0, 1, 5, 6, 7), unpack=True)\r\n\r\n dist, sr, st, sz = [], [], [], [] # lists that will contain only the data at the time specified\r\n for i in range(0, len(rtimes)):\r\n if rtimes[i] == time:\r\n dist.append(rdist.tolist()[i]), sr.append(rsr.tolist()[i]), st.append(rst.tolist()[i]), sz.append(rsz.tolist()[i])\r\n\r\n # Plot\r\n if content == 'g':\r\n title = 'Gas particles at ' + str(time) + ' Gyr'\r\n elif content == 's':\r\n title = 'Star particles at ' + str(time) + ' Gyr'\r\n\r\n marker = marker_styles[galaxy_names.index(galaxy_name)]\r\n ax.scatter(dist, sr, color='tab:red', marker=marker, alpha=0.7, label=galaxy_name + ', $\\sigma_r$')\r\n ax.scatter(dist, st, color='g', alpha=0.7, marker=marker, label=galaxy_name + ', $\\sigma_t$')\r\n ax.scatter(dist, sz, color='mediumblue', alpha=0.7, marker=marker, label=galaxy_name + ', $\\sigma_z$')\r\n picName += (galaxy_name + '-')\r\n\r\n ax.set_title(title)\r\n plt.legend()\r\n fig.tight_layout()\r\n\r\n # Save figure\r\n fig.savefig(picName + content + \"_velocity-distribution_\" + str(time) + \"_Gyr.png\", dpi=500)\r\n # plt.show()\r\n plt.cla()\r\n plt.close()", "def __init__(self):\n self.tape_tag = None\n self.independentVariableShapeList = []\n self.dependentVariableShapeList = []", "def taxonomy_plot(self,seasons):\n print('Formatting data.')\n no_of_ideograms=self.taxonomy_files()\n location=self.place.capitalize()+'-'+str(self.year)\n if seasons==True:\n seasons=self.weather.seasons(self.place)\n print('Done')\n self.conf.taxo_conf(no_of_ideograms, location, self.start_level, self.plot_level, seasons)", "def plot_variables(labels, plot, data):\n # Create individual figures\n fig = subplots.make_subplots(rows=1, cols=1)\n for var in labels:\n if plot == 0:\n counts = data[var].value_counts()\n fig.append_trace(go.Bar(x=counts, y=counts.index, orientation='h'), 1, 1)\n elif plot == 1:\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][0], 1, 1)\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][1], 1, 1)\n elif plot == 2:\n fig.add_trace(go.Box(x=list(data[data[\"Score\"] == \"good\"][var]), name=\"Good\", hoverinfo=\"x\", marker_color='mediumturquoise'))\n fig.add_trace(go.Box(x=list(data[data[\"Score\"] == \"bad\"][var]), name=\"Bad\", hoverinfo=\"x\", marker_color='darkorange'))\n else:\n raise ValueError(\"plot number must be 0, 1, or 2\")\n # Create buttons for drop down menu\n buttons = []\n for i, label in enumerate(labels):\n if plot == 0:\n visibility = [i == j for j in range(len(labels))]\n else:\n visibility = [j//2 == i for j in range(2*len(labels))]\n button = dict(\n label=label,\n method='update',\n args=[{'visible': visibility},\n {'title': label}])\n buttons.append(button)\n updatemenus = list([\n dict(active=-1,\n x=1.06, y=1.27,\n buttons=buttons\n )\n ])\n # Setup layout\n if plot == 0:\n fig['layout']['title'] = \"Distribution of categorical and discrete variables:\"\n fig.update_traces(marker_color='rgb(158,202,225)', marker_line_color='rgb(8,48,107)',\n marker_line_width=1.5, opacity=0.7)\n elif plot == 1:\n fig['layout']['title'] = \"Distribution of continuous variables:\"\n fig.update_traces(marker_color='rgb(112, 125, 188)', opacity=0.8)\n elif plot == 2:\n fig['layout']['title'] = \"Boxplot of continuous variables by score:\"\n fig['layout']['showlegend'] = False\n fig['layout']['updatemenus'] = updatemenus\n iplot(fig, config={\"displayModeBar\": False})", "def plot_parameter(self, parm):\n # If user wants to plot density, make sure it exists\n if parm == 'density' and 'density' not in self.ds.data_vars:\n self.insert_density()\n \n if parm == 'theta' and 'theta' not in self.ds.data_vars:\n self.insert_potential_density()\n \n if parm == 'N' and 'N' not in self.ds.data_vars:\n self.insert_buoyancy_frequency()\n \n # Use xarray to plot this parameter\n self.ds[parm].plot(y=self.ztsp[0])\n if plt.ylim()[0] <= 0:\n plt.gca().invert_yaxis()\n plt.tight_layout()\n \n plt.show()", "def getStarRating(waveHeight, windDir, avgWind, tideHeight):\n\n starRating = 0\n\n # wave height\n if waveHeight > 2:\n starRating += 4\n elif waveHeight > 1.6:\n starRating += 3\n elif waveHeight > 1.4:\n starRating += 2\n elif waveHeight > 1.2:\n starRating += 1\n\n # wind direction\n if windDir >= 270 or windDir <= 30:\n starRating += 1\n\n # wind strength\n if avgWind < 15:\n starRating += 1\n\n # tide\n if tideHeight < 1.2:\n starRating += 1\n elif tideHeight > 2.2:\n starRating = 1\n\n # check upper bound of 5 stars\n if starRating > 5:\n starRating = 5\n elif waveHeight < 1:\n starRating = 0\n\n return starRating", "def make_top_stars():\n topStars = {'1': {'amm': [{'name': 'G1_1', 'x': 2509.0, 'y': 555.0},\n {'name': 'G1_2', 'x': 2294.0, 'y': 1638.5}],\n 'ben': [{'name': 'G1_1', 'x': 2534.5, 'y': 563.5},\n {'name': 'G1_2', 'x': 2320.0, 'y': 1643.0}],\n 'jlu': [{'name': 'G1_1', 'x': 2532.0, 'y': 561.0},\n {'name': 'G1_2', 'x': 2317.5, 'y': 1644.0}]},\n '2': {'amm': [{'name': 'G2_1', 'x': 1659.0, 'y': 1595.4},\n {'name': 'G2_2', 'x': 507.0, 'y': 1394.5}],\n 'ben': [{'name': 'G2_1', 'x': 1665.5, 'y': 1602.0},\n {'name': 'G2_2', 'x': 513.5, 'y': 1401.0}],\n 'jlu': [{'name': 'G2_1', 'x': 1663.0, 'y': 1599.5},\n {'name': 'G2_2', 'x': 511.0, 'y': 1398.0}]},\n '3': {'amm': [{'name': 'G3_1', 'x': 507.0, 'y': 2344.0},\n {'name': 'G3_2', 'x': 1819.0, 'y': 2877.5}],\n 'ben': [{'name': 'G3_1', 'x': 522.0, 'y': 2386.5},\n {'name': 'G3_2', 'x': 1828.5, 'y': 2933.0}],\n 'jlu': [{'name': 'G3_1', 'x': 519.0, 'y': 2384.0},\n {'name': 'G3_2', 'x': 1826.0, 'y': 2930.5}]},\n '4': {'amm': [{'name': 'G4_1', 'x': 3470.0, 'y': 2781.5},\n {'name': 'G4_2', 'x': 2441.0, 'y': 3481.0}],\n 'ben': [{'name': 'G4_1', 'x': 3509.0, 'y': 2834.0},\n {'name': 'G4_2', 'x': 2469.3, 'y': 3519.5}],\n 'jlu': [{'name': 'G4_1', 'x': 3505.0, 'y': 2831.5},\n {'name': 'G4_2', 'x': 2467.0, 'y': 3516.9}]}}\n\n users = ['amm', 'ben', 'jlu']\n frames = ['58', '59', '60']\n chips = ['1', '2', '3', '4']\n\n root = 'S20121230S00'\n for user in users:\n for frame in frames:\n for chip in chips:\n starlist = '{0}{1}_{2}_{3}.lis'.format(root, frame,\n chip, user)\n\n move_stars_up(starlist, topStars[chip][user])\n\n return", "def _get_vars(symbol: Union[str, int]) -> str:\n if isinstance(symbol, str):\n return {\n 'circle': 'var b1=n.round(t,2);',\n 'square': 'var b1=n.round(t,2);',\n 'diamond': 'var b1=n.round(t*1.3,2);',\n 'hexagram': 'var b1=n.round(t,2);var b2=n.round(t/2,2);var b3=n.round(t*Math.sqrt(3)/2,2);'\n }[symbol]\n return {\n 37: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 38: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 39: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 40: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 34: 'var d1=n.round(t,2);',\n 33: 'var d1=n.round(t*1.4,2);',\n 35: 'var d1=n.round(t*1.2,2);var d2=n.round(t*0.85,2);',\n 36: 'var d1=n.round(t/2,2);var d2=n.round(t,2);'\n }[symbol]", "def plotScopes12p(day,shot):\n axs = []\n for p in range(4):\n if p == 0:\n ax = plt.subplot(4,3,3*p+1)\n plt.title(\"day %d, shot %d, scope 1\"%(day,shot))\n else:\n plt.subplot(4,3,3*p+1,sharex=ax)\n\n x = findReadData(day,1,p+1,shot)\n plt.plot(x.Time,x.Ampl)\n plt.ylabel(\"channel %d\"%(p+1))\n fudgePlotLimits(x.Time,x.Ampl)\n if p==3:\n plt.xlabel(\"time ($\\mu$s)\")\n\n plt.subplot(4,3,3*p+2,sharex=ax)\n if p == 0:\n plt.title(\"day %d, shot %d, scope 2\"%(day,shot))\n x = findReadData(day,2,p+1,shot)\n plt.plot(x.Time,x.Ampl)\n plt.ylabel(\"channel %d\"%(p+1))\n fudgePlotLimits(x.Time,x.Ampl)\n if p==3:\n plt.xlabel(\"time ($\\mu$s)\")\n\n plt.subplot(4,3,3*p+3,sharex=ax)\n if p == 0:\n plt.title(\"day %d, shot %d, scope 3\"%(day,shot))\n x = findReadData(day,3,p+1,shot)\n plt.plot(x.Time,x.Ampl)\n plt.ylabel(\"channel %d\"%(p+1))\n fudgePlotLimits(x.Time,x.Ampl)\n if p==3:\n plt.xlabel(\"time ($\\mu$s)\")", "def example():\n station = \"KFTG\"\n product = 'N0Q'\n start = datetime(2020,5,16,0,0)\n\n file_list,LatLonBox = query_radar_data(station,product,start,\n minute_delta=30,hour_delta=0,day_delta=0)\n radar_plot(station,save_path,product,start,file_list,LatLonBox,save=False,show=True,index=0)", "def legendValue(requestContext, seriesList, *valueTypes):\n def last(s):\n \"Work-around for the missing last point\"\n v = s[-1]\n if v is None:\n return s[-2]\n return v\n\n valueFuncs = {\n 'avg': lambda s: safeDiv(safeSum(s), safeLen(s)),\n 'total': safeSum,\n 'min': safeMin,\n 'max': safeMax,\n 'last': last\n }\n system = None\n if valueTypes[-1] in ('si', 'binary'):\n system = valueTypes[-1]\n valueTypes = valueTypes[:-1]\n for valueType in valueTypes:\n valueFunc = valueFuncs.get(valueType, lambda s: '(?)')\n if system is None:\n for series in seriesList:\n series.name += \" (%s: %s)\" % (valueType, valueFunc(series))\n else:\n for series in seriesList:\n value = valueFunc(series)\n formatted = None\n if value is not None:\n formatted = \"%.2f%s\" % format_units(value, system=system)\n series.name = \"%-20s%-5s%-10s\" % (series.name, valueType, formatted)\n return seriesList", "def _update_stars(self):\n self._check_galaxy_edges()\n self.stars.update()\n if pygame.sprite.spritecollideany(self.ship, self.stars):\n self._ship_hit()\n self._check_stars_bottom()", "def board_stars(self):\r\n return BoardStars(self)", "def plot(var):\n # MISSCHIEN KUNNEN WE HIER NOG IETS MEE\n # total_dead = len(train_data[\"Survived\"] == 0)\n # total_survived = len(train_data[\"Survived\"] == 1)\n # died = train_data[train_data[\"Survived\"] == 0][var].value_counts() / total_dead\n # survived = train_data[train_data[\"Survived\"] == 1][var].value_counts() / total_survived\n sns.set()\n sns.set_color_codes(\"pastel\")\n\n # order bars for family size variable\n if var == \"FamSize\":\n sns.barplot(x=var, y=\"Survived\", data=train_data, color=\"b\",\\\n capsize=.1, errwidth=.7, order=[\"alone\", 1, 2, 3, \"4 or more\"]).\\\n tick_params(labelsize=18)\n else:\n sns.barplot(x=var, y=\"Survived\", data=train_data, color=\"b\",\\\n capsize=.1, errwidth=1.1).tick_params(labelsize=18)\n\n # plot style properties\n ax = plt.gca()\n\n for ax in plt.gcf().axes:\n x = ax.get_xlabel()\n y = ax.get_ylabel()\n ax.set_xlabel(x, fontsize=20)\n ax.set_ylabel(y, fontsize=20)\n\n plt.title(\"Ratio of survivors for variable \" + str(var), fontsize=22)\n t = ax.title\n t.set_position([.5, 1.05])\n plt.ylim([0, 1])\n plt.subplots_adjust(bottom=.15, left=.15)\n plt.savefig(\"results/survived_\" + str(var) + \".png\", bbox_inches=\"tight\")\n\n plt.show()", "def visualize_type():\n\t\n\t#grab our parsed data\n\tdata_file = parse(MY_FILE, \",\")\n\t\n\t#make a new variable, counter, from iterating through each line of\n\t#data in parsed data, and count how many incidents happen by category\n\tcounter = Counter(item[\"Category\"] for item in data_file)\n\t\n\t#set the labels which are based on the keys of our counter\n\t#since order doesn't matter, we can just use counter.keys()\n\tlabels = tuple(counter.keys())\n\t\n\t#set exactly where the labels should hit the x-axis\n\txlocations = np.arange(len(labels)) + 0.5\n\t\n\t#width of each bar that will be plotted\n\twidth = 0.5\n\t\n\t#assign data to a bar plot\n\tplt.bar(xlocations, counter.values(), width=width)\n\t\n\t#assign labels and tick location to x-axis\n\tplt.xticks(xlocations + width /2, labels, rotation=90)\n\t\n\t#give more room to the x-axis so the labels aren't cut off\n\tplt.subplots_adjust(bottom=0.4)\n\t\n\t#make the overall graph/figure larger\n\tplt.rcParams['figure.figsize'] = 12, 8\n\t\n\t#save the graph\n\tplt.savefig(\"type.png\")\n\t\n\t#close the plot figure\n\tplt.clf()", "def __init__(self, img, header, starobj, halosize=40, padsize=40, mask=None, hscmask=None):\n Celestial.__init__(self, img, mask, header=header)\n if hscmask is not None:\n self.hscmask = hscmask\n self.name = 'star'\n self.scale_bar_length = 3\n # Trim the image to star size\n # starobj should at least contain x, y, (or ra, dec) and \n # Position of a star, in numpy convention\n x_int = int(starobj['x'])\n y_int = int(starobj['y'])\n dx = -1.0 * (starobj['x'] - x_int)\n dy = -1.0 * (starobj['y'] - y_int)\n halosize = int(halosize)\n # Make padded image to deal with stars near the edges\n padsize = int(padsize)\n ny, nx = self.image.shape\n im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))\n im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.image\n # Star itself, but no shift here.\n halo = im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1, \n x_int + padsize - halosize: x_int + padsize + halosize + 1]\n self._image = halo\n self.shape = halo.shape\n self.cen_xy = [x_int, y_int]\n self.dx = dx\n self.dy = dy \n # FLux\n self.flux = starobj['flux']\n self.fluxann = starobj['flux_ann']\n\n if hasattr(self, 'mask'):\n im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))\n im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.mask\n # Mask itself, but no shift here.\n halo = (im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1, \n x_int + padsize - halosize: x_int + padsize + halosize + 1])\n self._mask = halo\n \n if hasattr(self, 'hscmask'):\n im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))\n im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.hscmask\n # Mask itself, but no shift here.\n halo = (im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1, \n x_int + padsize - halosize: x_int + padsize + halosize + 1])\n self.hscmask = halo", "def plot_psd(self):\n from .error import show_error\n from ..backend.viz_raw import \\\n _plot_topomap, _plot_matrix, _plot_all_psd\n\n if self.plotType == 'Topomap':\n try:\n _plot_topomap(self)\n except ValueError:\n show_error(\n 'No coordinates for topomap have been initialized:(')\n self.ui.selectPlotType.setCurrentIndex(0)\n\n if self.plotType == 'Matrix':\n _plot_matrix(self)\n\n if self.plotType == 'All PSD':\n _plot_all_psd(self)", "def plotSpectrum(inp,xrng=[],yrng=[],xlabel='',ylabel='',xlog=False,ylog=False,grid=False,\n legend=[],legend_location='upper right',fontscale=1,legend_fontscale=1,title='',\n color='k',colormap=None,linestyle='-',linewidth=1.5,alpha=1.,\n show_noise=True,color_noise='k',linestyle_noise='-',linewidth_noise=1.5,alpha_noise=0.5,\n comparison=None,color_comparison='grey',linestyle_comparison='-',linewidth_comparison=1.5,alpha_comparison=1,\n residual=False,color_residual='m',linestyle_residual='-',linewidth_residual=1.5,alpha_residual=0.5,\n telluric=False,color_telluric='grey',linestyle_telluric='-',linewidth_telluric=1.5,alpha_telluric=0.2,\n features=[],mdwarf=False,ldwarf=False,tdwarf=False,young=False,binary=False,nsamples=100,\n band=[],band_color='k',band_alpha=0.2,band_label='',band_label_position='bottom',band_width=0.1,\n show_zero=True,stack=0.,zeropoint=0.,color_zero='k',linestyle_zero=':',linewidth_zero=1.5,alpha_zero=0.3,\n inset=False,inset_xrange=[],inset_yrange=[],inset_position=[0.65,0.60,0.20,0.20],inset_features=False,\n output='',multiplot=False,multipage=False,layout=[1,1],figsize=[],tight=True,\n interactive=False,**kwargs):\n\n# keyword parameters (for backward compatability)\n for k in ['showZero','showzero']: show_zero=kwargs.get(k,show_zero)\n for k in ['showNoise','noise','uncertainty','shownoise','showuncertainty','show_uncertainty']: show_noise=kwargs.get(k,show_noise)\n\n for k in ['line_style','lineStyle','ls','linestyles','line_styles']: linestyle=kwargs.get(k,linestyle)\n for k in ['line_width','lineWidth','width','lw','linewidths','line_widths']: linewidth=kwargs.get(k,linewidth)\n for k in ['colors','colour','colours']: color=kwargs.get(k,color)\n for k in ['colorScheme','color_scheme','colorscheme','colorMap','color_map']: colormap=kwargs.get(k,colormap)\n\n for k in ['colornoise','colorNoise','colorUnc','coloruncertainty','color_uncertainty','colorUncertainty']: color_noise=kwargs.get(k,color_noise)\n for k in ['linestylenoise','line_style_noise','linestyleNoise']: linestyle_noise=kwargs.get(k,linestyle_noise)\n for k in ['linewidthnoise','linewidthNoise','line_width_noise']: linewidth_noise=kwargs.get(k,linewidth_noise)\n for k in ['alphanoise','alphaNoise']: alpha_noise=kwargs.get(k,alpha_noise)\n\n for k in ['colorzero','colorZero']: color_zero=kwargs.get(k,color_zero)\n for k in ['linestylezero','line_style_zero','linestyleZero']: linestyle_zero=kwargs.get(k,linestyle_zero)\n for k in ['linewidthzero','linewidthZero','line_width_zero']: linewidth_zero=kwargs.get(k,linewidth_zero)\n for k in ['alphazero','alphaZero']: alpha_zero=kwargs.get(k,alpha_zero)\n\n for k in ['colorcomparison','colorComparison']: color_comparison=kwargs.get(k,color_comparison)\n for k in ['linestyleComparison','line_style_comparison','linestylecomparison']: linestyle_comparison=kwargs.get(k,linestyle_comparison)\n for k in ['linewidthcomparison','linewidthComparison','line_width_comparison']: linewidth_comparison=kwargs.get(k,linewidth_comparison)\n for k in ['alphacomparison','alphaComparison']: alpha_comparison=kwargs.get(k,alpha_comparison)\n\n for k in ['colorresidual','colorResidual']: color_residual=kwargs.get(k,color_residual)\n for k in ['linestyleresidual','line_style_residual','linestyleResidual']: linestyle_residual=kwargs.get(k,linestyle_residual)\n for k in ['linewidthresidual','linewidthResidual','line_width_residual']: linewidth_residual=kwargs.get(k,linewidth_residual)\n for k in ['alpharesidual','alphaResidual']: alpha_residual=kwargs.get(k,alpha_residual)\n\n for k in ['bands']: band=kwargs.get(k,band)\n if len(band) == 2 and isinstance(band[0],list) == False: band = [band]\n for k in ['bandcolors','bandcolor','band_colors']: band_color=kwargs.get(k,band_color)\n for k in ['bandalphas','band_alphas','bandalpha']: band_alpha=kwargs.get(k,band_alpha)\n for k in ['band_labels','bandlabel','bandlabels']: band_label=kwargs.get(k,band_label)\n for k in ['band_label_positions','bandlabelposition','bandlabelpositions']: band_label_position=kwargs.get(k,band_label_position)\n for k in ['bandwidth','bandwidths','band_widths']: band_width=kwargs.get(k,band_width)\n for par in [band_color,band_alpha,band_label,band_label_position,band_width]:\n if not isinstance(par,list): par = [par]*len(band)\n if len(par) < len(band): par.extend([par[-1] for x in range(len(band)-len(par))])\n\n for k in ['legends','label','labels']: legend=kwargs.get(k,legend)\n if not isinstance(legend,list): legend = [legend]\n for k in ['legendfontscale','legendFontscale']: legend_fontscale=kwargs.get(k,legend_fontscale)\n legend_fontscale=legend_fontscale*fontscale\n for k in ['legendLocation','legendlocation','labelLocation','labellocation','label_location']: legend_location=kwargs.get(k,legend_location)\n\n for k in ['xrange','x_range','wave_range','wrange','wrng']: xrng=kwargs.get(k,xrng)\n if not isinstance(xrng,list): xrng = [xrng]\n for k in ['yrange','y_range','flux_range','frange','frng']: yrng=kwargs.get(k,yrng)\n if not isinstance(yrng,list): yrng = [yrng]\n\n for k in ['multilayout','multiLayout','multi_layout']: layout=kwargs.get(k,layout)\n for k in ['file','filename']: output=kwargs.get(k,output)\n if not isinstance(output,str): output=''\n filetype = '.pdf'\n if output!='': filetype=output.split('.')[-1]\n\n if comparison != None and isinstance(comparison,splat.Spectrum) == False and isinstance(comparison,list) == False: \n print('plotSpectrum() Warning: comparison spectrum should be a splat Spectrum object, you passed {}'.format(comparison))\n comparison = None\n\n# some plotting constants\n xlabel_default = 'Wavelength'\n ylabel_deafult = 'Flux'\n\n# telluric bands in micron\n telluric_bands = [[1.1,1.2]*u.micron,[1.3,1.5]*u.micron,[1.75,2.0]*u.micron]\n\n# assign features by group\n if not isinstance(features,list): features = [features]\n if ldwarf==True or mdwarf==True: features.extend(['k','na','feh','tio','co','h2o','h2'])\n if tdwarf==True: features.extend(['k','ch4','h2o','h2'])\n if young==True: features.extend(['vo'])\n if binary==True: features.extend(['sb'])\n\n# clean repeats in features while maintaining order - set does not do this\n if len(features)>0:\n fea = []\n for i in features:\n if i not in fea: fea.append(i)\n features = fea\n\n\n# if a list is passed, use this list\n splist = copy.deepcopy(inp)\n if isinstance(splist,list) == False: splist = [splist]\n \n# set up for multiplot\n if len(splist) == 1: multiplot = False\n \n# array of lists => force multiplot\n elif len(splist) > 1 and isinstance(splist[0],list) == True: multiplot = True\n else: pass\n\n# reformat array of spectra of multiplot is used (i.e., user forgot to set)\n if multiplot == True and isinstance(splist[0],splat.Spectrum):\n splist = [[s] for s in splist]\n\n elif multiplot == False and isinstance(splist[0],splat.Spectrum):\n splist = [splist]\n \n# flatten array if multiplot is not set\n elif multiplot == False and isinstance(splist[0],list) and len(splist) > 1:\n splist = [[item for sublist in splist for item in sublist]] # flatten\n else: pass\n\n# total number of spectra - use to assign default legends\n allsps = [item for sublist in splist for item in sublist] # Total number of spectra\n if len(legend) == 0: legend=[sp.name for sp in allsps]\n if len(legend) < len(allsps):\n legend.extend([allsps[i].name for i in range(len(legend),len(allsps)-len(legend))])\n \n\n# now run a loop through the input subarrays\n plt.close('all')\n\n# set up here for multiple file output\n nplot = 1\n if multipage == True or multiplot == True:\n nplot = layout[0]*layout[1]\n numpages = int(len(splist) / nplot) + 1\n if (len(splist) % nplot == 0):\n numpages -= 1\n fig = []\n \n if multipage == True and filetype.lower() == 'pdf':\n pdf_pages = PdfPages(output)\n \n if multipage == False:\n if len(splist) > 1:\n filebase = output.replace('.{}'.format(filetype),'')\n files = [filebase+'{}.'.format(i+1)+filetype for i in numpy.arange(len(splist))]\n else:\n files = [output]\n\n pg_n = 0 # page counter\n plt_n = 0 # plot per page counter\n lg_n = 0 # legend per plot counter\n\n for plts,sp in enumerate(splist):\n# set specific plot parameters\n if not isinstance(sp[0],splat.Spectrum):\n raise ValueError('\\nInput to plotSpectrum has wrong format:\\n\\n{}\\n\\n'.format(sp[0]))\n\n# set up plotting defaults for the list of spectra - REPLACE THIS\n if not isinstance(zeropoint,list): zeropoint = [zeropoint]*len(sp)\n if len(zeropoint) < len(sp): zeropoint.extend([zeropoint[-1] for x in range(len(sp)-len(zeropoint))])\n if not isinstance(color,list): color = [color]*len(sp)\n if len(color) < len(sp): color.extend([color[-1] for x in range(len(sp)-len(color))])\n if not isinstance(linestyle,list): linestyle = [linestyle]*len(sp)\n if len(linestyle) < len(sp): linestyle.extend([linestyle[-1] for x in range(len(sp)-len(linestyle))])\n if not isinstance(linewidth,list): linewidth = [linewidth]*len(sp)\n if len(linewidth) < len(sp): linewidth.extend([linewidth[-1] for x in range(len(sp)-len(linewidth))])\n if not isinstance(alpha,list): alpha = [alpha]*len(sp)\n if len(alpha) < len(sp): alpha.extend([alpha[-1] for x in range(len(sp)-len(alpha))])\n if not isinstance(color_noise,list): color_noise = [color_noise]*len(sp)\n if len(color_noise) < len(sp): color_noise.extend([color_noise[-1] for x in range(len(sp)-len(color_noise))])\n if not isinstance(linestyle_noise,list): linestyle_noise = [linestyle_noise]*len(sp)\n if len(linestyle_noise) < len(sp): linestyle_noise.extend([linestyle_noise[-1] for x in range(len(sp)-len(linestyle_noise))])\n if not isinstance(linewidth_noise,list): linewidth_noise = [linewidth_noise]*len(sp)\n if len(linewidth_noise) < len(sp): linewidth_noise.extend([linewidth_noise[-1] for x in range(len(sp)-len(linewidth_noise))])\n if not isinstance(alpha_noise,list): alpha_noise = [alpha_noise]*len(sp)\n if len(alpha_noise) < len(sp): alpha_noise.extend([alpha_noise[-1] for x in range(len(sp)-len(color_noise))])\n if not isinstance(color_comparison,list): color_comparison = [color_comparison]*len(sp)\n if len(color_comparison) < len(sp): color_comparison.extend([color_comparison[-1] for x in range(len(sp)-len(color_comparison))])\n if not isinstance(linestyle_comparison,list): linestyle_comparison = [linestyle_comparison]*len(sp)\n if len(linestyle_comparison) < len(sp): linestyle_comparison.extend([linestyle_comparison[-1] for x in range(len(sp)-len(linestyle_comparison))])\n if not isinstance(linewidth_comparison,list): linewidth_comparison = [linewidth_comparison]*len(sp)\n if len(linewidth_comparison) < len(sp): linewidth_comparison.extend([linewidth_comparison[-1] for x in range(len(sp)-len(linewidth_comparison))])\n if not isinstance(alpha_comparison,list): alpha_comparison = [alpha_comparison]*len(sp)\n if len(alpha_comparison) < len(sp): alpha_comparison.extend([alpha_comparison[-1] for x in range(len(sp)-len(alpha_comparison))])\n\n# settings that work if the spectrum was read in as legitmate Spectrum object\n try:\n xlabel = kwargs.get('xlabel','{} ({})'.format(sp[0].wave_label,sp[0].wave.unit))\n ylabel = kwargs.get('ylabel','{} ({})'.format(sp[0].flux_label,sp[0].flux.unit))\n except:\n xlabel = kwargs.get('xlabel',xlabel_default)\n ylabel = kwargs.get('ylabel',ylabel_default)\n# initial plot range\n bound = [numpy.nanmin(sp[0].wave.value),numpy.nanmax(sp[0].wave.value)]\n ymax = [numpy.nanquantile(s.flux.value,0.98) for s in sp]\n bound.extend(numpy.array([-0.02,1.3])*numpy.nanmax(ymax)+\\\n numpy.array([numpy.nanmin(zeropoint),numpy.nanmax(zeropoint)+stack*(len(sp)-1)]))\n\n# set colormap if provided\n if colormap != None:\n values = numpy.arange(len(sp))\n color_map = plt.get_cmap(colormap)\n norm = colmap.Normalize(vmin=0, vmax=1.0*values[-1])\n scalarMap = cm.ScalarMappable(norm=norm, cmap=color_map)\n for i in range(len(sp)): color[i] = scalarMap.to_rgba(values[i])\n\n# GENERATE PLOTS\n if multiplot == True or multipage == True:\n plt_n = plts % nplot\n if (plt_n == 0):\n fig.append(plt.figure())\n pg_n += 1\n ax = fig[pg_n-1].add_subplot(layout[0], layout[1], plt_n+1)\n \n# plotting a single plot with all spectra\n else:\n plt.close('all')\n plt_n = 0\n fig = []\n if len(figsize)>0: fig.append(plt.figure(figsize=figsize))\n else: fig.append(plt.figure())\n ax = fig[0].add_subplot(111)\n \n for ii, a in enumerate(sp):\n# zeropoint and stack\n flx = [i+zeropoint[ii] for i in a.flux.value]\n if stack > 0: flx = [f + (len(sp)-ii-1)*stack for f in flx]\n ax.plot(a.wave.value,flx,color=color[ii],linestyle=linestyle[ii], lw=linewidth[ii], alpha=alpha[ii], zorder = 10, label = legend[lg_n]) \n\n# add comparison\n if comparison != None:\n# zeropoint and stack\n cflx = [i+zeropoint[ii] for i in comparison.flux.value]\n if stack > 0: cflx = [f + (len(sp)-ii-1)*stack for f in cflx]\n ax.plot(comparison.wave.value,cflx,color=color_comparison[ii],linestyle=linestyle_comparison[ii], lw=linewidth_comparison[ii], alpha=alpha_comparison[ii], zorder = 10)\n \n# add residual\n if residual == True and len(sp) == 2:\n # Save flux values from first spectrum\n if ii == 0:\n flx0 = [f - (len(sp)-ii-1)*stack for f in flx]\n \n # Subtract fluxes and plot\n elif ii == 1:\n res = [flx0[f_n] - f for f_n, f in enumerate(flx)]\n ax.plot(a.wave.value, res, alpha = alpha_residual[ii], color = color_residual[ii], linsetyle=linestyle_residual[ii], lw=linewidth_residual[ii])\n \n # Fix bound[2] if residual goes below 0\n if numpy.nanmin(res) < bound[2]:\n b0 = numpy.argmin(a.wave.value[a.wave.value > bound[0]])\n b1 = numpy.argmax(a.wave.value[a.wave.value < bound[1]])\n bound[2] = numpy.nanmin(res[b0:b1])\n\n# noise\n if show_noise == True:\n ns = [i+zeropoint[ii] for i in a.noise.value]\n ax.plot(a.wave.value,ns,color=color_noise[ii],linestyle=linestyle_noise[ii],alpha=alpha_noise[ii], lw=linewidth_noise[ii], zorder = 10)\n\n# zeropoint\n if show_zero == True:\n ze = numpy.ones(len(a.flux))*zeropoint[ii]\n ax.plot(a.wave.value,ze,color=color[ii],linestyle=linestyle_zero,alpha=alpha_zero,lw=linewidth_zero, zorder = 10)\n\n# save maximum flux among all spectra for plotting\n# THIS IS VERY SLOW AND IT WOULD BE BETTER TO FIND AN ALTERNATE APPROACH\n if len(features)>0:\n f = interp1d(a.wave,flx,bounds_error=False,fill_value=0.)\n if ii == 0: \n wvmax = numpy.linspace(bound[0],bound[1],nsamples)\n flxmax = numpy.array(f(wvmax))\n else: flxmax = numpy.maximum(flxmax,numpy.array(f(wvmax)))\n\n# legend counter\n lg_n = lg_n + 1 # Increment legend\n\n\n# label features\n# THIS NEEDS TO BE FIXED WITH GRETEL'S STUFF\n if len(features) > 0:\n yoff = 0.02*(bound[3]-bound[2]) # label offset\n fontsize = int((10-numpy.nanmin([(layout[0]*layout[1]-1),6]))*fontscale)\n for ftr in features:\n ftr = ftr.lower()\n if ftr in FEATURE_LABELS:\n ftrc = checkDict(ftr,FEATURE_LABELS)\n if ftrc != False:\n for ii,waveRng in enumerate(FEATURE_LABELS[ftrc]['wavelengths']):\n wRng = waveRng.to(sp[0].wave.unit).value\n# features must be contained in plot range (may change this)\n if numpy.nanmin(wRng) > bound[0] and numpy.nanmax(wRng) < bound[1]:\n wfeature = numpy.where(numpy.logical_and(wvmax >= numpy.nanmin(wRng),wvmax <= numpy.nanmax(wRng)))\n if len(wvmax[wfeature]) == 0: wfeature = numpy.argmax(numpy.absolute(wvmax-numpy.nanmedian(wRng)))\n y = numpy.nanmax(flxmax[wfeature])+yoff\n flxmax[wfeature] = flxmax[wfeature]+3.*yoff\n\n if FEATURE_LABELS[ftrc]['type'] == 'band':\n ax.plot(wRng,[y+yoff]*2,color='k',linestyle='-')\n ax.plot([wRng[0]]*2,[y,y+yoff],color='k',linestyle='-')\n ax.text(numpy.mean(wRng),y+1.5*yoff,FEATURE_LABELS[ftrc]['label'],horizontalalignment='center',fontsize=fontsize)\n else:\n for w in wRng: ax.plot([w]*2,[y,y+yoff],color='k',linestyle='-')\n ax.text(numpy.mean(wRng),y+1.5*yoff,FEATURE_LABELS[ftrc]['label'],horizontalalignment='center',fontsize=fontsize)\n bound[3] = numpy.nanmax([numpy.nanmax(flxmax)+2.*yoff,bound[3]])\n\n# add grid\n if grid == True: ax.grid() \n\n# axis labels \n fontsize = (numpy.round(numpy.max([13./((layout[0]*layout[1])**0.33),5]))) * fontscale\n legend_fontsize = (13-numpy.min([(layout[0]*layout[1]-1),8])) * legend_fontscale\n ax.set_xlabel(xlabel, fontsize = fontsize)\n ax.set_ylabel(ylabel, fontsize = fontsize)\n ax.tick_params(axis='x', labelsize=fontsize)\n ax.tick_params(axis='y', labelsize=fontsize)\n\n# add title\n if title!='': ax.set_title(title)\n\n# log scale?\n if kwargs.get('xlog',False): ax.set_xscale('log',nonposx='clip')\n if kwargs.get('ylog',False): ax.set_yscale('log',nonposy='clip')\n\n# place legend\n if len(legend) > 0:\n if legend_location == 'outside':\n box = ax.get_position()\n ax.set_position([box.x0, box.y0 + box.height * 0.15, box.width * 0.7, box.height * 0.7])\n ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop={'size':legend_fontsize})\n else:\n ax.legend(loc=legend_location, prop={'size':legend_fontsize})\n bound[3] = bound[3]+0.1*(bound[3]-bound[2]) # extend axis for in-plot legends\n\n# overplot telluric absorption\n if telluric == True:\n yoff = 0.02*(bound[3]-bound[2]) # label offset\n for waveRng in telluric_bands:\n wR = waveRng.to(sp[0].wave.unit).value\n rect = patches.Rectangle((wR[0],bound[2]),wR[1]-wR[0],bound[3]-bound[2],facecolor=color_telluric,alpha=alpha_telluric,color=color_telluric)\n ax.add_patch(rect)\n ax.text(numpy.mean(wR),bound[2]+3*yoff,r'$\\oplus$',horizontalalignment='center',fontsize=fontsize)\n\n# overplot color swaths for pre-specified bands\n if len(band) > 0:\n for i,b in enumerate(band):\n if not isinstance(b,list): \n try: b = [float(b)-0.5*band_width,float(b)+0.5*band_width]\n except:\n print('\\nWarning: plotSpectrum bands variables should be array of 2-element arrays; you passed {}'.format(band))\n b = [0.,0.]\n rect = patches.Rectangle((b[0],bound[2]),b[1]-b[0],bound[3]-bound[2],facecolor=band_color[i],color=band_color[i],alpha=band_alpha[i])\n ax.add_patch(rect)\n if band_label_position[i].lower() == 'top':\n ax.text(numpy.mean(b),bound[3]-3*yoff,band_label[i],horizontalalignment='center',fontsize=fontsize)\n elif band_label_position[i].lower() == 'middle':\n ax.text(numpy.mean(b),0.5*(bound[2]+bound[3]),band_label[i],horizontalalignment='center',fontsize=fontsize)\n else:\n ax.text(numpy.mean(b),bound[2]+3*yoff,band_label[i],horizontalalignment='center',fontsize=fontsize)\n\n# place inset - RIGHT NOW ONLY SETTING LIMITS WITH FIRST SPECTRUM IN LIST\n if inset == True and len(inset_xrange) == 2:\n ax_inset = fig[pg_n-1].add_axes(inset_position) #, axisbg='white')\n bound2 = inset_xrange\n if len(inset_yrange) == 0:\n b0 = numpy.argmax(sp[0].wave.value > bound2[0])\n b1 = numpy.argmin(sp[0].wave.value < bound2[1])\n inset_yrange = [numpy.nanmin(sp[0].flux.value[b0:b1]),numpy.nanmax(sp[0].flux.value[b0:b1])]\n bound2.extend(inset_yrange)\n db = (bound2[3]-bound2[2])\n bound2[2] = bound2[2]-0.05*db\n bound2[3] = bound2[3]+0.05*db\n ax_inset.axis(bound2)\n inset_fontsize = fontsize*0.7\n\n for ii,a in enumerate(sp):\n flx = [i+zeropoint[ii] for i in a.flux.value]\n ax_inset.plot(a.wave.value,flx,color=colors[ii],linestyle=linestyle[ii],linewidth=linewidth[ii],alpha=alpha[ii]) \n ax_inset.set_xlabel('')\n ax_inset.set_ylabel('')\n ax_inset.tick_params(axis='x', labelsize=inset_fontsize)\n ax_inset.tick_params(axis='y', labelsize=inset_fontsize)\n# ax_inset.legend()\n\n# inset feature labels\n if len(inset_features) > 0:\n yoff = 0.05*(bound2[3]-bound2[2])\n for ftr in inset_features:\n ftrc = checkDict(ftr,FEATURE_LABELS)\n if ftrc != False:\n for ii,waveRng in enumerate(FEATURE_LABELS[ftrc]['wavelengths']):\n wRng = waveRng.to(sp[0].wave.unit).value\n if (numpy.min(wRng) > bound2[0] and numpy.max(wRng) < bound2[1]):\n wfeature = numpy.where(numpy.logical_and(wvmax >= numpy.nanmin(wRng),wvmax <= numpy.nanmax(wRng)))\n if len(wvmax[wfeature]) == 0: wfeature = numpy.argmax(numpy.absolute(wvmax-numpy.nanmedian(wRng)))\n y = numpy.nanmax(flxmax[wfeature])+yoff\n flxmax[wfeature] = flxmax[wfeature]+3.*yoff\n \n if FEATURE_LABELS[ftrc]['type'] == 'band':\n ax_inset.plot(wR,[y+yoff]*2,color='k',linestyle='-')\n ax_inset.plot([wR[0]]*2,[y,y+yoff],color='k',linestyle='-')\n ax_inset.text(numpy.mean(wR),y+2*yoff,FEATURE_LABELS[ftrc]['label'],horizontalalignment='center',fontsize=inset_fontsize)\n else:\n for w in waveRng:\n ax_inset.plot([w]*2,[y,y+yoff],color='k',linestyle='-')\n ax_inset.text(numpy.mean(wR),y+2*yoff,FEATURE_LABELS[ftrc]['label'],horizontalalignment='center',fontsize=inset_fontsize)\n waveRng = [wR[0]-0.02,wR[1]+0.02] # for overlap\n \n# update offset\n if len(inset_features) > 0: bound2[3] = numpy.nanmax([bound2[3],numpy.nanmax(flxmax)+5.*yoff])\n ax_inset.axis(bound2)\n\n# finalize bounding\n if len(xrng) > 0: bound[0:2] = xrng\n if len(yrng) > 0: bound[2:4] = yrng\n if isUnit(bound[0]): bound = [x.value for x in bound]\n ax.axis(bound)\n \n# save to file or display\n# ERROR HERE - CHECK WHAT FILES\n if multipage == False:\n if files[plts] != '' and (plts % nplot == 3 or plts == len(splist)-1):\n if kwargs.get('tight',True) == True: \n plt.savefig(files[plts], bbox_inches='tight')\n else:\n plt.savefig(files[plts])\n if output == '' and not kwargs.get('web',False):\n plt.show()\n if (kwargs.get('interactive',False) != False): plt.ion()\n else: plt.ioff()\n\n\n# save figures in multipage format and write off pdf file\n if multipage == True: \n for pg_n in numpy.arange(numpages):\n# fig[pg_n].text(0.5, 0.04, xlabel, ha = 'center', va = 'center')\n# fig[pg_n].text(0.06, 0.5, ylabel, ha = 'center', va = 'center', rotation = 'vertical')\n fig[pg_n].tight_layout\n fig[pg_n].suptitle(title, fontsize = int(14*fontsize), fontweight = 'bold')\n pdf_pages.savefig(fig[pg_n])\n if filetype.lower() == 'pdf':\n pdf_pages.close()\n\n plt.clf()\n return fig", "def set_real_star(self, star):\n self.real_star = star", "def plot_posteriors(self, variants=[]):\n if variants != []:\n for var in variants:\n if var not in self.posteriors.keys():\n raise ValueError(('Variants must only be a value in '\n 'bucket_col_name'))\n self._plot_posteriors(variants)", "def variables(self):\n for name in self._nodes:\n if isinstance(self._nodes[name], RandomVariable):\n yield name", "def plot(self, num_levels=10):\n if num_levels == -1:\n num_levels = len(self.energies())\n print(self.energies(num_levels))\n figure(figsize=(20, 5))\n subplot(1, num_levels + 1, 1)\n self.plot_potential()\n #xlabel('$\\phi$')\n for ii, psi2D in enumerate(self.get_2Dpsis(num_levels)):\n subplot(1, num_levels + 1, ii + 2)\n #imshow(psi2D.real,extent=(self.x[0],self.x[-1],self.y[0],self.y[-1]),interpolation=\"None\",aspect='auto')\n imshow(psi2D.real, interpolation=\"None\", aspect='auto')\n xlabel(ii)", "def setTypes(self):\n\n integers = []\n floats = [\n 'S',\n 'Pinj',\n 'coreRadFrac',\n 'qBG',\n 'lqCN',\n 'lqCF',\n 'lqPN',\n 'lqPF',\n 'fracPN',\n 'fracPF',\n 'fracCN',\n 'fracCF',\n 'fracUI',\n 'fracUO',\n 'fracLI',\n 'fracLO',\n 'fG',\n ]\n\n\n for var in integers:\n if (getattr(self, var) is not None) and (~np.isnan(float(getattr(self, var)))):\n try:\n setattr(self, var, tools.makeInt(getattr(self, var)))\n except:\n print(\"Error with input file var \"+var+\". Perhaps you have invalid input values?\")\n log.info(\"Error with input file var \"+var+\". Perhaps you have invalid input values?\")\n for var in floats:\n if var is not None:\n if (getattr(self, var) is not None) and (~np.isnan(float(getattr(self, var)))):\n try:\n setattr(self, var, tools.makeFloat(getattr(self, var)))\n except:\n print(\"Error with input file var \"+var+\". Perhaps you have invalid input values?\")\n log.info(\"Error with input file var \"+var+\". Perhaps you have invalid input values?\")\n\n return", "def plot_gene(adata, ax, x, y, type='gene', x_test=None, x_mean=None, x_cov=None, x_grad=None,\n scatter_kwgs=None):\n\n # basic scatter plot\n if scatter_kwgs is not None:\n ax = scv.pl.scatter(adata, x=x, y=y, ax=ax, show=False, **scatter_kwgs)\n else:\n ax = scv.pl.scatter(adata, x=x, y=y, ax=ax, show=False)\n ax.set_title(\" \")\n\n if x_test is not None:\n # add smoothed expression valued\n if x_mean is not None:\n ax.plot(x_test, x_mean, '-', color='orange', lw=3,\n label='Smoothed {} expression values'.format(type))\n # add covariance\n if x_cov is not None:\n ax.fill_between(x_test.flatten(), x_mean - np.sqrt(np.diag(x_cov)),\n x_mean + np.sqrt(np.diag(x_cov)),\n alpha=0.5, color='k')\n # add the derivative\n if x_grad is not None:\n ax.plot(x_test, x_grad, '--', color='orange', lw=3,\n label='Derivative of gene expression')\n\n ax.set_ylabel('{} expression'.format(type), fontsize=10)\n ax.set_xticks([])\n plt.legend(fontsize=10)", "def star_graph():\n pylon_graph = graph.graph()\n idx = pylon_graph.add_unique_node(ORIGIN, \"base\")\n star_list = pylon_graph.add_star_to_node(idx, 6)\n pylon_graph.connect_nodes(star_list)\n pylon_graph.save_graph(\"star\")\n return pylon_graph", "def plot_profiles(snap, profs, dust_species_to_plot, debug=False):\n print('Plotting profiles...')\n\n units = {\n 'position': 'au',\n 'gas_velocity_radial_analytical': 'dimensionless',\n 'dust_velocity_radial_analytical': 'dimensionless',\n 'velocity_radial_numerical': 'dimensionless',\n }\n p = profs['gas'][0]\n\n if debug:\n num_dust = snap.num_dust_species\n ax = p.plot(x='radius', y=['velocity_pressure', 'velocity_visc'], units=units)\n y = ['gas_velocity_radial']\n y += [f'dust_velocity_radial_{idx+1:03}' for idx in range(num_dust)]\n ax = p.plot(x='radius', y=y, units=units)\n ax.legend().remove()\n\n fig, ax = plt.subplots()\n\n # Plot \"analytical\" radial drift velocity / velocity pressure component\n p.plot(\n x='radius',\n y='gas_velocity_radial_analytical',\n units=units,\n color='black',\n label='',\n ax=ax,\n )\n y = [f'dust_velocity_radial_analytical_{idx+1:03}' for idx in dust_species_to_plot]\n p.plot(x='radius', y=y, units=units, label='', ax=ax)\n colors = [line.get_color() for line in ax.lines[1:]]\n\n # Plot \"numerical\" radial drift velocity / velocity pressure component\n p.plot(\n x='radius',\n y='velocity_radial_numerical',\n units=units,\n color='black',\n linestyle='',\n marker='o',\n markersize=4,\n fillstyle='none',\n label='gas',\n std='shading',\n ax=ax,\n )\n profs_to_plot = [\n prof for idx, prof in enumerate(profs['dust']) if idx in dust_species_to_plot\n ]\n for species, prof, color in zip(dust_species_to_plot, profs_to_plot, colors):\n label = f'{snap.properties[\"grain_size\"][species].to(\"cm\"):.1f~P}'\n prof.plot(\n x='radius',\n y='velocity_radial_numerical',\n units=units,\n color=color,\n linestyle='',\n marker='o',\n markersize=4,\n fillstyle='none',\n label=label,\n std='shading',\n ax=ax,\n )\n\n ax.set_ylabel(r'$v_R / |v_P|$')\n ax.grid()\n\n textstr = f't = {snap.properties[\"time\"].to(\"years\").m:.0f} years'\n bbox = dict(boxstyle='round', facecolor='white', edgecolor='grey', alpha=0.8)\n ax.text(\n 0.97,\n 0.97,\n textstr,\n transform=ax.transAxes,\n horizontalalignment='right',\n verticalalignment='top',\n bbox=bbox,\n )\n ax.legend(framealpha=0.8, edgecolor='grey')\n\n return ax", "def star(request):\n account = models.Account.current_user_account\n account.user_has_selected_nickname() # This will preserve account.fresh.\n if account.stars is None:\n account.stars = []\n keyid = request.issue.key.id()\n if keyid not in account.stars:\n account.stars.append(keyid)\n account.put()\n return respond(request, 'issue_star.html', {'issue': request.issue})", "def plot_variables(labels, plot, data):\n # Create individual figures\n fig = subplots.make_subplots(rows=1, cols=1)\n for var in labels:\n if plot == 0:\n counts = data[var].value_counts()\n fig.append_trace(go.Bar(x=counts, y=counts.index, orientation='h'), 1, 1)\n elif plot == 1:\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][0], 1, 1)\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][1], 1, 1)\n else:\n raise ValueError(\"plot number must be 0, 1\")\n # Create buttons for drop down menu\n buttons = []\n for i, label in enumerate(labels):\n if plot == 0:\n visibility = [i == j for j in range(len(labels))]\n else:\n visibility = [j//2 == i for j in range(2*len(labels))]\n button = dict(\n label=label,\n method='update',\n args=[{'visible': visibility},\n {'title': label}])\n buttons.append(button)\n updatemenus = list([\n dict(active=-1,\n x=1.06, y=1.27,\n buttons=buttons\n )\n ])\n # Setup layout\n if plot == 0:\n fig['layout']['title'] = \"Distribution of categorical and discrete variables:\"\n fig.update_traces(marker_color='rgb(158,202,225)', marker_line_color='rgb(8,48,107)',\n marker_line_width=1.5, opacity=0.7)\n elif plot == 1:\n fig['layout']['title'] = \"Distribution of continuous variables:\"\n fig.update_traces(marker_color='rgb(112, 125, 188)', opacity=0.8)\n elif plot == 2:\n fig['layout']['title'] = \"Boxplot of continuous variables by score:\"\n fig['layout']['showlegend'] = False\n fig['layout']['updatemenus'] = updatemenus\n iplot(fig, config={\"displayModeBar\": False})", "def CreateNorthSymbol(ntype=GVNORTHSYM1,color1=(0.0,0.0,0.0,1.0),\n color2=(1.0,1.0,1.0,1.0),scale=1.0,symbol_manager=None):\n\n if symbol_manager is None:\n sm=gview.GvSymbolManager()\n else:\n sm=symbol_manager\n\n cstr1=gvogrfs.gv_to_ogr_color(color1)\n if len(cstr1) < 9:\n cstr1=cstr1+\"FF\"\n\n cstr2=gvogrfs.gv_to_ogr_color(color2)\n if len(cstr2) < 9:\n cstr2=cstr2+\"FF\"\n\n sstr = str(scale).replace('.','_')\n\n refname=ntype+cstr1[1:]+cstr2[1:]+sstr\n if ntype==GVNORTHSYM1: \n shape=gview.GvShape(type=gview.GVSHAPE_AREA)\n shape.set_node(1.0*scale,-2.6*scale,node=0)\n shape.set_node(0.0,-0.8*scale,node=1)\n shape.set_node(-1.0*scale,-2.6*scale,node=2)\n shape.set_node(0.0,2.6*scale,node=3)\n shape.set_node(1.0*scale,-2.6*scale,node=4)\n shape.set_property('_gv_ogrfs','PEN(c:'+cstr1+');BRUSH(c:'+\\\n cstr2+')')\n sm.inject_vector_symbol(refname,shape)\n\n return (refname,sm)", "def plot_containment_vs_energy(\n self, fractions=[0.68, 0.95], thetas=Angle([0, 1], \"deg\"), ax=None\n ):\n import matplotlib.pyplot as plt\n\n ax = plt.gca() if ax is None else ax\n\n energy = energy_logspace(self.energy_lo[0], self.energy_hi[-1], 100)\n\n for theta in thetas:\n for fraction in fractions:\n radius = self.containment_radius(energy, theta, fraction)\n label = f\"{theta.deg} deg, {100 * fraction:.1f}%\"\n ax.plot(energy.value, radius.value, label=label)\n\n ax.semilogx()\n ax.legend(loc=\"best\")\n ax.set_xlabel(\"Energy (TeV)\")\n ax.set_ylabel(\"Containment radius (deg)\")", "def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"Rdy[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rbk[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rqcd_emu[1,0.0,10.0]\");\n self.modelBuilder.doSet(\"POI\",\"Rbk,Rdy,Rqcd_emu\")", "def ShapeVar(name):\n return TypeVar(name, kind=Kind.ShapeVar)" ]
[ "0.6919843", "0.5500659", "0.5284528", "0.5282017", "0.49810436", "0.49508908", "0.49358875", "0.47995615", "0.47526926", "0.4750297", "0.4654779", "0.4653055", "0.46524152", "0.46244058", "0.46109277", "0.45994213", "0.45680085", "0.45665252", "0.45277017", "0.4504224", "0.44967166", "0.44825602", "0.44634634", "0.44269055", "0.4426561", "0.4409574", "0.4406664", "0.44041947", "0.4397185", "0.4381367", "0.43763986", "0.43657085", "0.4363257", "0.43616712", "0.4355397", "0.43382013", "0.43233448", "0.4316523", "0.4306278", "0.42985672", "0.42956075", "0.42921025", "0.42860547", "0.428559", "0.42841098", "0.42837924", "0.42741182", "0.42716935", "0.42715612", "0.42610174", "0.4255601", "0.42452174", "0.4241909", "0.42368418", "0.42316726", "0.42305443", "0.42279032", "0.42225656", "0.42171893", "0.42133537", "0.42129794", "0.42104915", "0.42038083", "0.42027298", "0.4202336", "0.41938278", "0.4170596", "0.41704097", "0.41670585", "0.41633597", "0.41604564", "0.415987", "0.41564545", "0.4156138", "0.41501328", "0.4147672", "0.4147156", "0.4145694", "0.4141405", "0.41363758", "0.41233897", "0.4120755", "0.41147292", "0.4112108", "0.41061005", "0.40945658", "0.4093882", "0.4092829", "0.40879813", "0.40859124", "0.40855175", "0.40836605", "0.40751696", "0.40666807", "0.4063491", "0.40632075", "0.40611893", "0.40594062", "0.4056992", "0.40558895" ]
0.76934016
0
Child function fo plot_variable_stars. Process the DataFrame to select only stars marked as 'var_type' variable stars.
def get_variable_stars(df_data, df_variables_names, variabletype=None): if variabletype is None: variabletype = ['CEP', 'BCEP', 'BCEPS', 'DSCT', 'SR', 'SRA', 'SRB', 'SRC', 'SRD', 'RR', 'RRAB', 'RRC', 'GDOR', 'SPB', 'M', 'LPV'] print "Selecting variable stars.." # create a string "var_type" of variabletype separated by or ('|'). # var_type = "|".join(variabletype) # check if var_type is contained in Type (any or all, partial or not) # are_variables = df_variables_names[df_variables_names.Type.str.contains(var_type) == True] # fails with "is True" # are_variables.Type = are_variables.Type.str.replace(".*BCEP.*", "BCEP") # rename all types containing 'BCEP' are_variables = df_variables_names[df_variables_names.Type.isin(variabletype)] types_df = are_variables[['hip', 'tycho2_id', 'source_id', 'Type', 'Name']] print "..Done" print "Preparing subselection of initial DataFrame.." print "..Making Hipparcos list.." hip_list = are_variables.hip.tolist() hip_list = np.array(hip_list) hip_list = hip_list[~np.isnan(hip_list)] # remove the nans hip_list = list(hip_list) print "..Making Tycho2 list.." tycho2_list = are_variables.tycho2_id.tolist() tycho2_list = np.array(tycho2_list) tycho2_list = tycho2_list[tycho2_list != 'nan'] # tycho2 is str tycho2_list = list(tycho2_list) print "..Done\n----------" print "Getting Hipparcos and Tycho variable objects.." hip_objects = df_data[df_data.hip.isin(hip_list)] hip_objects = pd.merge(hip_objects, types_df, on='hip', how='inner') if 'tycho2_id_y' in hip_objects.columns: hip_objects = hip_objects.drop('tycho2_id_y', axis=1) hip_objects = hip_objects.rename(columns={'hip_x': 'hip', 'tycho2_id_x': 'tycho2_id'}) tycho_objects = df_data[df_data.tycho2_id.isin(tycho2_list)] tycho_objects = pd.merge(tycho_objects, types_df, on='tycho2_id', how='inner') if 'hip_y' in tycho_objects.columns: tycho_objects = tycho_objects.drop('hip_y', axis=1) tycho_objects = tycho_objects.rename(columns={'hip_x': 'hip', 'tycho2_id_x': 'tycho2_id'}) print "..Done\n----------" print "Getting roAp stars from file.." # roAP_names.csv contains tycho2_id names of roAp stars with open('roAP/roAP_names.csv') as roAP_file: roap_objects_list = roAP_file.readlines() roap_objects_list = [line.rstrip() for line in roap_objects_list] roap_objects = df_data[df_data.tycho2_id.isin(roap_objects_list)] column_number = len(roap_objects.columns) roap_objects.insert(column_number, 'Type', 'roAp') print "..Done\n----------" variable_df = pd.concat([hip_objects, tycho_objects, roap_objects], axis=0, ignore_index=True) variable_df.source_id = variable_df.source_id.fillna(-9999).astype(int) return variable_df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_variable_stars(variablesdf, variabletype=None, x='B_V', y='M_V'):\n if variabletype is None:\n variabletype = ['CEP', 'BCEP', 'BCEPS', 'DSCT', 'SR', 'SRA', 'SRB', 'SRC', 'SRD', 'RR', 'RRAB', 'RRC', 'GDOR',\n 'SPB', 'M', 'LPV', 'roAp']\n markers = ['^', 'D', 'D', 'v', 's', 'D', 'D', 'D', 'D', 's', 'D', 'D', 'D', 'o', 'p', 'o', 'o']\n colors = ['k', 'k', 'k', '#00c000', 'r', 'r', 'r', 'r', 'r', 'm', 'm', 'm', '#00c0ff', (1, .7, 0), 'w', 'w', 'r']\n sizes = [50, 40, 40, 40, 50, 40, 40, 40, 40, 50, 50, 50, 40, 40, 45, 40, 40]\n labels = ['', \"BCEP, BCEPS\", '', 'DSCT', 'SR', \"SRA, SRB, SRC, SRD\", '', '', '', 'RR', \"RRAB, RRC\", '', 'GDOR',\n 'SPB', '', 'LPV', 'roAp']\n for i in range(len(variabletype)):\n if i in [2, 6, 7, 8, 11]:\n my_label = None\n else:\n my_label = \"%s\" % labels[i]\n plt.scatter(variablesdf[x].loc[variablesdf.loc[:, 'Type'] == variabletype[i]], variablesdf[y]\n .loc[variablesdf.loc[:, 'Type'] == variabletype[i]], facecolor=colors[i], marker=markers[i],\n s=sizes[i], label=my_label, edgecolor='k')\n print \"plotting %s as %s%s\" % (variabletype[i], colors[i], markers[i])\n return", "def plot_data_types(self, variable, **kwargs):\n return self.visualizer.plot_data_types(variable, **kwargs)", "def select_variables(df, dtype=\"numeric\"):\n numerics = ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']\n if dtype == \"numeric\":\n subset = df.copy().select_dtypes(include = numerics)\n else:\n subset = df.copy().select_dtypes(include != numerics)\n return(subset)", "def create_plot(x_var, y_var):\r\n\r\n FILE_PATH = 'application/star_data.csv'\r\n TARGET_VAR = 'star_type'\r\n SIZE_VAR = 'r_clipped'\r\n WIDTH = 1000\r\n HEIGHT = 600\r\n\r\n # Get the data\r\n df = pd.read_csv(FILE_PATH)\r\n fig = px.scatter(df, x=x_var, y=y_var, color=TARGET_VAR, size=SIZE_VAR, \r\n width=WIDTH, height=HEIGHT)\r\n graphJSON = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)\r\n\r\n return graphJSON", "def plot_selected(df, title='title', columns=[], shouldNormalize=True, symbol='any stock'):\n # df = df[columns][start_index:end_index]\n # df = df.loc[start_index:end_index, columns]\n df = df.loc[:, columns]\n ylabel = \"Price\"\n normal = \"un normalized\"\n if shouldNormalize:\n df = normalize(df.loc[:, ['Close', 'sma200']])\n ylabel = \"%\"\n normal = \"normalized\"\n # print('df.shape in plot=',df.shape)\n plot_data(df, title=title, ylabel=ylabel)", "def plot_selected(df, title='title', columns=[], shouldNormalize = True, symbol='any stock'):\n #df = df[columns][start_index:end_index]\n #df = df.loc[start_index:end_index, columns]\n df = df.loc[:, columns]\n ylabel=\"Price\"\n normal = \"un normalized\"\n if shouldNormalize:\n df = normalize(df.loc[:,['Close', 'sma200']])\n ylabel = \"%\"\n normal = \"normalized\"\n #print('df.shape in plot=',df.shape)\n plot_data(df, title=title, ylabel=ylabel)", "def plot_type_of_topic(data_frame: pb.DataFrame) -> None:\n plt.interactive(False)\n plt.figure()\n data_frame.plot(kind='bar', x= data_frame['TopicID'])\n plt.show()", "def update_graph_type(variable_dropdown_x, variable_dropdown_y):\n\n options = {\n \"violin\": {\"label\": \"Violin\", \"value\": 1},\n \"scatter\": {\"label\": \"Scatter\", \"value\": 2},\n \"bar\": {\"label\": \"Bar\", \"value\": 3},\n \"pie\": {\"label\": \"Pie\", \"value\": 4},\n # \"box\": {\"label\": \"Box\", \"value\": 5,},\n }\n\n if variable_dropdown_x is None:\n return [], None, True, \"Select a graph type\"\n\n graph_selection_list = []\n\n if variable_dropdown_y is None:\n # Only one variable selected\n field_id = variable_dropdown_x\n value_type = get_field_type(field_id)\n\n supported_graphs = value_type.supported_graphs\n\n for option_key in options:\n option = options[option_key]\n graph_type = option[\"value\"]\n if graph_type in supported_graphs:\n graph_selection_list.append(option)\n\n else:\n # Both variables selected\n # Logic is:\n # If the x-axis variable is continuous, integer, date or time:\n # If the y-axis variable is continuous or integer:\n # You can use scatter plot\n # Else if x-axis variable is categorical:\n # If the y-axis variable is continuous or integer:\n # You can use violin plot, box plot\n x_value_type = get_field_type(str(variable_dropdown_x))\n y_value_type = get_field_type(str(variable_dropdown_y))\n\n if (\n x_value_type == ValueType.INTEGER\n or x_value_type == ValueType.CONT\n or x_value_type == ValueType.DATE\n or x_value_type == ValueType.TIME\n ):\n if y_value_type == ValueType.INTEGER or y_value_type == ValueType.CONT:\n graph_selection_list.append(options[\"scatter\"])\n\n elif x_value_type == ValueType.CAT_SINGLE or x_value_type == ValueType.CAT_MULT:\n if y_value_type == ValueType.INTEGER or y_value_type == ValueType.CONT:\n # graph_selection_list.append(options[\"box\"])\n graph_selection_list.append(options[\"violin\"])\n\n if len(graph_selection_list) == 0:\n return graph_selection_list, None, True, \"No supported graph types\"\n\n return (\n graph_selection_list,\n graph_selection_list[0][\"value\"],\n False,\n \"Select a graph type\",\n )", "def plotvars_core(gs, data, plotfun=vis.plot_r, plot_radars=True,\n projection=PROJECTION, **kws):\n trans = ccrs.PlateCarree()\n axd = dict(ker=plt.subplot(gs[0, 0], projection=projection),\n kum=plt.subplot(gs[0, 1], projection=projection),\n van=plt.subplot(gs[1, 0], projection=projection),\n com=plt.subplot(gs[1, 1], projection=projection))\n for key in ['ker', 'kum']:\n axd[key].set_xticks([])\n for key in ['kum', 'com']:\n axd[key].set_yticks([])\n ax_cb = plt.subplot(gs[:, -1])\n for key in NAMES.keys():\n ax = axd[key]\n ax.set_ymargin(0)\n ax.set_xmargin(0)\n plotfun(data[I_RADAR[key]], ax=ax, cax=ax_cb, transform=trans, **kws)\n ax.set_title(NAMES[key])\n ax.coastlines(resolution='10m')\n if plot_radars:\n if key != 'com':\n RADAR[key].draw_marker(ax=ax, transform=trans)\n else:\n for radarkey in ['ker', 'kum', 'van']:\n RADAR[radarkey].draw_marker(ax=ax, transform=trans)\n return axd", "def get_safety_vars_plot(self):\n if 'safety_vars_stats' not in self.stats:\n raise ValueError('No safety vars statistics present in this evaluator.')\n\n safety_vars = self.stats['safety_vars_stats'][0].keys()\n n_plots = len(safety_vars)\n fig, axes = plt.subplots(n_plots, 1, figsize=(8, 6 * n_plots))\n\n for idx, var in enumerate(safety_vars):\n series = collections.defaultdict(list)\n for ep in self.stats['safety_vars_stats']:\n for stat in ep[var]:\n series[stat].append(ep[var][stat])\n ax = axes[idx]\n for stat in ['min', 'max']:\n ax.plot(np.squeeze(np.array(series[stat])), label=stat)\n x = range(len(series['mean']))\n\n mean = np.squeeze(np.array(series['mean']))\n std_dev = np.squeeze(np.array(series['std_dev']))\n ax.plot(x, mean, label='Value')\n ax.fill_between(\n range(len(series['mean'])), mean - std_dev, mean + std_dev, alpha=0.3)\n ax.set_title('Stats for {}'.format(var))\n ax.legend()\n ax.spines['top'].set_visible(False)\n\n ax.xaxis.set_ticks_position('bottom')\n ax.set_xlabel('Episode #')\n ax.set_ylabel('Magnitude')\n ax.plot()\n return fig", "def generate_var_scatter(self):\n pass", "def plot_histplots(\n df: pd.DataFrame,\n var_type: str = \"quant\",\n drop_cols: list = None,\n figsize=(15, 20),\n sub_col=3,\n ticksize=15,\n div: int = 1,\n subplot=True,\n) -> sns.histplot:\n\n assert var_type == \"quant\" or \"qual\", \"var_type has to be either 'quant' or 'qual'.\"\n\n def print_error():\n print(f\"Input var_type: {var_type} is invalid.\")\n print(\"Valide var_type can only be 'quant' or 'qual'.\")\n return\n\n def print_col():\n print(f\"Number of {var_type}itaive columns: {df.shape[1]}\")\n return\n\n def create_fig():\n # create figure and axes based on the number of columns of the dataframe\n _, axes = plt.subplots(\n ceil(len(df.columns) / sub_col), sub_col, figsize=figsize\n )\n y = 0 # set counter\n return axes, y\n\n if not subplot:\n # plt.figure(figsize=figsize)\n if var_type == \"quant\":\n sns.histplot(x=df)\n elif var_type == \"qual\":\n sns.histplot(y=df)\n else:\n print_error()\n\n else:\n # drop unnecessary columns\n if drop_cols:\n df = df.drop(drop_cols, axis=1)\n\n # create relative dataframe according to the var_type\n if var_type == \"quant\":\n # keep only quantitative features\n df = create_quanti_df(df)\n print_col()\n axes, y = create_fig()\n # plot histplot for each column of data\n for col in df.columns:\n i, j = divmod(y, sub_col)\n # sns.histplot(x=df[col], ax=axes[i, j]).set_title(col, fontsize=20)\n sns.histplot(x=df[col][: int(len(df) / div)], ax=axes[i, j]).set_title(\n col, fontsize=20\n )\n y += 1\n elif var_type == \"qual\":\n # keep only qualitatve features\n df = create_quali_df(df)\n print_col()\n axes, y = create_fig()\n # plot histplot for each column of data\n for col in df.columns:\n i, j = divmod(y, sub_col)\n ax = axes[i, j]\n sns.histplot(y=df[col], ax=ax)\n ax.set_title(col, fontsize=20)\n ax.tick_params(axis=\"y\", which=\"major\", labelsize=ticksize)\n y += 1\n else:\n print_error()\n\n plt.tight_layout()\n plt.show()\n return", "def check_is_plottable(self, var):\n self.plot_button.disabled = False # important to enable button once disabled\n data = self.data[var[0]]\n self.plot_button.disabled = len(data.dims) <= 1", "def star_rating(table, record_id, splitstars=False):\n import uuid\n id = uuid.uuid4()\n row=db(db.plugin_wiki_rating.tablename==table)(db.plugin_wiki_rating.record_id==record_id).select().first()\n rating = row.rating if row else 0\n callback = URL('plugin_wiki', 'star_rate', args = [table,record_id])\n incr = 0.5 if splitstars else 1\n return TAG[''](DIV(_id='star'+str(id),_class='rating'),\n SCRIPT(\"jQuery(document).ready(function(){jQuery('%(uid)s').rating('%(callback)s',{increment:%(incr)s, maxvalue:5, curvalue:%(rating)s});});\" % dict(uid='#star'+str(id), callback=callback,incr=incr, rating=rating)))", "def plot_diagram_value(\n df_bugs: pd.DataFrame,\n column_to_inspect: str,\n exclude_values: List[str] = [],\n mapping_latex: Dict[str, str] = None,\n latex_format: bool = False\n ):\n df = expand_columns(df_bugs, column_to_inspect)\n records = []\n for col_value in list(df[column_to_inspect].unique()):\n count = len(df[df[column_to_inspect] == col_value])\n if latex_format and mapping_latex is not None:\n col_value = mapping_latex[col_value]\n records.append(\n {\"code\": col_value, \"count\": count}\n )\n df_agg = pd.DataFrame.from_records(records)\n df_agg = df_agg.groupby(\"code\").sum().reset_index()\n print(\"-\" * 80)\n for i, row in df_agg.iterrows():\n code = str(row['code'])\n count = str(row['count'])\n if code not in exclude_values:\n if latex_format:\n print(\n \"\\\\node[above right=-.5em and -1.5em of \" + code +\n \"] {\" + count + \"};\"\n )\n else:\n print(f\"'{code}' was annotated {count} time(s).\")\n print(\"-\" * 80)", "def _format_variables(df: EDAFrame, cfg: Config, data: Dict[str, Any]) -> Dict[str, Any]:\n res: Dict[str, Any] = {}\n # variables\n if not cfg.variables.enable:\n res[\"has_variables\"] = False\n return res\n\n res[\"variables\"] = {}\n res[\"has_variables\"] = True\n for col in df.columns:\n try:\n stats: Any = None # needed for pylint\n dtp = df.get_eda_dtype(col)\n tab_names: List[str] = []\n if isinstance(dtp, Continuous):\n itmdt = Intermediate(col=col, data=data[col], visual_type=\"numerical_column\")\n stats = format_num_stats(data[col])\n tab_names = [\"Stats\", \"Histogram\", \"KDE Plot\", \"Normal Q-Q Plot\"]\n elif type(dtp) in [Nominal, SmallCardNum, GeoGraphy, GeoPoint]:\n itmdt = Intermediate(col=col, data=data[col], visual_type=\"categorical_column\")\n stats = format_cat_stats(\n data[col][\"stats\"], data[col][\"len_stats\"], data[col][\"letter_stats\"]\n )\n tab_names = [\"Stats\", \"Word Length\", \"Pie Chart\", \"Word Cloud\", \"Word Frequency\"]\n elif isinstance(dtp, DateTime):\n itmdt = Intermediate(\n col=col,\n data=data[col][\"stats\"],\n line=data[col][\"line\"],\n visual_type=\"datetime_column\",\n )\n stats = stats_viz_dt(data[col][\"stats\"])\n else:\n raise RuntimeError(f\"the type of column {col} is unknown: {type(dtp)}\")\n\n rndrd = render(itmdt, cfg)\n layout = rndrd[\"layout\"]\n figs_var: List[Figure] = []\n for tab in layout:\n try:\n fig = tab.children[0]\n except AttributeError:\n fig = tab\n # fig.title = Title(text=tab.title, align=\"center\")\n figs_var.append(fig)\n comp = components(figs_var)\n\n res[\"variables\"][col] = {\n \"tabledata\": stats,\n \"col_type\": itmdt.visual_type.replace(\"_column\", \"\"),\n \"tab_names\": tab_names,\n \"plots\": comp,\n }\n\n except:\n print(f\"error happended in column:{col}\", file=sys.stderr)\n raise\n\n return res", "def plot_missing_values(self, variable, **kwargs):\n return self.visualizer.plot_missing_values(variable, **kwargs)", "def drawPairPlot(df):\n plt.style.use('dark_background')\n warnings.filterwarnings(\"ignore\")\n types = getSpectralTypes()\n colors = getColors()\n sns.set_palette(sns.color_palette(colors))\n g = sns.pairplot(df, hue=\"spectral_type\", hue_order=types, dropna=True,\n vars=[\"stellar_age\", \"stellar_temperature\", \n \"stellar_luminosity\", \"stellar_mass\", \n \"stellar_radius\", \"stellar_surface_gravity\", \n \"optical_magnitude\", \"stellar_metallicity\"])\n plt.show()", "def plotting_helper_method(x_axis, y_axis, df):\n genre_dict = {\n 'g':'Rock',\n 'b':'Hip-Hop',\n 'r':'Pop'\n }\n for color, genre in genre_dict.items():\n filtered_df = df[df['genre'] == genre]\n plt.scatter(filtered_df[x_axis], filtered_df[y_axis], c=color, label=genre)", "def plot(var):\n # MISSCHIEN KUNNEN WE HIER NOG IETS MEE\n # total_dead = len(train_data[\"Survived\"] == 0)\n # total_survived = len(train_data[\"Survived\"] == 1)\n # died = train_data[train_data[\"Survived\"] == 0][var].value_counts() / total_dead\n # survived = train_data[train_data[\"Survived\"] == 1][var].value_counts() / total_survived\n sns.set()\n sns.set_color_codes(\"pastel\")\n\n # order bars for family size variable\n if var == \"FamSize\":\n sns.barplot(x=var, y=\"Survived\", data=train_data, color=\"b\",\\\n capsize=.1, errwidth=.7, order=[\"alone\", 1, 2, 3, \"4 or more\"]).\\\n tick_params(labelsize=18)\n else:\n sns.barplot(x=var, y=\"Survived\", data=train_data, color=\"b\",\\\n capsize=.1, errwidth=1.1).tick_params(labelsize=18)\n\n # plot style properties\n ax = plt.gca()\n\n for ax in plt.gcf().axes:\n x = ax.get_xlabel()\n y = ax.get_ylabel()\n ax.set_xlabel(x, fontsize=20)\n ax.set_ylabel(y, fontsize=20)\n\n plt.title(\"Ratio of survivors for variable \" + str(var), fontsize=22)\n t = ax.title\n t.set_position([.5, 1.05])\n plt.ylim([0, 1])\n plt.subplots_adjust(bottom=.15, left=.15)\n plt.savefig(\"results/survived_\" + str(var) + \".png\", bbox_inches=\"tight\")\n\n plt.show()", "def plot_posteriors(self, variants=[]):\n if variants != []:\n for var in variants:\n if var not in self.posteriors.keys():\n raise ValueError(('Variants must only be a value in '\n 'bucket_col_name'))\n self._plot_posteriors(variants)", "def plot_variables(labels, plot, data):\n # Create individual figures\n fig = subplots.make_subplots(rows=1, cols=1)\n for var in labels:\n if plot == 0:\n counts = data[var].value_counts()\n fig.append_trace(go.Bar(x=counts, y=counts.index, orientation='h'), 1, 1)\n elif plot == 1:\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][0], 1, 1)\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][1], 1, 1)\n elif plot == 2:\n fig.add_trace(go.Box(x=list(data[data[\"Score\"] == \"good\"][var]), name=\"Good\", hoverinfo=\"x\", marker_color='mediumturquoise'))\n fig.add_trace(go.Box(x=list(data[data[\"Score\"] == \"bad\"][var]), name=\"Bad\", hoverinfo=\"x\", marker_color='darkorange'))\n else:\n raise ValueError(\"plot number must be 0, 1, or 2\")\n # Create buttons for drop down menu\n buttons = []\n for i, label in enumerate(labels):\n if plot == 0:\n visibility = [i == j for j in range(len(labels))]\n else:\n visibility = [j//2 == i for j in range(2*len(labels))]\n button = dict(\n label=label,\n method='update',\n args=[{'visible': visibility},\n {'title': label}])\n buttons.append(button)\n updatemenus = list([\n dict(active=-1,\n x=1.06, y=1.27,\n buttons=buttons\n )\n ])\n # Setup layout\n if plot == 0:\n fig['layout']['title'] = \"Distribution of categorical and discrete variables:\"\n fig.update_traces(marker_color='rgb(158,202,225)', marker_line_color='rgb(8,48,107)',\n marker_line_width=1.5, opacity=0.7)\n elif plot == 1:\n fig['layout']['title'] = \"Distribution of continuous variables:\"\n fig.update_traces(marker_color='rgb(112, 125, 188)', opacity=0.8)\n elif plot == 2:\n fig['layout']['title'] = \"Boxplot of continuous variables by score:\"\n fig['layout']['showlegend'] = False\n fig['layout']['updatemenus'] = updatemenus\n iplot(fig, config={\"displayModeBar\": False})", "def dyn_flareplots(df, folderpath, dyn_list, itype, flare_template = False):\n os.makedirs(folderpath, exist_ok = True)\n colors_auld = ['#800000', '#860000', '#8c0000', '#930000', '#990000', '#9f0000', '#a60000', '#ac0000', '#b20000', '#b90000', '#bf0000', '#c50000', '#cc0000', '#d20000', '#d80000', '#df0000', '#e50000', '#eb0000', '#f20000', '#f80000', '#ff0000', '#ff0700', '#ff0e00', '#ff1500', '#ff1c00', '#ff2300', '#ff2a00', '#ff3100', '#ff3800', '#ff3f00', '#ff4600', '#ff4d00', '#ff5400', '#ff5b00', '#ff6200', '#ff6900', '#ff7000', '#ff7700', '#ff7e00', '#ff8500', '#ff8c00', '#ff9100', '#ff9700', '#ff9d00', '#ffa300', '#ffa800', '#ffae00', '#ffb400', '#ffba00', '#ffbf00', '#ffc500', '#ffcb00', '#ffd100', '#ffd600', '#ffdc00', '#ffe200', '#ffe800', '#ffed00', '#fff300', '#fff900', '#ffff00', '#f2ff00', '#e5ff00', '#d8ff00', '#ccff00', '#bfff00', '#b2ff00', '#a5ff00', '#99ff00', '#8cff00', '#7fff00', '#72ff00', '#66ff00', '#59ff00', '#4cff00', '#3fff00', '#33ff00', '#26ff00', '#19ff00', '#0cff00', '#00ff00', '#0afc0a', '#15fa15', '#1ff81f', '#2af62a', '#34f434', '#3ff13f', '#49ef49', '#54ed54', '#5eeb5e', '#69e969', '#74e674', '#7ee47e', '#89e289', '#93e093', '#9ede9e', '#a8dba8', '#b3d9b3', '#bdd7bd', '#c8d5c8', '#d3d3d3']\n colors_ylorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#ffeea3', '#fff0a7', '#fff1ab', '#fff3ae', '#fff4b2', '#fff6b6', '#fff7b9', '#fff9bd', '#fffac1', '#fffcc4', '#fffdc8', '#ffffcc']\n colors_inferno = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02010E', '#020210', '#030212', '#040314', '#040316', '#050418', '#06041B', '#07051D', '#08061F', '#090621', '#0A0723', '#0B0726', '#0D0828', '#0E082A', '#0F092D', '#10092F', '#120A32', '#130A34', '#140B36', '#160B39', '#170B3B', '#190B3E', '#1A0B40', '#1C0C43', '#1D0C45', '#1F0C47', '#200C4A', '#220B4C', '#240B4E', '#260B50', '#270B52', '#290B54', '#2B0A56', '#2D0A58', '#2E0A5A', '#300A5C', '#32095D', '#34095F', '#350960', '#370961', '#390962', '#3B0964', '#3C0965', '#3E0966', '#400966', '#410967', '#430A68', '#450A69', '#460A69', '#480B6A', '#4A0B6A', '#4B0C6B', '#4D0C6B', '#4F0D6C', '#500D6C', '#520E6C', '#530E6D', '#550F6D', '#570F6D', '#58106D', '#5A116D', '#5B116E', '#5D126E', '#5F126E', '#60136E', '#62146E', '#63146E', '#65156E', '#66156E', '#68166E', '#6A176E', '#6B176E', '#6D186E', '#6E186E', '#70196E', '#72196D', '#731A6D', '#751B6D', '#761B6D', '#781C6D', '#7A1C6D', '#7B1D6C', '#7D1D6C', '#7E1E6C', '#801F6B', '#811F6B', '#83206B', '#85206A', '#86216A', '#88216A', '#892269', '#8B2269', '#8D2369', '#8E2468', '#902468', '#912567', '#932567', '#952666', '#962666', '#982765', '#992864', '#9B2864', '#9C2963', '#9E2963', '#A02A62', '#A12B61', '#A32B61', '#A42C60', '#A62C5F', '#A72D5F', '#A92E5E', '#AB2E5D', '#AC2F5C', '#AE305B', '#AF315B', '#B1315A', '#B23259', '#B43358', '#B53357', '#B73456', '#B83556', '#BA3655', '#BB3754', '#BD3753', '#BE3852', '#BF3951', '#C13A50', '#C23B4F', '#C43C4E', '#C53D4D', '#C73E4C', '#C83E4B', '#C93F4A', '#CB4049', '#CC4148', '#CD4247', '#CF4446', '#D04544', '#D14643', '#D24742', '#D44841', '#D54940', '#D64A3F', '#D74B3E', '#D94D3D', '#DA4E3B', '#DB4F3A', '#DC5039', '#DD5238', '#DE5337', '#DF5436', '#E05634', '#E25733', '#E35832', '#E45A31', '#E55B30', '#E65C2E', '#E65E2D', '#E75F2C', '#E8612B', '#E9622A', '#EA6428', '#EB6527', '#EC6726', '#ED6825', '#ED6A23', '#EE6C22', '#EF6D21', '#F06F1F', '#F0701E', '#F1721D', '#F2741C', '#F2751A', '#F37719', '#F37918', '#F47A16', '#F57C15', '#F57E14', '#F68012', '#F68111', '#F78310', '#F7850E', '#F8870D', '#F8880C', '#F88A0B', '#F98C09', '#F98E08', '#F99008', '#FA9107', '#FA9306', '#FA9506', '#FA9706', '#FB9906', '#FB9B06', '#FB9D06', '#FB9E07', '#FBA007', '#FBA208', '#FBA40A', '#FBA60B', '#FBA80D', '#FBAA0E', '#FBAC10', '#FBAE12', '#FBB014', '#FBB116', '#FBB318', '#FBB51A', '#FBB71C', '#FBB91E', '#FABB21', '#FABD23', '#FABF25', '#FAC128', '#F9C32A', '#F9C52C', '#F9C72F', '#F8C931', '#F8CB34', '#F8CD37', '#F7CF3A', '#F7D13C', '#F6D33F', '#F6D542', '#F5D745', '#F5D948', '#F4DB4B', '#F4DC4F', '#F3DE52', '#F3E056', '#F3E259', '#F2E45D', '#F2E660', '#F1E864', '#F1E968', '#F1EB6C', '#F1ED70', '#F1EE74', '#F1F079', '#F1F27D', '#F2F381', '#F2F485', '#F3F689', '#F4F78D', '#F5F891', '#F6FA95', '#F7FB99', '#F9FC9D', '#FAFDA0', '#FCFEA4']\n colors_magma = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02020D', '#02020F', '#030311', '#040313', '#040415', '#050417', '#060519', '#07051B', '#08061D', '#09071F', '#0A0722', '#0B0824', '#0C0926', '#0D0A28', '#0E0A2A', '#0F0B2C', '#100C2F', '#110C31', '#120D33', '#140D35', '#150E38', '#160E3A', '#170F3C', '#180F3F', '#1A1041', '#1B1044', '#1C1046', '#1E1049', '#1F114B', '#20114D', '#221150', '#231152', '#251155', '#261157', '#281159', '#2A115C', '#2B115E', '#2D1060', '#2F1062', '#301065', '#321067', '#341068', '#350F6A', '#370F6C', '#390F6E', '#3B0F6F', '#3C0F71', '#3E0F72', '#400F73', '#420F74', '#430F75', '#450F76', '#470F77', '#481078', '#4A1079', '#4B1079', '#4D117A', '#4F117B', '#50127B', '#52127C', '#53137C', '#55137D', '#57147D', '#58157E', '#5A157E', '#5B167E', '#5D177E', '#5E177F', '#60187F', '#61187F', '#63197F', '#651A80', '#661A80', '#681B80', '#691C80', '#6B1C80', '#6C1D80', '#6E1E81', '#6F1E81', '#711F81', '#731F81', '#742081', '#762181', '#772181', '#792281', '#7A2281', '#7C2381', '#7E2481', '#7F2481', '#812581', '#822581', '#842681', '#852681', '#872781', '#892881', '#8A2881', '#8C2980', '#8D2980', '#8F2A80', '#912A80', '#922B80', '#942B80', '#952C80', '#972C7F', '#992D7F', '#9A2D7F', '#9C2E7F', '#9E2E7E', '#9F2F7E', '#A12F7E', '#A3307E', '#A4307D', '#A6317D', '#A7317D', '#A9327C', '#AB337C', '#AC337B', '#AE347B', '#B0347B', '#B1357A', '#B3357A', '#B53679', '#B63679', '#B83778', '#B93778', '#BB3877', '#BD3977', '#BE3976', '#C03A75', '#C23A75', '#C33B74', '#C53C74', '#C63C73', '#C83D72', '#CA3E72', '#CB3E71', '#CD3F70', '#CE4070', '#D0416F', '#D1426E', '#D3426D', '#D4436D', '#D6446C', '#D7456B', '#D9466A', '#DA4769', '#DC4869', '#DD4968', '#DE4A67', '#E04B66', '#E14C66', '#E24D65', '#E44E64', '#E55063', '#E65162', '#E75262', '#E85461', '#EA5560', '#EB5660', '#EC585F', '#ED595F', '#EE5B5E', '#EE5D5D', '#EF5E5D', '#F0605D', '#F1615C', '#F2635C', '#F3655C', '#F3675B', '#F4685B', '#F56A5B', '#F56C5B', '#F66E5B', '#F6705B', '#F7715B', '#F7735C', '#F8755C', '#F8775C', '#F9795C', '#F97B5D', '#F97D5D', '#FA7F5E', '#FA805E', '#FA825F', '#FB8460', '#FB8660', '#FB8861', '#FB8A62', '#FC8C63', '#FC8E63', '#FC9064', '#FC9265', '#FC9366', '#FD9567', '#FD9768', '#FD9969', '#FD9B6A', '#FD9D6B', '#FD9F6C', '#FDA16E', '#FDA26F', '#FDA470', '#FEA671', '#FEA873', '#FEAA74', '#FEAC75', '#FEAE76', '#FEAF78', '#FEB179', '#FEB37B', '#FEB57C', '#FEB77D', '#FEB97F', '#FEBB80', '#FEBC82', '#FEBE83', '#FEC085', '#FEC286', '#FEC488', '#FEC689', '#FEC78B', '#FEC98D', '#FECB8E', '#FDCD90', '#FDCF92', '#FDD193', '#FDD295', '#FDD497', '#FDD698', '#FDD89A', '#FDDA9C', '#FDDC9D', '#FDDD9F', '#FDDFA1', '#FDE1A3', '#FCE3A5', '#FCE5A6', '#FCE6A8', '#FCE8AA', '#FCEAAC', '#FCECAE', '#FCEEB0', '#FCF0B1', '#FCF1B3', '#FCF3B5', '#FCF5B7', '#FBF7B9', '#FBF9BB', '#FBFABD', '#FBFCBF']\n colors_ylgnbl = ['#081d58', '#0a1e5d', '#0c2062', '#0f2267', '#11246c', '#142671', '#162876', '#182a7b', '#1b2c80', '#1d2e85', '#20308a', '#22328f', '#253494', '#243795', '#243b97', '#243e99', '#24429a', '#23459c', '#23499e', '#234c9f', '#2350a1', '#2253a3', '#2257a4', '#225aa6', '#225ea8', '#2162aa', '#2166ac', '#206aae', '#206fb0', '#1f73b2', '#1f77b4', '#1f7bb6', '#1e80b8', '#1e84ba', '#1d88bc', '#1d8cbe', '#1d91c0', '#2094c0', '#2397c0', '#269ac1', '#299dc1', '#2ca0c1', '#2fa3c2', '#32a6c2', '#35a9c2', '#38acc3', '#3bafc3', '#3eb2c3', '#41b6c4', '#46b7c3', '#4bb9c2', '#50bbc1', '#55bdc1', '#5abfc0', '#60c1bf', '#65c3be', '#6ac5be', '#6fc7bd', '#74c9bc', '#79cbbb', '#7fcdbb', '#85cfba', '#8bd1b9', '#91d4b9', '#97d6b8', '#9dd8b8', '#a3dbb7', '#a9ddb6', '#afdfb6', '#b5e2b5', '#bbe4b5', '#c1e6b4', '#c7e9b4', '#caeab3', '#cdebb3', '#d0ecb3', '#d3eeb3', '#d6efb2', '#daf0b2', '#ddf1b2', '#e0f3b2', '#e3f4b1', '#e6f5b1', '#e9f6b1', '#edf8b1', '#eef8b4', '#f0f9b7', '#f1f9bb', '#f3fabe', '#f4fac1', '#f6fbc5', '#f7fcc8', '#f9fccb', '#fafdcf', '#fcfdd2', '#fdfed5', '#ffffd9']\n colors_grorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#fbeaa4', '#f7e8a8', '#f4e6ac', '#f0e4b1', '#ece2b5', '#e9e0b9', '#e5ddbd', '#e1dbc2', '#ded9c6', '#dad7ca', '#d6d5ce', '#d3d3d3']\n colors = colors_grorrd\n for dyn in dyn_list:\n\n # Select top interactions based on its mean frequency. Also asign color based on mean value\n color_len = len(colors) -1\n df_clust = df.filter(items = [dyn, 'APosition1', 'APosition2', 'BPosition1', 'BPosition2','CPosition1', 'CPosition2','FPosition1', 'FPosition2',])\n df_clust['color'] = df_clust[dyn].apply(lambda x: colors[color_len-round(x*color_len/100)]) #There are 101 colors avalible in list\n\n #Filter top 5 in df_clust\n df_clust = df_clust.nlargest(20, dyn)\n\n # 'Edge' entry for json file\n df_dict = pd.DataFrame(columns = [\"name1\", \"name2\", \"frames\"])\n df_dict['name1'] = df_clust['APosition1'] \n df_dict['name2'] = df_clust['APosition2']\n df_dict['frames'] = [[1]]*len(df_dict)\n df_dict['color'] = df_clust['color']\n df_dict['value'] = df_clust[dyn]\n edges = df_dict.to_dict(orient=\"records\")\n\n # Appending edges to flare plot template, if any submitted\n if flare_template:\n flare_template['edges'] = edges\n jsondict = flare_template\n else:\n jsondict = { 'edges' : edges }\n\n #'Edge' multi-entries, based on the 4 GPCR nomenclatures\n for leter in ['A', 'B', 'C', 'F']:\n df_dict = pd.DataFrame(columns = [\"name1\", \"name2\", \"frames\"])\n df_dict['name1'] = df_clust[leter+'Position1'] \n df_dict['name2'] = df_clust[leter+'Position2']\n df_dict['frames'] = [[1]]*len(df_dict)\n df_dict['color'] = df_clust['color']\n df_dict['value'] = df_clust[dyn]\n leter_edges = df_dict.to_dict(orient=\"records\")\n\n #Appending edges\n if flare_template:\n flare_template[leter+'edges'] = leter_edges\n jsondict = flare_template\n else:\n jsondict = { leter+'edges' : leter_edges }\n\n #Writing json\n jsonpath = folderpath + dyn + \"_top.json\"\n with open(jsonpath, 'w') as jsonfile:\n dump(jsondict, jsonfile, ensure_ascii=False, indent = 4)", "def prep_for_plotting(dataframe, value_var):\n reshaped_df = dataframe.pivot(index='weekday',\n columns='hour',\n values=value_var)\n reshaped_df['day_name'] = reshaped_df.index.values\n reshaped_df['numeric_day'] = reshaped_df.day_name.apply(\n assign_numeric_day)\n reshaped_df = reshaped_df.sort_values('numeric_day')\n del reshaped_df['numeric_day']\n del reshaped_df['day_name']\n\n return reshaped_df", "def Mstar_function(**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n if not p.xlim:\n p.xlim = np.array([1e10,1e13])\n\n df_all = pd.read_pickle(p.d_data + 'galaxy_selection/all_z0_galaxies')\n Mstar = df_all['M_star_caesar'].values\n\n logM_star = np.log10(Mstar)\n dM = 0.25\n N_gal = len(np.where((Mstar > Mstar.min()) & (Mstar < (Mstar.min() + dM)))[0])\n logM_star_bin = np.arange(logM_star.min(), logM_star.max(), dM)\n logM_star_bin_c = logM_star_bin[0:-1] + (logM_star_bin[1]-logM_star_bin[0])/2\n\n N_gal_array = np.zeros(len(logM_star_bin)-1)\n\n # Number of galaxies in each stellar mass bin\n for i in range(len(logM_star_bin)-1):\n N_gal_array[i] = len(np.where((logM_star > logM_star_bin[i]) & (logM_star < (logM_star_bin[i+1])))[0])\n\n # Corresponding volume density of galaxies\n n_gal_array = N_gal_array / (p.box_size)**3 # number of galaxies per Mpc^3\n\n fig, ax = plt.subplots()\n hb = ax.plot(logM_star_bin_c, np.log10(n_gal_array))\n ax.set_ylabel('$\\log\\Phi$ [Mpc$^{-3}$]')\n ax.set_xlabel('log Stellar Mass [M$_{\\odot}$]')\n ax.set_ylim([-7,0.2])\n plt.tight_layout()\n plt.show()", "def seaborn_formatting_mag(df, settings):\n df[\"salt\"] = df[\"dataset_saltfit_2classes\"] != -1\n df = du.tag_type(df, settings, type_column=settings.sntype_var)\n # because it doesn't like my normal df\n df_skimmed = pd.DataFrame()\n for f in [\"g\", \"r\", \"i\", \"z\"]:\n var = \"SIM_PEAKMAG_\" + f\n df_skimmed[var] = np.array([k for k in df[var].values])\n df_skimmed[\"salt\"] = np.array([k for k in df[\"salt\"].values])\n df_skimmed[\"target\"] = np.array([k for k in df[\"target_2classes\"].values])\n df_skimmed[\"SIM_REDSHIFT_CMB\"] = np.array(\n [k for k in df[\"SIM_REDSHIFT_CMB\"].values]\n )\n df_skimmed[settings.sntype_var] = np.array(\n [k for k in df[settings.sntype_var].values]\n )\n # skimm\n for f in [\"g\", \"r\", \"i\", \"z\"]:\n var = \"SIM_PEAKMAG_\" + f\n df_skimmed = df_skimmed[(df_skimmed[var] > 20) & (df_skimmed[var] < 28)]\n\n return df_skimmed", "def split_dataframe_datatypes(df, target_var):\n\tdf_num = df.select_dtypes(include=np.number)\n\tdf_cat = df.select_dtypes(include=object)\n\n\tif target_var in df_num.columns:\n\t\tdf_tar = df_num.copy() \n\t\tdf_tar = df_tar[[target_var]]\n\t\tdf_num.drop(columns=[target_var], axis=1, inplace=True) \n\telif target_var in df_cat.columns:\n\t\tdf_tar = df_cat.copy()\n\t\tdf_tar = df_tar[[target_var]]\n\t\tdf_cat.drop(columns=[target_var], axis=1, inplace=True) \n\n\treturn df_num,df_cat,df_tar", "def select_column(variable):\n return relevant_raw_data_df[variable].to_frame()", "def plot_variables(labels, plot, data):\n # Create individual figures\n fig = subplots.make_subplots(rows=1, cols=1)\n for var in labels:\n if plot == 0:\n counts = data[var].value_counts()\n fig.append_trace(go.Bar(x=counts, y=counts.index, orientation='h'), 1, 1)\n elif plot == 1:\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][0], 1, 1)\n fig.append_trace(ff.create_distplot([list(data[var])], ['distplot'])['data'][1], 1, 1)\n else:\n raise ValueError(\"plot number must be 0, 1\")\n # Create buttons for drop down menu\n buttons = []\n for i, label in enumerate(labels):\n if plot == 0:\n visibility = [i == j for j in range(len(labels))]\n else:\n visibility = [j//2 == i for j in range(2*len(labels))]\n button = dict(\n label=label,\n method='update',\n args=[{'visible': visibility},\n {'title': label}])\n buttons.append(button)\n updatemenus = list([\n dict(active=-1,\n x=1.06, y=1.27,\n buttons=buttons\n )\n ])\n # Setup layout\n if plot == 0:\n fig['layout']['title'] = \"Distribution of categorical and discrete variables:\"\n fig.update_traces(marker_color='rgb(158,202,225)', marker_line_color='rgb(8,48,107)',\n marker_line_width=1.5, opacity=0.7)\n elif plot == 1:\n fig['layout']['title'] = \"Distribution of continuous variables:\"\n fig.update_traces(marker_color='rgb(112, 125, 188)', opacity=0.8)\n elif plot == 2:\n fig['layout']['title'] = \"Boxplot of continuous variables by score:\"\n fig['layout']['showlegend'] = False\n fig['layout']['updatemenus'] = updatemenus\n iplot(fig, config={\"displayModeBar\": False})", "def analysis_of_dataframe(self, dataframe):\n\t\ttypes = self.data.type.unique()\n\t\tratings = self.data.rating.unique()\n\n\t\tprint \"\"\n\n\t\t# First analysis section\n\t\tfor rating in ratings:\n\t\t\tpercentage = format(self.data.rating.value_counts()[rating] / len(self.data.index), '.6f')\n\n\t\t\t# Print probability data\n\t\t\tprint \"Prob(rating={}) = {}\".format(rating, percentage)\n\n\t\tprint \"\"\n\n\t\t# Second analysis section\n\t\tfor rating in ratings:\n\t\t\tfor type in types:\n\n\t\t\t\t# Get sub-set dataframe\n\t\t\t\ttemp_dataframe = self.data[self.data['rating'] == rating]\n\n\t\t\t\t# Get conditional probability\n\t\t\t\ttry:\n\t\t\t\t\tpercentage = format(temp_dataframe.type.value_counts()[type] / len(temp_dataframe.index), '.6f')\n\n\t\t\t\t# Current type not found in temp_dataframe\n\t\t\t\texcept KeyError:\n\t\t\t\t\tpercentage = format(0, '.6f')\n\n\t\t\t\t# Print probability data\n\t\t\t\tfinally:\n\t\t\t\t\tprint \"Prob(type={}|rating={}) = {}\".format(type, rating, percentage)", "def variable_extractor(variable: str, df, frequency: str):\n\t#Extract list of unique symbols from dataset\n\tsymbols = list(df['symbol'].unique())\n\tdf_master=pd.DataFrame()\n\tfor symbol in symbols:\n\t\tprint(symbol)\n\t\tsubset = df.loc[df['symbol'] == symbol]\n\t\t#Drop random duplicates in data\n\t\tsubset.drop_duplicates(inplace=True)\n\t\t#Set dataframe index and frequency\n\t\tsubset = subset.set_index('datetime')\n\t\tsubset = subset.asfreq('T')\n\t\t#Resample data\n\t\tresampled_subset = resample(subset, frequency)\n\t\tcol_of_interest = pd.DataFrame(resampled_subset[variable])\n\t\tcol_of_interest = col_of_interest.rename(columns = {variable: str(symbol)})\n\t\tdf_master = pd.concat([df_master, col_of_interest], axis = 1)\n\treturn df_master", "def visit_Var(self, node: Var) -> VarSymbol:\n\n var_name = node.value\n var_symbol = self.symbol_table.get_token(var_name)\n var_symbol.type = DoubleType()\n\n return var_symbol", "def find_constant_stars(xmatch, phot_data, log):\n\n # Identify the overall primary reference dataset:\n ref_dset_idx = np.where(xmatch.datasets['primary_ref'] == 1)[0]\n ref_datacode = xmatch.datasets['dataset_code'][ref_dset_idx]\n\n # Fetch the indices of the images from this dataset in the photometry table\n image_index = np.where(xmatch.images['dataset_code'] == ref_datacode)[0]\n\n # Extract the timeseries photometry for this dataset:\n (mag_col, merr_col) = field_photometry.get_field_photometry_columns('corrected')\n ref_phot = np.zeros((phot_data.shape[0],len(image_index),2))\n ref_phot[:,:,0] = phot_data[:,image_index,mag_col]\n ref_phot[:,:,1] = phot_data[:,image_index,merr_col]\n\n # Evaluate the photometric scatter of all stars, and select those with\n # the lowest scatter for the brightest quartile of stars.\n (mean_mag, mean_magerr) = calc_weighted_mean_no_qc(ref_phot)\n rms = calc_weighted_rms_no_qc(ref_phot, mean_mag)\n\n # Function identifies constant stars as those with an RMS in the lowest\n # 1 - 25% of the set. This excludes both stars with high scatter and those\n # with artificially low scatter due to having few measurements.\n rms_range = rms.max() - rms.min()\n min_cut = rms.min()\n max_cut = rms.min() + (rms_range)*0.25\n\n constant_stars = np.where((rms >= min_cut) & (rms <= max_cut))[0]\n\n log.info('Identified '+str(len(constant_stars))\n +' stars with RMS between '+str(round(min_cut,3))+' and '+str(round(max_cut,3))\n +'mag to use for the normalization')\n\n return constant_stars", "def plot_counts(data, mtl_dissemination_areas, var, purpose=True, cmap='Reds', return_df=False, plot=True):\n if purpose:\n assert var in data['purpose'].unique(), \"var: \\'%s\\' is not in the purpose column of the dataframe\" % (var)\n joined_data = gpd.sjoin(data.loc[data['purpose'] == var], mtl_dissemination_areas)\n else:\n assert var in data['mode'].unique(), \"var: \\'%s\\' is not in the mode column of the dataframe\" % (var)\n joined_data = gpd.sjoin(data.loc[data['mode'] == var], mtl_dissemination_areas) \n \n mtl_dissemination_areas['freq_dissem_counts'] = mtl_dissemination_areas.apply(lambda row: joined_data.DAUID.value_counts()[row.DAUID] if row.DAUID in joined_data.DAUID.unique() else 0, axis=1)\n \n if plot:\n fig, ax = plt.subplots(1, figsize=(12,10))\n mtl_dissemination_areas.plot('freq_dissem_counts', linewidth=.1, edgecolor='0.5', legend=True, cmap=cmap, ax=ax)\n plt.title(var)\n \n if return_df:\n return mtl_dissemination_areas", "def plot_distribution(kind_, df, *col):\n \n if kind_ == 'box':\n \n if len(col) == 1:\n boxplot = df.boxplot(column = col[0], rot = 90)\n plt.show()\n \n elif len(col) > 1:\n for c in col[1:]:\n boxplot = df.boxplot(column = col[0], by = c, rot = 90)\n plt.show()\n \n else:\n if len(col) == 0:\n df.plot(kind = kind_)\n plt.show()\n \n elif len(col) == 1:\n df[col[0]].plot(kind = kind_)\n plt.xlabel(col[0])\n plt.show()\n \n elif len(col) == 2:\n df.plot(x = col[0], y = col[1], kind = kind_)\n plt.xlabel(col[0])\n plt.ylabel(col[1])\n plt.show()\n \n else:\n print(\"Unable to plot a chart with given parameters.\")", "def drawCatplot(df, xColumn):\n plt.style.use('default')\n plt.style.use('dark_background')\n types = getSpectralTypes()\n colors = getColors()\n sns.set_palette(sns.color_palette(colors))\n \n sns.catplot(x=xColumn, y=\"spectral_type\", data=df, order=types, height=3, \n aspect=4);\n plt.show()", "def getframe(var,model,data):\n from PyAnUtils.plotstyles import njStyle\n pstyle = njStyle()\n pstyle.cd()\n\n frame = var.frame()\n data.plotOn(frame)\n model.plotOn(frame)\n\n return frame", "def request_var(kid, var):\n code = \"import pandas as pd\\nimport numpy as np\\nif type(\" + var + \") \" \\\n \"is pd.DataFrame or type(\" + var + \") is np.ndarray or type(\" + var + \") is list:\\n\"\n code = code + \"\\tprint(\" + var + \".to_json(orient='split', index = False))\\n\"\n return exec_code(kid, var, code)", "def _create_velocity_figure(dataframe, color_key, title, color_mapper,\n legend_loc='top_right', plot_width=None, plot_height=None):\n \n # these markers are nearly indistinguishble\n markers = [marker for marker in MarkerType if marker not in ['circle_cross', 'circle_x']]\n fig = figure(title=title)\n _set_plot_wh(fig, plot_width, plot_height)\n\n for i, (marker, (path, df)) in enumerate(zip(markers, dataframe.iterrows())):\n ds = dict(df)\n source = ColumnDataSource(ds)\n fig.scatter('dpt', 'expr', source=source, color={'field': color_key, 'transform': color_mapper},\n marker=marker, size=10, legend=f'{path}', muted_alpha=0)\n\n fig.xaxis.axis_label = 'dpt'\n fig.yaxis.axis_label = 'expression'\n if legend_loc is not None:\n fig.legend.location = legend_loc\n\n if ds.get('x_test') is not None:\n if ds.get('x_mean') is not None:\n fig.line('x_test', 'x_mean', source=source, muted_alpha=0, legend=path)\n if all(map(lambda val: val is not None, ds.get('x_cov', [None]))):\n x_mean = ds['x_mean']\n x_cov = ds['x_cov']\n band_x = np.append(ds['x_test'][::-1], ds['x_test'])\n # black magic, known only to the most illustrious of wizards\n band_y = np.append((x_mean - np.sqrt(np.diag(x_cov)))[::-1], (x_mean + np.sqrt(np.diag(x_cov))))\n fig.patch(band_x, band_y, alpha=0.1, line_color='black', fill_color='black',\n legend=path, line_dash='dotdash', muted_alpha=0)\n\n if ds.get('x_grad') is not None:\n fig.line('x_test', 'x_grad', source=source, muted_alpha=0)\n\n\n fig.legend.click_policy = 'mute'\n\n return fig", "def stars(self, magnitude=20):\n # Get the stars that are visible within this chart.\n thestars = []\n for s in self.hip_stars:\n if not s: continue\n hip_id, mag, ra, dec, bv = s\n if mag>magnitude: continue\n if dec<min(self.inner_dec, self.outer_dec): continue\n if dec>max(self.inner_dec, self.outer_dec): continue\n thestars.append(s)\n # This should sort them by increasing magnitude (brightest first).\n thestars.sort(key=lambda a:a[1])\n if not thestars: return\n # Set the least bright magnitude.\n self.dimmest_mag = math.floor(thestars[-1][1])\n # Create the star group.\n star_g = self.make_element(self.centered, 'g', (\n 'stroke', 'none'), ('fill', 'black'), (\n 'clip-path', 'url(#innerClipPath)'))\n for hip_id, mag, ra, dec, bv in thestars:\n x, y = self.radec2xy(ra, dec)\n self.make_element(star_g, 'circle', (\n 'cx', x), ('cy', y), ('r', self.starsize(hip_id)))", "def is_variable(pattern):\n return (type(pattern) is str\n and pattern[0] == '?'\n and len(pattern) > 1\n and pattern[1] != '*'\n and pattern[1] in string.ascii_letters\n and ' ' not in pattern)", "def get_reviews(df, col, stars):\n log.info('Number of reviews to extract: {}'.format(stars))\n log.info(\n 'Number of available reviews: {}'.format(df[col].value_counts()))\n if [x for x in df[col].value_counts() if x < min(stars.values())]:\n raise Exception(\"To many review chosen from dataset\")\n idxs = []\n for star, n_rev in stars.iteritems():\n idxs += random.sample(df[df[col] == star].index, n_rev)\n return idxs", "def get_stat_type(df, stat_type):\n if stat_type == 'Age':\n if df.get('Age') is not None:\n age_stats = df['Age'].describe().to_dict()\n print_stats('Age', age_stats)\n elif df.get(stat_type) is not None:\n stat_value_counts = (df[stat_type].value_counts())\n print_stats(stat_type, stat_value_counts.to_dict())", "def plot_star_classes(obj_catalog):\n\n fig = plt.figure(num=None,figsize=(8,8), dpi=100)\n ax = fig.add_subplot(1,1,1)\n\n phot_class = obj_catalog.phot_star_class\n sclass = obj_catalog.star_class\n phot_class_num = np.zeros(obj_catalog.shape[0])\n sclass_num = np.zeros(obj_catalog.shape[0])\n\n star_classes = ['WD',\\\n 'O','O8','O9','OB','B0','B1','B2','B3','B5','B6','B7','B8','B9',\\\n 'A0','A1','A2','A3','A4','A5','A6','A8','A9',\\\n 'F0','F2','F3','F5','F6','F8','F9',\\\n 'G0','G1','G2','G3','G4','G5','G8','G9',\\\n 'K0','K1','K2','K3','K4','K5','K7',\\\n 'M0','M1','M2','M3','M4','M5','M6','M7','M8','M9', \\\n 'L0','L1','L2','L3','L4','L5','L9','Ldwarf', \\\n 'T','other','C']\n print len(star_classes)\n\n star_dict = dict(zip(star_classes,np.arange(len(star_classes))))\n\n # print phot_class.value_counts()\n\n for i in range(len(phot_class)):\n print phot_class[i], star_dict[phot_class[i]], sclass[i],star_dict[sclass[i]]\n phot_class_num[i] = star_dict[phot_class[i]]\n sclass_num[i] = star_dict[sclass[i]]\n\n #ax.plot(sclass_num,phot_class_num,'.')\n\n cmap = plt.cm.Blues\n cmap.set_bad('0.85',1.0)\n\n cax = plt.hist2d(sclass_num,phot_class_num, bins=65,range = [[0,65], [0,65]], norm = LogNorm(), cmap=cmap, zorder=0)\n cbar = plt.colorbar(ticks=[1,5,10,15,20,25,30,40])\n cbar.ax.set_yticklabels([1,5,10,15,20,25,30,40],fontsize=12)\n\n ax.plot(np.arange(65),np.arange(65),'r')\n\n plt.xticks(np.arange(len(star_classes)),star_classes,fontsize=8,rotation='vertical')\n plt.yticks(np.arange(len(star_classes)),star_classes,fontsize=8)\n\n plt.grid(True)\n return plt", "def VJ_Gene_Plot(clone_df, png = None, title = \"\", vgene_col = \"VGene\", jgene_col = \"JGene\", count_col = \"Clustered\",\r\n\t\t\t\t vgene_colors = vgene_colors, vfamily_colors = vfamily_colors, jgene_colors = jgene_colors,\r\n\t\t\t\t vj_gap = 0.008, vgene_gap = 0.0, line_width = 0.4, figsize = (800, 800), hover_tooltip = True):\r\n\r\n\tfigure_params = {\r\n\t\t\"plot_width\": figsize[0],\r\n\t\t\"plot_height\": figsize[1],\r\n\t\t#\"sizing_mode\": \"scale_both\",\r\n\t\t\"x_range\": Range1d(-0.5, 1.5, bounds = (-1.5, 2.5)),\r\n\t\t\"y_range\": Range1d(-0.5, 1.5, bounds = (-1.5, 2.5)),\r\n\t\t#\"outline_line_alpha\": 0.0,\r\n\t\t\"title\": title,\r\n\t\t\"tools\": \"pan, wheel_zoom, box_zoom, tap, save, reset, help\",\r\n\t\t\"active_scroll\": \"wheel_zoom\",\r\n\t\t\"toolbar_location\": \"right\"\r\n\t}\r\n\r\n\tplot = figure(**figure_params)\r\n\tplot.grid.visible = False\r\n\tplot.axis.visible = False\r\n\r\n\tif hover_tooltip:\r\n\t\thover_tool = HoverTool(tooltips = [(\"Gene\", \"@legend\"), (\"Percent\", \"@percent{(0.00%)}\")],\r\n\t\t\t\t\t\t\t\t\t point_policy = \"snap_to_data\")\r\n\t\tplot.add_tools(hover_tool)\r\n\r\n\tgene_df = clone_df[[vgene_col, jgene_col, count_col]].groupby([vgene_col, jgene_col]).agg({count_col: sum})\r\n\t#Sort by V gene ascending, then J gene ascending\r\n\tgene_df = gene_df.sort_index()\r\n\tgene_df = gene_df.reset_index()\r\n\r\n\ttotal_vgenes = len(gene_df[vgene_col].drop_duplicates())\r\n\ttotal_gapsize = total_vgenes * vgene_gap\r\n\tremaining_size = 360.0 - float(total_gapsize)\r\n\tgap_size = float(vgene_gap)\r\n\r\n\ttotal_counts = gene_df[count_col].sum()\r\n\tgene_df[\"Arc_Length\"] = gene_df[count_col] / total_counts * remaining_size\r\n\t#Starting at 90 degrees (top center of the circle) plus half the gap size\r\n\t#cur_v_start = -90.0 + (gap_size / 2.0)\r\n\tcur_v_start = 90.0 + (gap_size / 2.0)\r\n\r\n\tv_start_angles = []\r\n\tv_end_angles = []\r\n\tvgene_facecolors = []\r\n\tvgene_hover_colors = []\r\n\tvfamily_facecolors = []\r\n\tvfamily_hover_colors = []\r\n\tv_legend_text = []\r\n\tv_legend_percent = []\r\n\r\n\tj_start_angles = []\r\n\tj_end_angles = []\r\n\tjgene_facecolors = []\r\n\tjgene_hover_colors = []\r\n\tj_legend_text = []\r\n\tj_legend_percent = []\r\n\r\n\tfor vgene in gene_df[vgene_col].drop_duplicates():\r\n\t\tcur_vgene_df = gene_df[gene_df[vgene_col] == vgene]\r\n\t\tvfamily = vgene.split(\"-\")[0]\r\n\r\n\t\tvgene_color = vgene_colors[vgene]\r\n\t\tvgene_hover_color = vgene_color.darken(0.05)\r\n\t\tvfamily_color = vfamily_colors[vfamily]\r\n\t\tvfamily_hover_color = vfamily_color.darken(0.05)\r\n\r\n\t\tv_arc_length = cur_vgene_df[\"Arc_Length\"].sum()\r\n\t\tcur_v_end = cur_v_start + v_arc_length\r\n\r\n\t\tv_start_angles.append(cur_v_start)\r\n\t\tv_end_angles.append(cur_v_end)\r\n\r\n\t\tvgene_facecolors.append(vgene_color)\r\n\t\tvgene_hover_colors.append(vgene_hover_color)\r\n\t\tvfamily_facecolors.append(vfamily_color)\r\n\t\tvfamily_hover_colors.append(vfamily_hover_color)\r\n\r\n\t\tv_legend_text.append(vgene)\r\n\t\tcur_vgene_counts = cur_vgene_df[\"Clustered\"].sum()\r\n\t\tv_legend_percent.append(cur_vgene_counts / total_counts)\r\n\r\n\t\tcur_j_start = cur_v_start\r\n\t\tfor jgene, jgene_arc_length in zip(cur_vgene_df[jgene_col], cur_vgene_df[\"Arc_Length\"]):\r\n\t\t\tcur_j_end = cur_j_start + jgene_arc_length\r\n\r\n\t\t\tjgene_color = jgene_colors[jgene]\r\n\t\t\tjgene_hover_color = jgene_color.darken(0.05)\r\n\r\n\t\t\tj_start_angles.append(cur_j_start)\r\n\t\t\tj_end_angles.append(cur_j_end)\r\n\r\n\t\t\tjgene_facecolors.append(jgene_color)\r\n\t\t\tjgene_hover_colors.append(jgene_hover_color)\r\n\r\n\t\t\tcur_j_start = cur_j_end\r\n\r\n\t\t\tj_legend_text.append(jgene)\r\n\t\t\tcur_jgene_counts = cur_vgene_df[cur_vgene_df[jgene_col] == jgene][\"Clustered\"].sum()\r\n\t\t\tj_legend_percent.append(cur_jgene_counts / cur_vgene_counts)\r\n\r\n\t\tcur_v_start = cur_v_end + gap_size\r\n\r\n\tv_wedge_data = {\r\n\t\t\"start_angle\": v_start_angles,\r\n\t\t\"end_angle\": v_end_angles,\r\n\t\t\"fill_color\": vgene_facecolors,\r\n\t\t\"legend\": v_legend_text,\r\n\t\t\"percent\": v_legend_percent,\r\n\t\t\"vgene_facecolors\": vgene_facecolors,\r\n\t\t\"vfamily_facecolors\": vfamily_facecolors,\r\n\t\t\"hover_fill_color\": vgene_hover_colors,\r\n\t\t\"vgene_hover_colors\": vgene_hover_colors,\r\n\t\t\"vfamily_hover_colors\": vfamily_hover_colors\r\n\t}\r\n\tv_source = ColumnDataSource(v_wedge_data)\r\n\r\n\tv_inner_rad = 0.4\r\n\tv_outer_rad = 0.692\r\n\r\n\tplot.annular_wedge(x = 0.5, y = 0.5, start_angle = \"start_angle\", end_angle = \"end_angle\",\r\n\t\t\t\t\t fill_color = \"fill_color\", selection_fill_color = \"fill_color\",\r\n\t\t\t\t\t nonselection_fill_color = \"fill_color\", selection_fill_alpha = 1.0,\r\n\t\t\t\t\t nonselection_fill_alpha = 0.2, hover_fill_color = \"hover_fill_color\", inner_radius = v_inner_rad,\r\n\t\t\t\t\t outer_radius = v_outer_rad, line_color = \"black\", line_width = line_width, source = v_source,\r\n\t\t\t\t\t legend = \"legend\", start_angle_units = \"deg\", end_angle_units = \"deg\")\r\n\r\n\tj_wedge_data = {\r\n\t\t\"start_angle\": j_start_angles,\r\n\t\t\"end_angle\": j_end_angles,\r\n\t\t\"fill_color\": jgene_facecolors,\r\n\t\t\"legend\": j_legend_text,\r\n\t\t\"percent\": j_legend_percent,\r\n\t\t\"hover_fill_color\": jgene_hover_colors\r\n\t}\r\n\r\n\tj_source = ColumnDataSource(j_wedge_data)\r\n\r\n\tj_inner_rad = v_outer_rad + vj_gap\r\n\tj_outer_rad = j_inner_rad + 0.15\r\n\r\n\tplot.annular_wedge(x = 0.5, y = 0.5, start_angle = \"start_angle\", end_angle = \"end_angle\",\r\n\t\t\t\t\t fill_color = \"fill_color\", selection_fill_color = \"fill_color\",\r\n\t\t\t\t\t nonselection_fill_color = \"fill_color\", selection_fill_alpha = 1.0,\r\n\t\t\t\t\t nonselection_fill_alpha = 0.2, hover_fill_color = \"hover_fill_color\", inner_radius = j_inner_rad,\r\n\t\t\t\t\t outer_radius = j_outer_rad, line_color = \"black\", line_width = line_width, source = j_source,\r\n\t\t\t\t\t legend = \"legend\", start_angle_units = \"deg\", end_angle_units = \"deg\")\r\n\r\n\tif png is not None:\r\n\t\texport_png(plot, png)\r\n\r\n\tchange_v_color = CustomJS(args = {\"source\": v_source}, code = \"\"\"\r\n\t\tvar selection = cb_obj.value;\r\n\t\tvar new_color_array;\r\n\t\tvar new_hover_array;\r\n\t\tif(selection.toLowerCase().indexOf(\"gene\") !== -1) {\r\n\t\t\tnew_color_array = source.data[\"vgene_facecolors\"];\r\n\t\t\tnew_hover_array = source.data[\"vgene_hover_colors\"];\r\n\t\t} else {\r\n\t\t\tnew_color_array = source.data[\"vfamily_facecolors\"];\r\n\t\t\tnew_hover_array = source.data[\"vfamily_hover_colors\"];\r\n\t\t}\r\n\t\tvar fill_color = source.data[\"fill_color\"];\r\n\t\tvar hover_fill_color = source.data[\"hover_fill_color\"];\r\n\t\tfor(idx = 0; idx < fill_color.length; idx++) {\r\n\t\t\tfill_color[idx] = new_color_array[idx];\r\n\t\t\thover_fill_color[idx] = new_hover_array[idx];\r\n\t\t}\r\n\t\tsource.change.emit();\r\n\t\"\"\")\r\n\r\n\tv_data_color_by = Select(title = \"Color by:\", options = [\"V Gene\", \"V Family\"],\r\n\t\t\t\t\t\t\t value = \"V Gene\", callback = change_v_color)\r\n\r\n\tplot_layout = column(v_data_color_by, plot)\r\n\treturn plot_layout", "def plot_features(tpx_feature, plot_type=\"scatter\", plot_style=\"pygal\"):\n\n\tmd_table = pd.DataFrame.from_csv(os.path.join(wdir, md_csv), header=0)\n\tht_table = pd.DataFrame.from_csv(os.path.join(wdir, \"tpx-corpus-counts.csv\"), header=0)\n\tworking_table = ht_table.join(md_table)\n\n\t# get data points and sort\n\tdata = copy.copy(working_table[tpx_feature])\n\tdata_sorted = data.sort_values(ascending=False)\n\n\t# get ids of historical novels\n\tidnos_hist = md_table[md_table[\"subgenre_hist\"] == \"historical\"].index.tolist()\n\t# get ids of non-historical novels\n\tidnos_not_hist = md_table[md_table[\"subgenre_hist\"] == \"not_historical\"].index.tolist()\n\n\t# split data into subgroups\n\tdata_hist = data[idnos_hist]\n\tdata_not_hist = data[idnos_not_hist]\n\t\t\n\n\t# get ranks\n\tranks = {}\n\tfor idx, val in enumerate(data_sorted.index):\n\t\tranks[val] = idx\n\t\n\tif plot_style == \"matplotlib\":\n\t\t\n\t\tif plot_type == \"scatter\":\n\n\t\t\t# visualize as scatterplot\n\t\t\tplt.figure(figsize=(20,6))\n\n\t\t\t\t\t\t# rank as x values, alternative: range(len(data_hist))\n\t\t\tplt.scatter([ranks[idno] for idno in idnos_hist],\n\t\t\t\t\t\t# counts as y values\n\t\t\t\t\t\tdata_hist,\n\t\t\t\t\t\tmarker = \"D\",\n\t\t\t\t\t\tcolor = \"#3366CC\",\n\t\t\t\t\t\talpha = 1,\n\t\t\t\t\t\ts = 50,\n\t\t\t\t\t\tlabel = tpx_feature + \", historical novel\"\n\t\t\t)\n\n\t\t\tplt.scatter([ranks[idno] for idno in idnos_not_hist],\n\t\t\t\t\t\t# counts as y values\n\t\t\t\t\t\tdata_not_hist,\n\t\t\t\t\t\tmarker = \"o\",\n\t\t\t\t\t\tcolor = \"#DC3912\",\n\t\t\t\t\t\talpha = 1,\n\t\t\t\t\t\ts = 50,\n\t\t\t\t\t\tlabel = tpx_feature + \", non-historical novel\"\n\t\t\t)\n\t\t\tplt.title(\"Novels and number of temporal expressions (TPX)\")\n\t\t\tplt.ylabel(\"Number of TPX\")\n\t\t\tplt.xlabel(\"Novel rank\")\n\t\t\tplt.xlim(-5,len(data) + 5)\n\n\t\t\tplt.legend(loc='upper right')\n\t\t\tplt.tight_layout()\n\n\t\t\tfigurename = \"scatter-\"+ tpx_feature +\".png\"\n\t\t\tplt.savefig(os.path.join(dir_visuals, figurename), dpi=300)\n\t\t\tplt.close()\n\t\t\n\t\telif plot_type == \"bar\":\n\t\t\t# visualize as barplot\n\t\t\tplt.figure(figsize=(20,6))\n\n\t\t\t\t\t\t# rank as x values, alternative: range(len(data_hist))\n\t\t\tplt.bar([ranks[idno] for idno in idnos_hist],\n\t\t\t\t\t\t# counts as y values\n\t\t\t\t\t\tdata_hist,\n\t\t\t\t\t\talign = \"center\",\n\t\t\t\t\t\tcolor = \"#3366CC\",\n\t\t\t\t\t\talpha = 1,\n\t\t\t\t\t\tedgecolor = \"#3366CC\",\n\t\t\t\t\t\tlabel = tpx_feature + \", historical novel\"\n\t\t\t)\n\n\t\t\tplt.bar([ranks[idno] for idno in idnos_not_hist],\n\t\t\t\t\t\t# counts as y values\n\t\t\t\t\t\tdata_not_hist,\n\t\t\t\t\t\talign = \"center\",\n\t\t\t\t\t\tcolor = \"#DC3912\",\n\t\t\t\t\t\talpha = 1,\n\t\t\t\t\t\tedgecolor = \"#DC3912\",\n\t\t\t\t\t\tlabel = tpx_feature + \", non-historical novel\"\n\t\t\t)\n\t\t\tplt.title(\"Novels and number of temporal expressions (tpx)\", fontsize=40)\n\t\t\tplt.ylabel(\"Number of tpx\", fontsize=30)\n\t\t\tplt.xlabel(\"Novel rank\", fontsize=30)\n\t\t\tplt.xlim(-2,len(data) + 2)\n\t\t\tplt.xticks(fontsize=28)\n\t\t\tplt.yticks(fontsize=28)\n\n\t\t\t\n\t\t\tplt.legend(loc='upper right', prop={'size':30})\n\t\t\tplt.tight_layout()\n\n\t\t\tfigurename = \"bar-\"+ tpx_feature +\".png\"\n\t\t\tplt.savefig(os.path.join(dir_visuals, figurename), dpi=300)\n\t\t\tplt.close()\n\t\n\t# style: pygal\n\telse:\n\t\t# or XY chart type, but then bars are not possible\n\t\t\n\t\tbar = pygal.Bar(style=pygal_style, width=2000, height=1000, legend_at_bottom=True)\n\t\tbar.title = 'Novels and number of temporal expressions (' + tpx_feature + ')'\n\t\tbar.x_title = \"Novel rank\"\n\t\tbar.y_title = \"Number of tpx\"\n\t\t#bar.x_labels = (0,20,40,60,80,100,120,140)\n\t\t\n\t\tsorted_ranks = sorted(ranks.items(), key=operator.itemgetter(1))\n\t\t\n\t\tvals_hist = []\n\t\tvals_not_hist = []\n\t\t\n\t\tfor key,val in sorted_ranks:\n\t\t\tif key in idnos_hist:\n\t\t\t\t# (val,data[key]) with XY\n\t\t\t\tval = {\"value\" : data[key], \"color\" : \"#3366CC\", \"label\" : key}\n\t\t\t\tvals_hist.append(val)\n\t\t\telse:\n\t\t\t\tval = {\"value\" : data[key], \"color\" : \"#DC3912\", \"label\" : key}\n\t\t\t\tvals_not_hist.append(val)\n\t\t\n\t\tbar.add(\"historical\", vals_hist)\n\t\tbar.add(\"non-historical\", vals_not_hist)\n\t\t\n\t\tfigurename = \"bar-\"+ tpx_feature +\".svg\"\n\t\tbar.render_to_file(os.path.join(dir_visuals, figurename))\n\t\t\n\tprint(\"Plotted \" + figurename)", "def visualize_outliers(df, var):\n import pandas as pd\n import numpy as np\n import matplotlib.pyplot as plt\n \n num_var = df.groupby(var)[var].count() \n total = np.float(len(df))\n \n var_perc = num_var / total \n \n var_perc.plot.bar()\n plt.ylabel('Percentage of observations per label')\n plt.title(var)\n \n return plt.show()", "def preprocess_var(bd, var):\n filepath_sv = f\"team67-ptp/data/{var}.csv\"\n filepath = bd\n data = feather.read_dataframe(filepath)\n df = data.copy()\n df2 = df[var]\n df2 = df2.to_frame()\n if df2[var].dtype is \"category\":\n df2[var] = df2[var].astype(\"category\").cat.codes\n filename = filepath_sv\n df2.to_csv(filename, index=False)\n print(\"Succesfully exported to csv\")\n else:\n filename = filepath_sv\n df2.to_csv(filename, index=False)\n print(\"Succesfully exported to csv\")", "def Diagnostic_plot1(self, v=False):\n\n # sort modes by frequency (radial order)\n ds.mode_id.sort_values(['f0'], axis=0, ascending=True, inplace=True)\n\n # SNR values after smoothing/interpolating at radial mode freqs\n u = np.full(len(ds.mode_id), -99) # unsmoothed\n s1 = np.full(len(ds.mode_id), -99) # after Gaussian smoothing\n s2 = np.full(len(ds.mode_id), -99) # after uniform smoothing\n s3 = np.full(len(ds.mode_id), -99) # after linear interpolation\n\n for idx, f in ds.mode_id.iterrows():\n width = abs(f['w0']) # width to convolve/interpolate over\n\n # smooth by convolving with Guassian\n smoo = star.Conv(self.snr, width)\n\n # smooth with uniform filter\n smoo2 = ndim.filters.uniform_filter1d(self.snr, size=int(np.around(width)))\n\n # smooth by interpolating\n bins = np.arange(0., self.ds.freq[-1], width) # rebin data to get highest SNR\n smoo3 = np.interp(bins, self.ds.freq, self.snr) # SNR values at these freqs\n\n index = np.abs(self.ds.freq-f['f0']).argmin() # use the frequency closest to mode\n if v:\n print(self.ds.freq[index], self.snr[index])\n print('before smoo', self.snr[index])\n print('smoo1', smoo[index])\n print('smoo2', smoo2[index])\n print('smoo3', smoo3[np.abs(bins-f['f0']).argmin()], '\\n')\n\n u[idx] = self.snr[index]\n s1[idx] = smoo[index]\n s2[idx] = smoo2[index]\n s3[idx] = smoo3[np.abs(bins-f['f0']).argmin()]\n\n fig = plt.figure(figsize=(12, 18))\n plt.rc('font', size=26)\n plt.plot(self.ds.mode_id['f0'], u, label=r'unsmoothed')\n plt.plot(self.ds.mode_id['f0'], s1, label=r'Smoothed with 1D Gaussian')\n plt.plot(self.ds.mode_id['f0'], s2, label=r'Smoothed with uniform filter')\n plt.plot(self.ds.mode_id['f0'], s3, label=r'Smoothed by interpolating')\n plt.xlabel(r'$\\nu / \\mu$Hz')\n plt.ylabel(r'SNR')\n plt.legend(loc='upper right')\n plt.show()\n fig.savefig(os.getcwd() + os.sep + 'DetTest1_plots' + os.sep +'DetTest_Diagnostic_plot1_' + self.ds.epic + '.pdf')\n #sys.exit()", "def parse_plot_cmd(self, line):\n line, any_vars = self.find_vars_in_str(line)\n words = line.split()\n words = self.fix_words(words)\n\n # Parse line\n has_out_var = False\n if len(words) == 6:\n has_out_var = True\n _, plot_type, _, in_data, _, out_data = words\n else: _, plot_type, _, in_data = words\n\n in_data = getattr(self, in_data)\n plot_fnc = f_dicts.plot_fncs[plot_type]\n\n if has_out_var:\n OutVar = plot_fnc(in_data)\n else: plot_fnc(in_data)\n\n if has_out_var:\n self.set_var(out_data,\n {plot_type: OutVar}, {})", "def visualizeData(df):\n for column in df:\n df[column].value_counts().plot(kind = 'bar', rot = 'vertical', use_index = False)", "def plot_boxplots(\n df: pd.DataFrame, drop_cols: list = None, sub_col=3, figsize: tuple = (18, 26)\n):\n\n # drop unnecessary columns\n if drop_cols:\n df = df.drop(drop_cols, axis=1)\n # keep only quantitative features\n df = create_quanti_df(df)\n print(f\"Number of quantitaive columns: {df.shape[1]}\")\n # create figure and axes based on the number of columns of the dataframe\n fig, axes = plt.subplots(ceil(len(df.columns) / sub_col), sub_col, figsize=figsize)\n y = 0 # set counter\n\n # plot boxplot for each column of data\n for col in df.columns:\n i, j = divmod(y, sub_col)\n sns.boxplot(x=df[col], ax=axes[i, j]).set_title(col, fontsize=20)\n y += 1\n\n plt.tight_layout()\n plt.show()\n return", "def fast_plot(df, x, y, plot_type):\n\n # ASSERT TESTS\n\n # check that df is pd.DataFrame\n assert isinstance(df, pd.DataFrame), \"Data must be in pandas Data Frame!\"\n\n # check that x and y are strings, and are valid columns\n assert isinstance(x, str), \"x column name must be a string!\"\n assert isinstance(y, str), \"y column name must be a string!\"\n assert x in df.columns, \\\n \"x column name is not a column in data frame entered!\"\n assert y in df.columns, \\\n \"y column name is not a column in data frame entered!\"\n\n # check that plot_type is one of the three allowed\n assert plot_type.lower() in {\"scatter\", \"line\",\n \"bar\"}, \\\n 'plot_type must be either: \"scatter\", \"line\", or \"bar\"'\n\n # check that column is not all nulls\n assert df[x].isnull().sum() != len(df[x]), \"x Column must not be all Null!\"\n assert df[y].isnull().sum() != len(df[y]), \"y Column must not be all Null!\"\n\n # get types of each column\n x_type = df[x].dtype\n y_type = df[y].dtype\n\n # set x to be ordinal if x column is integer or date\n if x_type == 'int64' or all(df[x].map(type) == pd.Timestamp):\n x_arg = x + \":O\"\n else:\n x_arg = x\n\n if plot_type.lower() == \"scatter\":\n # don't allow y to be date\n if all(df[y].map(type) == pd.Timestamp):\n raise Exception(\"Y column cannot be a date type!\")\n chart = alt.Chart(df).mark_point().encode(\n x=alt.X(x_arg),\n y=alt.Y(y))\n\n elif plot_type.lower() == \"line\":\n # don't allow y to be date\n if all(df[y].map(type) == pd.Timestamp):\n raise Exception(\"Y column cannot be a date type!\")\n\n if y_type not in [\"float64\", \"int64\"]:\n if x_type not in [\"float64\", \"int64\"]:\n x_arg = x\n y_arg = y\n else:\n x_arg = x + \":N\"\n y_arg = \"sum(\" + y + \")\"\n\n chart = alt.Chart(df).mark_line().encode(\n x=alt.X(x_arg),\n y=alt.Y(y_arg))\n\n # bar chart takes sum of y column,\n # unless y column is non-numeric (then takes sum of x column)\n else:\n # check if column is non numeric\n if y_type not in [\"float64\", \"int64\"]:\n # raise error if both columns are non numeric\n if x_type == y_type or (x_type == \"O\" and all(\n df[y].map(type) == pd.Timestamp)) or (\n y_type == \"O\" and all(df[x].map(type) == pd.Timestamp)):\n raise Exception(\n \"Bar charts should have a numeric column, \"\n \"and both X and Y are non numeric!\")\n\n if all(df[y].map(type) == pd.Timestamp):\n x_arg = \"sum(\" + x + \")\"\n y_arg = y + \":O\"\n else:\n x_arg = \"sum(\" + x + \")\"\n y_arg = y\n else:\n x_arg = x + \":N\"\n y_arg = \"sum(\" + y + \")\"\n\n chart = alt.Chart(df).mark_bar().encode(\n x=alt.X(x_arg),\n y=alt.Y(y_arg))\n\n return chart.properties(width=900, height=600)", "def listofstars():\n a = []\n for star in Star.select():\n a.append(star.name)\n return a", "def datasets_plots(df, settings):\n\n # Reformat into seaborn friendly format\n df = seaborn_formatting_mag(df, settings)\n multiplot_violin_paper(df, \"test\", settings)", "def prepare_like_problem(df):\n like_theshold = 3.0\n filtered_df = df.loc[df.rating > like_theshold, :]\n filtered_df = filtered_df.reset_index(drop=True)\n filtered_df['like'] = 1\n return filtered_df[['userId', 'movieId', 'like', 'timestamp']]", "def graph(df):\n df.plot()\n plt.show()", "def format(var, as_var):\r\n if not hasattr(var, 'dtype'):\r\n return var\r\n rval = var\r\n if rval.type.dtype != as_var.type.dtype:\r\n rval = rval.astype(as_var.type.dtype)\r\n if rval.ndim == as_var.ndim:\r\n rval = as_var.type.filter_variable(rval)\r\n else:\r\n tmp = as_var.type.__class__(\r\n broadcastable=tuple(var.broadcastable[:1])+\\\r\n tuple(as_var.broadcastable),\r\n dtype=as_var.dtype)\r\n rval = tmp.filter_variable(rval)\r\n return rval", "def filter_date_df(date_time, df, var=\"date\"):\n filters = [True if date in date_time else False for date in df[var]]\n df_filter = df[filters]\n df_filter = df_filter.drop(columns=[\"spread\"], errors=\"ignore\")\n df_filter = df_filter.dropna().drop_duplicates()\n df_filter = df_filter.sort_values(by=[\"date\", \"hour\"])\n df_filter = df_filter.reset_index(drop=True)\n df_filter = format_hour(df_filter)\n return df_filter", "def null_handler(df):\n \n # define ok as hue = True\n ok = np.array([~pd.isnull(df['hue'])])[0]\n \n # filter arrays\n r = df['red'][ok]\n g = df['green'][ok]\n b = df['blue'][ok]\n h = df['hue'][ok]\n s = df['saturation'][ok]\n v = df['value'][ok]\n t = df['timestamp'][ok]\n dBT = df['dBT'][ok]\n dt = df['datetime'][ok] \n \n return (r,g,b,h,s,v,t,dBT,dt)", "def plot_nsynth_vals(self, df, ax, **kwargs):\n val_idx = np.where(self.nsynth_vals > 0)\n\n # Background regions\n ax.scatter(df['theta'].loc[~df.index.isin(val_idx)],\n df['r'].loc[~df.index.isin(val_idx)],\n color=[0.7, 0.7, 0.7],\n alpha=0.25,\n **kwargs)\n\n # Foreground -- regions with FDR z-score > 0\n ax.scatter(df['theta'].iloc[val_idx],\n df['r'].iloc[val_idx],\n vmin=0,\n c=self.nsynth_vals[val_idx],\n **kwargs)\n\n return ax", "def explore_col(s, e):\n \n fig = plt.figure(figsize=(10, 8))\n\n\n sub1 = fig.add_subplot(221) \n sub1.set_title(s +' histogram') \n sub1.hist(df_tr_lbl[s])\n\n sub2 = fig.add_subplot(222)\n sub2.set_title(s +' boxplot')\n sub2.boxplot(df_tr_lbl[s])\n \n #np.random.seed(12345)\n \n if e > 100 or e <= 0:\n select_engines = list(pd.unique(df_tr_lbl.id))\n else:\n select_engines = np.random.choice(range(1,101), e, replace=False)\n \n sub3 = fig.add_subplot(223)\n sub3.set_title('time series: ' + s +' / cycle')\n sub3.set_xlabel('cycle')\n for i in select_engines:\n df = df_tr_lbl[['cycle', s]][df_tr_lbl.id == i]\n sub3.plot(df['cycle'],df[s])\n \n sub4 = fig.add_subplot(224)\n sub4.set_title(\"scatter: \"+ s + \" / ttf (regr label)\")\n sub4.set_xlabel('ttf')\n sub4.scatter(df_tr_lbl['ttf'],df_tr_lbl[s])\n\n\n plt.tight_layout()\n plt.show()", "def plot_wanted_cols(self, df, cols_wanted_array):\n\t\tcols_to_plot = {}\n\t\tfor col_wanted in cols_wanted_array:\n\t\t\tfor col in df.columns:\n\t\t\t\tif col_wanted in col:\n\t\t\t\t\tcols_to_plot[col] = df[col]\n\t\tfor k,v in cols_to_plot.items():\n\t\t\tplt.plot(df.index, v, label=k)\n\t\tplt.title(\"Simple Plot\")\n\t\tplt.legend()\n\t\tplt.show()", "def aperture_photometry(self, x_stars, y_stars, aperture, background):\n print '--------------------------------------------------------------------- aperture_photometry'\n\n #--- CONSTANTS ---#\n gain = 0.73 # Gain of camera: electrons pr ADU (ADU = counts from object)- ajust to camera!\n ron = 3.3 # Read out noise - ajust to camera!\n con = 25 # Magnitude constant\n\n #--- PHOTOMETRY ---#\n # Find fluxes:\n N = len(x_stars) # Number of stellar objects \n flux_star = zeros((self.n,N))\n SNR_i = zeros((self.n,N))\n for i in range(self.n): # Loop over all images: if timeseries is available\n for j in range(N): # Loop over all stars and find flux: using same aperture size\n flux_sky, n_star_pix, flux_star[i][j] = self.aperture(self.LF_i[i], x_stars[j], y_stars[j],\\\n aperture, background)\n SNR_i[i][j] = self.SNR(flux_sky, n_star_pix, flux_star[i][j], gain, ron)\n\n #--- FINAL CORRECTIONS ---#\n print flux_star, flux_sky, SNR_i\n return flux_star, SNR_i", "def set_stars():\n prod_id = int(request.vars.prod_id)\n logger.info(\"changing stars on prod_id {%s}\" %prod_id)\n rating = int(request.vars.rating)\n logger.info(\"auth.user from api: %s\"%auth.user.email )\n db.stars.update_or_insert(\n (db.stars.prod_id == prod_id) & (db.stars.user_email == auth.user.email),\n prod_id = prod_id,\n user_email = auth.user.email,\n rating = rating\n )\n new_avg = calc_avg_rating(prod_id)\n return response.json(dict(new_avg=new_avg))", "def plot_raw_data(ratings, pl = True):\n # do statistics.\n num_items_per_user = np.array((ratings != 0).sum(axis=0)).flatten()\n num_users_per_item = np.array((ratings != 0).sum(axis=1).T).flatten()\n sorted_num_movies_per_user = np.sort(num_items_per_user)[::-1]\n sorted_num_users_per_movie = np.sort(num_users_per_item)[::-1]\n\n if pl:\n # plot\n fig = plt.figure()\n ax1 = fig.add_subplot(1, 2, 1)\n ax1.plot(sorted_num_movies_per_user, color='blue')\n ax1.set_xlabel(\"users\")\n ax1.set_ylabel(\"number of ratings (sorted)\")\n ax1.grid()\n\n ax2 = fig.add_subplot(1, 2, 2)\n ax2.plot(sorted_num_users_per_movie)\n ax2.set_xlabel(\"items\")\n ax2.set_ylabel(\"number of ratings (sorted)\")\n #ax2.set_xticks(np.arange(0, 2000, 300))\n ax2.grid()\n\n plt.tight_layout()\n plt.savefig(\"../results/stat_ratings\")\n plt.show()\n # plt.close()\n return num_items_per_user, num_users_per_item", "def _parse_stars_according_to_image(self, starClipSigma=40.0):\n if issubclass(self.imageType, ReducedScience):\n # Check if all the images were corrected to Airmass 0.0\n if np.sum([img.airmass for img in self.imageList]) > 0:\n raise ValueError('All images in the imageList must be corrected to airmass=0.0 before combining')\n\n # Compute the star masks for this image stack.\n starMask = self._construct_star_mask()\n\n else:\n starMask = False\n starClipSigma = 0\n\n return starMask, starClipSigma", "def plot_uniqueness(self, variable, **kwargs):\n return self.visualizer.plot_uniqueness(variable, **kwargs)", "def is_type_var(annotation) -> bool:\n\n return isinstance(annotation, typing.TypeVar) # type:ignore", "def get_type_lists(frame, rejects=['Id', 'ID','id'],frame_type='spark'):\n\n #Handle spark type data frames\n if frame_type == 'spark':\n nums, cats = [], []\n for key, val in frame.dtypes:\n if key not in rejects:\n if val == 'string' or val == 'boolean':\n cats.append(key)\n else: # ['int','double']\n nums.append(key)\n print('Numeric =', nums)\n print()\n print('Categorical =', cats)\n return nums, cats\n else:\n nums, cats = [], []\n for key, val in frame.types.items():\n if key not in rejects:\n if val == 'enum':\n cats.append(key)\n else:\n nums.append(key)\n\n print('Numeric =', nums)\n print()\n print('Categorical =', cats)\n\n return nums, cats", "def filter_geom(geom, _type):\n return list(filter(lambda x: isinstance(x, _type), geom))", "def getSolRatioData( self, var, type = \"all\" ):\n\n indx = self.getSolRatioVarIndx( var )\n if indx == -1:\n raise AcuDbAssistError, \"Invalid variable name.\"\n\n typeIndx= self.ratioType[type.lower()]\n pars\t= ( _EVENT_SOL_RAT, indx, typeIndx, 1 )\n\n return self.adbGetEvent( pars )", "def graph_multiple_choice(question_type, question, data_frame, path):\n if (question_type in [\"radio\", \"dropdown\"]):\n # Get this question's data\n column_data = data_frame[question[\"column-name\"]].astype('str')\n\n new_labels = []\n new_counts = []\n for option in question[\"choices\"]:\n new_labels.append(option)\n new_counts.append(column_data.str.count(\"^\" + re.escape(option) + \"$\").sum())\n\n num_items = np.arange(0, len(new_counts))\n\n plt.bar(num_items, new_counts)\n plt.xticks(num_items, new_labels, rotation=\"vertical\")\n plt.tight_layout()\n\n plt.savefig(path + question[\"column-name\"] + '.svg')\n plt.clf()\n plt.close()\n\n if question_type == \"boolean\":\n column_data = data_frame[question[\"column-name\"]].astype('str')\n new_counts = []\n choices = [\"Yes\", \"No\"]\n for option in choices:\n new_counts.append(column_data.str.count(option).sum())\n\n num_items = np.arange(0, len(new_counts))\n\n plt.bar(num_items, new_counts)\n plt.xticks(num_items, choices)\n plt.tight_layout()\n\n plt.savefig(path + question[\"column-name\"] + '.svg')\n plt.clf()\n plt.close()", "def filter_data(self):\n self.df = self.df[HeatStrokeDataFiller.important_features]", "def rep_dtypes(df):\n return \"(\" + re.sub(\", dtype.*\", \"\", re.sub(r\" +\", \": \", str(df.dtypes)).replace(\"\\n\", \", \")) + \")\"", "def select_columns(variables):\n return relevant_raw_data_df[variables]", "def visualization(data):\n\t# preview top 5 row of data\n\tprint(\"\\n--------Data preview--------\\n{0}\"\n\t\t .format(data.head()))\n\tprint(\"\\nNull value status as follow:\\n{0}\".format(data.isnull().sum()))\n\tcols = [col for col in data.columns]\n\tprint(\"\\nNumber of original features: {0}\".format(len(cols)))\n\tprint(\"\\nFeatures types:\\n{0}\".format(data[cols].dtypes.value_counts()))\n\n\tcounts = [[], [], []]\n\tfor col in cols:\n\t\t# the data type of each feature\n\t\ttyp = data[col].dtype\n\t\t# the number of differents value in each feature\n\t\tuniq = len(np.unique(data[col]))\n\t\t# constant value feature\n\t\tif uniq == 1:\n\t\t\tcounts[0].append(col)\n\t\t# binary value feature\n\t\telif uniq == 2 and typ == np.int64:\n\t\t\tcounts[1].append(col)\n\t\t# multiple value feature\n\t\telse:\n\t\t\tcounts[2].append(col)\n\n\tprint('\\nConstant features: {}\\nBinary features: {} \\nCategorical features: {}\\n'.format(*[len(c) for c in counts]))\n\tprint('Constant features:', counts[0])\n\tprint('Binary features:', counts[1])\n\tprint('Categorical features:', counts[2])\n\n\tfig, axes = plt.subplots(2,2)\n\tfig.set_size_inches(12, 10)\n\tsn.boxplot(data=data,y=\"count\",orient=\"v\",ax=axes[0][0])\n\tsn.boxplot(data=data,y=\"count\",x=\"season\",orient=\"v\",ax=axes[0][1])\n\tsn.boxplot(data=data,y=\"count\",x=\"hour\",orient=\"v\",ax=axes[1][0])\n\tsn.boxplot(data=data,y=\"count\",x=\"workingday\",orient=\"v\",ax=axes[1][1])\n\n\taxes[0][0].set(ylabel='Count',title=\"Box Plot On Count\")\n\taxes[0][1].set(xlabel='Season', ylabel='Count',title=\"Box Plot On Count Across Season\")\n\taxes[1][0].set(xlabel='Hour Of The Day', ylabel='Count',title=\"Box Plot On Count Across Hour Of The Day\")\n\taxes[1][1].set(xlabel='Working Day', ylabel='Count',title=\"Box Plot On Count Across Working Day\")\n\tplt.show()\n\n\tfig,(ax1,ax2,ax3,ax4)= plt.subplots(nrows=4)\n\tfig.set_size_inches(12,20)\n\tsortOrder = [1,2,3,4,5,6,7,8,9,10,11,12]\n\thueOrder = [\"Sunday\",\"Monday\",\"Tuesday\",\"Wednesday\",\"Thursday\",\"Friday\",\"Saturday\"]\n\n\tmonthAggregated = pd.DataFrame(data.groupby(\"month\")[\"count\"].mean()).reset_index()\n\tmonthSorted = monthAggregated.sort_values(by=\"count\",ascending=False)\n\tsn.barplot(data=monthSorted,x=\"month\",y=\"count\",ax=ax1,order=sortOrder)\n\tax1.set(xlabel='Month', ylabel='Avearage Count',title=\"Average Count By Month\")\n\n\thourAggregated = pd.DataFrame(data.groupby([\"hour\",\"season\"],sort=True)[\"count\"].mean()).reset_index()\n\tsn.pointplot(x=hourAggregated[\"hour\"], y=hourAggregated[\"count\"],hue=hourAggregated[\"season\"],\n\t data=hourAggregated, join=True,ax=ax2)\n\tax2.set(xlabel='Hour Of The Day', ylabel='Users Count',\n\t title=\"Average Users Count By Hour Of The Day Across Season\",label='big')\n\n\thourAggregated = pd.DataFrame(data.groupby([\"hour\",\"weekday\"],sort=True)[\"count\"].mean()).reset_index()\n\tsn.pointplot(x=hourAggregated[\"hour\"], y=hourAggregated[\"count\"],hue=hourAggregated[\"weekday\"],hue_order=hueOrder,\n\t data=hourAggregated, join=True,ax=ax3)\n\tax3.set(xlabel='Hour Of The Day', ylabel='Users Count',\n\t title=\"Average Users Count By Hour Of The Day Across Weekdays\",label='big')\n\n\thourTransformed = pd.melt(data[[\"hour\",\"casual\",\"registered\"]], id_vars=['hour'], value_vars=['casual', 'registered'])\n\thourAggregated = pd.DataFrame(hourTransformed.groupby([\"hour\",\"variable\"],sort=True)[\"value\"].mean()).reset_index()\n\tsn.pointplot(x=hourAggregated[\"hour\"], y=hourAggregated[\"value\"],hue=hourAggregated[\"variable\"],\n\t hue_order=[\"casual\",\"registered\"], data=hourAggregated, join=True,ax=ax4)\n\tax4.set(xlabel='Hour Of The Day', ylabel='Users Count',\n\t title=\"Average Users Count By Hour Of The Day Across User Type\",label='big')\n\tplt.show()", "def plot_raw_data(dataset):\n num_attributes = list(dataset.drop(\"Sex\", axis=1))\n for att in num_attributes:\n if att != \"Rings\":\n dataset.plot(kind=\"scatter\", x=\"Rings\", y = att)\n plt.show()\n\n # dataset[\"Rings\"].hist()\n # plt.show()", "def get_basic_plot(df, log_pathway, log_type):\n if len(df) > 0:\n # Get the date column we will use for various counts\n column_for_grouping = '{}Date'.format(log_type)\n # Add a date index to df\n df.set_index(df[column_for_grouping].apply(pd.to_datetime), inplace=True, drop=False)\n # Add Month, week and weekday columns\n df['Month'] = df.index.month\n df['Week'] = df.index.week # Should we use week of year here?\n df['WeekDay'] = df.index.weekday_name\n # Create groups for plotting\n month = df.groupby('Month').size()\n # month.index = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']\n week = df.groupby('Week').size()\n weekday = df.groupby('WeekDay').size()\n\n # Month\n data_viz_pathway = os.path.dirname(log_pathway).replace('logs', 'data_visualization')\n month_plot = month.plot(kind='bar')\n month_fig = month_plot.get_figure()\n month_figure_pathway = os.path.join(data_viz_pathway, '{}output_month.png'.format(log_type))\n month_fig.savefig(month_figure_pathway)\n print('Basic {} log by month chart saved to {}'.format(log_type, month_figure_pathway))\n\n # Week\n week_plot = week.plot(kind='bar')\n week_fig = week_plot.get_figure()\n week_figure_pathway = os.path.join(data_viz_pathway, '{}output_week.png'.format(log_type))\n week_fig.savefig(week_figure_pathway)\n print('Basic {} log by month chart saved to {}'.format(log_type, week_figure_pathway))\n\n # Weekday\n weekday_plot = weekday.plot(kind='bar')\n weekday_fig = weekday_plot.get_figure()\n weekday_figure_pathway = os.path.join(data_viz_pathway, '{}output_weekday.png'.format(log_type))\n weekday_fig.savefig(weekday_figure_pathway)\n print('Basic {} log by month chart saved to {}'.format(log_type, weekday_figure_pathway))", "def plot_var_time_series_dt0_multiquant(TRT_ID_sel, df_nonnan, cfg_tds):\r\n \r\n date_of_cell = datetime.datetime.strptime(TRT_ID_sel[\"TRT_ID\"][:12], \"%Y%m%d%H%M\")\r\n \r\n ## Find cells where the there are loads of similar TRT Ranks:\r\n DTI_sel = [dti for dti in df_nonnan.index.values if dti[13:] in TRT_ID_sel[\"TRT_ID\"]]\r\n cell_sel = df_nonnan.loc[DTI_sel]\r\n cell_sel.set_index(pd.to_datetime([datetime.datetime.strptime(date[:12],\"%Y%m%d%H%M\") for date in cell_sel.index]),\r\n drop=True,append=False,inplace=True)\r\n \r\n fig, axes = plt.subplots(2,2)\r\n fig.set_size_inches(10,8) \r\n cmap_3_quant = truncate_cmap(plt.get_cmap('afmhot'), 0.2, 0.6)\r\n legend_entries = []\r\n cell_sel[[\"IR_108_stat|0|MIN\",\"IR_108_stat|0|PERC05\",\"IR_108_stat|0|PERC25\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,0].set_title(r\"Brightness Temperatures T$_B$\")\r\n axes[0,0].set_ylabel(r\"IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"Min\",\"5%\", \"25%\"])\r\n\r\n cell_sel[[\"CG3_stat|0|PERC99\",\"CG3_stat|0|PERC95\",\"CG3_stat|0|PERC75\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,1].set_title(\"Glaciation indicator (GI)\")\r\n axes[0,1].set_ylabel(r\"IR 12.0$\\mu$m - IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"99%\",\"95%\", \"75%\"])\r\n\r\n cell_sel[[\"CD5_stat|0|MAX\",\"CD5_stat|0|PERC95\",\"CD5_stat|0|PERC75\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,0].set_title(\"Cloud optical depth indicator (COD)\")\r\n axes[1,0].set_ylabel(r\"WV 6.2$\\mu$m - IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"Max\",\"95%\", \"75%\"])\r\n\r\n cell_sel[[\"IR_108_stat|-15|PERC25\",\"IR_108_stat|-15|PERC50\",\"IR_108_stat|-15|PERC75\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,1].set_title(r\"Updraft strength indicator ($w_{T}$)\")\r\n axes[1,1].set_ylabel(r\"IR 10.8$\\mu$m (t$_0$) - IR 10.8$\\mu$m (t$_{-15}$) [K]\")\r\n legend_entries.append([\"25%\",\"50%\", \"75%\"])\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper right\") #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"SEVIRI_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()\r\n\r\n fig, axes = plt.subplots(3,2)\r\n fig.set_size_inches(10,8) \r\n legend_entries = []\r\n cell_sel[[\"RZC_stat_nonmin|0|PERC50\",\"RZC_stat_nonmin|0|PERC75\",\"RZC_stat_nonmin|0|MAX\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"RZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[0,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[0,0].set_title(r\"Rain Rate (RR)\")\r\n axes[0,0].set_ylabel(r\"Rain Rate [mm h$^{-1}$]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"LZC_stat_nonmin|0|PERC50\",\"LZC_stat_nonmin|0|PERC75\",\"LZC_stat_nonmin|0|MAX\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"LZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[0,1],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[0,1].set_title(\"Vertically Integrated Liquid (VIL)\")\r\n axes[0,1].set_ylabel(r\"VIL [kg m$^{-2}$]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"95%\", \"MAX\"])\r\n\r\n cell_sel[[\"MZC_stat_nonmin|0|PERC50\",\"MZC_stat_nonmin|0|PERC75\",\"MZC_stat_nonmin|0|MAX\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"MZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[1,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[1,0].set_title(\"Maximum Expected Severe Hail Size (MESHS)\")\r\n axes[1,0].set_ylabel(\"MESHS [cm]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"25%\",\"50%\", \"75%\"])\r\n\r\n cell_sel[[\"BZC_stat_nonmin|0|PERC50\",\"BZC_stat_nonmin|0|PERC75\",\"BZC_stat_nonmin|0|MAX\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"BZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[1,1],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[1,1].set_title(\"Probability of Hail (POH)\")\r\n axes[1,1].set_ylabel(r\"POH [%]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"EZC15_stat_nonmin|0|PERC75\",\"EZC15_stat_nonmin|0|MAX\",\"EZC45_stat_nonmin|0|PERC75\",\"EZC45_stat_nonmin|0|MAX\"]].plot(ax=axes[2,0],color=[\"#fdbf6f\",\"#ff7f00\",\"#fb9a99\",\"#e31a1c\"],linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"EZC45_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[2,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[2,0].set_title(\"Echo Top (ET)\")\r\n axes[2,0].set_ylabel(\"Altitude a.s.l. [km]\")\r\n ax_pixc.set_ylabel(\"Pixel count\")\r\n legend_entries.append([\"75% (15dBZ)\",\"Max (15dBZ)\", \"75% (45dBZ)\", \"Max (45dBZ)\"])\r\n\r\n cell_sel[[\"THX_dens_stat|0|MEAN\",\"THX_densIC_stat|0|MEAN\",\"THX_densCG_stat|0|MEAN\"]].plot(ax=axes[2,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[2,1].set_title(\"Mean lightning Density (THX)\")\r\n axes[2,1].set_ylabel(\"Lightning density [km$^{-2}$]\")\r\n ax_pixc.set_ylabel(\"Pixel count\")\r\n legend_entries.append([\"Total\",\"IC\", \"CG\"])\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper left\") #) #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"RADAR_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()\r\n\r\n fig, axes = plt.subplots(2,2)\r\n fig.set_size_inches(10,8) \r\n legend_entries = []\r\n cell_sel[[\"CAPE_ML_stat|0|PERC50\",\"CAPE_ML_stat|0|MAX\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,0].set_title(r\"CAPE (mean surface layer parcel)\")\r\n axes[0,0].set_ylabel(r\"CAPE [J kg$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"CIN_ML_stat|0|PERC50\",\"CIN_ML_stat|0|MAX\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,1].set_title(r\"CIN (mean surface layer parcel)\")\r\n axes[0,1].set_ylabel(r\"CIN [J kg$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"WSHEAR_0-3km_stat|0|PERC50\",\"WSHEAR_0-3km_stat|0|MAX\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,0].set_title(r\"Wind shear (0km - 3km)\")\r\n axes[1,0].set_ylabel(r\"Wind shear [m s$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"POT_VORTIC_30000_stat|0|PERC50\",\"POT_VORTIC_30000_stat|0|MAX\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,1].set_title(r\"Potential vorticity (300hPa)\")\r\n axes[1,1].set_ylabel(r\"PV [K m$^{2}$ kg$^{-1}$ s$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper left\") #) #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"COSMO_THX_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()", "def plot_stats(x_axis, y_axis, df, highlight=[]):\n a, b = df[x_axis], df[y_axis]\n\n X_train, X_test, y_train, y_test = train_test_split(a, b, test_size=0.33, random_state=42)\n\n X_train = np.array(X_train).reshape(-1, 1)\n X_test = np.array(X_test).reshape(-1, 1)\n y_train = np.array(y_train).reshape(-1, 1)\n y_test = np.array(y_test).reshape(-1, 1)\n\n regr = linear_model.LinearRegression()\n\n regr.fit(X_train, y_train)\n\n df[y_axis + \" STD\"] = df[y_axis].apply(lambda a: round((a-df[y_axis].mean())/df[y_axis].std()))\n df[y_axis + \" rank\"] = df[y_axis].rank(ascending=False)\n df[x_axis + \" rank\"] = df[x_axis].rank(ascending=False)\n \n mapper = linear_cmap(field_name=y_axis + \" STD\", palette=brewer[\"RdBu\"][len(df[y_axis + \" STD\"].unique())], \n low=min(df[y_axis + \" STD\"].unique()), high=max(df[y_axis + \" STD\"].unique()))\n \n source = ColumnDataSource(df)\n source2 = ColumnDataSource(df[df[\"Player\"].isin(highlight)])\n \n p = figure(x_range=(df[x_axis].min() - df[x_axis].std(), df[x_axis].max() + df[x_axis].std()), \n y_range=(df[y_axis].min() - df[y_axis].std(), df[y_axis].max() + df[y_axis].std()))\n \n r1 = p.circle(x=x_axis, y=y_axis,\n source=source, size=10, color=mapper, line_color=\"black\", legend_group= y_axis + \" STD\")\n\n p.title.text = y_axis + \" vs. \" + x_axis\n p.title.align = \"center\"\n p.xaxis.axis_label = x_axis\n p.yaxis.axis_label = y_axis\n p.legend.location = 'top_left'\n p.legend.title = \"St. Dev's from Avg \" + y_axis\n p.background_fill_color = \"#dddddd\"\n p.background_fill_alpha = 0.1\n \n line_x = [df[x_axis].min().item() - df[x_axis].std().item(), df[x_axis].max().item() + df[x_axis].std().item()]\n line_y = [(line_x[0]*regr.coef_.item()) + regr.intercept_.item(), (line_x[1]*regr.coef_.item()) + regr.intercept_.item()]\n r2 = p.line(line_x, line_y, line_width=2, color=\"black\")\n\n p.add_tools(HoverTool(renderers=[r1], tooltips=[\n (\"Player\", \"@Player\"),\n (y_axis, \"@{\" + y_axis +\"}{0.000}\"),\n (y_axis + \" Rank\", \"#@{\" + y_axis + \" rank}\"),\n (x_axis, \"@{\" + x_axis +\"}{0}\"),\n (x_axis + \" Rank\", \"#@{\" + x_axis + \" rank}\")]))\n\n \n p.add_tools(HoverTool(renderers=[r2], \n tooltips=[(x_axis, \"$x{0000}\"),\n (\"Predicted \" + y_axis, \"$y\")]))\n \n labels = LabelSet(x=x_axis, \n y=y_axis, text=\"Player\", y_offset=8,\n text_font_size=\"11px\", text_color=\"#555555\",\n source=source2, text_align='center')\n \n p.add_layout(labels)\n\n st.bokeh_chart(p)", "def plot_data(self):", "def plot_psychometric(df, color='black', ax=None, **kwargs):\n\n if len(df['signedContrast'].unique()) > 4:\n df2 = df.groupby(['signedContrast']).agg(\n {'choice': 'count', 'choice2': 'mean'}).reset_index()\n df2.rename(columns={\"choice2\": \"fraction\", \"choice\": \"ntrials\"}, inplace=True)\n\n pars, L = psy.mle_fit_psycho(df2.transpose().values, # extract the data from the df\n P_model='erf_psycho_2gammas',\n parstart=np.array([df2['signedContrast'].mean(), 20., 0.05,\n 0.05]),\n parmin=np.array([df2['signedContrast'].min(), 0., 0., 0.]),\n parmax=np.array([df2['signedContrast'].max(), 100., 1, 1]))\n sns.lineplot(np.arange(-100, 100), psy.erf_psycho_2gammas(pars, np.arange(-100, 100)),\n color=color, ax=ax)\n\n if 100 in df.signedContrast.values and not 50 in df.signedContrast.values:\n df['signedContrast'] = df.signedContrast.replace(-100, -35)\n df['signedContrast'] = df.signedContrast.replace(100, 35)\n\n brokenXaxis = True\n else:\n brokenXaxis = False\n\n # plot datapoints on top\n sns.lineplot(x='signedContrast', y='choice2', err_style=\"bars\", linewidth=0, linestyle='None',\n mew=0.5,\n marker='.', ci=68, data=df, color=color, ax=ax)\n\n if not brokenXaxis:\n # Reduce the clutter\n ax.set_xticks([-100, -50, 0, 50, 100])\n ax.set_xticklabels(['-100', '-50', '0', '50', '100'])\n ax.set_xlim([-110, 110])\n else:\n ax.set_xticks([-35, -25, -12.5, -6, 0, 6, 12.5, 25, 35])\n ax.set_xticklabels(['-100', '-25', '-12.5', '-6.25', '0', '6.25', '12.5', '25', '100'],\n size='x-small', rotation=-90)\n ax.set_xlim([-40, 40])\n\n ax.set_yticks([0, .5, 1])\n ax.set_ylim([-0.03, 1.03])\n ax.set_xlabel('Contrast (%)')\n\n return ax", "def plot_columns(dataframe, title):\n sns.boxplot(x=dataframe['category_id'], y=dataframe['price'])\n plt.title(title)\n plt.xlabel('Category ID')\n plt.ylabel('Price')\n plt.show()", "def violin_or_box_plot(df, y, path, y_name, settings, title=None, plot=\"violin\", log=False):\n comp = Plot(\n path=f\"{path}NanoComp_{y.replace(' ', '_')}_{plot}.html\",\n title=f\"Comparing {y_name.lower()}\",\n )\n palette = (\n settings[\"colors\"] if settings[\"colors\"] else cycle(plotly.colors.DEFAULT_PLOTLY_COLORS)\n )\n\n if plot == \"violin\":\n logging.info(f\"NanoComp: Creating violin plot for {y}.\")\n\n fig = go.Figure()\n\n for dataset, color in zip(df[\"dataset\"].unique(), palette):\n fig.add_trace(\n go.Violin(\n x=df[\"dataset\"][df[\"dataset\"] == dataset],\n y=df[y][df[\"dataset\"] == dataset],\n marker_color=color,\n points=False,\n name=dataset,\n )\n )\n\n process_violin_and_box(\n fig,\n log=log,\n plot_obj=comp,\n title=title,\n y_name=y_name,\n ymax=np.amax(df[y]),\n settings=settings,\n )\n\n elif plot == \"box\":\n logging.info(f\"NanoComp: Creating box plot for {y}.\")\n\n fig = go.Figure()\n\n for dataset, color in zip(df[\"dataset\"].unique(), palette):\n fig.add_trace(\n go.Box(\n x=df[\"dataset\"][df[\"dataset\"] == dataset],\n y=df[y][df[\"dataset\"] == dataset],\n marker_color=color,\n name=dataset,\n )\n )\n\n process_violin_and_box(\n fig,\n log=log,\n plot_obj=comp,\n title=title,\n y_name=y_name,\n ymax=np.amax(df[y]),\n settings=settings,\n )\n\n elif plot == \"ridge\":\n logging.info(f\"NanoComp: Creating ridges plot for {y}.\")\n\n fig = go.Figure()\n\n for d, color in zip(df[\"dataset\"].unique(), palette):\n fig.add_trace(go.Violin(x=df[y][df[\"dataset\"] == d], name=d, marker_color=color))\n\n fig.update_traces(orientation=\"h\", side=\"positive\", width=3, points=False)\n fig.update_layout(title=title or comp.title, title_x=0.5)\n\n comp.fig = fig\n comp.html = comp.fig.to_html(full_html=False, include_plotlyjs=\"cdn\")\n comp.save(settings)\n\n else:\n logging.error(f\"Unknown comp plot type {plot}\")\n sys.exit(f\"Unknown comp plot type {plot}\")\n\n return [comp]", "def profile_mass(df,variable_xaxis, sign, peak, edge_left, edge_right, pdf_key):\n\n if sign == 1:\n keyword = 'signal'\n if sign == 0:\n keyword = 'background'\n\n df = df[(df[variable_xaxis] < edge_right) & (df[variable_xaxis] > edge_left)]\n\n for var in df.columns:\n if var != variable_xaxis:\n\n fig, axs = plt.subplots(figsize=(20, 15))\n\n bin_means, bin_edges, binnumber = b_s(df[variable_xaxis],df[var], statistic='mean', bins=25)\n bin_std, bin_edges, binnumber = b_s(df[variable_xaxis],df[var], statistic='std', bins=25)\n bin_count, bin_edges, binnumber = b_s(df[variable_xaxis],df[var], statistic='count',bins= 25)\n bin_width = (bin_edges[1] - bin_edges[0])\n bin_centers = bin_edges[1:] - bin_width/2\n\n nan_ind = np.where(np.isnan(bin_means))\n bin_centers = np.delete(bin_centers, nan_ind)\n bin_means = np.delete(bin_means, nan_ind)\n bin_count = np.delete(bin_count, nan_ind)\n bin_std = np.delete(bin_std , nan_ind)\n\n\n plt.errorbar(x=bin_centers, y=bin_means, yerr=(bin_std/np.sqrt(bin_count)), linestyle='none', marker='.',mfc='red', ms=10)\n\n\n\n plt.title('Mean of ' +var+ ' plotted versus bin centers of '+variable_xaxis+ \\\n '('+keyword+')', fontsize=25)\n plt.xlabel('Mass', fontsize=25)\n plt.ylabel(\"Mean of each bin with the SEM ($\\dfrac{bin\\ std}{\\sqrt{bin\\ count}}$) of bin\", fontsize=25)\n\n\n plt.vlines(x=peak,ymin=bin_means.min(),ymax=bin_means.max(), color='r', linestyle='-')\n\n\n fig.tight_layout()\n plt.savefig(pdf_key,format='pdf')\n\n pdf_key.close()", "def get_variables_of_type(self, variable_type):\n if isinstance(variable_type,str):\n variable_key = variable_type\n else:\n #it is a class\n variable_key = variable_type.__name__\n return self._var_kinds[variable_key]", "def plot_variant_positions(strains):\n if strains.lower() == 'all':\n strains = None\n strains = get_required_strains(strains)\n gd_data = []\n with database.make_connection() as connection:\n for strain in strains:\n hits = r.table(TABLE).filter(lambda row: row['StrainID'].match(\n strain)).pluck('Position', 'Class').run(connection)\n feat = []\n for hit in hits:\n cur = hit['Position']\n feat.append(misc.create_feature(cur, cur, hit['Class'], strand=None))\n gd_data.append(feat)\n imaging.plot_SNPs(gd_data, strains)", "def _variance_symbol(self, variable):\n return Symbol(\"e_{0}\".format(variable))", "def _sig_stars(val):\n star = \"\"\n if 0 <= val < 0.001:\n star = \"***\"\n elif 0.001 <= val < 0.01:\n star = \"**\"\n elif 0.01 <= val < 0.05:\n star = \"*\"\n elif 0.05 <= val < 0.1:\n star = \".\"\n return star", "def plotVariableScatter(inputTable, logX=False, xLim=None, xLabel='', yLabel='', sampletypeColor=False, hLines=None, hLineStyle='-', hBox=None, vLines=None, vLineStyle=':', vBox=None, savePath=None, figureFormat='png', dpi=72, figureSize=(11 ,7)):\n\n ## Checks\n if xLim is not None:\n if not isinstance(xLim, tuple):\n raise TypeError('xLim must be \\'None\\' or tuple(float, float)')\n if not isinstance(xLabel, str):\n raise TypeError('xLabel must be a str')\n if not isinstance(yLabel, str):\n raise TypeError('yLabel must be a str')\n if hLines is not None:\n if not isinstance(hLines, list):\n raise TypeError('hLines must be \\'None\\' or list')\n if vLines is not None:\n if not isinstance(vLines, list):\n raise TypeError('vLines must be \\'None\\' or list')\n if hLineStyle not in ['-', '--', '-.', ':']:\n raise ValueError('hLines must be one of the matplotlib axhline linestyle (\\'-\\', \\'--\\', \\'-.\\', \\':\\')')\n if vLineStyle not in ['-', '--', '-.', ':']:\n raise ValueError('vLines must be one of the matplotlib axvline linestyle (\\'-\\', \\'--\\', \\'-.\\', \\':\\')')\n if hBox is not None:\n if not isinstance(hBox, list):\n raise TypeError('hBox must be \\'None\\' or list')\n if not isinstance(hBox[0], tuple):\n raise TypeError('hBox must be a list of tuple')\n if vBox is not None:\n if not isinstance(vBox, list):\n raise TypeError('vBox must be \\'None\\' or list')\n if not isinstance(vBox[0], tuple):\n raise TypeError('vBox must be a list of tuple')\n\n ## Init\n sns.set_style(\"ticks\", {'axes.linewidth': 0.75})\n fig = plt.figure(figsize=figureSize, dpi=dpi)\n ax = plt.subplot(1, 1, 1)\n current_palette = sns.color_palette()\n\n data = copy.deepcopy(inputTable)\n data.reset_index(drop=True, inplace=True)\n\n # reorder if needed and get a y position\n minY = 1\n maxY = data.shape[0]\n data['yPos'] = list(reversed(range(minY, maxY+ 1)))\n\n # Register +/-numpy.inf, all values for min/max\n infNegY = []\n infPosY = []\n allVal = []\n\n # color iterator and color scheme\n colorIdx = 1\n if sampletypeColor:\n ## Try loading toolbox wide color scheme\n # value just in case\n sTypeColourDict = {SampleType.StudySample: 'b', SampleType.StudyPool: 'g', SampleType.ExternalReference: 'r',\n SampleType.MethodReference: 'm', SampleType.ProceduralBlank: 'c', 'Other': 'grey'}\n # load from the SOP as we do not have access to object\n try:\n from .._toolboxPath import toolboxPath\n import json\n import os\n\n with open(os.path.join(toolboxPath(), 'StudyDesigns', 'SOP', 'Generic.json')) as data_file:\n attributes = json.load(data_file)\n # convert key names to SampleType enum\n if 'sampleTypeColours' in attributes.keys():\n sTypeColourDict = copy.deepcopy(attributes['sampleTypeColours'])\n for stype in SampleType:\n if stype.name in sTypeColourDict.keys():\n sTypeColourDict[stype] = sTypeColourDict.pop(stype.name)\n except:\n pass\n # add cases specific to this plot\n sTypeColourDict['All Samples'] = 'black'\n sTypeColourDict['nan'] = 'grey'\n\n\n # Plot each columns of data\n # columns of interest\n if sampletypeColor:\n # match the subset of columns while imposing the sampleType ordering (can't be done with a set)\n expectedCols = pandas.DataFrame({'sType': [SampleType.StudySample, SampleType.ExternalReference, 'Other', 'All Samples', SampleType.StudyPool]})\n workingCols = expectedCols['sType'][expectedCols.sType.isin(inputTable.columns.tolist())].tolist()\n else:\n workingCols = list(set(inputTable.columns.tolist()) - set(['yName']))\n\n # plot each column, store +/-inf for later plot\n for wcol in workingCols:\n # change plot alpha and linewidth\n if sampletypeColor:\n if wcol == SampleType.StudyPool:\n lwPlot = 1\n alphaPlot = 1\n else:\n lwPlot = 1\n alphaPlot = 0.5\n currentColor = sTypeColourDict[wcol]\n else:\n lwPlot = 1\n alphaPlot = 1\n currentColor = current_palette[colorIdx]\n\n # Get name\n if wcol == SampleType.StudySample:\n name = 'Study Sample'\n elif wcol == SampleType.StudyPool:\n name = 'Study Reference'\n elif wcol == SampleType.ExternalReference:\n name = 'Long-Term Reference'\n else:\n name = wcol\n\n # only plot values, no inf or nan\n valueMask = numpy.isfinite(data[wcol].tolist()).tolist()\n tmpX = pandas.DataFrame({'x': data.loc[valueMask, wcol].tolist()})\n tmpY = pandas.DataFrame({'y': data.loc[valueMask, 'yPos'].tolist()})\n pt = ax.scatter(x=tmpX['x'], y=tmpY['y'], alpha=alphaPlot, lw=lwPlot, c=currentColor, label=name)\n colorIdx += 1\n\n # store position of inf\n infNegY.extend(data.loc[(data[wcol] == -numpy.inf).tolist(), 'yPos'])\n infPosY.extend(data.loc[(data[wcol] == numpy.inf).tolist(), 'yPos'])\n allVal.extend(data.loc[valueMask, wcol])\n\n # Plot a marker for -inf/+inf\n minX = min(allVal)\n maxX = max(allVal)\n infNegX = [minX] * len(infNegY)\n infPosX = [maxX] * len(infPosY)\n infX = infNegX + infPosX\n infY = infNegY + infPosY\n ax.scatter(x=infX, y=infY, marker='X', c='white', linewidth=1, edgecolor='black')\n\n # Vertical lines\n if vLines is not None:\n for vlY in vLines:\n vline = ax.axvline(x=vlY, ymin=0, ymax=1, linestyle=vLineStyle, color='grey')\n vline.set_zorder(0)\n\n # Horizontal lines\n if hLines is not None:\n for hlY in hLines:\n hline = ax.axhline(y=hlY, xmin=0, xmax=1, linestyle=hLineStyle, color='grey', lw=0.5, alpha=0.5)\n hline.set_zorder(0)\n\n # Horizontal Box\n if hBox is not None:\n for hB in hBox:\n p_rect = ax.add_patch(mpatches.Rectangle((minX, hB[0]), maxX, (hB[1] - hB[0]), facecolor='grey', alpha=0.15)) # Rectangle((x,y), width, height)\n p_rect.set_zorder(0)\n\n # Vertical Box\n if vBox is not None:\n for vB in vBox:\n p_rect = ax.add_patch(mpatches.Rectangle((vB[0], minY-1), (vB[1] - vB[0]), maxY+1, facecolor='grey', alpha=0.15)) # Rectangle((x,y), width, height)\n p_rect.set_zorder(0)\n\n\n # Set limits\n xpadding = (maxX - minX) / 100.0\n ypadding = (maxY - minY) / 100.0\n ypadding = numpy.floor(ypadding)\n\n if xLim:\n ax.set_xlim(xLim)\n else:\n ax.set_xlim((minX - xpadding, maxX + xpadding))\n\n ax.set_ylim((minY - 1 - ypadding, maxY + 1 + ypadding))\n\n # Log scale axis\n if logX:\n ax.set_xscale('symlog')\n for axis in [ax.xaxis, ax.yaxis]:\n axis.set_major_formatter(ScalarFormatter())\n\n # Axis and legend\n ax.set_xlabel(xLabel)\n ax.set_ylabel(yLabel)\n if 'yName' in inputTable.columns:\n plt.yticks(data['yPos'], data['yName'])\n plt.legend(loc='upper left', bbox_to_anchor=(1, 1))\n\n # Save or output\n if savePath:\n plt.savefig(savePath, bbox_inches='tight', format=figureFormat, dpi=dpi)\n plt.close()\n else:\n plt.show()", "def proportions_visualiser(\n df: pd.core.frame.DataFrame,\n colum_name: str = \"Sensor Glucose (mg/dL)\",\n limits: Dict[str, int] = {\"low\": 70, \"high\": 180},\n windows: Dict[str, int] = {\"weekly\": 7, \"monthly\": 30},\n kind: str = \"TIR\",\n) -> NoReturn:\n\n valid_kinds = [\"TIR\", \"TBR\", \"TAR\"]\n\n if \"low\" not in limits.keys() or \"high\" not in limits.keys():\n raise Exception(f\"limits.keys() should be ['low', 'high'] not {limits.keys()}\")\n\n titles = {\n \"TIR\": f\"Time In Range [{limits['low']},{limits['high']})\",\n \"TAR\": f\"Time Above Range >= {limits['high']}\",\n \"TBR\": f\"Time Below Range < {limits['low']}\",\n }\n\n kind = kind.upper()\n if kind not in valid_kinds:\n raise Exception(\n f\"Invalid kind `{kind}`, select one from {valid_kinds} or refer to help({self.__name__})\"\n )\n\n TIR = (\n lambda y: 100\n * y[(y >= limits[\"low\"]) & (y < limits[\"high\"])].count()\n / y.count()\n )\n TBR = lambda y: 100 * y[(y < limits[\"low\"])].count() / y.count()\n TAR = lambda y: 100 * y[(y >= limits[\"high\"])].count() / y.count()\n\n _proportions = df[colum_name].groupby(df.index.date).apply(eval(f\"{kind}\"))\n\n _proportions.plot(**{\"label\": \"daily\"})\n\n for key, value in windows.items():\n _ax = _proportions.rolling(value).mean().plot(**{\"label\": key})\n\n _mean_proportion = _proportions.mean()\n plt.ylabel(\"Percentage\")\n plt.axhline(\n _mean_proportion,\n **{\"label\": f\"mean = {round(_mean_proportion,1)}\", \"c\": \"blue\"},\n )\n plt.legend()\n plt.title(titles[kind])", "def plot(dsname, wdir = './', width = 1000.0, dt = 5.0*yt.units.Myr, fields = all_fields,\n thickness = 20.0, outdir = './enrichment_plots_kpc'):\n\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n gal = Galaxy(dsname, wdir = wdir)\n data = gal.df\n\n @derived_field(name=\"logNO\", units=\"\")\n def _logNO(field, data):\n return np.log10(data['N_Abundance'] / data['O_Abundance'])\n gal.ds.add_field((\"gas\", \"logNO\"), function=_logNO, units=\"\")\n\n make_filtered_field(gal.ds, 'logNO', ['O_Fraction','N_Fraction'])\n make_filtered_field(gal.ds, 'O_over_H', ['O_Fraction'])\n make_filtered_field(gal.ds, 'N_over_O', ['O_Fraction','N_Fraction'])\n# def _logNO_filtered(field,data):\n# x = data[('gas','logNO')]\n#\n# f1 = data[('gas','O_Fraction')]\n# f2 = data[('gas','N_Fraction')]\n#\n# x[ (f1 < tol) + (f2 < tol)] = np.nan\n#\n# return x\n# gal.ds.add_field(('gas','logNO_filtered'), function = _logNO_filtered, units = \"\")\n\n M = data['birth_mass']\n t_o = data['creation_time'].convert_to_units('Myr')\n MS_lifetime = data[('io','particle_model_lifetime')].to('Myr')\n MS_death = t_o + MS_lifetime\n px = (data['particle_position_x'] - gal.ds.domain_center[0]).to('pc')\n py = (data['particle_position_y'] - gal.ds.domain_center[1]).to('pc')\n pz = (data['particle_position_z'] - gal.ds.domain_center[2]).to('pc')\n\n recent_death = (MS_death > gal.ds.current_time - dt) * (MS_death <= gal.ds.current_time + 0.001*yt.units.Myr)\n alive = MS_death > gal.ds.current_time + 0.001*yt.units.Myr\n\n AGB = M < 8.0\n massive_star = (M > 8.0) * (M < 25.0)\n\n boxdim = np.array([width*1.25,width*1.25,thickness])*yt.units.pc\n region = gal.ds.box(gal.ds.domain_center - boxdim*0.5, gal.ds.domain_center + boxdim*0.5)\n\n proj = yt.ProjectionPlot(gal.ds, 'z', fields,\n weight_field = 'number_density', data_source = region, width = (width,'pc'))\n\n if 'number_density' in fields:\n proj.set_unit('number_density','cm**(-3)')\n proj.set_cmap('number_density','viridis')\n proj.set_zlim('number_density',1.0E-4,200.0)\n\n if 'O_over_H_filtered' in fields:\n proj.set_cmap('O_over_H_filtered','cubehelix')\n proj.set_log('O_over_H_filtered', False)\n proj.set_zlim('O_over_H_filtered', -5, 1)\n proj.set_colorbar_label('O_over_H_filtered', r'[O/H]')\n\n if 'N_over_O_filtered' in fields:\n proj.set_cmap('N_over_O_filtered','PRGn')\n proj.set_log('N_over_O_filtered',False)\n proj.set_zlim('N_over_O_filtered',-2,2)\n proj.set_colorbar_label('N_over_O_filtered', r'[N/O]')\n\n if 'logNO' in fields:\n proj.set_cmap('logNO','PRGn')\n proj.set_log('logNO',False)\n proj.set_zlim('logNO',-2,0.5)\n proj.set_colorbar_label('logNO', r'log( N / O )')\n\n if 'logNO_filtered' in fields:\n proj.set_cmap('logNO_filtered','PRGn')\n proj.set_log('logNO_filtered',False)\n proj.set_zlim('logNO_filtered',-2,0.5)\n proj.set_colorbar_label('logNO_filtered', r'log( N / O )')\n\n if 'Temperature' in fields:\n proj.set_cmap('Temperature', 'RdYlBu_r')\n proj.set_log('Temperature',True)\n proj.set_zlim('Temperature',10.0, 1.0E7)\n proj.set_colorbar_label('Temperature', r'Temperature (K)')\n\n if 'G_o' in fields:\n proj.set_cmap('G_o', 'cubehelix')\n proj.set_log('G_o', True)\n proj.set_zlim('G_o',0.05, 100.0)\n proj.set_colorbar_label('G_o', r'ISRF (G$_{\\rm o}$)')\n\n if 'Q0_flux':\n proj.set_cmap('Q0_flux', 'magma')\n proj.set_log('Q0_flux',True)\n proj.set_zlim('Q0_flux',1.0E-6, 1.0E-1)\n proj.set_colorbar_label('Q0_flux', r'HI Ionizing Radiation (s$^{-1}$)')\n\n Mstar = np.sum(gal.df['particle_mass'][ gal.df['particle_type'] == 11]).to('Msun')\n time = gal.ds.current_time.to('Myr')\n# proj.annotate_title(r\"Time = %1.1f Myr M$_{*}$ = %2.2E M$_{\\odot}$\"%(time.value,Mstar.value))\n proj.set_font( {'size' : 32} )\n proj.save(outdir + '/') # necessary\n\n\n dt = 5.0 * yt.units.Myr\n # buffer around image. otherwise points plotted near edge of image my run a little outside\n # viewing area, causing weird shifts in plotting. Not sure how to control this otherwise\n buffer = 15.0 # in pc\n in_image = (np.abs(pz) <= boxdim[2]*0.5) *\\\n (np.abs(px) <= (width*0.5 - buffer)) *\\\n (np.abs(py) <= (width*0.5 - buffer))\n\n pp = {}\n pp['massive_star_winds'] = in_image * alive * massive_star\n pp['AGB_winds'] = in_image * recent_death * AGB\n pp['SN'] = in_image * recent_death * massive_star\n #pp['other_stars'] = in_image * alive * (np.logical_not(pp['massive_star_winds']))\n\n for k in list(proj.plots.keys()):\n image = proj.plots[k]\n\n #\n # Now select and annotate the points we want\n #\n for s in list(pp.keys()):\n if np.size(px[pp[s]].value) > 0:\n print(np.size(px[pp[s]]), 'Particles in ', s, px[pp[s]], py[pp[s]])\n image.axes.scatter(px[pp[s]].value,py[pp[s]].value, s = ps[s], marker = markers[s], color = colors[s])\n else:\n print('No particles in ', s)\n\n# proj.refresh()\n# proj.hide_axes()\n proj.save(outdir + '/') # necessary\n\n if 'N_over_O' in fields:\n vmin,vmax = -2,2\n x = proj.plots['N_over_O']\n x.image.set_norm( MidpointNormalize(midpoint= 0.5*(vmin+vmax), vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.5*(vmin+vmax),vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_N_over_O_number_density.png')\n\n if 'logNO' in fields:\n vmin, vmax = -2, 0.25\n x = proj.plots['logNO']\n x.image.set_norm( MidpointNormalize(midpoint= 0.0, vmin=vmin,vmax=vmax))\n x.cb.set_norm(MidpointNormalize(midpoint=0.0, vmin=vmin,vmax=vmax))\n x.cb.update_normal(x.image)\n x.save(outdir + '/' + str(gal.ds) + '_Projection_z_logNO_number_density.png')\n\n del(proj)\n del(gal)\n\n return", "def variable_type(self, variable): # pragma: no cover\n raise NotImplementedError('Implemented in child class')", "def data_visualization(df):\r\n\r\n # Visualizing the target variable\r\n plt.figure(figsize=(14, 10))\r\n plt.title(\"Count of bike sharing according to dates\")\r\n plt.plot(df['dteday'], df['cnt'])\r\n #plt.show()\r\n plt.savefig(\"Raw data visualization.png\")\r\n\r\n # box plot for visualizing outliers\r\n fig=px.box(df, y=\"cnt\", notched=True,title='Box plot of the count variable')\r\n #fig.show()\r\n plt.savefig(\"Box Plot.png\")\r\n\r\n # point plot for hourly utilization\r\n for column in ['season', 'yr', 'mnth', 'holiday', 'weekday', 'workingday', 'weathersit']:\r\n hist = px.histogram(df, x=column, y='cnt')\r\n hist.show()\r\n plt.savefig(\"Histogram plots for each column.png\")\r\n sns.pointplot(x=df['hr'], y='cnt', data=df);\r\n plt.title(\"Hourly Utilization\")\r\n plt.ylabel(\"Bike Shares\", fontsize=12)\r\n plt.xlabel(\"Hour\", fontsize=12)\r\n plt.savefig(\"Hourly Utilization point plot.png\", dpi=300, bbox_inches='tight')\r\n\r\n # line plot for hourly utilization\r\n for c in ['holiday','season','workingday']:\r\n sns.lineplot(data=df,x='hr',y='cnt',hue=c)\r\n plt.title('Hourly plot vs count')\r\n plt.savefig(\"Hour vs count plot_main features.png\",dpi=300, bbox_inches='tight')\r\n\r\n # point plots for humidity vs count\r\n sns.pointplot(x='hum', y='cnt', data=df)\r\n plt.title(\"Amount of bike shares vs humidity\", fontsize=25)\r\n plt.xlabel(\"Humidity (%)\", fontsize=20)\r\n plt.ylabel('count of bike shares', fontsize=20)\r\n plt.locator_params(axis='x', nbins=10)\r\n plt.savefig(\"Pointplot of humidity vs count.png\",dpi=300, bbox_inches='tight')\r\n\r\n # box plots of whole df\r\n bx=px.box(df, y=\"cnt\")\r\n bx.show()\r\n\r\n # feature correlation plot\r\n corrs = abs(df.corr())\r\n sns.heatmap(corrs, annot=True)\r\n plt.title(\"Feature Correlation\")\r\n plt.savefig(\"Feature_correlation.png\", dpi=300, bbox_inches='tight')\r\n return plt", "def get_var(df=songs_df):\n n_years = len(years)\n n_songs = len(songs_df['page'])\n variances = np.zeros((n_songs, n_songs))\n annual_diffs = np.zeros((n_songs, n_songs, n_years))\n\n # Figure out how to just get upper/lower triangle rather than populating w dups\n for s1 in range(n_songs):\n for s2 in range(n_songs):\n s1_ranks = songs_df['ranks'][s1]\n s2_ranks = songs_df['ranks'][s2]\n\n # Set up an offset/normalizer so that we're just looking at\n # functional form, not call totals. Maybe do this as a frac instead.\n offset = s1_ranks[0] - s2_ranks[0]\n\n annual_difference = [s1_ranks[year] - s2_ranks[year] - offset for year in range(n_years)]\n variance = sum( (annual_difference - np.mean(annual_difference))**2)/float(n_years)\n\n variances[s1][s2] = variance\n annual_diffs[s1][s2] = annual_difference\n\n\n mask = np.zeros_like(variances)\n mask[np.triu_indices_from(mask)] = True\n corr_matrix=variances.corr()\n\n sns.heatmap(variances, mask=mask) #, vmin=510, vmax=530)\n plt.show()\n return variances", "def flareplot_template(df, jsonpath):\n #'track' entry for json file: each track is a node (position) in the flareplot\n helix_colors = {'1':\"#78C5D5\",'12':\"#5FB0BF\",'2':\"#459BA8\",'23':\"#5FAF88\",'3':\"#79C268\",'34':\"#9FCD58\",'4':\"#C5D747\",'45':\"#DDD742\",'5':\"#F5D63D\",'56':\"#F3B138\",'6':\"#F18C32\",'67':\"#ED7A6A\",'7':\"#E868A1\",'78':\"#D466A4\",'8':\"#BF63A6\",'Ligand--1':'#FF5050', 'Ligand': '#FF5050'} \n allpos = set(df['Position1']).union(set(df['Position2']))\n tracks = [{\n 'trackLabel': 'Degree centrality',\n \"trackProperties\": []\n }]\n trees = [{\n 'treeLabel': 'Helices',\n 'treePaths': []\n }]\n \n #Add ligand\n tracks[0]['trackProperties'].append({\n 'color' : \"#FF5050\",\n 'size' : 1.0,\n 'nodeName': 'Ligand'\n })\n trees[0]['treePaths'].append([1, 'Ligand'])\n \n setpos = set()\n for multipos in allpos:\n if multipos.startswith('Ligand'):\n continue\n \n split_pos = multipos.split('x')\n for pos in split_pos:\n if split_pos.index(pos) == 0:\n helix = pos\n color = helix_colors[helix]\n else: \n real_pos = helix+'x'+pos\n if real_pos not in setpos:\n trackprop = {\n 'color' : color,\n 'size' : 1.0,\n 'nodeName': real_pos\n }\n if len(helix) == 2:\n newhelix = int(helix[0]) + int(helix[1])\n trees[0]['treePaths'].append([newhelix, real_pos])\n else:\n newhelix = int(helix)*2\n trees[0]['treePaths'].append([newhelix, real_pos])\n\n tracks[0]['trackProperties'].append(trackprop)\n setpos.add(real_pos)\n\n #Sort trees\n treePaths_sorted = sorted(list(trees[0]['treePaths']), key=lambda l: (l[0],l[1]))\n treePaths_sorted = [ str(x[0])+\".\"+x[1] for x in treePaths_sorted ]\n trees[0]['treePaths'] = treePaths_sorted\n \n #Output jsondict to store\n jsondict = { 'trees' : trees, 'tracks' : tracks }\n \n # Store json file\n jsonpath = basepath + \"template.json\" \n with open(jsonpath, 'w') as jsonfile:\n dump(jsondict, jsonfile, ensure_ascii=False, indent = 4)", "def filter_plot(mode, country, continent, start_date, end_date, options):\n # Default is World mode\n chart_data = world_daywise_df\n map_data = countries_daywise_df\n print(country, continent)\n if mode == SelectionMode.Continents.value:\n #Continents mode\n if not isinstance(continent, list):\n continent = [continent]\n\n chart_data = continents_daywise_df[continents_daywise_df['WHO Region'].isin(continent)]\n map_data = map_data[map_data['WHO Region'].isin(continent)]\n elif mode == SelectionMode.Countries.value:\n # Countries mode\n if not isinstance(country, list):\n country = [country]\n\n chart_data = countries_daywise_df[countries_daywise_df['Country/Region'].isin(country)]\n map_data = chart_data\n\n chart_data = chart_data.query('Date >= @start_date & Date <= @end_date')\n map_data = map_data.query('Date >= @start_date & Date <= @end_date')\n\n # fix error when groupby geometry or put it in the aggregate column\n temp = map_data.drop(['geometry', 'country_code', 'Date'], axis=1).groupby(['Country/Region']).agg(metrics).reset_index()\n map_data = join_country_code_data(temp, country_code_data)\n\n if is_perCapita(options):\n for metric in ['Confirmed', 'Deaths', 'Recovered']:\n chart_data[metric + '_per_capita'] = chart_data[metric] / chart_data['Population']\n map_data[metric + '_per_capita'] = map_data[metric] / map_data['Population']\n \n if is_perCapita(options):\n return plot(chart_data, 'Confirmed_per_capita', 'Confirmed Cases Per Capita'), \\\n plot(chart_data, 'Deaths_per_capita', 'Confirmed Deaths Per Capita'), \\\n plot(chart_data, 'Recovered_per_capita', 'Confirmed Recoveries Per Capita'), \\\n generate_map(map_data)\n\n return plot(chart_data, 'Confirmed', 'Confirmed Cases'), \\\n plot(chart_data, 'Deaths', 'Confirmed Deaths'), \\\n plot(chart_data, 'Recovered', 'Confirmed Recoveries'), \\\n generate_map(map_data)", "def column_stats(self, df, prefix='', ignore=None):\n if ignore is None:\n ignore = {}\n\n ignore = ignore.union({'uuid', 'kf_id', 'created_at', 'modified_at',\n 'external_id'})\n TABLES = {}\n FIGURES = {}\n\n for col in set(df.columns) - ignore:\n counts = (df.groupby(col)[col]\n .count()\n .sort_values(ascending=False))\n if len(counts)> 0:\n f = plt.figure()\n counts.plot(kind='bar')\n plt.title(col)\n plt.tight_layout()\n f.savefig(self.output+'figures/{}.png'.format(col))\n plt.close(f)\n FIGURES[col] = self.output+'figures/{}.png'.format(col)\n\n TABLES[col] = pd.DataFrame(counts.values, counts.index, columns=['count'])\n TABLES[col].to_csv(self.output+'tables/{}{}.csv'\n .format(prefix+'_' if prefix else '', col))\n TABLES[col] = TABLES[col].reset_index().to_html(index=False)\n\n return FIGURES, TABLES", "def plot_selected(df, columns, start_index, end_index):\r\n plot_data(df.ix[start_index:end_index, columns], title=\"Selected data\")" ]
[ "0.7149481", "0.53572685", "0.49779034", "0.49217516", "0.4668608", "0.46611875", "0.46530828", "0.46240893", "0.46187612", "0.45908424", "0.4562476", "0.45395714", "0.4536326", "0.45301196", "0.4524316", "0.4492281", "0.4465708", "0.44525927", "0.4432995", "0.44032437", "0.43878183", "0.437705", "0.4350447", "0.43497247", "0.43416074", "0.43400154", "0.43206483", "0.4305875", "0.4297608", "0.4296757", "0.42831317", "0.42750993", "0.4274775", "0.42663658", "0.42655116", "0.42580113", "0.42558667", "0.42522883", "0.42502326", "0.42485982", "0.42272472", "0.42165133", "0.41946664", "0.41898575", "0.41897875", "0.41868046", "0.416939", "0.41662136", "0.41538942", "0.41458824", "0.41439185", "0.4141585", "0.41406158", "0.41405478", "0.41404843", "0.41384658", "0.41366157", "0.41218838", "0.411256", "0.4092457", "0.40922382", "0.40873748", "0.40873164", "0.4084389", "0.40839496", "0.40839067", "0.40772134", "0.40665254", "0.40633634", "0.4061186", "0.40550205", "0.40540493", "0.40499875", "0.40453848", "0.40396824", "0.40390542", "0.40262267", "0.4021232", "0.4019578", "0.40190682", "0.400743", "0.40041703", "0.40040377", "0.40040085", "0.40004122", "0.39937085", "0.3991295", "0.39885578", "0.39825442", "0.398102", "0.39787272", "0.39782196", "0.39780682", "0.39754477", "0.39721364", "0.39685303", "0.39623702", "0.395867", "0.39459798", "0.39438722" ]
0.72028685
0
added a Variability flag check in data_process, where only CONFIRMED variables are now kept drop misclassified objects, like SR stars on the Main Sequence, optical binaries, etc.
def remove_misclassified_objects(data_frame): misclassified_objects = ['7720-1455-1', '7911-499-1', # Mira '8990-3504-1', '899-471-1', # SRs with very poor light curves '6430-88-1', # SV For, obsolete measurement is p=91d, newer is p=16h '9017-396-1', '7676-2953-1', '8296-3303-1', # SR '2365-2764-1', '4109-638-1', '2058-56-1', # Cepheids '3642-2459-1', '3999-1391-1', '2607-1448-1', # Cepheids '3655-469-1', '1476-148-1', '1233-531-1', # RR Lyrae '3029-738-1', '6863-1255-1', '6954-1236-1', '9380-420-1' # RR Lyrae '6192-461-1', # DSCT '2550-686-1', '4992-357-1', '9380-420-1', '8562-728-1', '6567-2007-1', '6040-2003-1', # RR '2553-1108-1', '4851-2441-1', '8962-577-1', '3135-132-1', '8976-3674-1', '3136-437-1', '1506-618-1', '7046-1715-1', '3140-3046-1', '2000-162-1', '6210-755-1', '3547-1807-1', '8836-935-1', '3033-273-1', '7606-437-1', '3049-180-1', '9198-1862-1', '8192-626-1', '7703-1577-1', '8594-433-1', '6833-280-1', '9270-1803-1', '8153-376-1', '8169-431-1', '3086-1770-1', '6472-1634-1', '9321-795-1', '2664-351-1', '2426-162-1' # roAp star that has no reference ] dsct = data_frame[(data_frame.Type == 'DSCT') & (data_frame.B_V > 0.4) & (data_frame.M_V > 2.5)].tycho2_id.tolist() dsct2 = data_frame[(data_frame.Type == 'DSCT') & (data_frame.B_V > 0.25) & (data_frame.M_V > 3.)].tycho2_id.tolist() print "Dropping objects DSCT: %s %s" % (dsct, dsct2) data_frame = data_frame.drop(data_frame[data_frame.tycho2_id.isin(dsct)].index) data_frame = data_frame.drop(data_frame[data_frame.tycho2_id.isin(dsct2)].index) print "Dropping objects: %s" % misclassified_objects data_frame = data_frame.drop(data_frame[data_frame.tycho2_id.isin(misclassified_objects)].index) print "..Done\n----------" return data_frame
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def control_variation(df, outDir, features_to_analyse, \n variables_to_analyse=[\"date_yyyymmdd\"], \n remove_outliers=True, \n p_value_threshold=0.05, \n PCs_to_keep=10):\n \n # Record non-data columns before dropping feature columns \n other_colnames = [col for col in df.columns if col not in features_to_analyse]\n \n # Drop columns that contain only zeros\n colnames_before = list(df.columns)\n AllZeroFeats = df[features_to_analyse].columns[(df[features_to_analyse] == 0).all()]\n df = df.drop(columns=AllZeroFeats)\n colnames_after = list(df.columns)\n zero_cols = [col for col in colnames_before if col not in colnames_after]\n if len(zero_cols) > 0:\n print(\"Dropped %d features with all-zero summaries:\\n%s\" % (len(zero_cols), zero_cols))\n \n # Record feature column names after dropping zero data\n features_to_analyse = [feat for feat in df.columns if feat not in other_colnames]\n \n # Remove outliers from the dataset \n if remove_outliers:\n df, indsOutliers = removeOutliersMahalanobis(df, features_to_analyse)\n remove_outliers = False \n # NB: Ensure Mahalanobis operation to remove outliers is performed only once!\n\n # Check for normality in features to analyse in order decide which \n # statistical test to use: one-way ANOVA (parametric) or Kruskal-Wallis \n # (non-parametric) test\n TEST = check_normality(df, features_to_analyse, p_value_threshold)\n\n # Record name of statistical test used (kruskal/f_oneway)\n test_name = str(TEST).split(' ')[1].split('.')[-1].split('(')[0].split('\\'')[0]\n\n # CONTROL VARIATION: STATS (ANOVAs)\n # - Does N2 worm behaviour on control vary across experiment days? \n # (worms are larger? Shorter L1 diapuase? Camera focus/FOV adjusted? Skewed by non-worm tracked objects?\n # Did not record time when worms were refed! Could be this. If so, worms will be bigger across all foods on that day) \n # - Perform ANOVA to see if features vary across imaging days for control\n # - Perform Tukey HSD post-hoc analyses for pairwise differences between imaging days\n # - Highlight outlier imaging days and investigate reasons why\n # - Save list of top significant features for outlier days - are they size-related features?\n for grouping_variable in variables_to_analyse:\n print(\"\\nTESTING: %s\\n\" % grouping_variable)\n \n if not len(df[grouping_variable].unique()) > 1:\n print(\"Need at least two groups for stats to investigate %s\" % grouping_variable)\n else:\n print(\"Performing %s tests for '%s'\" % (test_name, grouping_variable)) \n \n test_results_df, sigfeats_out = \\\n topfeats_ANOVA_by_group(df, \n grouping_variable, \n features_to_analyse,\n TEST,\n p_value_threshold)\n \n # Ensure directory exists to save results\n Path(outDir).mkdir(exist_ok=True, parents=True)\n \n # Define outpaths\n froot = 'control_variation_in_' + grouping_variable + '_' + test_name\n stats_outpath = outDir / (froot + \"_results.csv\")\n sigfeats_outpath = outDir / (froot + \"_significant_features.csv\")\n \n # Save test statistics + significant features list to file\n test_results_df.to_csv(stats_outpath)\n sigfeats_out.to_csv(sigfeats_outpath, header=False)\n\n # Box plots\n plotDir = outDir / \"Plots\"\n topfeats_boxplots_by_group(df, \n test_results_df, \n grouping_variable,\n plot_save_dir=plotDir, #save to plotDir\n p_value_threshold=p_value_threshold)\n \n # PCA (coloured by grouping variable, eg. experiment date)\n df = doPCA(df, \n grouping_variable, \n features_to_analyse,\n plot_save_dir = plotDir,\n PCs_to_keep = PCs_to_keep)", "def data_process(df_toprocess=None, cutoff=0.2, bv_cutoff=0.15, catalog=None):\n\n print \"Selecting objects..\"\n df_toprocess['sigma_pi/pi'] = df_toprocess.loc[:, 'parallax_error'].astype(float) / df_toprocess.loc[:, 'parallax']\\\n .astype(float)\n print \"..Done\\nCutoff at relative parallax error of %s\\n----------\" % cutoff\n\n # only take objects with relative parallax error < cutoff\n df_toprocess = df_toprocess.loc[df_toprocess.loc[:, 'parallax'] /\n df_toprocess.loc[:, 'parallax_error'] > 1. / cutoff]\n\n print catalog\n if catalog is None:\n print \"Replacing whitespace with nan\"\n df_toprocess = df_toprocess.replace(' ', np.nan) # some cells are ' ' instead of nan\n\n print \"Converting BTmag and VTmag to floats..\"\n df_toprocess.BTmag = df_toprocess.BTmag.astype(float)\n df_toprocess.VTmag = df_toprocess.VTmag.astype(float)\n # Some values are NaN:\n print \"Removing objects with missing BT or VT measurements..\"\n df_toprocess = df_toprocess[df_toprocess.BTmag.notnull()]\n df_toprocess = df_toprocess[df_toprocess.VTmag.notnull()]\n\n print \"Computing B-V and M_V..\"\n df_toprocess['B_V'] = df_toprocess.BTmag - df_toprocess.VTmag\n df_toprocess['M_V'] = df_toprocess.VTmag - 5. * (np.log10(1000. / df_toprocess.parallax) - 1.)\n\n print \"Converting sigma BT and sigma VT to float..\"\n df_toprocess.e_BTmag = df_toprocess.e_BTmag.astype(float)\n df_toprocess.e_VTmag = df_toprocess.e_VTmag.astype(float)\n\n print \"Computing sigma B-V..\"\n df_toprocess['e_B_V'] = np.sqrt(df_toprocess.e_BTmag.pow(2)+df_toprocess.e_VTmag.pow(2))\n\n print \"Applying selection on sigma BT-VT < %s..\" % bv_cutoff\n df_toprocess = df_toprocess[df_toprocess.e_B_V < bv_cutoff]\n\n if catalog == 'xmatch_TGAS_Simbad.csv':\n df_toprocess = df_toprocess.loc[(df_toprocess['J'] < 11.) & (df_toprocess['K'] < 11.)]\n print \"min in J: %s\" % np.max(df_toprocess['J'])\n print \"max in J: %s\" % np.min(df_toprocess['J'])\n df_toprocess.insert(10, 'B_V', df_toprocess.loc[:, 'B'] - df_toprocess.loc[:, 'V'])\n\n df_toprocess.insert(10, 'J_K', df_toprocess.loc[:, 'J'] - df_toprocess.loc[:, 'K'])\n df_toprocess.insert(10, 'M_G', df_toprocess.loc[:, 'phot_g_mean_mag'] - 5. *\n (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.))\n df_toprocess.insert(10, 'M_J', df_toprocess.loc[:, 'J'] - 5. *\n (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.))\n df_toprocess.insert(10, 'M_K', df_toprocess.loc[:, 'K'] - 5. *\n (np.log10(1000. / df_toprocess.loc[:, 'parallax']) - 1.))\n\n if catalog == 'xmatch_TGAS_VSX.csv':\n df_toprocess = df_toprocess[df_toprocess.V == 0]\n print \"%s objects selected\" % len(df_toprocess)\n print \"..Done\\n----------\"\n return df_toprocess", "def mutate_fix_var_filter(item_counts):\n assert isinstance(item_counts, Counter)\n for i in list(item_counts.keys()):\n if isinstance(i, Literal):\n i_n3 = i.n3()\n if len(i_n3) > config.MAX_LITERAL_SIZE:\n logger.debug(\n 'excluding very long literal %d > %d from mutate_fix_var:\\n'\n '%s...',\n len(i_n3), config.MAX_LITERAL_SIZE, i_n3[:128]\n )\n del item_counts[i]\n elif i.datatype in (XSD['float'], XSD['double']) \\\n and six.text_type(i).lower() in ('nan', 'inf'):\n logger.debug('excluding %s due to Virtuoso Bug', i_n3)\n del item_counts[i]\n elif isinstance(i, URIRef):\n # noinspection PyBroadException\n try:\n i.n3()\n except Exception: # sadly RDFLib doesn't raise a more specific one\n # it seems some SPARQL endpoints (Virtuoso) are quite liberal\n # during their import process, so it can happen that we're\n # served broken URIs, which break when re-inserted into SPARQL\n # later by calling URIRef.n3()\n logger.warning(\n 'removed invalid URI from mutate_fix_var:\\n%r',\n i\n )\n del item_counts[i]\n elif isinstance(i, BNode):\n # make sure that BNodes stay variables\n logger.info('removed BNode from mutate_fix_var')\n del item_counts[i]\n else:\n logger.warning(\n 'exlcuding unknown result type from mutate_fix_var:\\n%r',\n i\n )\n del item_counts[i]", "def check_X_recessive(variant, family, strict=False):\n path = os.path.abspath(os.getcwd())\n output_log = os.path.join(path, \"variants_inheritance_patterns.csv\")\n\n if os.path.isfile(output_log):\n output = open(output_log, \"a\")\n else:\n output = open(output_log, \"w\")\n\n affected = False\n genotyped = False\n female = False\n \n for individual in family.individuals:\n # Get the genotype for this variant for this individual\n individual_genotype = variant['genotypes'][individual]\n \n if strict:\n if not individual_genotype.genotyped:\n return False\n # The case where the individual is healthy\n if family.individuals[individual].healthy:\n # If individual is healthy and homozygote alternative \n # the variant can not be deleterious:\n if individual_genotype.genotyped:\n if individual_genotype.homo_alt:\n return False\n # If individual is male it can not have the variant at all\n if family.individuals[individual].sex == 1:\n if individual_genotype.has_variant:\n return False\n \n # The case when the individual is sick\n elif family.individuals[individual].affected:\n affected = True\n # If the individual is sick and homozygote ref it can not be x-recessive\n if individual_genotype.genotyped:\n genotyped = True\n if individual_genotype.homo_ref:\n return False\n # Women have to be hom alt to be sick (almost allways carriers)\n elif family.individuals[individual].sex == 2:\n female = True\n if not individual_genotype.homo_alt:\n return False\n if affected and genotyped and female:\n output.write(\"{},Parents are not genotyped or is female het or is male without variant and offspring is female homozygote alternative\\n\".format(variant.get('variant_id', None)))\n elif affected and genotyped:\n output.write(\"{},Parents are not genotyped or is female het or is male without variant and offspring is male with heterozygous variant\\n\".format(variant.get('variant_id', None)))\n elif affected:\n output.write(\"{},Parents are not genotyped or is female het or is male without variant and offspring is not genotyped\\n\".format(variant.get('variant_id', None)))\n else:\n output.write(\"{},Parents without variant and no affected individual\\n\".format(variant.get('variant_id', None)))\n output.close()\n return True", "def test_feature_is_filtered(self):\n\n # Duplicate 1st row in var and assigned to 2nd\n self.validator.adata.var[\"feature_is_filtered\"][0] = True\n for i in range(self.validator.adata.X.shape[0]):\n self.validator.adata.X[i, 0] = 0\n self.validator.adata.X[0, 0] = 1\n\n self.validator.validate_adata()\n self.assertEqual(\n self.validator.errors,\n [\n \"ERROR: Some features are 'True' in 'feature_is_filtered' of dataframe 'var', \"\n \"but there are 1 non-zero values in the corresponding columns of the matrix 'X'. \"\n \"All values for these features must be 0.\"\n ],\n )", "def test_columns_not_in_raw_var(self):\n\n self.validator.adata.raw = self.validator.adata\n self.validator.adata.uns[\"X_normalization\"] = \"CPM\"\n self.validator.validate_adata()\n self.assertEqual(\n self.validator.errors,\n [\"ERROR: Column 'feature_is_filtered' must not be present in 'raw.var'.\"],\n )", "def finalize_variable(self):\n # variables for which there has been a constraint\n constrained_values = []\n for constraint_type in self.constraint:\n for constraint in self.constraint[constraint_type]:\n if constraint_type in ['threshold', 'count', 'only_one']:\n constraint_value = constraint[-1]\n constrained_values.append(constraint_value)\n elif constraint_type == 'time':\n constraint_values = constraint[-2:]\n constrained_values += constraint_values\n # compare constrained values to all populated values\n unconstrained_values = [value for value in self.value if value not in constrained_values]\n\n # TODO: make sure constraint interpreter knows 1,0,0 is a special case of just making sure a matching value is seen\n for value in unconstrained_values:\n if 'count' in self.constraint.keys():\n self.constraint['count'].append([[1, 0, 0], value])\n else:\n self.constraint['count'] = [[1, 0, 0], value]\n # default is a single variable count if not otherswise stated\n for value in unconstrained_values:\n self.constraint\n\n ##TODO: if variable is seen in multiple constraints, link those constraints to create a special super constraint of some sort", "def qc_NoiseRank(spread):\n\n variance = spread*spread # for a Gaussian\n \n if (variance <= 0.2):\n qc_label = 'good'\n elif (0.2 < variance < 0.25):\n qc_label = 'ok'\n else:\n qc_label = 'bad'\n \n return qc_label", "def test_low_variance(self):\n # Cycle through various initializations\n initializations = ['random', 'pca']\n allowed = 1e-3\n\n for init in initializations:\n tsne = TSNE(initialization=init, perplexity=2)\n embedding = tsne.prepare_initial(self.x)\n np.testing.assert_array_less(np.var(embedding, axis=0), allowed,\n 'using the `%s` initialization' % init)", "def primers_are_useless(self):\r\n #TODO: send a message telling these primers can be taken out.\r\n for feature in self.gt_seq_region:\r\n if feature.attributes.active:\r\n feature.attributes.disable_feature(\"has no interesting sequence variation\")\r\n for feature in self.pcr_product:\r\n if feature.attributes.active:\r\n feature.attributes.disable_feature(\"has no interesting sequence variation\")\r\n for feature in self.forward_primer:\r\n if feature.attributes.active:\r\n feature.attributes.disable_feature(\"has no interesting sequence variation\")\r\n for feature in self.reverse_primer:\r\n if feature.attributes.active:\r\n feature.attributes.disable_feature(\"has no interesting sequence variation\")", "def _var_check(self):\n missing = set()\n for v in self.variables:\n if getattr(self, v) is None:\n missing.add(v)\n self.missing = missing", "def test_final_strongly_recommended(self):\n\n # move raw to X amd: i.e. there is no final\n self.validator.adata.X = self.validator.adata.raw.X\n del self.validator.adata.raw\n self.validator.adata.uns[\"X_normalization\"] = \"none\"\n self.validator.validate_adata()\n self.assertEqual(\n self.validator.warnings,\n [\n \"WARNING: Only raw data was found, i.e. there is no 'raw.X' and 'uns['X_normalization']' is 'none'. \"\n \"It is STRONGLY RECOMMENDED that 'final' (normalized) data is provided.\"\n ],\n )", "def _check_inference(self, inference):\n if inference == 'GP2KronSum':\n assert self.n_randEffs == 2, 'VarianceDecomposition: for fast inference number of random effect terms must be == 2'\n assert not sp.isnan(self.Y).any(\n ), 'VarianceDecomposition: fast inference available only for complete phenotype designs'\n # TODO: add GP3KronSumLR, GP2KronSumLR", "def get_prob_l_can_see_x_strict(self, obj_type):\n ret_probs = zeros(len(self.obj_names)) + 1e-12\n for i, name in enumerate(self.obj_names):\n vtags, itags_t = self.obj_to_visibility[i]\n vtags = set(vtags)\n itags = set([t for t in itags_t if not t in vtags])\n\n if obj_type in vtags:\n ret_probs[i] = 1.0\n return ret_probs", "def filter_variant(self, x):\n return True", "def clean(self):\n\n if (self.clean_level == 'dusty') | (self.clean_level == 'clean'):\n idx, = np.where(self['B_flag'] == 0)\n self.data = self[idx, :]\n\n return", "def _flag(self, test=False):\n\n if test:\n flag = self.test_flag\n else:\n flag = self._get_flag()\n\n # For each of the temperature add the threshold mask\n for var in TEMPERATURE_VARIABLES:\n self.add_mask(\n var, flag, 'discrepancy threshold exceeded',\n ('The discrepancy between the deiced and non-deiced temperature '\n f'sensors is greater than {TEMPERATURE_THRESHOLD} K.')\n )", "def independent(self):\n type_var_dist = type(self.variational_strategy.variational_distribution)\n return not (type_var_dist is CholMeanVaDi\n or type_var_dist is DeltaVaDi\n or type_var_dist is CholSamVaDi)", "def _analyseVariables(self):\n self.unused_vars = []\n ffis_limited = False\n\n highest_rank = -1\n best_var = None\n count = 0\n\n # Need to get highest ranked variable (most dimensions) so that we can work out FFI\n for var in self.vars:\n msg = f\"Analysing: {var.name}\"\n self.output_message.append(msg)\n count = count + 1\n\n # get rank\n rank = len(var.shape)\n\n # Deal with singleton variables\n if rank == 0: \n self.rank_zero_vars.append(var)\n self.rank_zero_var_ids.append(var.name)\n continue\n\n # Update highest if highest found or if equals highest with bigger size\n try:\n var.size = var.size()\n best_var.size = best_var.size()\n except:\n pass\n\n if rank > highest_rank or (rank == highest_rank and var.size > best_var.size):\n highest_rank = rank\n best_var = var\n best_var_index = count - 1\n\n # If all are zero ranked variables or no vars identified/found then we cannot write any to NASA Ames and return ([], [])\n if len(self.rank_zero_vars) == len(self.vars) or best_var is None: \n return ([], [])\n\n # Now start to sort the variables into main and auxiliary \n vars_for_na = [best_var]\n aux_vars_for_na = []\n shape = best_var.shape\n number_of_dims = len(shape)\n self.na_dict[\"NIV\"] = number_of_dims\n\n # If 2D then do a quick test to see if 2310 is feasible (i.e. uniformly spaced 2nd axis)\n if number_of_dims == 2:\n\n ffis_limited = [2010, 2110]\n axis = xarray_utils.get_coord_by_index(best_var, 1)\n\n if xarray_utils.isUniformlySpaced(axis):\n ffis_limited.append(2310)\n\n # Get the axes for the main variable being used\n best_var_axes = xarray_utils.getAxisList(best_var)\n \n # Get other variables into a list and analyse them\n rest_of_the_vars = self.vars[:best_var_index] + self.vars[(best_var_index + 1):]\n\n for var in rest_of_the_vars:\n\n if var.name in self.rank_zero_var_ids: continue\n\n # What to do with variables that have different number of dimensions or different shape\n if len(var.shape) != number_of_dims or var.shape != shape: \n # Could it be an auxiliary variable?\n if len(var.shape) != 1: \n self.unused_vars.append(var)\n continue\n\n first_axis = xarray_utils.get_coord_by_index(var, 0)\n # Check if axis is identical to first axis of main best variable, if so, can be auxiliary var\n if not xarray_utils.areAxesIdentical(best_var_axes[0], first_axis):\n\n # If not identical, then it might still qualify as an auxiliary every n time points - valid for 1020\n if len(var.shape) == 1:\n nvpm = xarray_utils.isAxisRegularlySpacedSubsetOf(first_axis, best_var_axes[0])\n\n # NVPM is the number of implied values which is equal to (len(ax2)/len(ax1))\n if nvpm:\n ffis_limited = [1020]\n self.na_dict[\"NVPM\"] = nvpm\n else: # if returned False, i.e. not regular subset axis\n self.unused_vars.append(var)\n\n else:\n self.unused_vars.append(var)\n continue\n\n else:\n # This could be used as a standard auxiliary variable\n if ffis_limited in ([1020],):\n # Already fixed on 1020 and cannot collect incompatible FFI vars so do not use\n self.unused_vars.append(var)\n else:\n aux_vars_for_na.append(var) \n\n else:\n this_var_axes = xarray_utils.getAxisList(var)\n\n # Loop through dimensions\n for i in range(number_of_dims): \n\n if not xarray_utils.areAxesIdentical(best_var_axes[i], this_var_axes[i]):\n self.unused_vars.append(var)\n break\n else:\n # OK, I think the current variable is compatible to write with the best variable along with a NASA Ames file \n vars_for_na.append(var)\n\n # Send vars_for_na AND aux_vars_for_na to a method to check if they have previously been mapped \n # from NASA Ames. In which case we'll write them back in the order they were initially read from the input file.\n (vars_for_na, aux_vars_for_na) = \\\n self._reorderVarsIfPreviouslyNA(vars_for_na, aux_vars_for_na)\n\n # Get the FFI\n self.na_dict[\"FFI\"] = \\\n self._decideFileFormatIndex(number_of_dims, aux_vars_for_na, ffis_limited)\n\n return vars_for_na, aux_vars_for_na", "def variability(sv):\r\n unchanging(sv) # remove change clause for constants \r\n make_volatiles(sv) # create list of volatile objects and make their clause Always\r", "def warnings(self, d):\n\n if d['filter_nu'] == 220e9:\n if d['beam_shape'] == 'gaussian':\n warnings.warn('The nu dependency of the gausian beam FWHM '\n 'is not a good approximation in the 220 GHz band.')\n elif d['beam_shape'] == 'fitted_beam':\n warnings.warn('Beam and solid angle frequency dependence implementation '\n 'in the 220 GHz band for the fitted beam does not correctly describe '\n 'the true behavior')", "def validate_dataset(self):\n if np.all(self.L_bpe == self.bpe_l):\n pass\n\n super(StandardDataset, self).validate_dataset()", "def _clean(self, dataset):\n # Replace missing values with numpy's NaN. The missing value is\n # usually 1e+20, but values can be like 1.0000002e+20, which is\n # different. Ergo the inequality.\n for var in dataset.data_vars.itervalues():\n if 'missing_value' in var.attrs:\n missing_data_value = var.missing_value\n try:\n var.values[var.values >= missing_data_value] = np.NaN\n except ValueError:\n print \"Encountered ValueError in {0}. Ignoring\".format(var.name)", "def extract_valid_gt_data(all_data, remove_ofv=False):\n distractor_classes = [2, 7, 8, 12]\n valid_classes = [1]\n original = all_data.shape[0]\n\n # remove classes in other classes, pedestrain and distractors\n # left for furthur usages\n selected = np.array([\n i for i in range(all_data.shape[0])\n if all_data[i, 7] in valid_classes + distractor_classes])\n all_data = all_data[selected, :]\n\n # remove boxes whose centers is out of view\n # Cause this tool is not only set for MOT, thus resolution is not assumed\n # provided. In MOT, the maximum width andd height should be taken into\n # consirderation\n\n # PS: As stated by author of MOT benchmark, it would be better the tracker\n # could figure out the out of view pedestrain like human does. Thus no\n # filtering\n if remove_ofv: # remove out of view for ground truth\n selected = np.array([i for i in range(all_data.shape[0])\n if (all_data[i, 2] + all_data[i, 4]) / 2 >= 0 and\n (all_data[i, 3] + all_data[i, 5]) / 2 >= 0])\n\n # not consider right and bottom out of range here. Anyway ofv is not\n # removed in MOT2016\n # selected = np.array([i for i in xrange(all_data.shape[0])\n # if (all_data[i, 2] + all_data[i, 4]) / 2 != 0\n # ])\n all_data = all_data[selected, :]\n\n # remove non-human classes from ground truth,\n # and return distractor identities\n cond = np.array(\n [i in valid_classes + distractor_classes for i in all_data[:, 7]])\n selected = np.where(cond == True)[0]\n all_data = all_data[selected, :] # not necessary?\n\n print('[GT PREPROCESSING]: Removing non-people classes, remaining '\n '{}/{} boxes'.format(all_data.shape[0], original))\n cond = np.array([i in distractor_classes for i in all_data[:, 7]])\n selected = np.where(cond == True)[0]\n\n all_dsitractor_ids = all_data[selected, 1]\n unique_distractor_ids = np.unique(all_dsitractor_ids)\n return all_data, unique_distractor_ids", "def non_maxima_suppression(boxes, probs, classes_num, thr=0.2):\n for i, box in enumerate(boxes):\n if probs[i] == 0:\n continue\n for j in range(i+1, len(boxes)):\n if classes_num[i] == classes_num[j] and iou(box, boxes[j]) > thr:\n probs[j] = 0.0\n\n return probs", "def filter_common_variation(self):\n # Filter common variation\n unknown_freq_df = self.variant_df.query('gnomAD_exome_ALL == \".\"')\n other_freq_df = self.variant_df.query('gnomAD_exome_ALL != \".\"')\n self.unknown_maf_count = unknown_freq_df.shape[0]\n \n # Filter common variants\n other_freq_df = other_freq_df[other_freq_df['gnomAD_exome_ALL'].astype(float) <= self.filter_common_maf]\n self.variant_df = pd.concat([other_freq_df, unknown_freq_df], axis=0)\n self.filter_common_var_count = self.variant_df.shape[0]", "def check_variable_norec(new_r):\r\n for reason, r, old_graph_str, new_graph_str in reasons[new_r]:\r\n new_r_val = r_vals[new_r]\r\n r_val = r_vals[r]\r\n\r\n if (r.type != new_r.type) or (not r.type.values_eq_approx(\r\n r_val, new_r_val)):\r\n raise BadOptimization(old_r=r,\r\n new_r=new_r,\r\n old_r_val=r_val,\r\n new_r_val=new_r_val,\r\n reason=reason,\r\n old_graph=old_graph_str,\r\n new_graph=new_graph_str)", "def test_X_normalization_not_raw(self):\n\n # Assign a real value to X while X_normalization is 'none'\n del self.validator.adata.raw\n self.validator.adata.uns[\"X_normalization\"] = \"none\"\n self.validator.validate_adata()\n print(\"FOO\", self.validator.warnings)\n self.assertEqual(\n self.validator.warnings,\n [\n \"WARNING: uns['X_normalization'] is 'none', there is no 'raw.X' and 'X' doesn't appear \"\n \"to have raw counts (integers)\"\n ],\n )", "def unusedFromKDOTDataPreparation():", "def filter_data(data: AnnData) -> None:\n\n assert \"passed_qc\" in data.obs\n data._inplace_subset_obs(data.obs[\"passed_qc\"].values)\n data._inplace_subset_var((data.var[\"n_cells\"] > 0).values)\n logger.info(\n \"After filteration, {nc} cells and {ng} genes are kept. Among {ng} genes, {nrb} genes are robust.\".format(\n nc=data.shape[0], ng=data.shape[1], nrb=data.var[\"robust\"].sum()\n )\n )", "def test_demand_variability(self):\n demand_variability = self._uncertain_demand.demand_variability\n avg_order = sum([int(item) for item in self._data_set.values()]) //len(self._data_set)\n variance = [(item - avg_order) for item in self._data_set.values()]\n stdev = pow(sum([pow(j, 2) for j in variance]) / len(self._data_set), 0.5)\n cal_variability = lambda x, y: x / y\n test_variability = cal_variability(stdev, avg_order)\n self.assertEqual(demand_variability, test_variability)", "def compute_variant_variability(event_log: Log) -> int:\n return len(event_log.trace_list)", "def reset_to_initial_condition(self,check=True):\n\t\t\tself.dict_csr_rand_and_targetted_dropout_matrices={} #These will start as copies of dict_csr_rand_dropout_matrices, but then certain features will be deleted in a targeted manner\n\t\t\tself.train_labels___0_unlab__neg1_exclud=self.train_orig_labels.copy()\n\t\t\tself.set_labels=set(self.train_orig_labels)-{0}\n\n\t\t\tself.bool_train_labelled=self.bool_train_orig_labelled.copy()\n\t\t\tself.bool_train_unlabelled=(self.train_labels___0_unlab__neg1_exclud==0)\n\t\t\tself.bool_train_excluded=(self.train_labels___0_unlab__neg1_exclud<0)\n\t\t\t\n\t\t\tself.num_train=self.csr_train_feats.shape[0]\n\t\t\tself.num_train_labelled=sum(self.bool_train_labelled)\n\t\t\tself.num_train_unlabelled=sum(self.bool_train_unlabelled)\n\t\t\tself.num_train_excluded=sum(self.bool_train_excluded)\n\t\t\tself.num_train_labelled_initially=sum(self.bool_train_orig_labelled)\n\n\t\t\tself.bool_feat_included=(np.ones(self.csr_train_feats.shape[1])>0) #Should be all True now\n\t\t\tself.bool_feat_excluded=~self.bool_feat_included #Should be all False now\n\t\t\tself.feat_time_left=np.ones(self.csr_train_feats.shape[1], int)*-1 #Time left until removed\n\n\t\t\tif check:\n\t\t\t\tself.check_init()", "def test_predict_transient_smoke_old(self):\n self.check_predict_transient_smoke()", "def is_new_variable_product(self):\n # ====================================================================#\n # Check if Inputs Contains Attributes\n if \"attributes\" not in self._in.keys() or not isinstance(self._in[\"attributes\"], dict):\n return False\n return True", "def filterVarForWizard(self, v):\n return v.isMeasurement()", "def _set_var_ignore(self):\n self._var_ignore = [k for k in self.__dict__.keys() if k[0] != '_']", "def remove_unwanted_features(self):\n\n bad_feats = []\n for f in self.features:\n\n # Exclude features with no data\n if self.valuecounts[f] == 0:\n self.messages.append(\"\"\"[INFO] Model \"%s\": Feature %s excluded because there are no datapoints for selected languages.\"\"\" % (self.name, f))\n bad_feats.append(f)\n continue\n\n # Exclude features with lots of missing data\n missing_ratio = self.missing_ratios[f]\n if int(100*(1.0-missing_ratio)) < self.minimum_data:\n self.messages.append(\"\"\"[INFO] Model \"%s\": Feature %s excluded because of excessive missing data (%d%%).\"\"\" % (self.name, f, int(missing_ratio*100)))\n bad_feats.append(f)\n continue\n\n # Exclude constant features\n if self.valuecounts[f] == 1:\n if self.remove_constant_features:\n self.constant_feature_removed = True\n self.messages.append(\"\"\"[INFO] Model \"%s\": Feature %s excluded because its value is constant across selected languages. Set \"remove_constant_features=False\" in config to stop this.\"\"\" % (self.name, f))\n bad_feats.append(f)\n continue\n else:\n self.constant_feature = True\n\n for bad in bad_feats:\n self.features.remove(bad)\n for lang in self.languages:\n if bad in self.data[lang]:\n self.data[lang].pop(bad)\n\n # Make sure there's something left\n if not self.features:\n raise ValueError(\"No features specified for model %s!\" % self.name)\n self.features.sort()\n self.messages.append(\"\"\"[INFO] Model \"%s\": Using %d features from data source %s\"\"\" % (self.name, len(self.features), self.data_filename))\n if self.constant_feature and self.rate_variation:\n self.messages.append(\"\"\"[WARNING] Model \"%s\": Rate variation enabled with constant features retained in data. This *may* skew rate estimates for non-constant features.\"\"\" % self.name)", "def check_for_derived_vars(e3sm_vars: Dict[Any, Any]):\n vars_used: List[Any] = []\n vars_in_user_file = set(list_of_vars_in_user_file())\n for var in e3sm_vars:\n if var in derived_variables:\n # Ex: {('PRECC', 'PRECL'): func, ('pr',): func1, ...}.\n vars_to_func_dict = derived_variables[var]\n # Ex: [('pr',), ('PRECC', 'PRECL')].\n possible_vars = vars_to_func_dict.keys() # type: ignore\n\n var_added = False\n for list_of_vars in possible_vars:\n if not var_added and vars_in_user_file.issuperset(list_of_vars):\n # All of the variables (list_of_vars) are in the input file.\n # These are needed.\n vars_used.extend(list_of_vars)\n var_added = True\n # If none of the original vars are in the file, just keep this var.\n # This means that it isn't a derived variable in E3SM.\n if not var_added:\n vars_used.append(var)\n\n else:\n # This var is not a derived variable, it's okay.\n vars_used.append(var)\n\n return list(set(vars_used))", "def check_normality(df, features_to_analyse, p_value_threshold=0.05):\n \n is_normal_threshold = 1 - p_value_threshold\n\n normality_results = pd.DataFrame(data=None, index=['stat','pval'], columns=features_to_analyse)\n for f, feature in enumerate(features_to_analyse):\n try:\n stat, pval = shapiro(df[feature])\n # NB: UserWarning: Input data for shapiro has range zero \n # Some features contain all zeros - shapiro(np.zeros(5))\n normality_results.loc['stat',feature] = stat\n normality_results.loc['pval',feature] = pval\n except Exception as EE:\n print(\"WARNING: %s\" % EE)\n \n prop_normal = (normality_results.loc['pval'] < p_value_threshold).sum()/len(features_to_analyse) \n if prop_normal > is_normal_threshold:\n print(\"\"\"More than %d%% of control features (%.1f%%) were found to obey a \n normal (Gaussian) distribution, so parametric analyses will be \n preferred.\"\"\" % (is_normal_threshold*100, prop_normal*100))\n TEST = f_oneway\n else:\n print(\"\"\"Less than %d%% of control features (%.1f%%) were found to obey a \n normal (Gaussian) distribution, so non-parametric analyses will be \n preferred.\"\"\" % (is_normal_threshold*100, prop_normal*100))\n TEST = kruskal\n return TEST", "def var_transform(self, do_data=False):\n\n empty_vars = ['leadJetEn', 'leadJetPt', 'leadJetPhi', 'leadJetEta', 'leadJetQGL',\n 'subleadJetEn', 'subleadJetPt', 'subleadJetPhi', 'subleadJetEta', 'subleadJetQGL',\n 'subsubleadJetEn', 'subsubleadJetPt', 'subsubleadJetPhi', 'subsubleadJetEta', 'subsubleadJetQGL',\n 'dijetMinDRJetEle', 'dijetDieleAbsDEta','dijetDieleAbsDPhiTrunc', 'dijetCentrality', 'dijetMass', \n 'dijetAbsDEta', 'dijetDPhi'] \n\n replacement_value = -10\n\n for empty_var in empty_vars:\n self.data_obj.mc_df_sig[empty_var] = self.data_obj.mc_df_sig[empty_var].replace(-999., replacement_value)\n self.data_obj.mc_df_bkg[empty_var] = self.data_obj.mc_df_bkg[empty_var].replace(-999., replacement_value)\n if do_data: self.data_obj.data_df[empty_var] = self.data_obj.data_df[empty_var].replace(-999., replacement_value)\n\n #print self.data_obj.mc_df_sig[empty_vars]\n #print np.isnan(self.data_obj.mc_df_sig[empty_vars]).any()\n\n for var in gev_vars:\n if var in (self.low_level_vars_flat+self.high_level_vars):\n self.data_obj.mc_df_sig[var] = self.data_obj.mc_df_sig.apply(self.var_transform_helper, axis=1, args=[var, replacement_value])\n self.data_obj.mc_df_bkg[var] = self.data_obj.mc_df_bkg.apply(self.var_transform_helper, axis=1, args=[var, replacement_value])\n if do_data: self.data_obj.data_df[var] = self.data_obj.data_df.apply(self.var_transform_helper, axis=1, args=[var, replacement_value])\n\n #print np.isnan(self.data_obj.mc_df_sig[empty_vars]).any()", "def _check_variables(datasets, necessary_short_names):\n dataset_name = datasets[0]['dataset']\n necessary_short_names = set(necessary_short_names)\n short_names = set(group_metadata(datasets, 'short_name').keys())\n if short_names != necessary_short_names:\n raise ValueError(\n f\"Expected variables {necessary_short_names} for dataset \"\n f\"'{dataset_name}', got {short_names}\")", "def _no_improve(self):\n improve = [p-f for (f,p),_ in self.population]\n return np.mean(improve) < 1.0", "def pre_exclude_rest_instances(seg_raw_df, class_df):\n import smdt.features.features as features\n\n print \"============start pre exclusion==================================\"\n \n\n seg_mag_df = seg_raw_df.copy(deep=True)\n temp = [seg_mag_df[name]**2 for name in s_info.raw_value_names]\n temp = np.sum(temp, axis=0)\n seg_mag_df['mag'] = np.sqrt(temp)\n\n grouped = seg_mag_df.groupby(s_info.segment_col)\n\n c1 = grouped['mag'].std()\n c2 = grouped['mag'].aggregate(features.f_slope).abs()\n c3 = grouped['mag'].aggregate(features.f_pppeakamplitude, paras={\"q\":10})\n\n # Used for visualization testing\n # import matplotlib.pyplot as pyplot\n # c1.hist()\n # pyplot.figure()\n # c2.hist(bins=100)\n # pyplot.figure()\n # c3.hist()\n\n # pyplot.show()\n # sys.exit(1)\n t1 = 0.13\n t2 = 0.0004\n t3 = 0.5\n print \"===================preexclusion criterions====================\"\n print \"std: <= %f, slope: <= %f, peak-peak amplitude: < %f\" % (t1, t2, t3) \n excluded = (c1 <= t1) & (c2 <= t2) & (c3 <= t3)\n class_df[s_info.classname_col][excluded] = 'rest'\n class_df[s_info.classnum_col][excluded] = -1\n\n c_rest = len(class_df[excluded])\n c_keep = len(class_df[~excluded])\n c_total = len(class_df)\n print \"Exclusion result: excluded/keep/total: %.1f, %.1f, %.1f exclusion rate: %.2f\" % (c_rest, c_keep, c_total, c_rest/float(c_total))\n return class_df", "def validate(self):\n variables = ['waterThickness', 'waterPressure']\n compare_variables(test_case=self, variables=variables,\n filename1='full_run/output.nc',\n filename2='restart_run/output.nc')", "def variance_selection(X):\n try:\n selector = VarianceThreshold()\n X = selector.fit_transform(X)\n return X\n\n except ValueError:\n return 0", "def __post_init__(self): \n c_model = self.concentration_model\n # Check if the diameter is vectorised.\n if (isinstance(c_model.infected, InfectedPopulation) and not np.isscalar(c_model.infected.expiration.diameter)\n # Check if the diameter-independent elements of the infectious_virus_removal_rate method are vectorised.\n and not (\n all(np.isscalar(c_model.virus.decay_constant(c_model.room.humidity, c_model.room.inside_temp.value(time)) + \n c_model.ventilation.air_exchange(c_model.room, time)) for time in c_model.state_change_times()))):\n raise ValueError(\"If the diameter is an array, none of the ventilation parameters \"\n \"or virus decay constant can be arrays at the same time.\")", "def passes_cutoff(self, filter_code):\r\n try:\r\n filterset_dict = {\"all_positions\":[True],\r\n \"all_variants\":[self.is_variant == True],\r\n \"actionable_variants\":[self.is_variant == True, \r\n self.in_blacklist == \"WHITE\", \r\n \"exon\" in self.loc, # and \"exonic_nc\" not in self.loc, \r\n \"syn\" not in self.func, \r\n \"ref\" not in self.func, \r\n self.ir_version == \"14\" or int(self.FAO)>50,\r\n int(self.FRO)+int(self.FAO)>500, \r\n self.FR == \".\"],\r\n \r\n \r\n \"indels\":[self.is_variant == True, self.type == \"del\" or self.type == \"in\" , \"exon\" in self.loc]\r\n }\r\n return all(filterset_dict[filter_code])\r\n \r\n except:\r\n return False", "def clean_data(df):\n \n # Put in code here to execute all main cleaning steps:\n # convert missing value codes into NaNs, ...\n count_miss = df.isnull().sum(axis=0).values #find number of nans for each column\n count_miss = [val for val in count_miss]\n \n drop_cols = []\n\n for ind, val in enumerate(count_miss):\n if val > 200000:\n drop_cols.append(ind)\n \n df_drop_cols = list(azdias.columns[drop_cols])\n df = df.drop(df_drop_cols, axis=1)\n \n for col in range(df.shape[1]): #loop through columns\n column_name = df.columns[col] #get column name\n missing_list = feat_info.iloc[col,3] #get missing_or_unknown column from feature info\n missing_list = missing_list.replace('[','') #remove left bracket from string\n missing_list = missing_list.replace(']','') #remove right bracket from string\n missing_list = missing_list.split(',') #split into individual strings\n \n #find data that is natually missing and continue loop to omit\n if missing_list == ['']:\n continue\n \n else:\n for dat_type in missing_list: \n if df[column_name].dtype == 'object': #find values that contain x\n df.loc[df[column_name] == dat_type, column_name] = np.nan #replace x with nan\n \n else:\n dat_type = int(dat_type) #if no x, convert to integer and replace with nan\n df.loc[df[column_name] == dat_type, column_name] = np.nan\n \n # select, re-encode, and engineer column values.\n \n # encode OST_WEST_KZ\n df.loc[df['OST_WEST_KZ'] == 'W','OST_WEST_KZ'] = 0\n df.loc[df['OST_WEST_KZ'] == 'O','OST_WEST_KZ'] = 1\n \n # Re-encode categorical variable(s) to be kept in the analysis.\n \n \n #get list of attributes with type categorical\n feat_info[feat_info['type'] == 'categorical']\n \n cat_new_cols = [] #initialize\n for i in feat_info[feat_info['type'] == 'categorical']['attribute']:\n cat_new_cols.append(i)\n \n for cols in df.columns:\n if cols in cat_new_cols:\n if df[cols].nunique(dropna=True) > 2: #if the number of unique values is greater than 2 \n df = df.drop(cols, axis=1) #drop from the analysis\n print(\"more than 2 categories: {}\".format(cols))\n \n else:\n if not df[cols].unique()[0] > 0:\n #if not df[cols].unique()[0] > 0:\n dummies = pd.get_dummies(df[cols], prefix=cols)\n df = df.drop(cols, axis=1) #create dummy variable\n df = df.join(dummies)\n print(\"transformed to dummy variable: {}\".format(cols))\n \n # create variable: MOVEMENT\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([1,3,5,8,10,12,14]),'MOVEMENT'] = 1\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([2,4,6,7,9,11,13,15]),'MOVEMENT'] = 2\n \n #Capture Decade\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([1,2]), 'DECADE'] = 40\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([3,4]), 'DECADE'] = 50\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([5,6,7]), 'DECADE'] = 60\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([8,9]), 'DECADE'] = 70\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([10,11,12,13]), 'DECADE'] = 80\n df.loc[df['PRAEGENDE_JUGENDJAHRE'].isin([14,15]), 'DECADE'] = 90\n \n df['CAMEO_INTL_2015'] = df['CAMEO_INTL_2015'].astype(float)\n\n # create new variable: WEALTH\n df.loc[df['CAMEO_INTL_2015'].isin([51,52,53,54,55]), 'WEALTH'] = 1\n df.loc[df['CAMEO_INTL_2015'].isin([41,42,43,44,45]), 'WEALTH'] = 2\n df.loc[df['CAMEO_INTL_2015'].isin([31,32,33,34,35]), 'WEALTH'] = 3\n df.loc[df['CAMEO_INTL_2015'].isin([21,22,23,24,25]), 'WEALTH'] = 4\n df.loc[df['CAMEO_INTL_2015'].isin([11,12,13,14,15]), 'WEALTH'] = 5\n \n # create new variable: LIFE_STAGE\n df.loc[df['CAMEO_INTL_2015'].isin([11,21,31,41,51]),'LIFE_STAGE'] = 1\n df.loc[df['CAMEO_INTL_2015'].isin([12,22,32,42,52]),'LIFE_STAGE'] = 2\n df.loc[df['CAMEO_INTL_2015'].isin([13,23,33,43,53]),'LIFE_STAGE'] = 3\n df.loc[df['CAMEO_INTL_2015'].isin([14,24,34,44,54]),'LIFE_STAGE'] = 4\n df.loc[df['CAMEO_INTL_2015'].isin([15,25,35,45,55]),'LIFE_STAGE'] = 5\n \n # remove selected columns and rows, ...\n df = df.drop('PRAEGENDE_JUGENDJAHRE', axis=1)\n df = df.drop('CAMEO_INTL_2015',axis=1)\n \n # Return the cleaned dataframe.\n return df", "def test_no_reproducible_for_varinat_analysis(self):\n self.testcases[0].job_type = 'some_type1'\n self.testcases[0].project_name = 'project1'\n self.testcases[0].crash_state = 'abcde'\n self.testcases[0].one_time_crasher_flag = False\n self.testcases[0].crash_type = 'crash_type1'\n self.testcases[0].security_flag = True\n self.testcases[1].job_type = 'some_type2'\n self.testcases[1].project_name = 'project1'\n self.testcases[1].crash_state = 'vwxyz'\n self.testcases[1].crash_type = 'crash_type2'\n self.testcases[1].one_time_crasher_flag = True\n self.testcases[1].security_flag = True\n\n for t in self.testcases:\n t.put()\n\n # testcase2's varinat will be evaluated against testcase1\n self.testcase_variants[0].job_type = 'fake_engine_asan_project1'\n self.testcase_variants[0].testcase_id = self.testcases[0].key.id()\n self.testcase_variants[0].security_flag = True\n self.testcase_variants[1].job_type = 'some_type1'\n self.testcase_variants[1].crash_state = 'abcde'\n self.testcase_variants[1].crash_type = 'crash_type1'\n self.testcase_variants[1].testcase_id = self.testcases[1].key.id()\n self.testcase_variants[1].security_flag = True\n\n for v in self.testcase_variants:\n v.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def testConditionReasons(self):\n \n state = State.from_problem(self.prob)\n\n relevantVars = []\n drive = self.dom.get_action(\"drive\")\n with drive.instantiate([\"agent\", \"tru1\", \"apt1\"], self.prob):\n self.assert_(state.is_satisfied(drive.precondition, relevantVars))\n\n relevantVars = set(relevantVars)\n \n s1 = StateVariable(self.prob.functions[\"city-of\"][0], [self.prob[\"pos1\"]])\n s2 = StateVariable(self.prob.functions[\"city-of\"][0], [self.prob[\"apt1\"]])\n s3 = StateVariable(self.prob.functions[\"location-of\"][0], [self.prob[\"tru1\"]])\n \n self.assertEqual(len(relevantVars), 3)\n self.assert_(s1 in relevantVars)\n self.assert_(s2 in relevantVars)\n self.assert_(s3 in relevantVars)", "def preprocess(self, df):\n print(\"Started Processing....\")\n # binary conversion\n df.replace(to_replace=\"yes\", value=1, inplace=True)\n df.replace(to_replace=\"no\", value=0, inplace=True)\n\n # replace unknowns with nan\n df = df.replace(to_replace=\"unknown\", value=np.nan)\n # getting the list of columns with nan\n ml = df.columns[df.isna().any()].tolist()\n\n for item in ml:\n # getting the ratio of the index labels\n val = pd.DataFrame(df[item].value_counts(normalize=True))\n\n # index labels in a list\n valr = val.index.tolist()\n # drc.index = valr\n # columns values in a list\n valc = val[item].tolist()\n # replacing the nan values with ratio\n df[item] = df[item].fillna(pd.Series(np.random.choice(valr, p=valc, size=len(df))))\n\n # dependent variable\n dfy = df.iloc[:, -1]\n # independent variable\n dfx = df.iloc[:, :-1]\n\n # converting categorical data to numerical\n dfx = pd.get_dummies(dfx)\n\n # normalizing\n dfx = (dfx - dfx.min()) / (dfx.max() - dfx.min())\n\n dxdy = pd.concat([dfx, dfy], axis=1)\n\n # class balancing\n sm = RandomOverSampler(random_state=42)\n dfx, dfy = sm.fit_sample(dxdy.iloc[:, :-1], dxdy.iloc[:, -1])\n\n # converting to dataframe\n dfx = pd.DataFrame(dfx, columns=dxdy.iloc[:, :-1].columns.values)\n\n # dimensionality reduction\n pca = PCA(n_components=33)\n dfx = pca.fit_transform((dfx))\n\n print(\"Processing Done\")\n\n return dfx, dfy", "def test_bayes_update_nondiscriminating(self):\r\n # deletion of non-discriminating evidence should not affect result\r\n for obs, exp in zip(bayes_updates(self.deleted), self.result):\r\n self.assertFloatEqualAbs(obs, exp, 1e-11)\r\n # additional non-discriminating evidence should not affect result\r\n for obs, exp in zip(bayes_updates(self.extra), self.result):\r\n self.assertFloatEqualAbs(obs, exp, 1e-11)", "def allowed_class_vars(self):\n\n\n self.allowed_vars = [\n 'hfMode',\n 'lqCN',\n 'lqCF',\n 'lqPN',\n 'lqPF',\n 'lqCNmode',\n 'lqCFmode',\n 'lqPNmode',\n 'lqPFmode',\n 'S',\n 'SMode',\n 'fracCN',\n 'fracCF',\n 'fracPN',\n 'fracPF',\n 'fracUI',\n 'fracUO',\n 'fracLI',\n 'fracLO',\n 'Pinj',\n 'coreRadFrac',\n 'qBG',\n 'fG',\n 'qFilePath',\n 'qFileTag',\n ]\n return", "def clean(self):\n # TODO: unit test me\n small_vars = SmallVariant.objects.filter(\n case_id=self.case.pk,\n release=self.release,\n chromosome=self.chromosome,\n start=self.start,\n reference=self.reference,\n alternative=self.alternative,\n )\n if not small_vars.exists():\n raise ValidationError(\"No corresponding variant in case\")", "def clean(self):\n # TODO: unit test me\n small_vars = SmallVariant.objects.filter(\n case_id=self.case.pk,\n release=self.release,\n chromosome=self.chromosome,\n start=self.start,\n reference=self.reference,\n alternative=self.alternative,\n )\n if not small_vars.exists():\n raise ValidationError(\"No corresponding variant in case\")", "def repair(self):\n # self.add_cons_vars([x.constraint for x in self._cons_dict.values()])\n # self.add_cons_vars([x.variable for x in self._var_dict.values()])\n self._push_queue()\n Model.repair(self)\n self.regenerate_constraints()\n self.regenerate_variables()", "def condition_domain_reduction(csp, var) :\n return True", "def condition_domain_reduction(csp, var) :\n return True", "def compute_constrained_species(model,var_dec_map,variants,flux_samples,fva_rxn_set=\"all_reacts\",\n fva=False,fva_frac_opt=0.1,action_n=6,add_na_bound=True,processes=cpu_count()):\n ### Create action set for each player and mapping to model changes.\n actions = create_action_set(number_of_actions=action_n, add_no_change=add_na_bound)\n variant_rxn_action_dict = rxn_to_constraints_samples(variants, actions, flux_samples) ### IMPORTANT- THIS IS OLD! v2 available\n\n ### Get list of reactions used in popFVA. Won't be used if fva = False, since single objective is solved.\n popfva_reacts_set = []\n if fva_rxn_set==\"all_reacts\":\n popfva_reacts_set=list([x.id for x in model.base_cobra_model.reactions])\n elif fva_rxn_set==\"var_reacts\":\n for x in variants:\n popfva_reacts_set.extend(x.cobra_reactions.keys())\n popfva_reacts_set = list(set(popfva_reacts_set))\n \n ### initalize variables\n variant_indices = range(len(variants))\n species_pheno_trajectory = {}\n \n pheno = None\n all_pheno_list = []\n\n ALLELE_REACT_DF = pd.DataFrame()\n for allele in variants[:]:\n for rxn_id in allele.cobra_reactions.keys():\n ALLELE_REACT_DF[allele.id+\"__\"+rxn_id] = model.strain_allele_matrix[allele.id].replace(1.0, rxn_id)\n ALLELE_REACT_DF.replace(0.0, np.nan, inplace=True)\n ALLELE_REACT_DF[\"RXN_DUPS\"] = ALLELE_REACT_DF.apply(lambda x: x.value_counts()[x.value_counts()>1].index.tolist(),axis=1)\n \n ### --- Sample decision from variants's mixed action at start of round\n variant_decision_dict = {variants[x]: var_dec_map[variants[x].id] for x in variant_indices}\n\n for allele_play, constrnt in variant_decision_dict.items():\n ### if lb constraint is picked, use base model ub for ub constraint\n if constrnt.split(\"_\")[0] == \"lb\":\n for react, constrnt_actions in variant_rxn_action_dict[allele_play].items():\n base_bound = model.base_cobra_model.reactions.get_by_id(react).upper_bound\n for strain_react in allele_play.cobra_reactions[react]:\n strain_react.upper_bound = base_bound\n strain_react.lower_bound = constrnt_actions[constrnt]\n ### if ub constraint is picked, use base model lb for lb constraint\n elif constrnt.split(\"_\")[0] == \"ub\":\n for react, constrnt_actions in variant_rxn_action_dict[allele_play].items():\n base_bound = model.base_cobra_model.reactions.get_by_id(react).lower_bound\n for strain_react in allele_play.cobra_reactions[react]:\n strain_react.upper_bound = constrnt_actions[constrnt]\n strain_react.lower_bound = base_bound\n \n elif constrnt == 'no_change':\n for react, constrnt_actions in variant_rxn_action_dict[allele_play].items():\n base_lower_bound = model.base_cobra_model.reactions.get_by_id(react).lower_bound\n base_upper_bound = model.base_cobra_model.reactions.get_by_id(react).upper_bound\n for strain_react in allele_play.cobra_reactions[react]:\n strain_react.upper_bound = base_upper_bound\n strain_react.lower_bound = base_lower_bound\n\n ### Take care of duplicate allelic effects for shared reactions\n for strn, allele_row in ALLELE_REACT_DF.iterrows():\n rxn_dups=allele_row[\"RXN_DUPS\"]\n if len(rxn_dups)>0:\n str_obj = model.strains.get_by_id(strn)\n for rxn in rxn_dups:\n bnd_dict = {\"lb\":[], \"no\":[], \"ub\":[]}\n alleles={model.alleles.get_by_id(x.split(\"__\")[0]): var_dec_map[x.split(\"__\")[0]] for x in allele_row[allele_row==rxn].index.tolist()}\n constraints=[bnd_dict[y.split(\"_\")[0]].append(variant_rxn_action_dict[x][rxn][y]) for x,y in alleles.items()]\n if len(bnd_dict[\"lb\"])>0:\n str_obj.cobra_model.reactions.get_by_id(rxn).lower_bound=np.mean(bnd_dict[\"lb\"])\n else:\n str_obj.cobra_model.reactions.get_by_id(rxn).lower_bound=model.base_cobra_model.reactions.get_by_id(rxn).lower_bound\n \n if len(bnd_dict[\"ub\"])>0:\n str_obj.cobra_model.reactions.get_by_id(rxn).upper_bound=np.mean(bnd_dict[\"ub\"])\n else:\n str_obj.cobra_model.reactions.get_by_id(rxn).upper_bound=model.base_cobra_model.reactions.get_by_id(rxn).upper_bound\n\n with ProcessPool(processes, initializer=species_init_Objective, initargs=(model,popfva_reacts_set,fva_frac_opt)) as pool:\n try:\n if fva==True:\n future = pool.map(models_optimize_fva, [x.id for x in model.strains], timeout=300)\n future_iterable = future.result()\n pheno = list(future_iterable)\n else:\n future = pool.map(models_optimize_objective, [x.id for x in model.strains], timeout=40)\n future_iterable = future.result()\n pheno = list(future_iterable)\n\n except TimeoutError as error:\n print(\"function took longer than %d seconds\" % error.args[1])\n except Exception as error:\n print(\"function raised %s\" % error)\n \n pool.close()\n pool.stop()\n pool.join()\n return pheno", "def reject_test(self):\n self.__genes_test = None\n self.__fitness_test = None", "def test_predict_transient_smoke_new(self):\n self.check_predict_transient_smoke()", "def verifications(sv):\r\n verif_no_special(sv) # check no special character remains (fatal error) \r\n verif_functions(sv) # check if all functions are defined (fatal error)\r\n verif_similar_names(sv) # check similar names for typing errors (warning)\r\n verif_stochastic(sv) # make sure stochastic objects are not used in expressions\r\n verif_unused(sv) # check physical and logical configurations match \r", "def violated(self) -> bool:\n ...", "def _is_legal(self, h5_main, variables=None):\n if variables is None:\n variables = ['DC_Offset']\n\n file_data_type = get_attr(h5_main.file, 'data_type')\n meas_grp_name = h5_main.name.split('/')\n h5_meas_grp = h5_main.file[meas_grp_name[1]]\n meas_data_type = get_attr(h5_meas_grp, 'data_type')\n\n if h5_main.dtype != sho32:\n warn('Provided dataset is not a SHO results dataset.')\n return False\n\n # This check is clunky but should account for case differences. If Python2 support is dropped, simplify with\n # single check using casefold.\n if not (meas_data_type.lower != file_data_type.lower or meas_data_type.upper != file_data_type.upper):\n warn('Mismatch between file and Measurement group data types for the chosen dataset.')\n print('File data type is {}. The data type for Measurement group {} is {}'.format(file_data_type,\n h5_meas_grp.name,\n meas_data_type))\n return False\n\n if file_data_type == 'BEPSData':\n if get_attr(h5_meas_grp, 'VS_mode') not in ['DC modulation mode', 'current mode']:\n warn('Provided dataset is not a DC modulation or current mode BEPS dataset')\n return False\n elif get_attr(h5_meas_grp, 'VS_cycle_fraction') != 'full':\n warn('Provided dataset does not have full cycles')\n return False\n\n elif file_data_type == 'cKPFMData':\n if get_attr(h5_meas_grp, 'VS_mode') != 'cKPFM':\n warn('Provided dataset has an unsupported VS_mode.')\n return False\n\n return super(BELoopFitter, self)._is_legal(h5_main, variables)", "def validate_variables (manifest_vars, algorithm_code_PublicVars, algorithm_code_ProtectedVars):\n\n \n problems =[]\n \n \n print (\"\\nValidating all variables:\\n\" )\n\n #check if all model variables in the manifest file are declared in the Algorithm code file\n allAlgorithm_code_vars = {**algorithm_code_PublicVars, **algorithm_code_ProtectedVars}\n problemsSize = len(problems)\n for key in manifest_vars.keys():\n if key not in allAlgorithm_code_vars.keys():\n # the variable (key) exists in the manifest file but it is not declared in the Algoirthm code file\n problems.append(' There is no declaration for the %s model variable in the Algorithm Code, although it exists under the ModelVariables in the manifest file' % key)\n\n #check if all variables which are declared in the Algorithm code file exist in the manifest file \n for key in algorithm_code_PublicVars.keys():\n if key not in manifest_vars.keys():\n # the variable (key) is declared in the Algoirthm code file but it does not exists in the manifest file\n problems.append(' The variable %s is declared in the Algorithm Code but it does not exist under the ModelVariables in the manifest file' % key)\n \n if len(problems) == problemsSize:\n print(\" All model variables in the manifest file are declared in the Algorithm Code file and vice versa\")\n else:\n return problems\n\n #check if the type/causality of the model vraibles (in manifest) match the variables type/causality declared in the Algorithm code file\n problemsSize = len(problems)\n for key in manifest_vars.keys():\n manifest_type_caus = manifest_vars[key]\n algoirthm_type_caus = allAlgorithm_code_vars[key]\n #checking if the types match\n if manifest_type_caus.type != algoirthm_type_caus.type:\n problems.append(' The %s variable in the manifest is of type %s and the same variable is of type %s in the algorithm code: variable types must match' % (key, manifest_type_caus.type, algoirthm_type_caus.type))\n\n manifest_caus = manifest_type_caus.causality\n algorithm_caus = algoirthm_type_caus.causality\n\n if manifest_caus == 'tunableParameter' or manifest_caus == 'dependentParameter':\n if algorithm_caus != 'parameter':\n problems.append(' The blockCausality of the %s variable is %s and the causality of the same variable in the algorithm code is %s in the algorithm code: causalities must match (line %s in the Algorithm code)' % (key, manifest_caus, algorithm_caus, algoirthm_type_caus.line))\n else:\n if manifest_caus != algorithm_caus:\n problems.append(' The blockCausality of the %s variable is %s and the causality of the same variable in the algorithm code is %s: causalities must match (line %s in the Algorithm code)' % (key, manifest_caus, algorithm_caus, algoirthm_type_caus.line))\n \n if len(problems) == problemsSize:\n print(\" All model variables types and blockCausalities in the manifest file match the types and causalities in the Algorithm Code file\")\n\n \n return problems", "def _finalize(self):\n if self.vcount > 1:\n # skewness = g1 = sqrt(n) M3/(M2^(3/2)) # zero \n # kurtosis = g2 = n M4/M2^2 - 3 # zero for normal\n # sk = (M3/nf)/(sigma**3)\n # ku = (M4/nf)/sigma**4 - 3\n n = self.vcount\n nf = float(n)\n mu2 = self.vm2/nf\n self.vvar = self.vm2/(nf-1)\n try:\n self.vskewness = self.vm3/nf/(mu2**1.5)\n self.vkurtosis = self.vm4/nf/(mu2**2)\n except:\n self.vskewness = 0\n self.vkurtosis = 0\n elif self.vcount == 1:\n self.vvar = 0\n self.vskewness = 0\n self.vkurtosis = 0\n self.dirty = False", "def test_Bayesian_selection_priors(self):\n Bayesian_priors = [CS, SS, ET]\n df_Bayesian_priors_atleast_one_notNaN = self.df.dropna(\n subset=Bayesian_priors, thresh=1, axis=0, inplace=False)\n\n indices = [\n i for i in self.df.index if i not in df_Bayesian_priors_atleast_one_notNaN.index]\n\n if (df_Bayesian_priors_atleast_one_notNaN.shape != self.df.shape):\n print('\\n\\nBayesian discrepancies: ',\n self.df.loc[indices, ['Reference', 'Reported Semiology']])\n assert (df_Bayesian_priors_atleast_one_notNaN.shape == self.df.shape)", "def get_tumor_normal_evidence(variant_info_field: dict) -> dict:\n # Set default values\n tumor_has_contig, normal_has_contig = False, False\n allele_frequency_tumor, allele_frequency_normal = 0, 0\n\n # Extract allele frequency and contig-status for variant in T and N\n if \"TUMOR_PASS_SAMPLE\" in variant_info_field:\n pass_sample: List[str] = variant_info_field[\"TUMOR_PASS_SAMPLE\"]\n pass_info: List[str] = variant_info_field[\"TUMOR_PASS_INFO\"]\n allele_frequency_tumor, max_af_index = get_max_allele_frequency(\n variant_info_sample_field=pass_sample\n )\n if allele_frequency_tumor == 0:\n get_any_contig = True\n else:\n get_any_contig = False\n tumor_has_contig: bool = look_for_contigs(\n variant_info_pass_field=pass_info,\n max_allele_frequency_variant_index=max_af_index,\n look_in_all_variants=get_any_contig,\n )\n\n if \"NORMAL_PASS_SAMPLE\" in variant_info_field:\n pass_sample: List[str] = variant_info_field[\"NORMAL_PASS_SAMPLE\"]\n pass_info: List[str] = variant_info_field[\"NORMAL_PASS_INFO\"]\n allele_frequency_normal, max_af_index = get_max_allele_frequency(\n variant_info_sample_field=pass_sample\n )\n if allele_frequency_normal == 0:\n get_any_contig: bool = True\n else:\n get_any_contig: bool = False\n normal_has_contig: bool = look_for_contigs(\n variant_info_pass_field=pass_info,\n max_allele_frequency_variant_index=max_af_index,\n look_in_all_variants=get_any_contig,\n )\n\n evidence_dict: dict = {\n \"tumor_max_af\": allele_frequency_tumor,\n \"normal_max_af\": allele_frequency_normal,\n \"tumor_has_contig\": tumor_has_contig,\n \"normal_has_contig\": normal_has_contig,\n }\n return evidence_dict", "def check_if_reduce_needed(vars_to_modify):\n for var in vars_to_modify:\n if len(var.dimensions) > 2 and var[0,0,:].mask.all() and \\\n var[-1,1,:,:].mask.all():\n return True\n return False", "def FE_remove_variables_using_SULOV_method(df, numvars, modeltype, target,\r\n corr_limit = 0.70,verbose=0):\r\n df = copy.deepcopy(df)\r\n ### for some reason, doing a mass fillna of vars doesn't work! Hence doing it individually!\r\n null_vars = np.array(numvars)[df[numvars].isnull().sum()>0]\r\n for each_num in null_vars:\r\n df[each_num].fillna(0,inplace=True)\r\n target = copy.deepcopy(target)\r\n print('Searching for highly correlated variables from %d variables using SULOV method' %len(numvars))\r\n print('##### SULOV : Searching for Uncorrelated List Of Variables (takes time...) ############')\r\n correlation_dataframe = df[numvars].corr().abs().astype(np.float16)\r\n ######### This is how you create a dictionary of which var is highly correlated to a list of vars ####\r\n corr_values = correlation_dataframe.values\r\n col_index = correlation_dataframe.columns.tolist()\r\n index_triupper = list(zip(np.triu_indices_from(corr_values,k=1)[0],np.triu_indices_from(\r\n corr_values,k=1)[1]))\r\n high_corr_index_list = [x for x in np.argwhere(abs(corr_values[np.triu_indices(len(corr_values), k = 1)])>=corr_limit)]\r\n low_corr_index_list = [x for x in np.argwhere(abs(corr_values[np.triu_indices(len(corr_values), k = 1)])<corr_limit)]\r\n tuple_list = [y for y in [index_triupper[x[0]] for x in high_corr_index_list]]\r\n correlated_pair = [(col_index[tuple[0]],col_index[tuple[1]]) for tuple in tuple_list]\r\n corr_pair_dict = dict(return_dictionary_list(correlated_pair))\r\n keys_in_dict = list(corr_pair_dict.keys())\r\n reverse_correlated_pair = [(y,x) for (x,y) in correlated_pair]\r\n reverse_corr_pair_dict = dict(return_dictionary_list(reverse_correlated_pair))\r\n for key, val in reverse_corr_pair_dict.items():\r\n if key in keys_in_dict:\r\n if len(key) > 1:\r\n corr_pair_dict[key] += val\r\n else:\r\n corr_pair_dict[key] = val\r\n #### corr_pair_dict is used later to make the network diagram to see which vars are correlated to which\r\n # Selecting upper triangle of correlation matrix ## this is a fast way to find highly correlated vars\r\n upper_tri = correlation_dataframe.where(np.triu(np.ones(correlation_dataframe.shape),\r\n k=1).astype(np.bool))\r\n empty_df = upper_tri[abs(upper_tri)>corr_limit]\r\n ### if none of the variables are highly correlated, you can skip this whole drawing\r\n if empty_df.isnull().all().all():\r\n print(' No highly correlated variables in data set to remove. All selected...')\r\n return numvars\r\n #### It's important to find the highly correlated features first #############\r\n lower_tri = correlation_dataframe.where(np.tril(np.ones(correlation_dataframe.shape),\r\n k=-1).astype(np.bool))\r\n lower_df = lower_tri[abs(lower_tri)>corr_limit]\r\n corr_list = empty_df.columns[[not(empty_df[x].isnull().all()) for x in list(empty_df)]].tolist(\r\n )+lower_df.columns[[not(lower_df[x].isnull().all()) for x in list(lower_df)]].tolist()\r\n corr_list = find_remove_duplicates(corr_list)\r\n ###### This is for ordering the variables in the highest to lowest importance to target ###\r\n if len(corr_list) == 0:\r\n final_list = list(correlation_dataframe)\r\n print('Selecting all (%d) variables since none of them are highly correlated...' %len(numvars))\r\n return numvars\r\n else:\r\n if isinstance(target, list):\r\n target = target[0]\r\n max_feats = len(corr_list)\r\n if modeltype == 'Regression':\r\n sel_function = mutual_info_regression\r\n fs = SelectKBest(score_func=sel_function, k=max_feats)\r\n else:\r\n sel_function = mutual_info_classif\r\n fs = SelectKBest(score_func=sel_function, k=max_feats)\r\n ##### you must ensure there are no null values in corr_list df ##\r\n try:\r\n fs.fit(df[corr_list].astype(np.float16), df[target])\r\n except:\r\n fs.fit(df[corr_list].astype(np.float32), df[target])\r\n try:\r\n mutual_info = dict(zip(corr_list,fs.scores_))\r\n #### The first variable in list has the highest correlation to the target variable ###\r\n sorted_by_mutual_info =[key for (key,val) in sorted(mutual_info.items(), key=lambda kv: kv[1],reverse=True)]\r\n ##### Now we select the final list of correlated variables ###########\r\n selected_corr_list = []\r\n #### You have to make multiple copies of this sorted list since it is iterated many times ####\r\n orig_sorted = copy.deepcopy(sorted_by_mutual_info)\r\n copy_sorted = copy.deepcopy(sorted_by_mutual_info)\r\n copy_pair = copy.deepcopy(corr_pair_dict)\r\n #### select each variable by the highest mutual info and see what vars are correlated to it\r\n for each_corr_name in copy_sorted:\r\n ### add the selected var to the selected_corr_list\r\n selected_corr_list.append(each_corr_name)\r\n for each_remove in copy_pair[each_corr_name]:\r\n #### Now remove each variable that is highly correlated to the selected variable\r\n if each_remove in copy_sorted:\r\n copy_sorted.remove(each_remove)\r\n ##### Now we combine the uncorrelated list to the selected correlated list above\r\n rem_col_list = left_subtract(list(correlation_dataframe),corr_list)\r\n final_list = rem_col_list + selected_corr_list\r\n removed_cols = left_subtract(numvars, final_list)\r\n except:\r\n print(' SULOV Method crashing due to memory error, trying alternative simpler method...')\r\n #### Dropping highly correlated Features fast using simple linear correlation ###\r\n removed_cols = remove_highly_correlated_vars_fast(train[numvars],corr_limit)\r\n final_list = left_subtract(numvars, removed_cols)\r\n if len(removed_cols) > 0:\r\n print(' Removing (%d) highly correlated variables:' %(len(removed_cols)))\r\n if len(removed_cols) <= 30:\r\n print(' %s' %removed_cols)\r\n if len(final_list) <= 30:\r\n print(' Following (%d) vars selected: %s' %(len(final_list),final_list))\r\n ############## D R A W C O R R E L A T I O N N E T W O R K ##################\r\n selected = copy.deepcopy(final_list)\r\n try:\r\n import networkx as nx\r\n except:\r\n print(' Python networkx library not installed. Install it for feature selection visualization.')\r\n return\r\n #### Now start building the graph ###################\r\n gf = nx.Graph()\r\n ### the mutual info score gives the size of the bubble ###\r\n multiplier = 2100\r\n for each in orig_sorted:\r\n gf.add_node(each, size=int(max(1,mutual_info[each]*multiplier)))\r\n ######### This is where you calculate the size of each node to draw\r\n sizes = [mutual_info[x]*multiplier for x in list(gf.nodes())]\r\n #### The sizes of the bubbles for each node is determined by its mutual information score value\r\n corr = df[corr_list].corr()\r\n high_corr = corr[abs(corr)>corr_limit]\r\n ## high_corr is the dataframe of a few variables that are highly correlated to each other\r\n combos = combinations(corr_list,2)\r\n ### this gives the strength of correlation between 2 nodes ##\r\n multiplier = 20\r\n for (var1, var2) in combos:\r\n if np.isnan(high_corr.loc[var1,var2]):\r\n pass\r\n else:\r\n gf.add_edge(var1, var2,weight=multiplier*high_corr.loc[var1,var2])\r\n ######## Now start building the networkx graph ##########################\r\n widths = nx.get_edge_attributes(gf, 'weight')\r\n nodelist = gf.nodes()\r\n cols = 5\r\n height_size = 5\r\n width_size = 15\r\n rows = int(len(corr_list)/cols)\r\n if rows < 1:\r\n rows = 1\r\n plt.figure(figsize=(width_size,min(20,height_size*rows)))\r\n pos = nx.shell_layout(gf)\r\n nx.draw_networkx_nodes(gf,pos,\r\n nodelist=nodelist,\r\n node_size=sizes,\r\n node_color='blue',\r\n alpha=0.5)\r\n nx.draw_networkx_edges(gf,pos,\r\n edgelist = widths.keys(),\r\n width=list(widths.values()),\r\n edge_color='lightblue',\r\n alpha=0.6)\r\n pos_higher = {}\r\n x_off = 0.04 # offset on the x axis\r\n y_off = 0.04 # offset on the y axis\r\n for k, v in pos.items():\r\n pos_higher[k] = (v[0]+x_off, v[1]+y_off)\r\n if len(selected) == 0:\r\n nx.draw_networkx_labels(gf, pos=pos_higher,\r\n labels=dict(zip(nodelist,nodelist)),\r\n font_color='black')\r\n else:\r\n nx.draw_networkx_labels(gf, pos=pos_higher,\r\n labels = dict(zip(nodelist,[x+' (selected)' if x in selected else x+' (removed)' for x in nodelist])),\r\n font_color='black')\r\n plt.box(True)\r\n plt.title(\"\"\"In SULOV, we repeatedly remove features with lower mutual info scores among highly correlated pairs (see figure),\r\n SULOV selects the feature with higher mutual info score related to target when choosing between a pair. \"\"\", fontsize=10)\r\n plt.suptitle('How SULOV Method Works by Removing Highly Correlated Features', fontsize=20,y=1.03)\r\n red_patch = mpatches.Patch(color='blue', label='Bigger circle denotes higher mutual info score with target')\r\n blue_patch = mpatches.Patch(color='lightblue', label='Thicker line denotes higher correlation between two variables')\r\n plt.legend(handles=[red_patch, blue_patch],loc='best')\r\n plt.show();\r\n ##### N E T W O R K D I A G R A M C O M P L E T E #################\r\n return final_list", "def check_if_stopping_criterion_is_met(original_training_data_values):\n if len(original_training_data_values)<23:\n return True\n else:\n target_column = original_training_data_values[:, -1]\n recipe_type, cupcake_muffin_count = np.unique(target_column, return_counts=True)\n cupcake_ratio = cupcake_muffin_count[0] / (cupcake_muffin_count.sum())\n muffin_ratio = cupcake_muffin_count[1] / (cupcake_muffin_count.sum())\n\n if cupcake_ratio >= 0.9 or muffin_ratio >= 0.9:\n return True\n else:\n return False", "def _infer_variable_types_from_data(raw_data):\n raise NotImplementedError()", "def check_variables(self, model):\n for rhs_var in model.rhs.keys():\n if rhs_var.name in model.variables.keys():\n var = model.variables[rhs_var.name]\n\n different_shapes = not np.array_equal(\n model.rhs[rhs_var].shape, var.shape\n )\n\n not_concatenation = not isinstance(var, pybamm.Concatenation)\n\n not_mult_by_one_vec = not (\n isinstance(\n var, (pybamm.Multiplication, pybamm.MatrixMultiplication)\n )\n and (\n pybamm.is_matrix_one(var.left)\n or pybamm.is_matrix_one(var.right)\n )\n )\n\n if different_shapes and not_concatenation and not_mult_by_one_vec:\n raise pybamm.ModelError(\n \"variable and its eqn must have the same shape after \"\n \"discretisation but variable.shape = \"\n \"{} and rhs.shape = {} for variable '{}'. \".format(\n var.shape, model.rhs[rhs_var].shape, var\n )\n )", "def main(var_file, rd_file, segment_file, patient_names, vaf_threshold=0.05, filterSegments = False):\n patient = patient_names\n\n patient_varcount = pd.read_csv(var_file, low_memory=False, delimiter=\"\\t\")\n patient_readdepth = pd.read_csv(rd_file, low_memory=False, delimiter=\"\\t\")\n\n # Sanity check to see if the columns are identical\n unmatch = patient_varcount.loc[patient_varcount.loc[:, 'Chromosome'] != patient_readdepth.loc[:, 'Chromosome']]\n if (unmatch.empty != True):\n print(\"Something wrong with sample, columns order do not match!\")\n\n tumor_sample = patient_varcount.columns[4:]\n info_col = patient_varcount.columns[:4]\n\n # Can use this to remove indels\n # patient_readdepth = patient_readdepth.loc[patient_readdepth['Change'].str.contains('-')!=True]\n # patient_varcount = patient_varcount.loc[patient_varcount['Change'].str.contains('-')!=True]\n\n # Make sure there's no zero read depth position for any sector, as Pyclone\n # will assume that the mutatation has identical VAF at that sector\n tmp = (patient_readdepth.loc[:, tumor_sample] == 0)\n patient_readdepth = patient_readdepth.loc[tmp.any(axis=1)==False]\n patient_varcount = patient_varcount.loc[tmp.any(axis=1)==False]\n\n # Transform RD to ref count which is just the difference between RD and varcount\n patient_readdepth.iloc[:, 4:] = patient_readdepth.iloc[:, 4:] - patient_varcount.iloc[:, 4:]\n # Get VAF and filter out those with < 0.05 VAF called in any sector.\n patient_VAF = patient_varcount.iloc[:, 4:] / patient_readdepth.iloc[:, 4:]\n patient_VAF = (patient_VAF < vaf_threshold)\n\n # Remove the mutations where the condition is true for ALL segments, i.e. it has to be below\n # 0.05 for all sectors. If it's above 0.05 in any sector, keep the mutations. This will keep most\n # of the private mutations.\n filter_VAF_index = (patient_VAF.all(axis=1) == False)\n num_filtered = filter_VAF_index.loc[filter_VAF_index == False, ]\n print(\"Patient {} has {} mutations with average VAF < {} removed\".format(patient, num_filtered.shape[0], vaf_threshold))\n # Filter out the variants\n patient_readdepth = patient_readdepth.loc[filter_VAF_index, ]\n patient_varcount = patient_varcount.loc[filter_VAF_index, ]\n\n all_segments = pd.read_csv(segment_file, low_memory=False, delimiter='\\t')\n\n if not os.path.exists(\"{}_mutations_withCN\".format(patient)):\n os.makedirs(\"{}_mutations_withCN\".format(patient))\n if not os.path.exists(\"{}_pyclone_input\".format(patient)):\n os.makedirs(\"{}_pyclone_input\".format(patient))\n\n for sample in tumor_sample:\n # The treeomics input has this weird problem of not accepting dash\n # in the name, so the output from my script in preparing treeomics\n # input has underscore instead. Change it back here.\n samplename = re.sub(r'_', r'-', sample)\n print(samplename)\n col_to_get = list(info_col)\n col_to_get.extend([sample])\n var_pat = patient_varcount.loc[:, col_to_get]\n var_pat.rename(columns={sample:\"var_counts\"}, inplace=True)\n ref_pat = patient_readdepth.loc[:, col_to_get]\n ref_pat.rename(columns={sample:\"ref_counts\"}, inplace=True)\n merge_sample_mut = var_pat.merge(ref_pat, how=\"left\")\n merge_sample_mut.loc[:, 'normal_cn'] = 2\n merge_sample_mut.loc[:, 'mutation_id'] = merge_sample_mut.loc[:, 'Gene'].map(str) + \"_\" + merge_sample_mut.loc[:, 'Chromosome'].map(str) + \":\" + merge_sample_mut.loc[:, 'Position'].map(str)\n sample_segments = all_segments[all_segments['Tumor_Sample_Barcode'] == samplename]\n\n seg_dict = segments_to_dict(sample_segments)\n\n overlap_seg = pd.DataFrame()\n filtered_seg = pd.DataFrame()\n for _, mut_row in merge_sample_mut.iterrows():\n # Skip X and Y chromosome\n if (mut_row['Chromosome'] == \"X\" or mut_row['Chromosome'] == \"Y\"):\n continue\n\n # Search for the segment\n buf = search_overlap(mut_row, seg_dict)\n # Skip if no overlapping segments\n if (buf.empty):\n continue\n # Filter segments with unreliable calls. This is according to Canopy's guideline. However, I set CNt to 8 instead of 6 since\n # LUAD tends to have higher ploidy than the other cancer types.\n elif filterSegments:\n print(\"--filterSegments specified. Will filter segments of low quality.\")\n if (buf.iloc[0]['numMarker'] < 100) or (buf.iloc[0]['end.pos'] - buf.iloc[0]['start.pos'] < 5000000) or (buf.iloc[0]['CNt'] >= 8):\n if (filtered_seg.empty):\n filtered_seg = buf.iloc[0].to_frame()\n else:\n filtered_seg = pd.concat([filtered_seg, buf.iloc[0]], axis=1)\n else:\n # Get copy number for mutations\n assigned_row = mut_row.copy(deep=True)\n assigned_row['CNt'] = buf.iloc[0]['CNt']\n assigned_row['major_cn'] = buf.iloc[0]['A']\n assigned_row['minor_cn'] = buf.iloc[0]['B']\n # Initialize dataframe for merging.\n if (overlap_seg.empty):\n overlap_seg = assigned_row.to_frame()\n else:\n overlap_seg = pd.concat([overlap_seg, assigned_row], axis=1)\n\n overlap_seg = overlap_seg.transpose()\n overlap_seg.to_csv(\"./{}_mutations_withCN/{}_SNV_withCN.maf\".format(patient, samplename),sep=\"\\t\", index=False)\n\n filtered_seg = filtered_seg.transpose()\n print(\"Sample {} has {} segments with marker<100 or smaller than 5 Mb or >= 8 copy number (Canopy guideline)\".format(sample, filtered_seg.shape[0]))\n filtered_seg.to_csv(\"./{}_mutations_withCN/{}_filtered_seg.maf\".format(patient, samplename),sep=\"\\t\", index=False)\n\n towrite = overlap_seg.loc[:, ['mutation_id', 'ref_counts', 'var_counts', 'normal_cn', 'minor_cn', 'major_cn']]\n # Remove those with major CN = 0. Most likely false positive. Note this will however remove the mutations across all\n # sectors when Pyclone run analysis\n weird_mut = towrite.loc[towrite.loc[:, 'major_cn'] == 0]\n print(\"{} mutations for sample {} are located in regions with major_cn 0!\".format(weird_mut.shape[0], samplename))\n towrite = towrite.loc[towrite.loc[:, 'major_cn'] != 0]\n towrite['ref_counts'] = towrite['ref_counts'].map(int)\n towrite['var_counts'] = towrite['var_counts'].map(int)\n\n towrite.to_csv(\"./{}_pyclone_input/{}.tsv\".format(patient, samplename), sep='\\t', index=False)", "def verif_unused(sv):\r\n if Unused in sv.Object and sv.Object[Unused].value: # check presence and integrity of unused list\r\n unusedlist=[applied (x, Unused) for x in sv.Object[Unused].value]\r\n for nam in unusedlist: # check each unused declaration\r\n nod=sv.Object[nam]\r\n if sv.Namedpinlist.get(nam)==[nod.effects]: continue # pin is just named\r\n elif applied(nam, Output):\r\n if len(nod.effects)==1: # only effect is output list\r\n if len(nod.causes)<=2: continue\r\n if len(nod.causes)<=4 and Faux in nod.causes and Ewent in nod.causes: continue # allow 'take event'\r\n elif nod.causes or nod.effects: # object should have no cause and no effect\r\n print(Err_unused_obj) \r\n print(str(nam))\r\n sv.Current_clause=None, None, None\r\n raise ReferenceError", "def test_variational():\n # iris\n #pres = \"Test pour le data set Iris (facile, classique)\"\n #test_from_func_variational(pres, 15, 10, 3, True, Iris)\n\n # breast cancer\n pres = \"Test pour le data set Breast Cancer (facile, classique)\"\n test_from_func_variational(pres, 15, 10, 3, True, Breast_cancer)\n\n # digits\n # pres = \"Test pour le data set Digits (difficile, classique)\"\n # test_from_func(pres, 10, 10, 10, True, Digits, quantum_instance)\n\n # wine\n # pres = \"Test pour le data set Wine (moyen, classique)\"\n # test_from_func(pres, 15, 10, 5, True, Wine, quantum_instance)\n\n # gaussian\n pres = \"Test pour des données gaussiennes (moyen, classique)\"\n for _ in range(1):\n print(\"\\n\")\n print(\"New iteration\")\n test_from_func_variational(pres, 25, 10, 2, True, Gaussian)\n print(\"\\n\")\n\n # small adn strings\n pres = \"Test pour des séquences ADN courtes (difficile, classique)\"\n test_from_func_variational(pres, 10, 15, 14, True, Sequence)\n\n #Quantum data\n pres = \"Test pour des données générées par ordinateur quantique (facile, quantique)\"\n print(pres)\n _, samp_train, samp_test, labels = ad_hoc_data(15, 10, 2, 0.3, True)\n sample_m, sample_p = stock_get(20, 0.3)\n\n labels_me = [-1, 1]\n samp_train_me = {-1: np.array(sample_m[:15]), 1: np.array(sample_p[:15])}\n samp_test_me = {-1: np.array(sample_m[15:]), 1: np.array(sample_p[15:])}\n print(samp_train)\n print(samp_train_me)\n print(samp_test)\n print(samp_test_me)\n\n my_impl_variational(samp_train, samp_test, labels)\n print(\"Pour autres données quantiques\")\n my_impl_variational(samp_train_me, samp_test_me, labels_me)", "def classify():\n yes_dataset = df[df[\"_class\"] == 1] # 470588\n no_dataset = df[df[\"_class\"] == 0] # 1971\n\n parameter_analysis = list()\n for criterion in np.arange(0.05, 0.91, 0.05):\n print(\"doing experiment at criterion = %s ...\" % criterion)\n rate_list = list()\n for i in range(10):\n # shuffle yes_dataset and no_dataset, so we can randomly choose 90% yes_dataset\n # + 90% no_dataset as train dataset, 10% yes_dataset + 10% no_dataset as test dataset\n yes_index = yes_dataset.index.tolist()\n random.shuffle(yes_index)\n no_index = no_dataset.index.tolist()\n random.shuffle(no_index)\n \n # concatenate 90%yes + 90%no, 10%yes + 10%no\n train = pd.concat([\n yes_dataset.loc[yes_index[:1774], :],\n no_dataset.loc[no_index[:423530], :]\n ])\n test = pd.concat([\n yes_dataset.loc[yes_index[1774:], :],\n no_dataset.loc[no_index[423530:], :]\n ]) \n \n # split data and label\n train_data, train_label = (train[[\"Revenue.Code\", \n \"Service.Code\", \n \"Procedure.Code\", \n \"Diagnosis.Code\", \n \"Subscriber.Index\"]], \n train[\"_class\"])\n test_data, test_label = (test[[\"Revenue.Code\", \n \"Service.Code\", \n \"Procedure.Code\", \n \"Diagnosis.Code\", \n \"Subscriber.Index\"]], \n test[\"_class\"])\n \n # apply classifier\n clf = GaussianNB()\n clf.fit(train_data, train_label)\n probability = clf.predict_proba(test_data).T[1]\n \n result = pd.DataFrame()\n result[\"_class\"] = test_label\n result[\"_predict\"] = probability >= criterion\n \n result_yes = result[result[\"_class\"] == 1]\n yes_yes_rate = sum(result_yes[\"_class\"] == result_yes[\"_predict\"])/len(result_yes[\"_predict\"])\n \n result_no = result[result[\"_class\"] == 0]\n no_no_rate = sum(result_no[\"_class\"] == result_no[\"_predict\"])/len(result_no[\"_predict\"])\n \n rate_list.append((yes_yes_rate, no_no_rate))\n\n rate_list = pd.DataFrame(rate_list)\n yes_yes_rate, no_no_rate = rate_list.mean()[0], rate_list.mean()[1]\n parameter_analysis.append((criterion, yes_yes_rate, no_no_rate))\n \n # save data to excel spreadsheet\n parameter_analysis = pd.DataFrame(parameter_analysis, columns=[\"criterion\", \"yes_yes_rate\", \"no_no_rate\"])\n writer = pd.ExcelWriter(\"parameter_analysis.xlsx\")\n parameter_analysis.to_excel(writer, \"parameter_analysis\", index=False)\n writer.save()", "def pre_processing_(data_df , serialized_objects):\n max_recency_acc_dig = serialized_objects['max_recency_acc_dig'] # These values are taken from trained model values\n max_recency_dig_2yr = serialized_objects['max_recency_dig_2yr'] # These values are taken from trained model values\n max_acc_recency_mf = serialized_objects['max_acc_recency_mf'] #These are values imported in training dataset. Same values needs to be used to impute missing values in unseen data\n\n data_df = data_df.na.fill({\n 'recency_acc_dig' : max_recency_acc_dig, # Filling missing values\n 'recency_dig_2yr' : max_recency_dig_2yr,\n 'acc_recency_mf' : max_acc_recency_mf\n })\n\n freq_acc_upg_2yrs_split = [-float('inf'), 0, 1, 2, float('inf')]\n bucketizer_freq_acc_upg_2yrs = Bucketizer(splits=freq_acc_upg_2yrs_split, inputCol='freq_acc_upg_acc_2yrs', outputCol='freq_acc_upg_acc_2yrs_bkt')\n data_df = bucketizer_freq_acc_upg_2yrs.setHandleInvalid('keep').transform(data_df) # Binning the freq_acc_upg_acc_2yrs column\n\n tot_purchase_split = [-float('inf'), 0, 1, 2, 3, float('inf')]\n bucketizer_tot_purchase = Bucketizer(splits=tot_purchase_split, inputCol='tot_accsry_purchse', outputCol='tot_accsry_purchse_bkt')\n data_df = bucketizer_tot_purchase.setHandleInvalid('keep').transform(data_df) # Binning the tot_accsry_purchse column\n\n del_cols_new = ['freq_acc_upg_acc_2yrs', 'tot_accsry_purchse']\n data_df = data_df.drop(*del_cols_new) # Dropping the older continuous columns\n return data_df", "def correct_detector_efficiency(sansdata,sensitivity):\n result=sansdata.data/sensitivity #Could be done more elegantly by defining a division method on SansData\n res=SansData()\n res.data=result\n res.metadata=deepcopy(sansdata.metadata)\n res.qx=copy(sansdata.qx)\n res.qy=copy(sansdata.qy)\n res.theta=copy(sansdata.theta)\n return res", "def unusedVars(self):\n fullcode = self.code_cfg\n variables = set([x[1:] for x in codeconfig_getvars(fullcode)])\n exceptions = set(['complexity', 'code_cfg'])\n clsvars = set(vars(self).keys())\n nones = set(filter(lambda x: self.__dict__[x] is None, clsvars))\n nones = nones.union(set(filter(lambda x: str(self.__dict__[x]) == \"\", clsvars)))\n unused = clsvars - variables - exceptions - nones\n return unused", "def read_variant_effect_predictor(file, gene_filter=None):\n vars = []\n\n def get_type(ref, alt):\n \"\"\"\n returns the variant type\n \"\"\"\n if len(ref) == 1 and len(alt) == 1:\n return VariationType.SNP\n if len(ref) > 0 and len(alt) == 0:\n if len(ref) % 3 == 0:\n return VariationType.DEL\n else:\n return VariationType.FSDEL\n if len(ref) == 0 and len(alt) > 0:\n if len(alt) % 3 == 0:\n return VariationType.INS\n else:\n return VariationType.FSINS\n return VariationType.UNKNOWN\n\n coding_types = set(\n [\n \"3_prime_UTR_variant\",\n \"5_prime_UTR_variant\",\n \"start_lost\",\n \"stop_gained\",\n \"frameshift_variant\",\n \"start_lost\",\n \"inframe_insertion\",\n \"inframe_deletion\",\n \"missense_variant\",\n \"protein_altering_variant\",\n \"splice_region_variant\",\n \"incomplete_terminal_codon_variant\",\n \"stop_retained_variant\",\n \"synonymous_variant\",\n \"coding_sequence_variant\",\n ]\n )\n\n with open(file, \"r\") as f:\n for i, l in enumerate(f):\n # skip comments\n if l.startswith(\"#\") or l.strip() == \"\":\n continue\n\n chrom, gene_pos, var_id, ref, alt, _, filter_flag, info = l.strip().split(\"\\t\")[:8]\n coding = {}\n isSynonymous = False\n\n for co in info.split(\",\"):\n # Allele|Consequence|IMPACT|SYMBOL|Gene|Feature_type|Feature|BIOTYPE|EXON|INTRON|HGVSc|HGVSp|cDNA_position|CDS_position|Protein_position|Amino_acids|Codons|Existing_variation|DISTANCE|STRAND|FLAGS|SYMBOL_SOURCE|HGNC_ID|TSL|APPRIS|SIFT|PolyPhen|AF|AFR_AF|AMR_AF|EAS_AF|EUR_AF|SAS_AF|AA_AF|EA_AF|gnomAD_AF|gnomAD_AFR_AF|gnomAD_AMR_AF|gnomAD_ASJ_AF|gnomAD_EAS_AF|gnomAD_FIN_AF|gnomAD_NFE_AF|gnomAD_OTH_AF|gnomAD_SAS_AF|CLIN_SIG|SOMATIC|PHENO|PUBMED|MOTIF_NAME|MOTIF_POS|HIGH_INF_POS|MOTIF_SCORE_CHANGE\">\n (\n _,\n var_type,\n _,\n gene,\n _,\n transcript_type,\n transcript_id,\n _,\n _,\n _,\n _,\n _,\n transcript_pos,\n _,\n prot_pos,\n aa_mutation,\n ) = co.strip().split(\"|\")[:16]\n HGNC_ID = co.strip().split(\"|\")[22]\n\n # pass every other feature type except Transcript (RegulatoryFeature, MotifFeature.)\n # pass genes that are uninterresting for us\n if transcript_type != \"Transcript\" or (HGNC_ID not in gene_filter and gene_filter):\n continue\n\n # pass all intronic and other mutations that do not directly influence the protein sequence\n if any(t in coding_types for t in var_type.split(\"&\")):\n # generate mutation syntax\n\n # positioning in Fred2 is 0-based!!!\n if transcript_pos != \"\":\n coding[transcript_id] = MutationSyntax(\n transcript_id,\n int(transcript_pos.split(\"/\")[0]) - 1,\n -1 if prot_pos == \"\" else int(prot_pos) - 1,\n co,\n \"\",\n geneID=HGNC_ID,\n )\n\n # is variant synonymous?\n isSynonymous = any(t == \"synonymous_variant\" for t in var_type.split(\"&\"))\n if coding:\n vars.append(\n Variant(\n var_id,\n get_type(ref, alt),\n chrom,\n int(gene_pos),\n ref.upper(),\n alt.upper(),\n coding,\n False,\n isSynonymous,\n )\n )\n return vars", "def set_food_dependent_flags(self):\n # check if self.food <= self.population\n # change self.hungry\n pass", "def clean_data(data,variables,ignore_na=True,fill_mode=\"mode\"):\n\tnum_rows,num_cols = data.shape\n\tif ignore_na:\n\t\tdata = data.dropna(axis=0,how=\"any\",subset=variables)\n\t\tnum_rows,num_cols = data.shape\n\telse:\n\t\tfor variable in variables:\n\t\t\tif data[variable].isna().sum()>0:\n\t\t\t\tif fill_mode==\"mode\":\n\t\t\t\t\tdata[variable] = data[variable].fillna(data[variable].mode()[0])\n\t\t\t\telse:\n\t\t\t\t\tif variable==\"Age\":\n\t\t\t\t\t\tnan_idx = np.where(data[\"Age\"].isna())[0]\n\t\t\t\t\t\tnorm = [1.,1.]#[8.,3.]\n\t\t\t\t\t\tnew_age = np.zeros((num_rows))\n\t\t\t\t\t\tfor idx in nan_idx:\n\t\t\t\t\t\t\terr = (data[[\"SibSp\",\"Pclass\"]]-data[[\"SibSp\",\"Pclass\"]].iloc[idx])\n\t\t\t\t\t\t\terr /= norm\n\t\t\t\t\t\t\ttotal_err = np.sqrt(np.sum(err**2,axis=1))\n\t\t\t\t\t\t\tif len(data[\"Age\"][total_err==0].mode())>0:\n\t\t\t\t\t\t\t\tnew_age[idx] = data[\"Age\"][total_err==0].mode()[0]\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tnew_age[idx] = data[\"Age\"][total_err<4].mode()[0]\n\t\t\t\t\t\tdata[\"Age\"].fillna(pd.Series(new_age),inplace=True)\n\n\t\t\t\t\tif variable==\"Embarked\":\n\t\t\t\t\t\t# data[variable] = data[variable].fillna(data[variable].mode()[0])\n\t\t\t\t\t\tdata = data.dropna(axis=0,how=\"any\",subset=[\"Embarked\"])\n\t\t\t\t\t\tnum_rows,num_cols = data.shape\n\n\t\t\t\t\tif variable==\"Fare\":\n\t\t\t\t\t\tnan_idx = np.where(data[\"Fare\"].isna())[0]\n\t\t\t\t\t\tnew_fare = np.zeros((num_rows))\n\t\t\t\t\t\tfor idx in nan_idx:\n\t\t\t\t\t\t\ttemp_age = data[\"Age\"].iloc[idx]\n\t\t\t\t\t\t\ttemp_embarked = data[\"Embarked\"].iloc[idx]\n\t\t\t\t\t\t\ttemp_pclass = data[\"Pclass\"].iloc[idx]\n\n\t\t\t\t\t\t\tsimilar_cases = data.loc[(data[\"Embarked\"]==temp_embarked) &\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t (data[\"Pclass\"]==temp_pclass) &\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t (data[\"Age\"]>(temp_age-15)) &\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t (data[\"Age\"]<(temp_age+15)) &\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t (data[\"Fare\"].isna()==False), \"Fare\"]\n\t\t\t\t\t\t\tnew_fare[idx] = similar_cases.median()\n\t\t\t\t\t\tdata[\"Fare\"].fillna(pd.Series(new_fare),inplace=True)\n\n\n\t## format ticket values\n\tduplicates = []\n\tticket_no = np.zeros((num_rows))\n\tfor i,uniq in enumerate(data['Ticket'].unique()):\n\t\tticket_no[data['Ticket']==uniq] = i\n\t## take into account four cases where 'Ticket' is given as LINE\n\tticket_no[data['Ticket']==\"LINE\"] = i + np.arange(np.sum(data['Ticket']==\"LINE\"))\n\tdata[\"ticket_no\"] = pd.Series(ticket_no,dtype=int)\n\n\t## format cabin values, set NaN cabin values to zero\n\tduplicates = []\n\tcabin_no = np.zeros((num_rows))\n\tfor i,uniq in enumerate(data['Cabin'].unique()):\n\t\tif uniq==\"nan\":\n\t\t\tcabin_no[data['Cabin']==uniq] = 0\n\t\tcabin_no[data['Cabin']==uniq] = i + 1\n\tdata[\"cabin_no\"] = pd.Series(cabin_no,dtype=int)", "def _abnormality(self):\n weights = [f\"{self.cwd}/step2/inceptionv3_level1.pth\",\n f\"{self.cwd}/step2/inceptionv3_apthae_ulcer_bleed_angio_lymph.pth\",\n f\"{self.cwd}/step2/inceptionv3_poly_cyst_stenoses_voedemas.pth\",\n f\"{self.cwd}/step2/inceptionv3_apthae_ulcer.pth\",\n f\"{self.cwd}/step2/inceptionv3_bleed_angio_lymph.pth\",\n f\"{self.cwd}/step2/inceptionv3_poly_cyst_stenoses.pth\",\n f\"{self.cwd}/step2/inceptionv3_angio_lymph.pth\",\n f\"{self.cwd}/step2/inceptionv3_poly_cyst_test.pth\"\n ]\n _t = _test(self.root+\"/step_1/Abnormal\", self.root+\"/step_2\", weights, step=2)\n _t._predict()\n\n print(\"Classification of abnormal to 9 different classes --> DONE\")", "def check_additional_input():\r\n\r\n # Check if the cluster center input is correct\r\n RM.check_if_matrix(clust_cent, 'The cluster centers')\r\n RM.warn_if_bigger(clust_cent.shape[1], meta_model.get_in_par_means().shape[1],\r\n 'The number of input parameters in the cluster centers',\r\n 'the number of input parameters - 1')\r\n RM.check_if_bigger(clust_cent.shape[1], meta_model.get_in_par_means().shape[1] - 1,\r\n 'The number of input parameters',\r\n 'the number of input parameters in the cluster centers')\r\n\r\n bounds = meta_model.get_in_par_intervals()\r\n\r\n for j in range(clust_cent.shape[0]):\r\n for i in range(bounds.shape[0]):\r\n RM.check_if_in_interval(bounds[i], clust_cent[j, i], i, ' In cluster center %x, the value')\r\n\r\n def check_PLSR_input():\r\n \"\"\" Checks model data of PLSR\r\n\r\n :return: Checks model data of PLSR\r\n \"\"\"\r\n\r\n RM.check_if_ndim_array(model_data, 3, 'Model data')\r\n RM.check_if_bigger(model_data.shape[1], meta_model.get_in_par_means().shape[1],\r\n 'The number of input parameters in the solution matrix',\r\n 'the number of input parameters')\r\n RM.warn_if_bigger(model_data.shape[1], meta_model.get_in_par_means().shape[1] + 1,\r\n 'The number of input parameters',\r\n 'the number of input parameters in the solution matrix')\r\n RM.check_if_bigger(model_data.shape[2], meta_model.get_out_par_means().shape[1] - 1,\r\n 'The number of output parameters in the solution matrix',\r\n 'the number of output parameters')\r\n RM.warn_if_bigger(model_data.shape[2], meta_model.get_out_par_means().shape[1],\r\n 'The number of output parameters',\r\n 'the number of output parameters in the solution matrix')\r\n\r\n # Check if the additional data is correct\r\n\r\n if meta_model.get_type() == 'PLSR': # Additional check-up for PLSR\r\n check_PLSR_input()\r\n\r\n elif meta_model.get_type() == 'DLU': # Additional check-up for DLU\r\n raise TypeError('This part is not implemented yet')\r\n\r\n # if not isinstance(model_data, np.ndarray):\r\n # raise TypeError('The cluster input and output data is not stored in a multidimensional array')\r\n #\r\n # for clust_data in model_data:\r\n #\r\n # if not isinstance(clust_data[0], np.matrix) or not isinstance(clust_data[1], np.matrix):\r\n # raise TypeError('One of the input or output databases is not a matrix')\r\n #\r\n # if clust_data[0].shape[1] > meta_model.get_in_par_means().shape[1]:\r\n # warnings.warn('The number of input parameters for the input database of the clusters is bigger '\r\n # 'than the actual number of input parameters')\r\n #\r\n # elif clust_data[0].shape[1] < meta_model.get_in_par_means().shape[1]:\r\n # raise TypeError('The number of input parameters for the input database of the clusters is '\r\n # 'smaller than the actual numbers of input parameters')\r\n #\r\n # if clust_data[1].shape[1] > meta_model.get_out_par_means().shape[1]:\r\n # raise TypeError('The number of output parameters for the output database of the clusters is '\r\n # 'bigger than the actual number of output parameters')\r\n #\r\n # elif clust_data[1].shape[1] < meta_model.get_out_par_means().shape[1]:\r\n # raise TypeError('The number of output parameters for the output database of the clusters is '\r\n # 'smaller than the actual numbers of output parameters')\r\n #\r\n # if clust_data[0].shape[0] != clust_data[1].shape[0]:\r\n # raise TypeError('The number rows in the input and output database differ from each other')\r\n\r\n else: # No check-up is done when the meta-model is an unknown version\r\n warnings.warn('The additional cluster data can not be checked, for this kind of meta-model')\r\n\r\n RM.check_if_same_size(clust_cent.shape[0], model_data.shape[0],\r\n 'The number of clusters according to the cluster centers',\r\n 'The number of clusters according to the model_data')", "def postfit_covariance(self) -> NONEARRAY:\n pass", "def regularize_if_necessary(self) -> None:\n # As described in [wang2014], all entities and relations are used to compute the regularization term\n # which enforces the defined soft constraints.\n super().regularize_if_necessary(\n self.entity_embeddings.weight,\n self.normal_vector_embeddings.weight,\n self.relation_embeddings.weight,\n )", "def var_qc_mask(self, v, flagtype='orig'):\n data = numpy.ma.array(numpy.zeros(self.n_levels()), mask=False, dtype=bool)\n prof = self.var_profile_qc(v)\n if prof is not None and prof > 0:\n data[:] = True\n else:\n zqc = self.z_level_qc(flagtype)\n data[(zqc.mask == False) & (zqc > 0)] = True\n lqc = self.var_level_qc(v, flagtype)\n data[(lqc.mask == False) & (lqc > 0)] = True\n return data", "def test_minvar_basic(self):\n\n # Synthetic data of zeros\n data = np.zeros([2, 3])\n vrot, v, w = minvar(data)\n self.assertTrue(np.sum(vrot - data) < self.tol)\n self.assertTrue(np.sum(v - np.diag(np.ones(3))) < self.tol)\n self.assertTrue(np.sum(w - np.zeros(3)) < self.tol)", "def _compactness_pruning(self):\n feature_phrases = [phrase for phrase in self.frequent_features if self._is_compact(phrase)]\n self.features_phrases = feature_phrases", "def _varfilter(self, vrs, response, predictor) -> List[str]:\n if not response and not predictor:\n return vrs\n if response:\n vrs = _list_union(vrs, self.response_vars)\n if predictor:\n vrs = _list_union(vrs, self.predictor_vars)\n return vrs", "def test_sparsity(config):\n total_zeros = 0\n total_nonzeros = 0\n\n print ('<===sparsity type is {}'.format(config.sparsity_type))\n print ('<===layers to be pruned are {}'.format(config._prune_ratios))\n if config.masked_progressive and (config.sparsity_type == 'filter' or config.sparsity_type =='column'or config.sparsity_type == \"bn_filter\" ):\n ### test both column and row sparsity\n print (\"***********checking column sparsity*************\")\n for name,W in config.model.named_parameters():\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy()\n shape = W.shape\n W2d = W.reshape(shape[0],-1)\n column_l2_norm = LA.norm(W2d,2,axis=0)\n zero_column = np.sum(column_l2_norm == 0)\n nonzero_column = np.sum(column_l2_norm !=0)\n\n print (\"column sparsity of layer {} is {}\".format(name,zero_column/(zero_column+nonzero_column)))\n print (\"***********checking filter sparsity*************\") \n for name,W in config.model.named_parameters():\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy()\n shape = W.shape\n W2d = W.reshape(shape[0],-1)\n row_l2_norm = LA.norm(W2d,2,axis=1)\n zero_row = np.sum(row_l2_norm == 0)\n nonzero_row = np.sum(row_l2_norm !=0)\n print (\"filter sparsity of layer {} is {}\".format(name,zero_row/(zero_row+nonzero_row)))\n print (\"************checking overall sparsity in conv layers*************\")\n for name,W in config.model.named_parameters():\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy() \n total_zeros +=np.sum(W==0)\n total_nonzeros +=np.sum(W!=0)\n print ('only consider conv layers, compression rate is {}'.format((total_zeros+total_nonzeros)/total_nonzeros))\n return\n \n if config.sparsity_type == \"irregular\":\n for name,W in config.model.named_parameters():\n if 'bias' in name:\n continue\n W = W.cpu().detach().numpy()\n zeros = np.sum(W==0)\n total_zeros+=zeros\n nonzeros = np.sum(W!=0)\n total_nonzeros+=nonzeros\n print (\"sparsity at layer {} is {}\".format(name,zeros/(zeros+nonzeros)))\n total_weight_number = total_zeros+total_nonzeros\n print ('overal compression rate is {}'.format(total_weight_number/total_nonzeros))\n elif config.sparsity_type == \"column\":\n for name,W in config.model.named_parameters():\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy()\n shape = W.shape\n W2d = W.reshape(shape[0],-1)\n column_l2_norm = LA.norm(W2d,2,axis=0)\n zero_column = np.sum(column_l2_norm == 0)\n nonzero_column = np.sum(column_l2_norm !=0)\n total_zeros +=np.sum(W==0)\n total_nonzeros +=np.sum(W!=0)\n print (\"column sparsity of layer {} is {}\".format(name,zero_column/(zero_column+nonzero_column)))\n print ('only consider conv layers, compression rate is {}'.format((total_zeros+total_nonzeros)/total_nonzeros)) \n elif config.sparsity_type == \"filter\":\n print ('inside if')\n print (config.prune_ratios)\n for name,W in config.model.named_parameters():\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy()\n shape = W.shape\n W2d = W.reshape(shape[0],-1)\n row_l2_norm = LA.norm(W2d,2,axis=1)\n zero_row = np.sum(row_l2_norm == 0)\n nonzero_row = np.sum(row_l2_norm !=0)\n total_zeros +=np.sum(W==0)\n total_nonzeros +=np.sum(W!=0)\n print (\"filter sparsity of layer {} is {}\".format(name,zero_row/(zero_row+nonzero_row)))\n print ('only consider conv layers, compression rate is {}'.format((total_zeros+total_nonzeros)/total_nonzeros))\n elif config.sparsity_type == \"bn_filter\":\n print ('inside bn_filter')\n print (config.prune_ratios)\n for i,(name,W) in enumerate(config.model.named_parameters()):\n if name not in config.prune_ratios:\n continue\n W = W.cpu().detach().numpy()\n zeros = np.sum(W==0)\n nonzeros = np.sum(W!=0)\n print (\"sparsity at layer {} is {}\".format(name,zeros/(zeros+nonzeros)))", "def test_ignored_crash_type_for_varinat_analysis(self):\n self.testcases[0].job_type = 'some_type1'\n self.testcases[0].project_name = 'project1'\n self.testcases[0].crash_state = 'abcde'\n self.testcases[0].one_time_crasher_flag = False\n self.testcases[0].crash_type = 'crash_type1'\n self.testcases[0].security_flag = True\n self.testcases[1].job_type = 'some_type2'\n self.testcases[1].project_name = 'project1'\n self.testcases[1].crash_state = 'vwxyz'\n self.testcases[1].crash_type = 'Data race'\n self.testcases[1].one_time_crasher_flag = False\n self.testcases[1].security_flag = True\n\n for t in self.testcases:\n t.put()\n\n # testcase2's varinat will be evaluated against testcase1\n self.testcase_variants[0].job_type = 'fake_engine_asan_project1'\n self.testcase_variants[0].testcase_id = self.testcases[0].key.id()\n self.testcase_variants[0].security_flag = True\n self.testcase_variants[1].job_type = 'some_type1'\n self.testcase_variants[1].crash_state = 'abcde'\n self.testcase_variants[1].crash_type = 'crash_type1'\n self.testcase_variants[1].testcase_id = self.testcases[1].key.id()\n self.testcase_variants[1].security_flag = True\n\n for v in self.testcase_variants:\n v.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def filter_dont_care(gt: NDArrayObject, class_name: str) -> bool:\n if gt == \"ignore\":\n return True\n\n if gt == class_name:\n return True\n\n else:\n return False", "def analyse ( self ) :\n odin = self.get( self.RootInTES + 'DAQ/ODIN' )\n \n ## Check for PVs\n PVs = self.get( self.RootInTES + self.InputPrimaryVertices )\n if not PVs or PVs.size() == 0:\n self.setFilterPassed( False )\n return SUCCESS\n\n ## get recontructed B+ mesons\n Bs = self.select ( 'B' , eval( self._cut % self._selection ) )\n \n if not Bs or Bs.size() == 0:\n self.setFilterPassed( False )\n return SUCCESS \n\n ## Select random candidate\n r = self.random( odin )\n n = Bs.size()\n for i in xrange( n ):\n if r <= ( float( i ) / float( n ) ): break\n B = Bs[ i ]\n \n tisTos = self.tisTosSignal( B, \"Hlt1Track(AllL0|Muon)Decision\" )\n if tisTos.tos():\n ## This has to be a clone, otherwise it doesn't work...\n self.markParticle( B.clone() )\n self.setFilterPassed( True )\n else:\n self.setFilterPassed( False )\n\n return SUCCESS", "def _check_for_noise(self) -> None:\n safety_stop = 5\n while self._has_noise() and safety_stop > 0:\n self.filter(size=3)\n safety_stop -= 1", "def preprocess(num_frames, gt_data, pr_data, iou_threshold, ignore_categories,\n vis_threshold):\n ignore_categories = ignore_categories or []\n # Remove all classes that are neither in keep or ignore sets.\n keep_or_ignore_categories = np.unique(\n [POSITIVE_CATEGORY] + list(ignore_categories))\n gt_mask = np.isin(gt_data[:, CATEGORY_COLUMN], keep_or_ignore_categories)\n logging.info('remove irrelevant categories: annotations %d -> %d',\n len(gt_mask), gt_mask.sum())\n gt_data = gt_data[gt_mask, :]\n # Remove ignore classes and non-visible boxes.\n gt_data, pr_data = remove_ignored(num_frames, gt_data, pr_data,\n iou_threshold=iou_threshold,\n ignore_categories=ignore_categories,\n vis_threshold=vis_threshold)\n assert np.all(gt_data[:, CATEGORY_COLUMN] == POSITIVE_CATEGORY), (\n 'expect only categories to keep')\n assert np.all(gt_data[:, CONFIDENCE_COLUMN] == 1), (\n 'expect all remaining annotations have confidence one')\n return gt_data, pr_data", "def discretize_not(self,var=[]):\n\n for i in var:\n un=np.unique(self.data[:,i]).tolist()\n for j in un:\n inds=np.where(self.data[:,i]==j)[0]\n self.data[inds,i]=un.index(j)", "def _check_variables_in_raw_data(self):\n # make sure that all of the necessary variables are present\n # or mapped via the variable dict\n for key in [key for key in self._map_cols.keys()\n if key not in ['block', 'choice_accuracy', 'ID']]:\n assert self._map_cols[key] in self._raw_data.columns,\\\n 'missing {} from raw data df columns'.format(\n self._map_cols[key])\n\n condition_codes = self._raw_data[self._map_cols['condition']].unique()\n for cond in ['go', 'stop']:\n assert self._map_codes[cond] in condition_codes,\\\n ('missing {} from column: '.format(self._map_codes[cond]),\n self._map_cols[\"condition\"])\n\n # check that all unique non-nan values in the accuracy column \n # can be mapped onto either correct or incorrect,\n # as defined by the values in the var_dict.\n if self._map_cols['choice_accuracy'] in self._raw_data.columns:\n raw_acc_codes = self._raw_data[\n self._map_cols['choice_accuracy']].unique()\n raw_acc_codes = [i for i in raw_acc_codes if i==i]\n map_acc_codes = [self._map_codes['correct'],\n self._map_codes['incorrect']]\n for acc_code in raw_acc_codes:\n assert acc_code in map_acc_codes,\\\n '{} present in {} column.'. format(\n acc_code, self._cols[\"choice_accuracy\"]\n )\n\n return True", "def is_var_item_valid(item_list, index, ext, config):\n\n full_ext = f\"_VAR{index}_{ext}\"\n msg = []\n sed_cmds = []\n if 'BOTH' in item_list and ('FCST' in item_list or 'OBS' in item_list):\n\n msg.append(f\"Cannot set FCST{full_ext} or OBS{full_ext} if BOTH{full_ext} is set.\")\n elif ext == 'THRESH':\n # allow thresholds unless BOTH and (FCST or OBS) are set\n pass\n\n elif 'FCST' in item_list and 'OBS' not in item_list:\n # if FCST level has 1 item and OBS name is a python embedding script,\n # don't report error\n level_list = getlist(config.getraw('config',\n f'FCST_VAR{index}_LEVELS',\n ''))\n other_name = config.getraw('config', f'OBS_VAR{index}_NAME', '')\n skip_error_for_py_embed = ext == 'LEVELS' and is_python_script(other_name) and len(level_list) == 1\n # do not report error for OPTIONS since it isn't required to be the same length\n if ext not in ['OPTIONS'] and not skip_error_for_py_embed:\n msg.append(f\"If FCST{full_ext} is set, you must either set OBS{full_ext} or \"\n f\"change FCST{full_ext} to BOTH{full_ext}\")\n\n config_files = config.getstr('config', 'CONFIG_INPUT', '').split(',')\n for config_file in config_files:\n sed_cmds.append(f\"sed -i 's|^FCST{full_ext}|BOTH{full_ext}|g' {config_file}\")\n sed_cmds.append(f\"sed -i 's|{{FCST{full_ext}}}|{{BOTH{full_ext}}}|g' {config_file}\")\n\n elif 'OBS' in item_list and 'FCST' not in item_list:\n # if OBS level has 1 item and FCST name is a python embedding script,\n # don't report error\n level_list = getlist(config.getraw('config',\n f'OBS_VAR{index}_LEVELS',\n ''))\n other_name = config.getraw('config', f'FCST_VAR{index}_NAME', '')\n skip_error_for_py_embed = ext == 'LEVELS' and is_python_script(other_name) and len(level_list) == 1\n\n if ext not in ['OPTIONS'] and not skip_error_for_py_embed:\n msg.append(f\"If OBS{full_ext} is set, you must either set FCST{full_ext} or \"\n f\"change OBS{full_ext} to BOTH{full_ext}\")\n\n config_files = config.getstr('config', 'CONFIG_INPUT', '').split(',')\n for config_file in config_files:\n sed_cmds.append(f\"sed -i 's|^OBS{full_ext}|BOTH{full_ext}|g' {config_file}\")\n sed_cmds.append(f\"sed -i 's|{{OBS{full_ext}}}|{{BOTH{full_ext}}}|g' {config_file}\")\n\n return not bool(msg), msg, sed_cmds" ]
[ "0.5850183", "0.56285363", "0.562465", "0.55599266", "0.5522764", "0.5506512", "0.5470186", "0.5469287", "0.54548407", "0.5453748", "0.5453382", "0.54242575", "0.5410281", "0.53945994", "0.53779036", "0.5369179", "0.5358951", "0.5341739", "0.53268266", "0.531534", "0.5304359", "0.5297842", "0.5285028", "0.52813745", "0.52736586", "0.52717835", "0.5247317", "0.52316684", "0.5220113", "0.5212645", "0.52076995", "0.5207336", "0.5184591", "0.5183529", "0.518296", "0.51589686", "0.5155951", "0.5128538", "0.5115799", "0.5112998", "0.5096812", "0.50921065", "0.50897086", "0.5079991", "0.50703853", "0.5066124", "0.50574625", "0.5050702", "0.50450146", "0.50427186", "0.5037344", "0.50287044", "0.5027258", "0.5025939", "0.50196105", "0.50196105", "0.50113934", "0.5008405", "0.5008405", "0.5005313", "0.5000048", "0.49984172", "0.4978865", "0.49781165", "0.49641797", "0.49566597", "0.4945952", "0.49440292", "0.49405047", "0.49367177", "0.49334097", "0.49312165", "0.49297518", "0.49274707", "0.49270964", "0.4924945", "0.49220306", "0.49211204", "0.49176237", "0.4917054", "0.49168164", "0.4912714", "0.49017686", "0.4898701", "0.48964715", "0.4887572", "0.48856187", "0.488232", "0.4881679", "0.4879068", "0.48786098", "0.4874977", "0.486737", "0.4866434", "0.48656678", "0.48640674", "0.48619622", "0.48605037", "0.48601386", "0.4855349", "0.4854045" ]
0.0
-1
compute the dereddened values of BV and M_V for the six Cepheids in our sample (parallax cutoff = 0.25 and BV error < 0.1).
def deredden_cepheids(df_variables): extinction_coefficients = {'2365-2764-1': np.array([0.2622, 0.844]), '4109-638-1': np.array([0.0524, 0.1576]), '2058-56-1': np.array([0.0751, 0.248]), '3642-2459-1': np.array([0.1907, 0.608]), '3999-1391-1': np.array([0.3911, 1.2480]), '2607-1448-1': np.array([0.0430, 0.1310])} print "Dereddening Cepheids:" for tyc in extinction_coefficients.keys(): print "%s.." % tyc b_minus_v = df_variables[df_variables.tycho2_id == tyc].B_V m_v = df_variables[df_variables.tycho2_id == tyc].M_V extinc = extinction_coefficients[tyc] df_variables.set_value(df_variables.tycho2_id == tyc, 'B_V', b_minus_v - extinc[0]) df_variables.set_value(df_variables.tycho2_id == tyc, 'M_V', m_v - extinc[1]) print "..Done\n----------" return df_variables
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deredden(EBV,filt):\n conversion_data = ascii.read(datapath+\"/stellar_param_data/sf11.txt\")\n assert filt in conversion_data[\"filter\"], (filt, conversion_data[\"filter\"])\n return EBV * float(conversion_data[\"AB_EBV\"][np.where(conversion_data[\"filter\"]==filt)[0]])", "def get_debiet_inlaatduiker(verval, m_constant=1, schuifhoogte=1.78, breedte_duiker=1.75):\n return m_constant * schuifhoogte * breedte_duiker * np.sqrt(2 * 9.81 * verval)", "def _vce(self):\n sum = 0.0\n for sail in self.sails:\n cl2 = sail.cl(self.awa)**2\n cd2 = sail.cd(self.awa)**2\n sum += sail.area * sail.vce * sail.bk * np.sqrt(cl2+cd2)\n self._area()\n deltaCH = 0 if self.sails[1].up!=True else (1-self.ftj)*0.05*self.sails[1].IG\n Zce = sum/(self.area*np.sqrt(self.cl**2+self.cd**2)) - deltaCH\n return (Zce*(1-0.203*(1-self.flat)-0.451*(1-self.flat)*(1-self.fractionality)))", "def calculate_KDE(self, states, bw=np.logspace(-1, 1, 20), cv=5):\n params = {'bandwidth': bw}\n grid = GridSearchCV(KernelDensity(), params, cv=cv)\n grid.fit(states)\n kde = grid.best_estimator_\n return kde", "def dvdt(self, args: List[float]) -> float:\n v, n_kvhh, ca = args\n return ((-10.0*self.params.area \n * (self.kvhh.i(v, n=n_kvhh) \n + self.cav.i(v) \n + self.kca.i(v, ca=ca) \n + self.nap.i(v) \n + self.leak.i(v))) \n / (10.0*self.params.cm*self.params.area))", "def rk4_damp_constvBC(xy, v, NL, KL, BM, Mm, beta, h, BND):\n dx1 = h * v\n dv1 = h * fdspring(xy, v, NL, KL, BM, Mm, beta)\n dx1[BND] = h * v[BND]\n dv1[BND] = 0.\n dx2 = h * (v + dv1 / 2.)\n dv2 = h * fdspring(xy + dx1 / 2., v + dv1 / 2., NL, KL, BM, Mm, beta) # xy,NL,KL,BM, Mm, beta, NP,nn\n dv2[BND] = h * v[BND]\n dx3 = h * (v + dv2 / 2.)\n dv3 = h * fdspring(xy + dx2 / 2., v + dv2 / 2., NL, KL, BM, Mm, beta)\n dv3[BND] = h * v[BND]\n dx4 = h * (v + dv3)\n dv4 = h * fdspring(xy + dx3, v + dv3, NL, KL, BM, Mm, beta)\n dv4[BND] = h * v[BND]\n xout = xy + (dx1 + 2. * dx2 + 2. * dx3 + dx4) / 6.\n vout = v + (dv1 + 2. * dv2 + 2. * dv3 + dv4) / 6.\n\n # print 'rk BM = ', BM\n return dx1, dv1, dx2, dv2, dx3, dv3, dx4, dv4, xout, vout", "def fit_evd(self):\n\n # EVD only work on square matrices as we need to compute the eigenvalues and eigenvectors\n # For this we compute the covariance matrix K\n # K should be n x n matrix (pixels x pixels)\n\n # The covariance matrix is nxn\n self.cov_matrix = np.zeros(shape=[self.n_features, self.n_features], dtype='uint8')\n\n self.cov_matrix = np.cov(self.norm_matrix, rowvar=False)\n # C is a symmetric matrix and so it can be diagonalized:\n eig_val, eig_vec = linalg.eig(self.cov_matrix)\n\n # Sorting the eigenvectors by decreasing eigenvalues\n # [Start : stop : stepcount] stepcount is reversed\n idx = eig_val.argsort()[::-1]\n eig_val, eig_vec = eig_val[idx], eig_vec[:, idx]\n\n # Explained_variance tell us how much of the variance in the data each eigen value explains\n explained_variance = eig_val / (self.n_samples - 1)\n # total_var is the total variance in the data\n total_var = explained_variance.sum()\n explained_variance_ratio = explained_variance / total_var\n # The cumulative sum of all ratios\n ratio_cumsum = np.cumsum(explained_variance_ratio)\n\n # We search in the cumsum for the index of the value which, when added, corresponds to the quality_percent\n # The index of the cumsum gives us the components we need to add to explain X quality percent of our data\n n_components = np.searchsorted(ratio_cumsum, self.quality_percent, side='right') + 1\n\n self.components = eig_vec[:n_components]\n print(\"The principal components have been calculated using eigendecomposition\", self.components.shape)\n\n return self.components", "def get_hben_voor_debiet(debiet, h_bov, m_constant=1, schuifhoogte=1.78, breedte_duiker=1.75):\n return h_bov - np.square(Q / (m_constant * schuifhoogte * breedte_duiker)) / (2 * 9.81)", "def drude(ep, ew, eb, epc, e0, beta, nn, tnm):\n\n logging.info('plasmon energy (eV): %g', ep)\n logging.info('plasmon width (eV) : %g', ew)\n logging.info('binding energy (eV): %g', eb)\n logging.info('ev/channel : %g', epc)\n logging.info('E0(kev) : %g', e0)\n logging.info('beta(mrad) : %g', beta)\n logging.info('number of data points : %g', nn)\n logging.info('thickness(nm) : %g', tnm)\n\n b = beta / 1000. # rad\n T = 1000. * e0 * (1. + e0 / 1022.12) / (1. + e0 / 511.06)**2 # eV\n tgt = 1000. * e0 * (1022.12 + e0) / (511.06 + e0) # eV\n rk0 = 2590. * (1. + e0 / 511.06) * np.sqrt(2. * T / 511060)\n fideps = open('Drude.eps', 'w')\n fidssd = open('Drude.ssd', 'w')\n\n iw = np.arange(2, nn+2)\n e = epc * (iw - 1.0)\n eps = 1.0 - ep**2 / (e**2 - eb**2 + e * ew * 1.0j)\n eps1 = np.real(eps)\n eps2 = np.imag(eps)\n # eps1 = 1.0 - ep**2 / (e**2 + ew**2)\n # eps2 = ew * ep**2 / e / (e**2 + ew**2)\n elf = ep**2 * e * ew / ((e**2 - ep**2)**2 + (e * ew)**2)\n rereps = eps1 / (eps1 * eps1 + eps2 * eps2)\n the = e / tgt # varies with energy loss!\n # srfelf = 4..*eps2./((1+eps1).^2+eps2.^2) - elf %equivalent\n srfelf = np.imag(-4.0 / (1.0 + eps)) - elf # for 2 surfaces\n angdep = np.arctan(b / the) / the - b / (b * b + the * the)\n srfint = angdep * srfelf / (3.1416 * 0.0529 * rk0 * T) # probability per eV\n anglog = np.log(1.0 + b * b / the / the)\n volint = tnm / 3.1416 / 0.0529 / T / 2. * elf * anglog # probability per eV\n ssd = volint + srfint\n\n # %fprintf(fidssd,['%0.15g %0.15g %0.15g %0.15g \\n'], [evolintsrfintssd])\n # fprintf(fidssd,['%0.15g %0.15g\\n'], [essd])\n # fprintf(fideps,['%0.15g %0.15g %0.15g \\n'], [eeps1eps2])\n # fclose(fidssd)\n # fclose(fideps)\n # %fprintf(1,'For Ep(eV) = %f, width(eV) = %f, Eb(eV) = %f, eV/ch = %f \\n',ep,ew,eb,epc)\n # %fprintf(1,'beta(mrad) = %f, E0(keV) = %f, t(nm) = %f, #chan = %f\\n',beta,e0,tnm,nn)\n\n # %Integrate over all energy loss\n Ps = np.trapz(srfint, e) # 2 surfaces but includes negative begrenzungs contribn.\n Pv = np.trapz(volint, e) # integrated volume probability\n lam = tnm / Pv # does NOT depend on free-electron approximation (no damping).\n lamfe = 4. * 0.05292 * T / ep / np.log(1.0 + (b * tgt / ep)**2) # Eq.(3.44) approximation\n logging.info('Ps(2surfaces+begrenzung terms)=%g,Pv=t/lambda(beta)= %g', Ps, Pv)\n logging.info('Volume-plasmon MFP(nm) = %g, Free-electron MFP(nm) = %f', lam, lamfe)\n logging.info('--------------------------------')\n\n return e, eps1, eps2, elf, srfelf, rereps, ssd, volint, srfint, Ps, Pv, lam, lamfe", "def delta_v_calc(mass_initial,\n mass_final,\n v_exhaust,\n ):\n\n return v_exhaust * math.log(mass_initial / mass_final)", "def run_test(d):\n\n ######### Problem Specification\n\n # Data generation parameters\n prior_mu_z = np.zeros(d, dtype=np.float32) # Prior mean\n prior_sigma_z = np.eye(d, dtype=np.float32) # Prior covariance matrix\n\n # True model parameters\n num_range = np.arange(-(d-1)/2, (d+1)/2, dtype=np.float32)\n\n t_delta = num_range / 5 \n\n if d == 1:\n t_sigma = np.ones(1)\n else: \n # Allow sigma to range from 0.1 to 1\n t_sigma = 36/(10*(d-1)**2) * num_range**2 + 0.1 \n\n ######### Variable Initialization\n\n # Initial model parameters - same across all methods\n init_delta = prior_mu_z.copy()\n init_log_sigma = 3 * np.ones(d)\n\n # Initial HVAE variational parameters\n init_T = 5.\n init_eps = 0.005 * np.ones(d)\n max_eps = params['max_eps'] * np.ones(d)\n init_logit_eps = np.log(init_eps/(max_eps - init_eps))\n init_log_T_0 = np.log(init_T - 1)\n\n # Initial NF variational parameters\n init_u_pre_reparam = scipy.stats.truncnorm.rvs(-2, 2, scale=0.1, size=d)\n init_w = scipy.stats.truncnorm.rvs(-2, 2, scale=0.1, size=d)\n init_b = 0.1\n\n # Initial VAE parameters\n init_mu_z = prior_mu_z.copy()\n init_log_sigma_z = np.ones(d)\n\n ######### Set up models\n\n HVAE_model_1 = HVAE(\n ['delta', 'log_sigma', 'logit_eps', 'log_T_0'],\n [init_delta, init_log_sigma, init_logit_eps, init_log_T_0], \n 'HVAE_1', d, params['HVAE_K_1'])\n HVAE_model_2 = HVAE(\n ['delta', 'log_sigma', 'logit_eps', 'log_T_0'],\n [init_delta, init_log_sigma, init_logit_eps, init_log_T_0], \n 'HVAE_2', d, params['HVAE_K_2'])\n\n HVAE_model_notemp_1 = HVAE(\n ['delta', 'log_sigma', 'logit_eps'],\n [init_delta, init_log_sigma, init_logit_eps], \n 'HVAE_notemp_1', d, params['HVAE_K_1'])\n HVAE_model_notemp_2 = HVAE(\n ['delta', 'log_sigma', 'logit_eps'], \n [init_delta, init_log_sigma, init_logit_eps],\n 'HVAE_notemp_2', d, params['HVAE_K_2'])\n\n NF_model_1 = NF(\n ['delta', 'log_sigma', 'u_pre_reparam', 'w', 'b'],\n [init_delta, init_log_sigma, init_u_pre_reparam, init_w, init_b],\n 'NF_1', d, params['NF_K_1'])\n NF_model_2 = NF(\n ['delta', 'log_sigma', 'u_pre_reparam', 'w', 'b'],\n [init_delta, init_log_sigma, init_u_pre_reparam, init_w, init_b],\n 'NF_2', d, params['NF_K_2'])\n\n VB_model = VB(['delta', 'log_sigma', 'mu_z', 'log_sigma_z'], \n [init_delta, init_log_sigma, init_mu_z, init_log_sigma_z], 'VB', d)\n\n model_list = [HVAE_model_1, HVAE_model_2, HVAE_model_notemp_1, \n HVAE_model_notemp_2, NF_model_1, NF_model_2, VB_model]\n \n ######### Generate Training Data & Save - One for each test\n\n train_data_list = []\n\n for i in range(params['n_tests']):\n z = np.random.multivariate_normal(prior_mu_z, prior_sigma_z)\n x = np.random.multivariate_normal(z + t_delta, np.diag(t_sigma**2), \n size=params['n_data'])\n train_data_list.append(x)\n\n # Folder should have already been created in the initializations\n data_path = os.path.join('save', str(d), 'train_data.p')\n pickle.dump(train_data_list, open(data_path, 'wb')) \n\n ######### Train models\n\n sess = tf.Session()\n sess.run(tf.global_variables_initializer())\n\n # Store the final parameter values for all test runs in this dictionary\n final_params = {}\n\n for m in model_list:\n\n final_values = []\n\n for i in range(params['n_tests']):\n (delta, sigma) = m.train(sess, train_data_list[i], i)\n final_values.append((delta, sigma))\n\n final_params[m.model_name] = final_values.copy()\n\n ######### Test models using difference between parameters\n\n param_diffs = {}\n\n for m in model_list:\n\n diffs = []\n\n for i in range(params['n_tests']):\n delta = final_params[m.model_name][i][0]\n sigma = final_params[m.model_name][i][1]\n\n delta_diff = np.sum((delta - t_delta)**2)\n sigma_diff = np.sum((sigma - t_sigma)**2)\n\n diffs.append((delta_diff, sigma_diff))\n\n param_diffs[m.model_name] = diffs.copy()\n\n # Save parameter differences in a pickle file\n diff_path = os.path.join('save', str(d), 'all_diffs.p')\n pickle.dump(param_diffs, open(diff_path, 'wb'))", "def df2dx5_func(self,X):\n result = (\n -self.rj*self.rm*self.k_spr*self.b_spr * (\n np.exp(self.b_spr*(self.rm*X[4] + self.rj*X[0]))\n * ((self.rm*X[4] + self.rj*X[0])>=0)\n ) / self.Ij\n )\n return(result)", "def E(self, dstrct):\n rep_votes = dstrct.rep_votes + self.properties['sen_red']\n dem_votes = dstrct.dem_votes + self.properties['sen_blue']\n\n thresh = threshold(rep_votes+dem_votes)\n rep_wasted = wasted_votes(rep_votes, thresh)\n dem_wasted = wasted_votes(dem_votes, thresh)\n gap = (rep_wasted - dem_wasted)/(rep_votes + dem_votes)\n score = 1-abs(gap)\n\n self.E_ = self.w_E * score\n return self.E_", "def ev2ve(eV): \n return cv*np.sqrt( eV*(eV+2.e0*mec2))/(eV+mec2)", "def kde_agent(beta, bid, i, m, bw = None, *args):\n if bw is None: bw = 'scott'\n\n i0 = D.index[0]\n if only_cond == {'d'}:\n s1 = D.ix[i0:i, 's1_rp']\n if len(s1) == 1:\n s1[i0+1] = sp.mean([s1, 10])\n else:\n s1est = args[0]\n ar = D.ix[i, 'out_bool']\n b1 = D.ix[i, 'bid']\n b1i = round2(b1)\n if len(s1est) == 1:\n s1est.append(sp.mean([s1est[0], 10])) \n else:\n gks = stats.gaussian_kde(s1est, bw_method = bw)\n if ar:\n aestint = gks.pdf(sp.arange(0, b1i, 0.1))\n aest = sp.dot(aestint, sp.arange(0, b1i, 0.1)) / b1i \n else:\n aestint = gks.pdf(sp.arange(b1i, 10, 0.1))\n aest = sp.dot(aestint, sp.arange(b1i, 10, 0.1)) / (10-b1i) \n s1est.append(aest) \n s1 = s1est\n gks = stats.gaussian_kde(s1, bw_method = bw)\n Ks1 = gks.pdf(B) \n Ks1 /= sum(Ks1)\n\n if m == 'SC':\n if only_cond == {'d'}:\n s2 = D.ix[i0:i, 's2_rp'].dropna()\n if len(s2) == 1:\n s2[i+1] = sp.mean([s2[i], 10]) \n else:\n s2est = args[1]\n if len(s2est) == 1:\n s2est.append(sp.mean([s2est[0], 10])) \n else:\n gks = stats.gaussian_kde(s2est, bw_method = bw)\n if ar:\n aestint = gks.pdf(sp.arange(0, b1i, 0.1))\n aest = sp.dot(aestint, sp.arange(0, b1i, 0.1)) / b1i \n else:\n aestint = gks.pdf(sp.arange(b1i, 10, 0.1))\n aest = sp.dot(aestint, sp.arange(b1i, 10, 0.1)) / (10-b1i) \n s2est.append(aest) \n s2 = s2est\n gks = stats.gaussian_kde(s2, bw_method = bw)\n Ks2 = gks.pdf(B) \n Ks2 /= sum(Ks2)\n U = kdeag_uf(Ks1, Ks2, None)\n s2orb2 = s2\n elif m == 'NC':\n U = kdeag_uf(Ks1, None, None)\n s2orb2 = None\n elif m == 'BC':\n if only_cond == {'d'}:\n b2 = D.ix[i0:i, 'b2_bid'].dropna()\n if len(b2) == 1:\n b2[i+1] = sp.mean([b2[i], 10]) \n else:\n b2est = args[1]\n if len(b2est) == 1:\n b2est.append(sp.mean([b2est[0], 10])) \n else:\n gks = stats.gaussian_kde(b2est, bw_method = bw)\n if ar:\n aestint = gks.pdf(sp.arange(0, b1i, 0.1))\n aest = sp.dot(aestint, sp.arange(0, b1i, 0.1)) / b1i \n else:\n aestint = gks.pdf(sp.arange(b1i, 10, 0.1))\n aest = sp.dot(aestint, sp.arange(b1i, 10, 0.1)) / (10-b1i) \n b2est.append(aest) \n b2 = b2est\n gks = stats.gaussian_kde(b2, bw_method = bw)\n Kb2 = gks.pdf(B) \n Kb2 /= sum(Kb2)\n U = kdeag_uf(Ks1, None, Kb2)\n s2orb2 = b2\n \n opb = B[sp.argmax(U)]\n p = boltzmann_dist(beta, U, int(bid*((Bbins-1)/10)))#p = P[int(bid*((Bbins-1)/10))]\n return p, opb, s1, s2orb2, U", "def calcEVals(self):\n self.eVals,self.eVecs = np.linalg.eigh(self.rhoOp)", "def v_cmax(self, tl, ared):\n\t return ared*self.VCMAX0*exp(self.HAV/(R*self.TO)*(1. - self.TO/tl))/(1. + exp((self.SVC*tl - self.HDV)/(R*tl)))", "def fAVM(RHOB,Dw,Ds,Df,Dc1,PHIc1,Ck,Dk,PHIk,RSK):\n#\n# 5.1.1 Initialise Outputs & Check for missing values in inputs:\n# --------------------------------------------------------------\n\tPHIt=MissingValue\n\tPHIe=MissingValue\n\tCBW=MissingValue\n\tBVW=MissingValue\n\tHCPV=MissingValue\n\tVf=MissingValue\n\tVs=MissingValue\n\tSwt=MissingValue\n\tSwe=MissingValue\n\tVc1=MissingValue\n\tVc2=MissingValue\n\tVc3=MissingValue\n\tVk=MissingValue\n\tToc=MissingValue\n\tQc=MissingValue\n\tGDen=MissingValue\n\tif MissingValue in (RHOB,Dw,Ds,Df,Dc1,PHIc1,Ck,Dk,PHIk,RSK):\n\t\treturn PHIt,PHIe,CBW,BVW,HCPV,Vf,Vs,Swt,Swe,Vc1,Vc2,Vc3,Vk,Toc,Qc,GDen\n#\n# 5.1.2 Initialise parameters:\n# ----------------------------\n\tNIter=0\n\tNIterMax=100\n\tErrIter=10000\n\tTolErrIter=0.0001\n\tIterEnd=0\n\tVk=0.000 # Initially assumme no kerogen\n\tDh=Df\n#\n#\t5.1.3 Start interative loop:\n#\t-----------------------------\n\twhile IterEnd==0:\n#\n# 5.5.3.1 Organic and Inorganic Component Density Values:\n# -------------------------------------------------------\n\t\tDBI=(1-PHIc1)*Dc1+(PHIc1*Dw) # Bulk Density of Inorganic Component\n\t\tDBO=(1-PHIk)*Dk+(PHIk*Dh)# Bulk Density of Organic Component\n#\n# 5.1.3.2 Compute Volume of Organic and Inorganic Component:\n# ----------------------------------------------------------\n\t\tVOR=(DBI-RHOB)/(DBI-DBO)\n\t\tVOR=ImposeLimits(VOR,0,1)\n\t\tVIN=(1-VOR)\n#\n# 5.1.3.3 Compute Volumetrics, Total & Effective Porosity and Total & Effective Water Saturation:\n# ---------------------------------------\t-------------------------------------------------------\n\t\tVc1=VIN*(1-PHIc1)\n\t\tVc2=0.000\n\t\tVc3=0.000\n\t\tVk=VOR*(1-PHIk)\n\t\tPHIt=VIN*PHIc1+VOR*PHIk\n\t\tPHIe=VOR*PHIk\n\t\tSwt=1-((VOR*PHIk)/PHIt)\n\t\tSwt=ImposeLimits(Swt,0,1)\n\t\tSwe=0.000\n\t\tSxot=Swt\n\t\tSxoe=Swe\n#\n# 5.1.3.4 Compute Bulk Volume of Water, Hydrocarbon Pore Volume and Pore Space Fluid Properties:\n# ---------------------------------------\t------------------------------------------------------\n\t\tBVW=PHIe*Swe\n\t\tHCPV=PHIe*(1-Swe)\n\t\tVs=RSK*Vk # Estimate volume of adsorbed (sorbed) hydrocarbon\n\t\tVs=ImposeLimits(Vs,0,HCPV)\n\t\tVf=(HCPV-Vs)\n\t\tVf=ImposeLimits(Vf,0,(HCPV-Vs))\n#\n# 5.1.3.5 Recompute hydrocarbon properties in the pore space:\n# -----------------------------------------------------------\n\t\tSum=Vs+Vf\n\t\tif(Sum<=0.000):\n\t\t\tDh=Df\n\t\telse:\n\t\t\tDh=(Ds*Vs+Df*Vf)/(Vs+Vf)\n#\n# 5.1.4 Test for interative computations:\n# ---------------------------------------\n\t\tNIter=NIter+1\n\t\tif(NIter>=NIterMax):\n\t\t\tIterEnd=1\n\t\telse:\t\t\t\n\t\t\tif(NIter<=2):\n\t\t\t\tResultOld=[1,1,1,1,1,1,1,1,1] # Initial Setting\n\t\t\t\tResultNew=[Vc1,Vc2,Vc3,Vk,Vs,Vf,PHIe,Swt,Swe] # Current Results\n\t\t\t\tErrIter=ComputeMatrixDifference(ResultOld,ResultNew)\n\t\t\t\tResultOld=ResultNew\n\t\t\telse:\n\t\t\t\tResultNew=[Vc1,Vc2,Vc3,Vk,Vs,Vf,PHIe,Swt,Swe] # Current Results\n\t\t\t\tErrIter=ComputeMatrixDifference(ResultOld,ResultNew)\n\t\t\t\tResultOld=ResultNew\n\t\t\t\tif(ErrIter<=TolErrIter):\n\t\t\t\t\tIterEnd=1\n#\n# 5.1.6 Preoutput computations:\n# ------------------------------\n\tQc=MissingValue\n\tDc2=0.00\n\tDc3=0.00\n\tCBW=PHIt-PHIe # The assumption is that all microporosity can be considered to be clay bound water.\n\tToc=fToc_Wtf(Vc1,Vc2,Vc3,Vk,0,Ck,Dc1,Dc2,Dc3,Dk,Dw) # TOC-wt fraction. Note: Vrw=0 in fToc_Wtf(Vc1,Vc2,Vc3,Vk,Vrw,Ck,Dc1,Dc2,Dc3,Dk,Dw)\n\tGDen=fOrmGDen(Vc1,Vc2,Vc3,Vk,0,Dc1,Dc2,Dc3,Dk,Dw) # Grain Density. Note: Vrw=0 in fOrmGDen(Vc1,Vc2,Vc3,Vk,Vrw,Dc1,Dc2,Dc3,Dk,Dw)\n#\n# 5.5.7 Output Results:\n# \t-------------------\n\treturn PHIt,PHIe,CBW,BVW,HCPV,Vf,Vs,Swt,Swe,Vc1,Vc2,Vc3,Vk,Toc,Qc,GDen", "def get_dnde_error_mev_gtlike(spectrum,covariance_matrix,energies):\n from . models import gtlike_unscale_all_parameters\n spectrum = gtlike_unscale_all_parameters(spectrum)\n\n dnde_err = np.empty_like(energies)\n for i,energy in enumerate(energies):\n\n # method taken from pyLikelihood.FluxDensity\n srcpars = pyLikelihood.StringVector()\n spectrum.getParamNames(srcpars)\n arg = pyLikelihood.dArg(energy)\n partials = np.array([spectrum.derivByParam(arg, x) for x in srcpars])\n dnde_err[i] = np.sqrt(np.dot(partials, np.dot(covariance_matrix, partials)))\n return dnde_err", "def each_evidence(y_, f, fh, v, s, vh, N, D):\n epsilon = 1e-5\n alpha = 1.0\n beta = 1.0\n lam = alpha / beta\n tmp = (vh @ (f @ np.ascontiguousarray(y_)))\n for _ in range(11):\n # should converge after at most 10 steps\n # typically converge after two or three steps\n gamma = (s / (s + lam)).sum()\n # A = v @ np.diag(alpha + beta * s) @ v.transpose() # no need to compute A\n # A_inv = v @ np.diag(1.0 / (alpha + beta * s)) @ v.transpose() # no need to compute A_inv\n m = v @ (tmp * beta / (alpha + beta * s))\n alpha_de = (m * m).sum()\n alpha = gamma / (alpha_de + epsilon)\n beta_de = ((y_ - fh @ m) ** 2).sum()\n beta = (N - gamma) / (beta_de + epsilon)\n new_lam = alpha / beta\n if np.abs(new_lam - lam) / lam < 0.01:\n break\n lam = new_lam\n evidence = D / 2.0 * np.log(alpha) \\\n + N / 2.0 * np.log(beta) \\\n - 0.5 * np.sum(np.log(alpha + beta * s)) \\\n - beta / 2.0 * (beta_de + epsilon) \\\n - alpha / 2.0 * (alpha_de + epsilon) \\\n - N / 2.0 * np.log(2 * np.pi)\n return evidence / N, alpha, beta, m", "def build_rhs():\n\n def div(\n coeff_rho,\n momentum_x,\n momentum_y,\n momentum_z,\n ):\n \"\"\"Computes the divergence of the velocity field.\"\"\"\n # Compute the fourth order derivative of the pressure for the face\n # velocity correction.\n p_corr = (\n states['p']\n if self._params.enable_rhie_chow_correction else states['dp'])\n d4p_dx4 = self._kernel_op.apply_kernel_op_x(p_corr, 'k4d2x')\n d4p_dy4 = self._kernel_op.apply_kernel_op_y(p_corr, 'k4d2y')\n d4p_dz4 = self._kernel_op.apply_kernel_op_z(p_corr, 'k4d2z',\n 'k4d2zsh')\n\n # Compute velocity gradient based on interpolated values on cell faces.\n coeff_x = dt / (4. * coeff_rho * dx**2)\n du = self._kernel_op.apply_kernel_op_x(momentum_x, 'kDx')\n du_dx = [\n du_i / (2. * dx) + coeff_x * d4p_dx4_i\n for du_i, d4p_dx4_i in zip(du, d4p_dx4)\n ]\n\n coeff_y = dt / (4. * coeff_rho * dy**2)\n dv = self._kernel_op.apply_kernel_op_y(momentum_y, 'kDy')\n dv_dy = [\n dv_i / (2. * dy) + coeff_y * d4p_dy4_i\n for dv_i, d4p_dy4_i in zip(dv, d4p_dy4)\n ]\n\n coeff_z = dt / (4. * coeff_rho * dz**2)\n dw = self._kernel_op.apply_kernel_op_z(momentum_z, 'kDz', 'kDzsh')\n dw_dz = [\n dw_i / (2. * dz) + coeff_z * d4p_dz4_i\n for dw_i, d4p_dz4_i in zip(dw, d4p_dz4)\n ]\n\n return [\n du_dx_i + dv_dy_i + dw_dz_i\n for du_dx_i, dv_dy_i, dw_dz_i in zip(du_dx, dv_dy, dw_dz)\n ]\n\n def add_factor(\n v,\n factor,\n ):\n return [factor * v_i for v_i in v]\n\n b_terms = {\n _B_TERM_SOURCE_RHO: add_factor(src_rho, inv_dt),\n }\n if isinstance(rho_info, ConstantDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(rho_info.rho, states['u'], states['v'], states['w']),\n inv_dt * rho_info.rho),\n _B_TERM_DRHO_DT: [\n tf.zeros_like(src_rho_i) for src_rho_i in src_rho\n ],\n })\n\n elif isinstance(rho_info, VariableDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(1.0, states['rho_u'], states['rho_v'], states['rho_w']),\n inv_dt),\n _B_TERM_DRHO_DT:\n add_factor(rho_info.drho_dt, inv_dt),\n })\n\n else:\n raise ValueError('`rho_info` has to be either `ConstantDensityInfo` or '\n '`VariableDensityInfo`.')\n\n # pylint: disable=g-complex-comprehension\n return [(div_i + drho_dt_i - src_rho_i)\n for div_i, drho_dt_i, src_rho_i in zip(\n b_terms[_B_TERM_DIV],\n b_terms[_B_TERM_DRHO_DT],\n b_terms[_B_TERM_SOURCE_RHO],\n )], b_terms\n # pylint: enable=g-complex-comprehension", "def calibration(N,kb,T,Ekinv,V):\n lamb = np.sqrt((N-1)*3*kb*T/(Ekinv*2))\n \n if lamb < 0.9999:\n V = lamb*V\n elif lamb>1.0001:\n V = lamb*V\n \n return V", "def method_ERVKDE(data, xs, ys, DIMENSION = 2):\n mu, sigma = rvkde_sigmas(data, int(len(data)/10), DIMENSION)\n sig_avg = np.mean(np.std(data))\n diff = ((4*sig_avg**5/(len(data)*(DIMENSION+2)))**(1/(DIMENSION+4))) - np.median(sigma)\n elevated_sigma = np.array([s + diff for s in sigma])\n return get_density(xs, ys, mu, elevated_sigma, DIMENSION)", "def _epsilon(vds) -> np.ndarray:\n return vds[\"rhod_tot\"] / vds[\"rho\"]", "def ES_SVD(U, sigma, V, time, f_fault, f_side, PMItreshold, estimate_xi_func=get_SVDxi, estimate_xi_func_params=None):\n\n # Get the search region\n m = sigma.size\n f_fault = np.asanyarray(f_fault)\n f_side = np.asanyarray(f_side)\n dt = time[1] - time[0]\n Fs = 1.0/dt\n PMI = [] #PMI is here the envelope score\n W = []\n for i in range(0, f_fault.size):\n PMI.append(np.zeros(m))\n W.append(np.zeros(m))\n\n # Calculate PMI for each fault type\n for i in range(0, m):\n if estimate_xi_func_params is None:\n a_i = estimate_xi_func(U, sigma, V, i)\n else:\n a_i = estimate_xi_func(U, sigma, V, i, estimate_xi_func_params)\n a_i = envelope(a_i)\n Y, df = fft(a_i, Fs)\n # Calculate PMI for each fault type\n for k in range(0, f_fault.size):\n PMI[k][i] = diagnosefft(Y, df, f_fault[k], 1.0, f_side[k])\n\n # Calculate weights\n for k in range(0, f_fault.size):\n temp = 0.0\n for i in range(0, m):\n if PMI[k][i] > PMItreshold:\n temp += PMI[k][i]\n for i in range(0, m):\n if PMI[k][i] > PMItreshold:\n W[k][i] = PMI[k][i]/temp\n\n # Return data\n return PMI, W", "def fD(self, vpd):\n\t if vpd < 0.1:\n\t return 1.\n\t else:\n\t return 3/13./sqrt(vpd/1000.)", "def test_parameter_derivatives(self):\n self.set_up()\n shape_gradient = np.zeros((self.vmecOptimization.nzeta+1,\n self.vmecOptimization.ntheta)) \n self.assertRaises(ValueError,\n self.vmecOptimization.vmec_shape_gradient,\n shape_gradient,self.vmecOptimization.vmecOutputObject)\n self.tear_down()", "def __init__(self, M, rat):\n self.M = M\n xc0, _ = np.polynomial.chebyshev.chebgauss(M-0)\n xc1, _ = np.polynomial.chebyshev.chebgauss(M-1)\n xc2, _ = np.polynomial.chebyshev.chebgauss(M-2)\n # vandermonde and inverse vandermonde matrices\n self.V0 = np.polynomial.chebyshev.chebvander(xc0, M-1)\n self.V1 = np.polynomial.chebyshev.chebvander(xc1, M-2)\n self.V2 = np.polynomial.chebyshev.chebvander(xc2, M-3)\n self.VI0 = np.linalg.inv(self.V0)\n self.VI1 = np.linalg.inv(self.V1)\n self.VI2 = np.linalg.inv(self.V2)\n # differentiation matrices\n DC01 = np.polynomial.chebyshev.chebder(np.eye(M-0)) / rat\n DC12 = np.polynomial.chebyshev.chebder(np.eye(M-1)) / rat\n DC00 = np.row_stack([DC01, np.zeros(M)])\n self.D00 = self.V0.dot(DC00.dot(self.VI0))\n self.D01 = self.V1.dot(DC01.dot(self.VI0))\n self.D12 = self.V2.dot(DC12.dot(self.VI1))\n # boundary condition operators\n self.ibc_dirichlet = np.polynomial.chebyshev.chebvander(1, M-1).dot(self.VI0)\n self.obc_dirichlet = np.polynomial.chebyshev.chebvander(-1, M-1).dot(self.VI0)\n self.ibc_neumann = self.ibc_dirichlet.dot(self.D00)\n self.obc_neumann = self.obc_dirichlet.dot(self.D00)\n # rank reduction operators\n temp = np.zeros([M-1, M-0], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.R01 = self.V1.dot(temp.dot(self.VI0))\n temp = np.zeros([M-2, M-1], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.R12 = self.V2.dot(temp.dot(self.VI1))\n self.R02 = self.R12.dot(self.R01)\n # get poof operator from M-1 --> M\n temp = np.zeros([M, M-1], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.P10 = self.V0.dot(temp.dot(self.VI1))", "def darcy_func(self):\n i, o = self.inl[0].to_flow(), self.outl[0].to_flow()\n\n if abs(i[0]) < 1e-4:\n return i[1] - o[1]\n\n visc_i = visc_mix_ph(i, T0=self.inl[0].T.val_SI)\n visc_o = visc_mix_ph(o, T0=self.outl[0].T.val_SI)\n v_i = v_mix_ph(i, T0=self.inl[0].T.val_SI)\n v_o = v_mix_ph(o, T0=self.outl[0].T.val_SI)\n\n re = 4 * abs(i[0]) / (np.pi * self.D.val * (visc_i + visc_o) / 2)\n\n return ((i[1] - o[1]) - 8 * abs(i[0]) * i[0] * (v_i + v_o) / 2 *\n self.L.val * lamb(re, self.ks.val, self.D.val) /\n (np.pi ** 2 * self.D.val ** 5))", "def _vtmaxEq(vT,R,diskdf):\n #Calculate a bunch of stuff that we need\n if diskdf._beta == 0.:\n E= vT**2./2.+sc.log(R)\n xE= sc.exp(E-.5)\n OE= xE**-1.\n LCE= xE\n dxEdvT= xE*vT\n else: #non-flat rotation curve\n E= vT**2./2.+1./2./diskdf._beta*R**(2.*diskdf._beta)\n xE= (2.*E/(1.+1./diskdf._beta))**(1./2./diskdf._beta)\n OE= xE**(diskdf._beta-1.)\n LCE= xE**(diskdf._beta+1.)\n dxEdvT= xE/2./diskdf._beta/E*vT\n L= R*vT\n sigma2xE= diskdf._surfaceSigmaProfile.sigma2(xE,log=False)\n return OE*R/sigma2xE+\\\n (diskdf._surfaceSigmaProfile.surfacemassDerivative(xE,log=True)\\\n -(1.+OE*(L-LCE)/sigma2xE)*diskdf._surfaceSigmaProfile.sigma2Derivative(xE,log=True)\\\n +(L-LCE)/sigma2xE*(diskdf._beta-1.)*xE**(diskdf._beta-2.)\\\n -OE*(diskdf._beta+1.)/sigma2xE*xE**diskdf._beta)\\\n *dxEdvT", "def get_fermi_velocities():\n\n vr = Vasprun('vasprun.xml')\n # eigenvalues = vr.eigenvalues\n bs = vr.get_band_structure()\n bands = bs.bands\n kpoints = bs.kpoints\n efermi = bs.efermi\n h_bar = 6.582e-16 # eV*s\n\n fermi_bands = []\n for spin in bands:\n for i in range(len(bands[spin])):\n if max(bands[spin][i]) > efermi > min(bands[spin][i]):\n fermi_bands.append(bands[spin][i])\n\n fermi_velocities = []\n for band in fermi_bands:\n for i in range(len(band)-1):\n if (band[i] < efermi < band[i+1]) or (band[i] > efermi > band[i+1]):\n dk = np.sqrt((kpoints[i+1].cart_coords[0]\n - kpoints[i].cart_coords[0])**2\n + (kpoints[i+1].cart_coords[1]\n - kpoints[i].cart_coords[1])**2)\n v_f = abs((band[i+1] - band[i]) / (h_bar * dk))\n fermi_velocities.append(v_f)\n\n return fermi_velocities # Values are in Angst./s", "def ftlan_E1c(hop, v0, T, m=50, Min_b=10e-10, Min_m=5, kB=1, norm = np.linalg.norm):\n# def Tri_diag(a1, b1):\n# mat = np.diag(b1, -1) + np.diag(a1, 0) + np.diag(b1, 1)\n# e, w = np.linalg.eigh(mat)\n# return e, w\n\n beta = 1./(T * kB)\n E = 0.\n a, b = [], []\n v0 = v0/norm(v0)\n Hv = hop(v0)\n a.append(v0.dot(Hv))\n v1 = Hv - a[0] * v0\n b.append(norm(v1))\n if b[0] < Min_b:\n return 0\n\n v1 = v1/b[0]\n Hv = hop(v1)\n a.append(v1.dot(Hv))\n\n for i in range(1, m - 1):\n v2 = Hv - b[i - 1] * v0 - a[i] * v1\n b.append(norm(v2))\n if abs(b[i]) < Min_b:\n b.pop()\n break\n\n v2 = v2/b[i]\n Hv = hop(v2)\n a.append(v2.dot(Hv))\n v0 = v1.copy()\n v1 = v2.copy()\n \n a = np.asarray(a)\n b = np.asarray(b)\n\n eps, phi = Tri_diag(a, b)\n l = len(eps)\n# Eo = eps[0]\n# eps = eps-Eo\n exp_eps = np.exp(-beta * eps)\n E = np.sum(exp_eps * eps * phi[0, :]**2.)\n Z = np.sum(exp_eps * phi[0, :]**2.)\n# for i in range(len(eps)):\n# E += exp_eps[i] * eps[i] * phi[0, i]**2\n\n# E = E + Eo\n# de = eps[:, np.newaxis] - eps\n# for i in range(l):\n# E += eps[i] * phi[0, i]**2./np.sum(np.exp(-beta*de[:l, i])*(phi[0, :l]**2.))\n return E, Z", "def vonMises(self):\n s = self.voigt\n return ((1 / 2) * ((s[0] - s[1])**2\n + (s[1] - s[2])**2\n + (s[2] - s[0])**2\n + 6 * (s[3]**2 + s[4]**2 + s[5]**2))) ** (1 / 2)", "def dvdt(self, args: List[float]) -> float:\n v, h_nav, n_kvhh, h_kva, m_kvsi, s_ampar, _, s_nmdar, s_gabar, ca = args\n return ((-10.0*self.params.area \n * (self.leak.i(v)\n + self.nav.i(v, h=h_nav) \n + self.kvhh.i(v, n=n_kvhh)\n + self.kva.i(v, h=h_kva)\n + self.kvsi.i(v, m=m_kvsi)\n + self.cav.i(v)\n + self.kca.i(v, ca=ca)\n + self.nap.i(v)\n + self.kir.i(v))\n - (self.ampar.i(v, s=s_ampar)\n + self.nmdar.i(v, s=s_nmdar)\n + self.gabar.i(v, s=s_gabar))) \n / (10.0*self.params.cm*self.params.area))", "def set_voltages(): \n #0) set parameters\n from project_parameters import trapFile,multipoleControls,reg,driveFrequency,ax,az,phi,coefs\n import pickle\n with open(trapFile,'rb') as f:\n trap = pickle.load(f)\n V,X,Y,Z=trap.instance.DC,trap.instance.X,trap.instance.Y,trap.instance.Z\n tc=trap.configuration\n C = tc.multipoleControl\n el = []\n #1) check if trap_knobs has been run yet, creating multipoleControl and multipoleKernel\n if tc.trap_knobs != True:\n return 'WARNING: You must run trap_knobs first!'\n #2a) determine electrode voltages directly\n elif multipoleControls: # note plurality to contrast from attribute\n el = np.dot(C,coefs.T) # these are the electrode voltages\n #2b) determine electrode volages indirectly\n else:\n charge = tc.charge\n mass = tc.mass\n V0 = mass*(2*np.pi*frequencyRF)**2/charge\n U2 = az*V0/8\n U1 = U2+ax*V0/4\n U3 = 2*U1*np.tan(2*np.pi*(phi+tc.thetaRF)/180)\n U1p= np.sqrt(U1**2+U3**2/2)\n U4 = U1p*tc.Qrf[4]/tc.Qrf[1]\n U5 = U1p*tc.Qrf[5]/tc.Qrf[1]\n inp = np.array([E[0], E[1], E[2], U1, U2, U3, U4, U5]).T\n mCf = tc.multipoleCoefficients[1:9,:]\n el = np.dot(mCf.T,inp) # these are the electrode voltages\n el = np.real(el)\n #3) regularize if set to do so\n reg = 0\n if reg: \n C = el\n Lambda = np.linalg.lstsq(tc.multipoleKernel,C)\n Lambda=Lambda[0]\n el = el-(np.dot(tc.multipoleKernel,Lambda))\n return el", "def dV(X):\n return -4 * a * np.power(X, 3) + 2 * b * X", "def doCalculation(self, E1, E2, muL, muR, T, pot, C, TCalc, Density, E0, L):\n NEcut = len(E1) #we determine the number of single-particle states that we use\n VG=np.diag(pot)\n E= int(0.5*np.size(VG))\n V = VG[0:E] #since the potential of both barriers is symmetric and we only tunnel through one barrier. Therefore we only use one half of the potential.\n dx= L/(np.size(pot))\n\n #Following prints are for debugging purposes:\n #print(\"---------------------------------------------------------------------\")\n #print(\"---------------------------------------------------------------------\")\n #print(\"Hier beginnt die Ausgabe von Rates:\")\n #print(\"---------------------------------------------------------------------\")\n #print(\"V:\", V)\n #print(\"E1:\", E1)\n #print(\"E2:\", E2)\n #print(\"C:\", C)\n\n kB=0.08629 #Boltzmann constant in meV/K\n \n \n def fermi(E,mu,T):\n \"\"\"This fermi-function tells us with which likelyhood a state with an E is occupied on the lead.\n E(float): energy difference between the initial and the final state that the tunneling electron has to carry.\n mu(float): chemical potential of either drain(muR) or source(muL).\n T(float): temperature.\n \"\"\"\n if (E-mu)/T > 600:\n f=0\n\t\t\t\t\n else:\n f=1/(math.exp((E-mu)/(kB*T) )+1)\n return(f)\n \n\n\t#This function is called by the Gamma_ij-equations and includes the transmission-coefficient for each tunnelling-event\n #and the density of state function of the source and drain. \n def Gamma(Ea,Eb,V):\n \"\"\":math:`\\\\Gamma` includes the transmission coefficient and DOS: :math:`\\Gamma = | t |^2 * DOS`\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n V(np.array): barrier potential\n \"\"\"\n #print(Ea)\n #print(V)\n return (np.absolute(TCalc.calculate_transmission(Ea,V,dx))**2*Density.calculate_DensityofStates(np.absolute(Ea-Eb)))\n \n #These next four functions are used to calculate the transition rates.Each function for a different kind of transition:\n #We distinguish between transitions, in which the number of electrons on the dot changes from one to two(Gamma_12) and reverse(Gamma_21).\n #And between transitions in which the number of electrons on the dot change from zero to one(Gamma_01) and reverse(Gamma_10).\n\n def Gamma_12(Ea,Eb,mu,T):\n \"\"\"Calculates the rate of a transition from a one body state to a two body state.\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n summe=0\n j=0\n Cb=C[np.where(E2==Eb)[0][0]]\n while j< NEcut:\n summe=Cb[np.where(E1==Ea)[0][0]][j]+summe\n j=j+1\n return(Gamma(Ea,Eb,V)*(np.absolute(summe))**2*fermi((Eb-Ea),mu,T))\n\n\n def Gamma_01(Eb,mu,T):\n \"\"\"Calculates the transition rate from the vacuum state to a one-body state.\n\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n return(Gamma(E0,Eb,V)*fermi((Eb-E0),mu,T))\n\n def Gamma_21(Ea,Eb,mu,T):\n \"\"\"Calculates the rate of a transition from a two body state to a one body state.\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n summe=0\n nu=0\n Ca=C[np.where(E2==Ea)[0][0]]\n while nu < NEcut:\n summe=summe+Ca[np.where(E1==Eb)[0][0]][nu]\n nu=nu+1\n return(Gamma(Ea,Eb,V)*(np.absolute(summe))**2*(1-fermi((Ea-Eb),mu,T)))\n\n def Gamma_10(Ea,mu,T):\n \"\"\"Calculates the rate of a transition from a one body state to the vacuum state.\n\n Ea(float): energy of initial state \n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n return(Gamma(Ea,E0,V)*(1-fermi((Ea-E0),mu,T)))\n\n #creating the output matrices that later contain all the transition rates through either\n #the left or the right barrier\n Gamma_R=np.zeros((1+np.size(E1)+np.size(E2),1+np.size(E1)+np.size(E2)))\n Gamma_L=np.zeros((1+np.size(E1)+np.size(E2),1+np.size(E1)+np.size(E2)))\n\n #using a loop to fill the output matrices with transition rates.\n i_=0\n for i in E1:\n j_=0\n for j in E2:\n Gamma_L[i_+1][j_+1+np.size(E1)]=Gamma_12(i,j,muL,T)\n Gamma_L[j_+1+np.size(E1)][i_+1]=Gamma_21(j,i,muL,T)\n Gamma_R[i_+1][j_+1+np.size(E1)]=Gamma_12(i,j,muR,T)\n Gamma_R[j_+1+np.size(E1)][i_+1]=Gamma_21(j,i,muR,T)\n j_=j_+1\n Gamma_L[0][i_+1]=Gamma_10(i,muL,T)\n Gamma_R[0][i_+1]=Gamma_10(i,muR,T)\n Gamma_L[i_+1][0]=Gamma_01(i,muL,T)\n Gamma_R[i_+1][0]=Gamma_01(i,muR,T)\n i_=1+i_\n\n #print(\"Gamma_L und Gamma_R:\")\n #print(Gamma_L,Gamma_R)\n #print(\"-----------------------------------------------------------------------\")\n #print(\"---------------------------------------------------------------------\")\n return(Gamma_L,Gamma_R)", "def FBEqs( a, v, nphi, mDM, sv):\n #-- Define Parameters --#\n \n rRAD = v[0] # Radiation energy density\n NDM = v[1] # DM number density\n Tp = v[2] # Temperature\n \n H = np.sqrt(25.13274122871834 * GCF * (rRAD * 10.**(-4*a))/3.) # Hubble parameter\n Del = 1. + Tp * dgstarSdT(Tp)/(3. * gstarS(Tp)) # Temperature parameter\n\n #-- Radiation + Temperature equations --#\n \n drRADda = 0.\n dTda = - Tp/Del\n\n #-- Calculate freeze-out of DM pion --#\n\n NDMeq = (10.**(3*a) * mDM*mDM * Tp * kn(2, mDM/Tp))/(9.8696044)/nphi\n \n dNDMda = -(NDM*NDM - NDMeq*NDMeq)*sv*nphi/(H*10.**(3.*a))\n \n dEqsda = [drRADda, dNDMda, dTda]\n\n dEqsda = [x * 2.3025 for x in dEqsda]\n \n return dEqsda", "def calculate_eigens(self):\n covariance_matrix = np.cov(self.predictor_vars_train.T)\n eigenvalues, eigenvectors = np.linalg.eig(covariance_matrix)\n idx = eigenvalues.argsort()[::-1]\n # Create \"All\" version\n self.eigenvalues_all = eigenvalues[idx]\n self.eigenvectors_all = eigenvectors[:, idx]\n # Create selected percentage version with cutoff\n eigenvalues_pct = self.eigenvalues_all / np.sum(self.eigenvalues_all)\n self.pct_var_exp_cumulative_all = np.cumsum(eigenvalues_pct)\n self.pct_var_exp_cumulative = self.pct_var_exp_cumulative_all[\n self.pct_var_exp_cumulative_all <= self.variance_explained_cutoff\n ]\n self.eigenvectors = self.eigenvectors_all[:, : len(self.pct_var_exp_cumulative)]\n self.eigenvalues = self.eigenvalues_all[: len(self.pct_var_exp_cumulative)]", "def max_evidence(self):\n self.A = np.linalg.inv(self.Sn)\n A_eigval = np.linalg.eigvals(self.A)\n gamma = 0\n for i in range(len(A_eigval)):\n gamma += A_eigval[i]/(self.alpha + A_eigval[i])\n new_alpha = gamma/(self.mn.T@self.mn)\n\n sum = 0\n for i in range(self.n):\n sum +=(self.t[i]-self.mn.T@self.design_matrix[i])**2\n new_beta = 1/((1/(self.n-gamma))*sum)\n\n return new_alpha, new_beta", "def innerProd(vcfResults):\n both = vcfResults.get(\"both\", 0)\n onlyX = vcfResults.get(\"onlyX\", 0)\n onlyY = vcfResults.get(\"onlyY\", 0)\n \n # return (both - onlyX - onlyY)/(both + onlyX + onlyY) \n return (both)/(both + onlyX + onlyY)\n # Distance heuristic = # correct variants / total \n # => % correct variants called", "def testDensityCenterVelocity(self):\n known_dcv = np.array([0.24275266063732542, 0.25474645145914782, 0.32455563530545328])\n np.testing.assert_allclose(nb.dc_vel, known_dcv)", "def test_cherenkov_instability( show=False ):\n # Dictionary to record the final value of E\n slope_Erms = {}\n\n for scheme in [ 'standard', 'galilean', 'pseudo-galilean']:\n\n # Choose the correct parameters for the scheme\n if scheme == 'standard':\n v_comoving = 0.\n use_galilean = False\n else:\n v_comoving = 0.9999*c\n if scheme == 'galilean':\n use_galilean = True\n else:\n use_galilean = False\n\n # Initialize the simulation object\n sim = Simulation( Nz, zmax, Nr, rmax, Nm, dt,\n p_zmin, p_zmax, p_rmin, p_rmax, p_nz, p_nr, p_nt, n_e,\n zmin=zmin, initialize_ions=True,\n v_comoving=v_comoving, use_galilean=use_galilean,\n boundaries={'z':'periodic', 'r':'reflective'}, use_cuda=use_cuda )\n\n # Give a relativistic velocity to the particle, with some noise\n sim.ptcl[0].uz[:] = uz_m\n sim.ptcl[0].inv_gamma[:] = 1./np.sqrt( 1 + sim.ptcl[0].uz**2 )\n sim.ptcl[1].uz[:] = uz_m\n sim.ptcl[1].inv_gamma[:] = 1./np.sqrt( 1 + sim.ptcl[1].uz**2 )\n\n # Perform the simulation;\n # record the rms electric field every 50 timestep\n Er_rms = np.zeros(int(N_step/30)+1)\n t = np.zeros(int(N_step/30+1))\n Er_rms[0] = get_Er_rms(sim)\n t[0] += sim.time\n for i in range(int(N_step/30)):\n sim.step( 30, show_progress=False )\n print('Checkpoint %d' %i)\n Er_rms[i+1] = get_Er_rms(sim)\n t[i+1] += sim.time\n print('Calculated RMS')\n\n # Check/plot the results\n if show:\n import matplotlib.pyplot as plt\n # Add a plot\n plt.semilogy( t, Er_rms, '-', label=scheme )\n plt.ylabel('RMS(Er)')\n plt.xlabel('Time')\n else:\n # Registed the final value of the slope of the electric field\n slope_Erms[scheme] = np.log( Er_rms[-1] ) - np.log(Er_rms[-2] )\n\n if show:\n # Show the plot\n plt.legend(loc=0)\n plt.show()\n else:\n # Check that, in the standard case, the electric field is\n # growing much faster, due to the Cherenkov instability\n assert slope_Erms['standard'] > 3.5*slope_Erms['galilean']\n assert slope_Erms['standard'] > 3.5*slope_Erms['pseudo-galilean']", "def herd_factor(self, premises):\n # This uses C code to replicate the herd size calculation for this\n # set of units. These are official values.\n # s=[1, 3, 13, 7, 10, 7, 5, 2, 10, 12, 2510, 100530, 285310, 567290, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000, 36000]\n # y=[0.0153846, 0.0769231, 0.292308, 0.153846, 0.215385, 0.153846, 0.107692, 0.0461538, 0.215385, 0.261538, 0.323077, 1.92308, 1.95385, 1.98462, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308, 1.12308]\n # return { a : b for (a,b) in zip(s,y)}\n # The relative error in the calculation below is under 10^-6, so perfect.\n\n special_factor=2 # Comes from NAADSM\n histogram=collections.defaultdict(int)\n for p in premises:\n histogram[p.size]+=1\n total=sum(histogram.values())\n running=0\n cumulants=list()\n for size, count in sorted(histogram.items()):\n running+=count\n cumulants.append([size, special_factor*running/total])\n factor_dict=dict()\n previous=0\n for size, val in cumulants:\n factor_dict[size]=0.5*(previous+val)\n previous=val\n logger.debug(\"Creating herd special factor {0}\".format(factor_dict))\n return factor_dict", "def test_vmec_objective_grad(self): \n self.set_up()\n self.assertRaises(ValueError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='volumee')\n \n boundary = np.copy(self.vmecOptimization.boundary_opt)\n boundary_new = np.hstack((boundary,boundary))\n self.assertRaises(ValueError,\n self.vmecOptimization.vmec_objective_grad,\n boundary=boundary_new)\n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='iota') \n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='iota_prime') \n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='iota_target') \n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='well_ratio') \n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='well') \n self.assertRaises(RuntimeError,\n self.vmecOptimization.vmec_objective_grad,\n which_objective='axis_ripple') \n # Jacobian\n dfdomega = self.vmecOptimization.vmec_objective_grad(\n boundary=boundary,which_objective='jacobian')\n boundary = self.vmecOptimization.boundary_opt\n dfdomega_fd = finite_difference_derivative(boundary, lambda boundary :\n self.vmecOptimization.vmec_objective(boundary=boundary,\n which_objective='jacobian'),epsilon=1e-4,\n method='centered')\n\n self.assertTrue(np.allclose(dfdomega,dfdomega_fd,atol=1e-5))\n # Radius\n dfdomega = self.vmecOptimization.vmec_objective_grad(\n boundary=boundary,which_objective='radius')\n boundary = self.vmecOptimization.boundary_opt\n dfdomega_fd = finite_difference_derivative(boundary, lambda boundary :\n self.vmecOptimization.vmec_objective(boundary=boundary,\n which_objective='radius'),epsilon=1e-4,\n method='centered')\n self.assertTrue(np.allclose(dfdomega,dfdomega_fd,atol=1e-5))\n # normalized_jacobian\n dfdomega = self.vmecOptimization.vmec_objective_grad(\n boundary=boundary,which_objective=\n 'normalized_jacobian')\n boundary = self.vmecOptimization.boundary_opt\n dfdomega_fd = finite_difference_derivative(boundary, lambda boundary :\n self.vmecOptimization.vmec_objective(boundary=boundary,\n which_objective='normalized_jacobian'),epsilon=1e-4,\n method='centered')\n self.assertTrue(np.allclose(dfdomega,dfdomega_fd,atol=1e-5))\n # iota\n inputObject = VmecInput('input.rotating_ellipse_highres')\n boundary = np.copy(self.vmecOptimization.boundary_opt)\n boundary[0] = 1.1*boundary[0]\n self.vmecOptimization.inputObject = inputObject\n self.vmecOptimization.vmecInputFilename = 'input.rotating_ellipse_highres'\n self.vmecOptimization.delta_curr = 10\n dfdomega = self.vmecOptimization.vmec_objective_grad(\n boundary=boundary,which_objective='iota',\n weight_function = axis_weight)\n dfdomega_fd = finite_difference_derivative(boundary, lambda boundary :\n self.vmecOptimization.vmec_objective(boundary=boundary,\n which_objective='iota', weight_function = axis_weight),\n epsilon=1e-3, method='centered')\n self.assertTrue(np.allclose(dfdomega,dfdomega_fd,atol=1e-2))\n \n # To do : finish FD testing\n self.tear_down()", "def dnde_gray(self, EMeV, t_sec, g11, m_neV, bfield = 'jansson12'):\n\n dna_dedt = self.AvgALPflux(EMeV, t_sec, g11) # alps / MeV / s \n pag = self.Pag(EMeV, g11, m_neV, bfield = bfield) # conversion prob\n dng_dedt = dna_dedt * pag # gamma rays / MeV / s \n flux = dng_dedt * self.fluxconstant # gamma rays / MeV / s / cm^2\n return flux", "def eval(self):\n\n T = 0. # the test statistic\n N = float(len(self.x))\n M = float(len(self.y))\n\n if N == 0 or M == 0:\n raise ValueError('cvm: empty vector')\n\n s1 = 0.\n for ex in self.x:\n s1 += (self.eval_ecdf(self.x, self.ecdf_x, ex) -\n self.eval_ecdf(self.y, self.ecdf_y, ex))**2\n \n s2 = 0.\n for ey in self.y:\n s2 += (self.eval_ecdf(self.x, self.ecdf_x, ey) -\n self.eval_ecdf(self.y, self.ecdf_y, ey))**2\n\n # the CVM test statistic\n T = N*M/(N + M)**2*(s1 + s2)\n\n # the expected value of T (under the null hypothesis)\n expT = 1./6. + 1./(6.*(M + N))\n\n # the variance of T\n varT = 1./45.*(M + N + 1.)/(M + N)**2*\\\n (4.*M*N*(M + N) - 3.*(M**2 + N**2) - 2.*M*N)/(4.*M*N)\n\n # adjust T so that its significance can be computed using the limiting\n # distribution\n limitT = (T - expT)/np.sqrt(45.*varT) + 1./6.\n\n\n # p-value for this test statistic\n if limitT > self._z[-1]:\n p = 0.\n else:\n p = 1. - self._interp_f(limitT)\n\n return T, limitT, p", "def test_ccsd_energy(parthole_drudge):\n\n dr = parthole_drudge\n p = dr.names\n\n a, b = p.V_dumms[:2]\n i, j = p.O_dumms[:2]\n u = dr.two_body\n t = IndexedBase('t')\n\n energy = dr.define_einst(\n Symbol('e'),\n u[i, j, a, b] * t[a, b, i, j] * Rational(1, 2)\n + u[i, j, a, b] * t[a, i] * t[b, j]\n )\n targets = [energy]\n\n searched_eval_seq = optimize(targets, substs={p.nv: p.no * 10})\n\n assert verify_eval_seq(searched_eval_seq, targets)\n assert len(searched_eval_seq) == 2\n searched_cost = get_flop_cost(searched_eval_seq)\n\n best_eval_seq = optimize(\n targets, substs={p.nv: p.no * 10},\n strategy=Strategy.BEST | Strategy.SUM | Strategy.COMMON\n )\n assert verify_eval_seq(best_eval_seq, targets)\n assert len(best_eval_seq) == 2\n best_cost = get_flop_cost(best_eval_seq)\n\n assert (best_cost - searched_cost).xreplace({p.no: 1, p.nv: 10}) > 0", "def Incentives(Md,X,EEnvPrime):\n u = X[iu]\n b = Md.b()\n\n\n\n CplusG = 1./X[iMU] * (1+SSGRatio*X[ieG])\n\n SD = SkillDeriv(Md,X)\n SkillRisk = beta * ( EEnvPrime[iEnv_EAlph] *(1-delta)*X[iEAlph]*SD[0]\n + EEnvPrime[iEnv_ElogAlpha]*(1-delta)*SD[1])\n\n dWdu = (log(b) + (1-b)/(1-u+u*b) - (1+chi)*X[iA]*X[ih]/CplusG/X[iSA]\n + (1+chi)*psi_1*X[iM]**psi_2/CplusG+gamma_0*X[ih]**(1+gamma)/(1+gamma)-psy + SkillRisk)\n dqdb_M = (1./kappa)*X[iq] * X[idVndb] / X[iVn]\n dudq_M = -X[iM]*(X[iulag]+upsilon*(1-X[iulag]))\n dudb_M = dudq_M * dqdb_M\n\n dWdh = ( (1+chi)*X[iA]/CplusG/X[iSA] - gamma_0 * X[ih]**gamma ) * (1-u)\n\n # dhdq = -zeta_2/(1+gamma)*X[ih]/(1-X[iu])*dudq_M\n # dhdubar = zeta_2/(1+gamma)*X[ih]/(1-Md.ubar)\n # dhdb = dhdq/kappa*X[iq]/X[iVn]*X[idVndb] + dhdubar * Md.dubardb\n\n dhdu = -zeta_2/(1+gamma)*X[ih]/(1-X[iu])\n\n\n XSS = Md.XSS\n dhdubar = zeta_2/(1+gamma)*X[ih]/(1-XSS[iu])\n dubardq_M = -XSS[iM]*(XSS[iulag]+upsilon*(1-XSS[iulag]))\n dqbardb_M = (1./kappa)*XSS[iq] * XSS[idVndb] / XSS[iVn]\n\n\n dhdb = dhdu*dudq_M*dqdb_M + dhdubar * dubardq_M*dqbardb_M\n\n\n return dWdu * dudb_M - (X[iulag]+upsilon*(1-X[iulag]))*X[iM]*X[iVn]*dqdb_M + dWdh * dhdb + beta*dudb_M*EEnvPrime[iEnv_ulag]", "def get_vdw_contrib(self, rcut=8.0, groupBy='m'):\n e2v = 0.0\n ias2v = []\n es2v = [];\n types2v_z = []\n types2v_m = []\n for i in range(self.na):\n for j in range(i+1,self.na):\n bond = self.m.GetBondBetweenAtoms(i, j)\n #print ' -- i,j,bond = ', i,j,bond\n if bond is None:\n neibs_i = set( [ ai.GetIdx() for ai in self.m.GetAtomWithIdx(i).GetNeighbors() ] )\n neibs_j = set( [ ai.GetIdx() for ai in self.m.GetAtomWithIdx(j).GetNeighbors() ] )\n if neibs_j.intersection(neibs_i) == set():\n eij = 0.0\n ias = [ i,j ]\n ias2v.append(ias)\n zs = [ self.zs[ia] for ia in ias]\n zs.sort()\n aps = [ self.atypes[ia] for ia in ias ]\n aps.sort()\n r0, D0 = rcr.GetUFFVdWParams(self.m, i,j)\n rij = rdMolTransforms.GetBondLength(self.m.GetConformer(), i, j)\n if rij <= rcut:\n ratio = r0/rij\n r6 = ratio**6\n r12 = r6*r6\n eij = D0*(r12 - 2.0*r6)\n #print 'i,j,rij, r0,D0, evdw = %2d %2d %5.2f %5.2f %8.4f %8.4f'%( i,j,rij, r0,D0, evdw )\n e2v += eij\n es2v.append(eij)\n types2v_z.append('-'.join([ '%d'%zi for zi in zs ]))\n types2v_m.append( '-'.join( aps ) )\n self.e2v = e2v\n self.es2v = es2v\n self.n2v = len(es2v)\n self.types2v = {'m':types2v_m, 'n':types2v_z}[groupBy]\n #print ' -- types2v = ', self.types2v\n #return e2v, n2v, types2v, es2v", "def energy_distributions_v_v_v(\n model: SingleRhNeutrinoModel,\n genv: Generation,\n nbins: int,\n):\n\n def msqrd(s, t):\n return msqrd_v_v_v(s, t, model, genv)\n\n tb = ThreeBody(model.mx, (0, 0, 0), msqrd=msqrd)\n return tb.energy_distributions(nbins=nbins)", "def abc_reject_analyse(obs):\n def closest(lst, K):\n lst = np.asarray(lst)\n idx = (np.abs(lst - K)).argmin()\n return idx\n failure_results = [1, 1]\n suffixes = ('', '_hm')\n w = np.ones(1000)\n if (os.path.exists('%s/abc_reject.pkl' % obs.results_dir) and\n os.path.exists('%s/abc_reject_hm.pkl' % obs.results_dir)):\n for test in range(2):\n with open('%s/abc_reject%s.pkl' % (obs.results_dir, suffixes[test]), 'rb') as pfile:\n results = pickle.load(pfile)\n params = pd.DataFrame([(r.scout_prob, r.survival_prob) for r in results],\n columns=('scout prob', 'survival prob'))\n X, Y, PDF = pyabc.visualization.kde.kde_2d(params, w, x=\"scout prob\", y=\"survival prob\")\n x_idx = closest(X[0], obs.parameters.scout_prob)\n y_idx = closest([y[0] for y in Y], obs.parameters.survival_prob)\n posterior = PDF[y_idx][x_idx]\n ratio = posterior / np.amax(PDF)\n if ratio > 0.5:\n failure_results[test] = 0\n return failure_results", "def dvdt(self, args: Dict) -> float:\n if self.channel_bool['leak']:\n i_leak: float = self.leak.i(args['v'])\n else:\n i_leak: float = 0.\n \n if self.channel_bool['nav']:\n i_nav: float = self.nav.i(args['v'], h=args['h_nav'])\n else:\n i_nav: float = 0.\n\n if self.channel_bool['kvhh']:\n i_kvhh: float = self.kvhh.i(args['v'], n=args['n_kvhh'])\n else:\n i_kvhh: float = 0.\n\n if self.channel_bool['kva']:\n i_kva: float = self.kva.i(args['v'], h=args['h_kva'])\n else:\n i_kva: float = 0.\n\n if self.channel_bool['kvsi']:\n i_kvsi: float = self.kvsi.i(args['v'], m=args['m_kvsi'])\n else:\n i_kvsi: float = 0.\n\n if self.channel_bool['cav']:\n i_cav: float = self.cav.i(args['v'])\n else:\n i_cav: float = 0.\n\n if self.channel_bool['kca']:\n i_kca: float = self.kca.i(args['v'], ca=args['ca'])\n else:\n i_kca: float = 0.\n \n if self.channel_bool['nap']:\n i_nap: float = self.nap.i(args['v'])\n else:\n i_nap: float = 0.\n\n if self.channel_bool['kir']:\n i_kir: float = self.kir.i(args['v'])\n else:\n i_kir: float = 0.\n\n if self.channel_bool['ampar']:\n i_ampar: float = self.ampar.i(args['v'], s=args['s_ampar'])\n else:\n i_ampar: float = 0.\n\n if self.channel_bool['nmdar']:\n i_nmdar: float = self.nmdar.i(args['v'], s=args['s_nmdar'])\n else:\n i_nmdar: float = 0.\n\n if self.channel_bool['gabar']:\n i_gabar: float = self.gabar.i(args['v'], s=args['s_gabar'])\n else:\n i_gabar: float = 0.\n\n return ((-10.0*self.params.area \n * (i_leak\n + i_nav \n + i_kvhh \n + i_kva \n + i_kvsi \n + i_cav \n + i_kca \n + i_nap \n + i_kir) \n - (i_ampar \n + i_nmdar \n + i_gabar))\n / (10.0*self.params.cm*self.params.area))", "def get_effective_mass():\n\n H_BAR = 6.582119514e-16 # eV*s\n M_0 = 9.10938356e-31 # kg\n N_KPTS = 6 # Number of k-points included in the parabola.\n\n spin_up = Spin(1)\n\n band_structure = Vasprun('vasprun.xml').get_band_structure()\n\n # Locations of CBM and VBM in band_structure.bands\n cbm_band_index = band_structure.get_cbm()['band_index'][spin_up][0]\n cbm_kpoint_index = band_structure.get_cbm()['kpoint_index'][0]\n\n vbm_band_index = band_structure.get_vbm()['band_index'][spin_up][0]\n vbm_kpoint_index = band_structure.get_vbm()['kpoint_index'][0]\n\n k = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n E = {'electron': {'left': [], 'right': []},\n 'hole': {'left': [], 'right': []}}\n\n e_ref_coords = band_structure.kpoints[cbm_kpoint_index]._ccoords\n h_ref_coords = band_structure.kpoints[vbm_kpoint_index]._ccoords\n\n for n in range(-N_KPTS, 1):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['left'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['left'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['left'].append(e_energy)\n E['hole']['left'].append(h_energy)\n\n for n in range(1, 1 + N_KPTS):\n e_coords = band_structure.kpoints[cbm_kpoint_index + n]._ccoords\n h_coords = band_structure.kpoints[vbm_kpoint_index + n]._ccoords\n\n k['electron']['right'].append(\n ((e_coords[0] - e_ref_coords[0])**2 +\n (e_coords[1] - e_ref_coords[1])**2 +\n (e_coords[2] - e_ref_coords[2])**2)**0.5\n )\n k['hole']['right'].append(\n ((h_coords[0] - h_ref_coords[0])**2 +\n (h_coords[1] - h_ref_coords[1])**2 +\n (h_coords[2] - h_ref_coords[2])**2)**0.5\n )\n\n e_energy = band_structure.bands[\n spin_up][cbm_band_index][cbm_kpoint_index + n]\n h_energy = band_structure.bands[\n spin_up][vbm_band_index][vbm_kpoint_index + n]\n\n E['electron']['right'].append(e_energy)\n E['hole']['right'].append(h_energy)\n\n # 2nd order fits\n e_l_fit = np.poly1d(\n np.polyfit(k['electron']['left'], E['electron']['left'], 2))\n e_r_fit = np.poly1d(\n np.polyfit(k['electron']['right'], E['electron']['right'], 2))\n h_l_fit = np.poly1d(\n np.polyfit(k['hole']['left'], E['hole']['left'], 2))\n h_r_fit = np.poly1d(\n np.polyfit(k['hole']['right'], E['hole']['right'], 2))\n\n # Curvatures\n e_l_curvature = e_l_fit.deriv().deriv()[0]\n e_r_curvature = e_r_fit.deriv().deriv()[0]\n h_l_curvature = h_l_fit.deriv().deriv()[0]\n h_r_curvature = h_r_fit.deriv().deriv()[0]\n\n # Unit conversion\n e_m_eff_l = 10 * ((H_BAR ** 2) / e_l_curvature) / M_0\n e_m_eff_r = 10 * ((H_BAR ** 2) / e_r_curvature) / M_0\n h_m_eff_l = -10 * ((H_BAR ** 2) / h_l_curvature) / M_0\n h_m_eff_r = -10 * ((H_BAR ** 2) / h_r_curvature) / M_0\n\n return {'electron': {'left': e_m_eff_l, 'right': e_m_eff_r},\n 'hole': {'left': h_m_eff_l, 'right': h_m_eff_r}}", "def Dekel(mv,mv0,lmax0,vmax0,alpha0,z=0.):\n g_vmax,g_lmax = g_P10(mv/mv0,alpha0)\n lmax = lmax0 * g_lmax\n vmax = vmax0 * g_vmax\n s2 = 2.-alpha0\n s3 = 3.-alpha0\n A = (cfg.G * mv / lmax / vmax**2)**(0.5/s3) * (s2/s3)\n lv = lmax / s2**2 * A**2 / (1.-A)**2\n c = s2**2 * lv / lmax\n rhoc = co.rhoc(z,h=cfg.h,Om=cfg.Om,OL=cfg.OL)\n Delta = 3.*mv / (cfg.FourPi * lv**3 * rhoc)\n return c,Delta", "def muscovite():\n\n rho = 2834.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 181.; C[0,1] = 48.8; C[0,2] = 25.6; C[0,3] = 0.; C[0,4] = -14.2; C[0,5] = 0.\n C[1,0] = C[0,1]; C[1,1] = 178.4; C[1,2] = 21.2; C[1,3] = 0.; C[1,4] = 1.1; C[1,5] = 0.\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 58.6; C[2,3] = 0.; C[2,4] = 1.; C[2,5] = 0.\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 16.5; C[3,4] = 0.; C[3,5] = -5.2\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 19.5; C[4,5] = 0.\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 72.\n\n return C, rho", "def bse_fe(self):\n p = self.model.exog.shape[1]\n return np.sqrt(np.diag(self.cov_params())[0:p])", "def FigA7(case):\n \n #set the parameter, arrays\n \n n_array=np.array([1,2,3])\n\n #set the result arrays\n if case==0:\n class_number=5\n elif case==1:\n class_number=6\n fate=np.zeros([class_number])#number of evolutionary fate\n fate_matrix=np.zeros([np.size(n_array),np.size(fate)])\n \n time=np.linspace(0,100000, 1000000)\n loop=10**6\n \"\"\"\n 0 Co and/or Ch cannot survive in mono-culture\n 1 Co cannot invade\n 2 Only equilibrium of exclusion is stable\n 3 Only equilibrium of coexistence is stable\n 4 Two equilibria are UNstable\n 5 two Equilibrium are stable (which may occur only when sCO vs rCh)\n \"\"\"\n for tri in range(np.size(n_array)):\n counter=0\n n=n_array[tri]\n print(str(\"Hill coefficient is %d\" %(n)))\n fate=np.zeros([class_number])#number of evolutionary fate should be reset\n if case==0 or case==1:\n fname=str('parameter-sweep-MC-n%d-case%d' %(n, case))\n else:\n print(\"Error in case\")\n return 1\n \n for i in range(loop):\n if(i+1)%10000==0:\n print(i+1)\n Ks,cd,T0, alpha,=np.random.uniform(0,1,4)\n Kr,cr=np.random.uniform([Ks,0],[1,1],2)#Kr>Ks and cr.cd\n #check whether r is positive or not\n if case==0:\n r1=rmax*(1-cr-cd)#rCO\n r2=rmax#sCH\n W0Co=r1-dmax*T0**n/(T0**n+Kr**n)-alpha#initial growth of Cooperator\n W0Ch=r2-dmax*T0**n/(T0**n+Ks**n)-alpha#initial growth of Cheater\n elif case==1:\n r1=rmax*(1-cd)#sCo\n r2=rmax*(1-cr)#rCh\n W0Co=r1-dmax*T0**n/(T0**n+Ks**n)-alpha\n W0Ch=r2-dmax*T0**n/(T0**n+Kr**n)-alpha\n stab_e=0#initialize the falgs of stability\n stab_c=0\n if W0Co<0 or W0Ch<0:\n fate[0]+=1\n res=0\n else:\n #succeed in mono-culture \n init=np.array([T0,10**(-6)])\n if case==0: \n solCo=odeint(DyCoop, init, time, args=(T0, r1, Kr, alpha, n))\n Ts=solCo[-1,0]\n #x1s=solCo[-1,1]\n solCh=odeint(DyCheat, init, time, args=(T0, r2, Ks, alpha, n))\n x2s=solCh[-1,1]\n else:\n solCo=odeint(DyCoop, init, time, args=(T0, r1, Ks, alpha, n))\n Ts=solCo[-1,0]\n #x1s=solCo[-1,1]\n solCh=odeint(DyCheat, init, time, args=(T0, r2, Kr, alpha, n))\n x2s=solCh[-1,1]\n \n #Evolutionary dynamics \n if case==0:\n K=Kr\n else:\n K=Ks\n if r1*(1-x2s)-dmax*T0**n/(T0**n+K**n)<alpha:\n #Co cannot invade\n fate[1]+=1\n res=1\n else:\n #Co can invade\n #calculate Tdagger Td and check whether coexist or exclude\n if case==0:\n #rCo vs sCh\n #in this case, at most one equilbrium is stable\n tau=Quad(case,alpha,cr+cd,0,Kr, Ks, n)\n Td=tau**(1/n)\n if Td<Ts:\n #Co exclude Ch\n fate[2]+=1\n res=2\n else:\n x1d=alpha*Kd*(T0-Td)/(fmax*Td-alpha*(T0-Td))\n x2d=1-x1d-(dmax*Td**n/(Td**n+K**n)+alpha)/r1\n #check the stability condition\n stab=Stab_cond(alpha, T0, Td,x1d,x2d, r1,r2,n, K)\n if stab==0:\n #stable coexistence\n fate[3]+=1\n res=3\n else:\n #unstable coexistence nor exclusion\n fate[4]+=1\n res=4\n print(Td, x1d, x2d)\n else:\n #sCo vs rCh\n # in this case two equilibria can be stable at the same time\n [tau_p,tau_m]=Quad(case,alpha,cd,cr,Ks, Kr, n)\n if tau_m>Ts**n or tau_p<Ts**n:\n # cexclusion is stable\n stab_e=1\n # stability in coexistence \n if tau_p<0:\n stab_c=0\n else:\n Td=tau_p**(1/n)\n x1d=alpha*Kd*(T0-Td)/(fmax*Td-alpha*(T0-Td))\n x2d=1-x1d-(dmax*Td**n/(Td**n+K**n)+alpha)/r1\n #check the stability condition\n stab=Stab_cond(alpha, T0, Td,x1d,x2d, r1,r2,n, K)\n if stab==0:\n #stable coexistence\n stab_c=1\n #classify\n if stab_e==1 and stab_c==1:\n # two stable equilbria\n fate[5]+=1\n res=5\n elif stab_e==1 and stab_c==0:\n #only stable cexclusion\n fate[2]+=1\n res=2\n elif stab_e==0 and stab_c==1:\n #stable coexistence\n fate[3]+=1\n res=3\n else:\n #both unstable\n fate[4]+=1\n res=4\n \n #save the results\n if counter==0:\n result=np.array([[Ks, Kr, cr, cd, alpha, T0,res]])\n #save the result with parameter values\n \n else:\n #add array of results\n R=np.array([[Ks, Kr, cr, cd, alpha, T0,res]])\n result=np.concatenate((result, R), axis=0)\n counter+=1\n \n #save csv file and graph\n np.savetxt(fname+'.csv',result, delimiter=',', header='Ks, Kr, cr, cd, alpha, T0, class', fmt='%.6f') \n print(fate)\n fate_matrix[tri,:]=fate \n if case==0: \n np.savetxt('parameter_sweep_MC_total_case0.csv',fate_matrix, delimiter=',', header='cl0,l1,cl2,cl3,cl4', fmt='%d')\n else:\n np.savetxt('parameter_sweep_MC_total_case1.csv',fate_matrix, delimiter=',', header='cl0,l1,cl2,cl3,cl4,cl5', fmt='%d')\n Plot(case)", "def test_compute_c_max_D():\n # build\n T = np.array([600, 500])\n E_ion = np.array([20, 10])\n E_atom = np.array([30, 40])\n angles_ion = np.array([60, 60])\n angles_atom = np.array([60, 60])\n ion_flux = np.array([1e21, 1e20])\n atom_flux = np.array([2e21, 2e20])\n\n # run\n c_max = divHretention.compute_c_max(\n T, E_ion, E_atom, angles_ion, angles_atom,\n ion_flux, atom_flux, full_export=False, isotope=\"T\")\n\n # test\n D_0_W = 1.9e-7\n E_D_W = 0.2\n k_B = 8.617e-5\n D = D_0_W*np.exp(-E_D_W/k_B/T)\n D *= 1/3**0.5\n\n # implantation ranges\n implantation_range_ions = [\n float(divHretention.implantation_range(energy, angle))\n for energy, angle in zip(E_ion, angles_ion)]\n implantation_range_atoms = [\n float(divHretention.implantation_range(energy, angle))\n for energy, angle in zip(E_atom, angles_atom)]\n\n # reflection coefficients\n reflection_coeff_ions = [\n float(divHretention.reflection_coeff(energy, angle))\n for energy, angle in zip(E_ion, angles_ion)]\n reflection_coeff_atoms = [\n float(divHretention.reflection_coeff(energy, angle))\n for energy, angle in zip(E_atom, angles_atom)]\n\n reflection_coeff_ions = np.array(reflection_coeff_ions)\n reflection_coeff_atoms = np.array(reflection_coeff_atoms)\n\n c_max_ions = (1 - reflection_coeff_ions) * \\\n ion_flux*implantation_range_ions/D\n c_max_atoms = (1 - reflection_coeff_atoms) * \\\n atom_flux*implantation_range_atoms/D\n c_max_expected = c_max_ions + c_max_atoms\n\n assert c_max.all() == c_max_expected.all()\n assert c_max.all() == c_max_expected.all()", "def calcCV(self):\n # Make sure Zm Area and Standard Error are already calculated\n if not hasattr(self,'ZmArea'):\n self.calcZmArea()\n if not hasattr(self,'SE'):\n self.calcSE()\n # Coefficient of Variation = Standard Error / Zm Area\n if self.ZmArea > 0:\n self.CV = self.SE / self.ZmArea\n else:\n self.CV = 0\n return self.CV", "def calcCV(self):\n # Make sure Zm Area and Standard Error are already calculated\n if not hasattr(self,'ZmArea'):\n self.calcZmArea()\n if not hasattr(self,'SE'):\n self.calcSE()\n # Coefficient of Variation = Standard Error / Zm Area\n if self.ZmArea > 0:\n self.CV = self.SE / self.ZmArea\n else:\n self.CV = 0\n return self.CV", "def derv(self, t, y):\n x = y[0];\n xc = y[1];\n n = y[2];\n\n Bhat = self.G * (1.0 - n) * self.alpha0(t) * (1 - 0.4 * x) * (1 - 0.4 * xc);\n\n dydt = np.zeros(3)\n\n dydt[0] = sp.pi / 12.0 * (xc + Bhat);\n dydt[1] = sp.pi / 12.0 * (self.mu * (xc - 4.0 / 3.0 * pow(xc, 3.0)) - x * (\n pow(24.0 / (0.99669 * self.taux), 2.0) + self.kparam * Bhat));\n dydt[2] = 60.0 * (self.alpha0(t) * (1.0 - n) - self.delta * n);\n\n return (dydt)", "def blueschist_felsic():\n\n rho = 2970.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 149.85; C[0,1] = 38.7; C[0,2] = 32.59; C[0,3] = -0.15; C[0,4] = -1.; C[0,5] = -0.19\n C[1,0] = C[0,1]; C[1,1] = 163.55; C[1,2] = 30.03; C[1,3] = 1.05; C[1,4] = -1.81; C[1,5] = -1.78\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 121.62; C[2,3] = 0.22; C[2,4] = -0.95; C[2,5] = -0.13\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 48.03; C[3,4] = -0.63; C[3,5] = -1.14\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 48.62; C[4,5] = -0.01\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 58.42\n\n return C, rho", "def EVBMF(Y, sigma2=None, H=None): \n L,M = Y.shape #has to be L<=M\n\n if H is None:\n H = L\n\n alpha = L/M\n tauubar = 2.5129*np.sqrt(alpha)\n \n #SVD of the input matrix, max rank of H\n _,s,_ = torch.svd(Y)\n s = s[:H]\n\n #Calculate residual\n residual = 0.\n if H<L:\n # residual = np.sum(np.sum(Y**2)-np.sum(s**2))\n residual = torch.sum(torch.sum(Y**2)-torch.sum(s**2))\n\n #Estimation of the variance when sigma2 is unspecified\n if sigma2 is None: \n xubar = (1+tauubar)*(1+alpha/tauubar)\n eH_ub = int(np.min([np.ceil(L/(1+alpha))-1, H]))-1\n\n upper_bound = (torch.sum(s**2)+residual)/(L*M)\n lower_bound = np.max([s[eH_ub+1]**2/(M*xubar), torch.mean(s[eH_ub+1:]**2)/M])\n\n scale = 1.#/lower_bound\n s = s*np.sqrt(scale)\n residual = residual*scale\n lower_bound = float(lower_bound)*scale\n upper_bound = float(upper_bound)*scale\n\n sigma2_opt = minimize_scalar(EVBsigma2, args=(L,M,s,residual,xubar), bounds=[lower_bound, upper_bound], method='Bounded')\n sigma2 = sigma2_opt.x\n\n #Threshold gamma term\n threshold = np.sqrt(M*sigma2*(1+tauubar)*(1+alpha/tauubar))\n\n pos = torch.sum(s>threshold)\n if pos == 0: return np.array([])\n #Formula (15) from [2]\n d = torch.mul(s[:pos]/2, 1-(L+M)*sigma2/s[:pos]**2 + torch.sqrt( (1-((L+M)*sigma2)/s[:pos]**2)**2 - (4*L*M*sigma2**2)/s[:pos]**4) )\n return torch.diag(d)", "def _finalize(self):\n if self.vcount > 1:\n # skewness = g1 = sqrt(n) M3/(M2^(3/2)) # zero \n # kurtosis = g2 = n M4/M2^2 - 3 # zero for normal\n # sk = (M3/nf)/(sigma**3)\n # ku = (M4/nf)/sigma**4 - 3\n n = self.vcount\n nf = float(n)\n mu2 = self.vm2/nf\n self.vvar = self.vm2/(nf-1)\n try:\n self.vskewness = self.vm3/nf/(mu2**1.5)\n self.vkurtosis = self.vm4/nf/(mu2**2)\n except:\n self.vskewness = 0\n self.vkurtosis = 0\n elif self.vcount == 1:\n self.vvar = 0\n self.vskewness = 0\n self.vkurtosis = 0\n self.dirty = False", "def cspline_params(self):\n b = np.zeros(self.n)\n c = np.zeros(self.n-1)\n d = np.zeros(self.n-1)\n B = np.zeros(self.n)\n Q = np.ones(self.n-1)\n D = 2 * np.ones(self.n)\n dx = np.zeros(self.n-1)\n p = np.zeros(self.n-1)\n\n # Calculate x-interval and slope\n for j in range(self.n-1):\n dx[j] = self.x[j+1] - self.x[j]\n p[j] = (self.y[j+1] - self.y[j]) / dx[j]\n\n # Fill B\n B[0] = 3 * p[0]\n for i in range(self.n-2):\n B[i+1] = 3 * (p[i] + p[i+1] * dx[i] / dx[i+1])\n B[-1] = 3 * p[-2]\n \n # Fill D\n for i in range(self.n-2):\n D[i+1] = 2 * dx[i] / dx[i+1] + 2\n\n # Fill Q\n for i in range(self.n-2):\n Q[i+1] = dx[i] / dx[i+1]\n\n # Gauss elimination\n for i in range(1, self.n):\n D[i] = D[i] - Q[i-1] / D[i-1]\n B[i] = B[i] - B[i-1] / D[i-1]\n\n # Back-substitution\n b[-1] = B[-1] / D[-1]\n list = range(self.n-1)\n for i in list[::-1]:\n b[i] = (B[i] - Q[i] * b[i+1]) / D[i]\n\n # Calculate c and d\n for i in range(self.n-1):\n c[i] = (3 * p[i] - 2 * b[i] - b[i+1]) / dx[i]\n d[i] = (b[i] + b[i+1] - 2 * p[i]) / dx[i]\n c[-1] = -3 * d[-1] * dx[-1]\n\n return b, c, d", "def eclogite_massive():\n\n rho = 3490.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 238.85; C[0,1] = 82.01; C[0,2] = 81.44; C[0,3] = 0.3; C[0,4] = -0.02; C[0,5] = 0.5\n C[1,0] = C[0,1]; C[1,1] = 242.12; C[1,2] = 81.11; C[1,3] = -0.66; C[1,4] = 0.33; C[1,5] = 0.12\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 235.57; C[2,3] = -0.28; C[2,4] = 0.22; C[2,5] = 0.31\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 78.72; C[3,4] = 0.27; C[3,5] = 0.\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 78.37; C[4,5] = 0.25\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 77.91\n\n return C, rho", "def heavy_fixCM_eigvals(NP, b, c, params):\n l = params['l']\n k = params['k']\n I3 = params['I3']\n # Here, omega_3 is just the MAGNITUDE, not signed\n w3 = np.abs(params['w3'][0])\n gn = params['Mm'] * params['g']\n\n # Check output if small system\n print 'gn = ', gn\n print 'b = ', b\n print 'c = ', c\n\n if NP == 1:\n pass\n elif NP == 2:\n matrix = -np.array([[0., (-1) ** (1 + c) * l * gn / (I3 * w3), 0., 0.],\n [(-1) ** (1 + c) * (-l * gn + (-1) ** (1 + b) * l ** 2 * k) / (I3 * w3), 0.,\n (-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0.],\n [0., 0., 0., (-1) ** (1 + c) * l * gn / (I3 * w3)],\n [(-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0.,\n (-1) ** (1 + c) * (-l * gn + (-1) ** (1 + b) * l ** 2 * k) / (I3 * w3), 0.]\n ])\n print 'exact matrix = ', matrix\n eigvals = np.array([\n 1j * l * gn / (I3 * w3),\n -1j * l * gn / (I3 * w3),\n l * np.sqrt(gn) * np.sqrt(0j - 2. * l * k * (-1) ** (b) - gn) / (I3 * w3),\n -l * np.sqrt(gn) * np.sqrt(0j - 2. * l * k * (-1) ** (b) - gn) / (I3 * w3)\n ])\n print 'exact_eigvals are =', eigvals\n return eigvals\n elif NP == 3:\n matrix = -np.array([[0., (-1) ** (1 + c) * l * gn / (I3 * w3), 0., 0., 0., 0.],\n [(-1) ** (1 + c) * (-l * gn + (-1) ** (1 + b) * l ** 2 * k) / (I3 * w3), 0.,\n (-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0., 0., 0.],\n [0., 0., 0., (-1) ** (1 + c) * l * gn / (I3 * w3), 0., 0.],\n [(-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0.,\n (-1) ** (1 + c) * (-l * gn - 2. * (-1) ** (b) * l ** 2 * k) / (I3 * w3), 0., \\\n (-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0.],\n [0., 0., 0., 0., 0., (-1) ** (1 + c) * l * gn / (I3 * w3)],\n [0., 0., (-1) ** (1 + b + c) * l ** 2 * k / (I3 * w3), 0.,\n (-1) ** (1 + c) * (-l * gn + (-1) ** (1 + b) * l ** 2 * k) / (I3 * w3), 0.]\n ])\n print 'exact matrix = ', matrix\n\n eigvals = np.array([\n 1j * l * gn / (I3 * w3),\n # -1j*l*gn/(I3*w3),\n l * np.sqrt(gn) * np.sqrt(0j - 3. * l * k * (-1) ** (b) - gn) / (I3 * w3),\n # -l*np.sqrt(gn)*np.sqrt(0j-3.*l*k*(-1)**(b) - gn)/(I3*w3),\n l * np.sqrt(gn) * np.sqrt(0j - l * k * (-1) ** (b) - gn) / (I3 * w3),\n # -l*np.sqrt(gn)*np.sqrt(0j - l*k*(-1)**(b) - gn)/(I3*w3)\n ])\n return eigvals\n else:\n return np.array([])", "def calc_D(state):\n\t\tif t < thresh:\n\t\t\tstate.D_g[t] = 0.5\n\t\t\tstate.D_n[t] = 0.5\n\t\telse:\n\t\t\tif mod == \"constant\":\n\t\t\t\tstate.D_g[t] = D\n\t\t\t\tstate.D_n[t] = 1-D\n\t\t\tif mod == \"value\":\n\t\t\t\t# NOTE: if rmag and lmag is 1/0, can just use V\n\t\t\t\t# average of two actions\n\t\t\t\tV = np.mean(1/2*(state.QG[t,:] - state.QN[t,:])) # state average(?) \n\t\t\t\tV = 1/(1 + np.exp(-V*k)) # translate between 0 and 1\n\t\t\t\tstate.D_g[t] = V \n\t\t\t\tstate.D_n[t] = 1 - V\n\t\treturn state", "def build_dcel(self):\r\n\r\n # Step 1: vertex list creation\r\n for v in self.vl:\r\n self.vertices.append(Vertex(v[0], v[1]))\r\n\r\n # Step 2: hedge list creation. Assignment of twins and\r\n # vertices\r\n\r\n for e in self.el:\r\n if e[0] >= 0 and e[1] >= 0:\r\n h1 = Hedge(self.vertices[e[0]],\r\n self.vertices[e[1]])\r\n h2 = Hedge(self.vertices[e[1]], self.vertices[e[0]])\r\n h1.twin = h2\r\n h2.twin = h1\r\n self.vertices[e[1]].hedgelist.append(h1)\r\n self.vertices[e[0]].hedgelist.append(h2)\r\n self.hedges.append(h2)\r\n self.hedges.append(h1)\r\n else:\r\n print(\"oh shit boi wadup\")\r\n\r\n # Step 3: Identification of next and prev hedges\r\n for index, v in enumerate(self.vertices):\r\n v.sort_incident()\r\n l = len(v.hedgelist)\r\n if l < 2:\r\n raise DcelError(\"Badly formed dcel: less than two hedges in vertex:\" + str(index))\r\n else:\r\n for i in range(l - 1):\r\n v.hedgelist[i].nexthedge = v.hedgelist[i + 1].twin\r\n v.hedgelist[i + 1].prevhedge = v.hedgelist[i]\r\n v.hedgelist[l - 1].nexthedge = v.hedgelist[0].twin\r\n v.hedgelist[0].prevhedge = v.hedgelist[l - 1]\r\n\r\n # Step 4: Face assignment\r\n provlist = self.hedges[:]\r\n nf = 0\r\n nh = len(self.hedges)\r\n\r\n while nh > 0:\r\n h = provlist.pop()\r\n nh -= 1\r\n # We check if the hedge already points to a face\r\n if h.face == None:\r\n f = Face()\r\n nf += 1\r\n # We link the hedge to the new face\r\n f.wedge = h\r\n f.wedge.face = f\r\n # And we traverse the boundary of the new face\r\n while not h.nexthedge is f.wedge:\r\n h = h.nexthedge\r\n h.face = f\r\n self.faces.append(f)\r\n # And finally we have to determine the external face\r\n for f in self.faces:\r\n f.external = f.area() < 0", "def CvM(self, using, dx=0.0001):\n pits = np.array(self.PIT(using=using,dx=dx))\n cvm_result = skgof.cvm_test(pits, stats.uniform())\n return cvm_result.statistic, cvm_result.pvalue", "def test_compute_c_max_D():\n # build\n T = np.array([600, 500])\n E_ion = np.array([20, 10])\n E_atom = np.array([30, 40])\n angles_ion = np.array([60, 60])\n angles_atom = np.array([60, 60])\n ion_flux = np.array([1e21, 1e20])\n atom_flux = np.array([2e21, 2e20])\n\n # run\n c_max = divHretention.compute_c_max(\n T, E_ion, E_atom, angles_ion, angles_atom,\n ion_flux, atom_flux, full_export=False, isotope=\"D\")\n\n # test\n D_0_W = 1.9e-7\n E_D_W = 0.2\n k_B = 8.617e-5\n D = D_0_W*np.exp(-E_D_W/k_B/T)\n D *= 1/2**0.5\n\n # implantation ranges\n implantation_range_ions = [\n float(divHretention.implantation_range(energy, angle))\n for energy, angle in zip(E_ion, angles_ion)]\n implantation_range_atoms = [\n float(divHretention.implantation_range(energy, angle))\n for energy, angle in zip(E_atom, angles_atom)]\n\n # reflection coefficients\n reflection_coeff_ions = [\n float(divHretention.reflection_coeff(energy, angle))\n for energy, angle in zip(E_ion, angles_ion)]\n reflection_coeff_atoms = [\n float(divHretention.reflection_coeff(energy, angle))\n for energy, angle in zip(E_atom, angles_atom)]\n\n reflection_coeff_ions = np.array(reflection_coeff_ions)\n reflection_coeff_atoms = np.array(reflection_coeff_atoms)\n\n c_max_ions = (1 - reflection_coeff_ions) * \\\n ion_flux*implantation_range_ions/D\n c_max_atoms = (1 - reflection_coeff_atoms) * \\\n atom_flux*implantation_range_atoms/D\n c_max_expected = c_max_ions + c_max_atoms\n\n assert c_max.all() == c_max_expected.all()", "def _core_calc_design(self,prof) :\n\t\tlp_list,ld_list = [],[]\n\t\tcp_list,cd_list = [],[]\n\t\t\t\t\n\t\tfor eqnid,eqn in enumerate(self.equations) : \n\t\t\treg_p = self.regressors[eqnid]['prod']\n\t\t\treg_d = self.regressors[eqnid]['degrad']\n\t\t\t\n\t\t\tLp = np.ones(prof.n_sample)\n\t\t\tLd = np.ones(prof.n_sample)\n\t\t\n\t\t\t# Get regressor values\n\t\t\tX_p = [np.log(prof.var[:,reg-1]) for reg in reg_p ]\n\t\t\tX_d = [np.log(prof.var[:,reg-1]) for reg in reg_d ]\n\t\t\t\n\t\t\tLp = np.vstack((Lp,np.array(X_p))).T\n\t\t\tLd = np.vstack((Ld,np.array(X_d))).T\t\t\t\n\n\t\t\t# Calculate Cp\n\t\t\tCp = np.dot(LA.inv(np.dot(Lp.T,Lp)),Lp.T)\n\t\t\tCd = np.dot(LA.inv(np.dot(Ld.T,Ld)),Ld.T)\n\t\t\t# Append Lp,Ld,Cp and Cd to relevant lists\n\t\t\tlp_list.append(Lp)\n\t\t\tld_list.append(Ld)\n\t\t\tcp_list.append(Cp)\n\t\t\tcd_list.append(Cd)\t\t\t\n\t\treturn (lp_list,ld_list,cp_list,cd_list)", "def energy_balance_deriv(self, increment_filter, k):\n i = self.inl[0].get_flow()\n dT_dp_in = dT_mix_dph(i)\n dT_dh_in = dT_mix_pdh(i)\n dT_dfluid_in = dT_mix_ph_dfluid(i)\n j = 0\n for c in self.outl:\n o = c.get_flow()\n self.jacobian[k, 0, 1] = dT_dp_in\n self.jacobian[k, 0, 2] = dT_dh_in\n self.jacobian[k, 0, 3:] = dT_dfluid_in\n self.jacobian[k, j + 1, 1] = -dT_mix_dph(o)\n self.jacobian[k, j + 1, 2] = -dT_mix_pdh(o)\n self.jacobian[k, j + 1, 3:] = -np.array(dT_mix_ph_dfluid(o))\n j += 1\n k += 1", "def dE_mdn(self, x, y, t, w1 = None, w2 = None):\n if w2 == None:\n w2 = self.w2\n M = int(self.M)\n # avoid underrun\n \n alpha, sigma, mu = self.getMixtureParams(y.T)\n #import pdb; pdb.set_trace()\n \n #T = t.T[None, None, :] # note: np.tile is slower than this notation\n T = t.T[None, :]\n \n phi = self._phi(T, mu, sigma)\n aphi = alpha*phi\n pi = aphi / np.sum(aphi, 0)\n \n # derivatives of E with respect to the output variables (s. Bishop 1995, chp. 6.4)\n dE_dy_alpha = alpha - pi\n dE_dy_sigma = - 0.5 * pi * ((np.sum((T-mu)**2 , 1) / sigma) - self.c)\n dE_dy_mu = pi[:,np.newaxis,:] * (mu - T) / sigma[:,np.newaxis,:]\n\n dk = np.zeros([self.ny, x.shape[0]])\n dk[0:M,:] = dE_dy_alpha\n dk[M:2*M,:] = dE_dy_sigma\n \n dk[2*M:] = np.reshape(dE_dy_mu, [M*self.c, x.shape[0]])\n \n # back-propagate the dks\n #t0=datetime.now()\n dEnw1, dEnw2 = self.backward(x, dk, None, w2)\n #print 'eval of dE_mdn:' + str((datetime.now()-t0))\n #dj = (1 - self.z[1:]**2) * np.dot(w2[:,1:].T, dk)\n # evaluate derivatives with respect to the weights\n #dEnw1 = (dj[:,:,np.newaxis]*x[np.newaxis,:,:]).transpose(1,0,2)\n #dEnw2 = (dk[:,:,np.newaxis]*self.z.T[np.newaxis,:,:]).transpose(1,0,2)\n return dEnw1, dEnw2", "def bv_data():\n heights = [1000., 1500., 2000., 2500.] * units('m')\n potential_temperatures = [[290., 290., 290., 290.],\n [292., 293., 293., 292.],\n [294., 296., 293., 293.],\n [296., 295., 293., 296.]] * units('K')\n return heights, potential_temperatures", "def Get_params(numparams, dt, D):\n # bounds from table 1 Kowalek et al 2020\n Nmin, Nmax = 30, 600\n Bmin, Bmax = 1, 6\n Rmin, Rmax = 1, 17\n alphamin, alphamax = 0.3, 0.7\n Qmin, Qmax = 1, 9\n\n # Gen parameters\n Q = np.random.uniform(Qmin, Qmax, size=numparams)\n Q1, Q2 = Q, Q\n\n NsND = np.random.randint(Nmin, Nmax + 1, size=numparams)\n NsAD = np.random.randint(Nmin, Nmax + 1, size=numparams)\n NsCD = np.random.randint(Nmin, Nmax + 1, size=numparams)\n NsDM = np.random.randint(Nmin, Nmax + 1, size=numparams)\n TDM = NsDM * dt\n\n B = np.random.uniform(Bmin, Bmax, size=numparams)\n r_c = np.sqrt(D * NsCD * dt / B) # solving for r_c in eq. 8 Kowalek\n\n R = np.random.uniform(Rmin, Rmax, size=numparams)\n v = np.sqrt(R * 4 * D / TDM) # solving for v in eq. 7 Kowalek\n\n alpha = np.random.uniform(alphamin, alphamax, size=numparams)\n\n # Compute sigma for ND, AD, CD from eq. 12 Kowalek\n sigmaND = np.sqrt(D * dt) / Q1\n sigmaAD = np.sqrt(D * dt) / Q1\n sigmaCD = np.sqrt(D * dt) / Q1\n\n # Compute sigma for DM from eq. 12 Kowalek\n sigmaDM = np.sqrt(D * dt + v ** 2 * dt ** 2) / Q2\n\n return np.array(\n [\n NsND,\n NsAD,\n NsCD,\n NsDM,\n D * np.ones(numparams),\n dt * np.ones(numparams),\n r_c,\n v,\n alpha,\n sigmaND,\n sigmaAD,\n sigmaCD,\n sigmaDM,\n ]\n ).T", "def test_svd_sharpness(self):\n \t\t\t\n\t\tesd_before = self.watcher.get_ESD(layer=self.fc2_layer) \n\t\t\n\t\tself.watcher.SVDSharpness(layers=[self.fc2_layer])\n\t\tesd_after = self.watcher.get_ESD(layer=self.fc2_layer) \n\t\t\n\t\tprint(\"max esd before {}\".format(np.max(esd_before)))\n\t\tprint(\"max esd after {}\".format(np.max(esd_after)))\n\n\t\tself.assertGreater(np.max(esd_before)-2.0,np.max(esd_after))", "def uncertainty_ee(self,e1,e2):\n # reco\n unc = (self._eleRecoWeight[(e1.pt(),e1.eta())][1]/self._eleRecoWeight[(e1.pt(),e1.eta())][0] + \\\n self._eleRecoWeight[(e2.pt(),e2.eta())][1]/self._eleRecoWeight[(e2.pt(),e2.eta())][0])**2\n # id-isolation\n unc += (self._eleIdIsoWeight[(e1.pt(),e1.eta())][1]/self._eleIdIsoWeight[(e1.pt(),e1.eta())][0] + \\\n self._eleIdIsoWeight[(e2.pt(),e2.eta())][1]/self._eleIdIsoWeight[(e2.pt(),e2.eta())][0])**2\n # trigger (approximate)\n unc += (abs(self._ele8TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][1]+ \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][1]*self._ele8TrgWeight[(e2.pt(),e2.eta())][0]- \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][1]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]- \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][1])/ \\\n (self._ele8TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]+ \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele8TrgWeight[(e2.pt(),e2.eta())][0]- \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]))**2\n unc += ((self._ele8TrgWeight[(e1.pt(),e1.eta())][1]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]+ \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele8TrgWeight[(e2.pt(),e2.eta())][1])/ \\\n (self._ele8TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]+ \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele8TrgWeight[(e2.pt(),e2.eta())][0]- \\\n self._ele17TrgWeight[(e1.pt(),e1.eta())][0]*self._ele17TrgWeight[(e2.pt(),e2.eta())][0]))**2\n #outcome\n return sqrt(unc)", "def calcECDF(self,arr):\n res=np.zeros_like(arr)\n for index, value in np.ndenumerate(arr):\n res[index]=self.calc(value)\n return res", "def calcECDF(self,arr):\n res=np.zeros_like(arr)\n for index, value in np.ndenumerate(arr):\n res[index]=self.calc(value)\n return res", "def pre_processing(self):\n while self.number_of_dmax() < 1:\n self.dmax -= 1\n __edges = self.current_edges()\n print('current edges =', __edges, ' expected edges =', self.edges)\n if __edges < self.edges:\n __temp = self.dmax\n __l = self.dmax\n self.dmax *= 2\n __r = self.dmax\n while self.number_of_dmax() >= 1 and __r < self.nodes:\n __l = __r\n self.dmax *= 2\n __r = self.dmax\n while __l < __r:\n self.dmax = int((__l + __r) / 2)\n if self.number_of_dmax() < 1:\n __r = self.dmax\n else:\n __l = self.dmax + 1\n self.dmax = __l - 1\n __edges = self.current_edges()\n if __edges > self.edges:\n __l = __temp\n __r = self.dmax\n while __l < __r:\n self.dmax = int((__l + __r) / 2)\n __edges = self.current_edges()\n if __edges > self.edges:\n __r = self.dmax\n else:\n __l = self.dmax + 1\n self.dmax = __l - 1\n print('adjust dmax =', self.dmax, ' edges =', int(__edges))\n elif __edges > self.edges:\n __temp1 = [_ ** self.lmd for _ in range(self.dmin, self.dmax + 1)]\n __temp2 = [_ * __ for _, __ in zip(__temp1, list(range(self.dmin, self.dmax+1)))]\n c = self.edges / sum(__temp2)\n n = c * sum(__temp1)\n self.select_p = n / self.nodes\n print('reduce select p =', self.select_p)", "def Map_Gradients(post_eval,q,InvV,m_points):\n m = InvV.n\n N = m_points.num\n d = InvV.d\n \n ds_dq = np.zeros([m,N])\n dr_dq = np.zeros([m,N])\n \n ds_db = np.zeros([m,d,N])\n dr_db = np.zeros([m,d,N])\n \n ds_dL = np.zeros([m,d,d,N])\n dr_dL = np.zeros([m,d,d,N])\n \n dB_dL = np.zeros([m,d,d,d,N])\n dM_dL = Cholesky_Derivs(InvV,m_points)\n Q = Partitioner(q, InvV, post_eval, m_points)\n \n for j in range(m):\n backtrack = m_points.map(InvV,j)\n ds_dq[j,:] = - Q[j,:] / q[j]\n dr_dq[j,:] = ds_dq[j,:] - np.mean(ds_dq[j,:])\n \n for k in range(d):\n ds_db[j,k,:] = Q[j,:] * backtrack.all[:,k].T\n dr_db[j,k,:] = ds_db[j,k,:] - np.mean(ds_db[j,k,:])\n \n for l in range(d):\n for i in range(N):\n for row in range(d):\n for col in range(d):\n dB_dL[j,row,k,l,i] += m_points.pick(i)[col] * dM_dL[j,row,col,k,l]\n ds_dL[j,k,l,i] = Q[j,i] * np.inner(backtrack.pick(i),dB_dL[j,:,k,l,i])\n if k == l:\n ds_dL[j,k,l,:] += (2/InvV.L[j,k,l])\n \n dr_dL[j,k,l,:] = ds_dL[j,k,l,:] - np.mean(ds_dL[j,k,l,:])\n \n return dr_dq, dr_db, dr_dL", "def get_edfdv_center_differenced(dv):\n\n def step_edfdv_center_difference(f, e, dt):\n \"\"\"\n This method calculates the f + dt * e * df/dv using naive\n 2nd-order center differencing\n\n :param f: (float array (nx, nv)) distribution function\n :param e: (float array (nx, )) the electric field in real space\n :param dt: (float) timestep\n :return: (float array (nx, nv)) updated distribution function\n \"\"\"\n return f - e[:, None] * np.gradient(f, dv, axis=1, edge_order=2) * dt\n\n return step_edfdv_center_difference", "def bayes_cov_col(Y,X,cols,lm):\n\n #EM iterateit\n Yhat=pd.DataFrame(lm.predict(X))\n Yhat.index=Y.index\n Yhat.columns=Y.columns\n SSE_all=np.square(Y.subtract(Yhat))\n X_adjust=X.copy()\n\n\n df_SSE = []\n df_logit = []\n\n for curcov in cols:\n\n curcells=X[X[curcov]>0].index\n\n if len(curcells)>2:\n\n X_notcur=X.copy()\n X_notcur[curcov]=[0]*len(X_notcur)\n\n X_sub=X_notcur.loc[curcells]\n\n Y_sub=Y.loc[curcells]\n\n GENE_var=2.0*Y_sub.var(axis=0)\n vargenes=GENE_var[GENE_var>0].index\n\n Yhat_notcur=pd.DataFrame(lm.predict(X_sub))\n Yhat_notcur.index=Y_sub.index\n Yhat_notcur.columns=Y_sub.columns\n\n SSE_notcur=np.square(Y_sub.subtract(Yhat_notcur))\n SSE=SSE_all.loc[curcells].subtract(SSE_notcur)\n SSE_sum=SSE.sum(axis=1)\n\n SSE_transform=SSE.div(GENE_var+0.5)[vargenes].sum(axis=1)\n logitify=np.divide(1.0,1.0+np.exp(SSE_transform))#sum))\n\n df_SSE.append(SSE_sum)\n df_logit.append(logitify)\n\n X_adjust[curcov].loc[curcells]=logitify\n\n return X_adjust", "def V2E(V):\n# for v in m/s returns energy in meV\n return 5.227e-6*V*V", "def compute_divergence(self):\n d_tr_a = []\n d_te_a = []\n for k in self.synth_keys:\n d_tr_a.append(self.divergence('tr', k))\n d_te_a.append(self.divergence('te', k))\n\n training = np.mean(np.array(d_tr_a))\n testing = np.mean(np.array(d_te_a))\n return training, testing", "def detEvals(self, targets):\n res = []\n res2 = []\n for f in targets:\n tmp = np.array([np.nan] * len(self.bestfinalfunvals))\n tmp2 = None\n for i, line in enumerate(self.evals):\n if line[0] <= f:\n tmp = line[1:]\n tmp2 = self.algs[i]\n break\n res.append(tmp)\n res2.append(tmp2)\n return res, res2", "def updateBD(self):\r\n # itereigenupdated is always up-to-date in the diagonal case\r\n # just double check here\r\n if self.itereigenupdated == self.countiter:\r\n return\r\n\r\n if self.sp.neg.cmuexp: # cave:\r\n self.update_exponential(self.Zneg, -self.sp.neg.cmuexp)\r\n # self.C += self.Zpos # pos update after Zneg would be the correct update, overall:\r\n # self.C = self.Zpos + Cs * Mh.expms(-self.sp.neg.cmuexp*Csi*self.Zneg*Csi) * Cs\r\n self.Zneg = np.zeros((self.N, self.N))\r\n\r\n if self.sigma_vec is not 1 and not np.all(self.sigma_vec == 1):\r\n self.C = dot(dot(np.diag(self.sigma_vec), self.C), np.diag(self.sigma_vec))\r\n self.sigma_vec[:] = 1\r\n\r\n if self.opts['CMA_const_trace'] in (True, 1, 2): # normalize trace of C\r\n if self.opts['CMA_const_trace'] == 2:\r\n s = np.exp(np.mean(np.log(self.dC)))\r\n else:\r\n s = np.mean(self.dC)\r\n self.C /= s\r\n self.dC /= s\r\n self.C = (self.C + self.C.T) / 2\r\n # self.C = np.triu(self.C) + np.triu(self.C,1).T # should work as well\r\n # self.D, self.B = eigh(self.C) # hermitian, ie symmetric C is assumed\r\n\r\n if type(self.opts['CMA_eigenmethod']) == type(1):\r\n print('WARNING: option CMA_eigenmethod should be a function, not an integer')\r\n if self.opts['CMA_eigenmethod'] == -1:\r\n # pygsl\r\n # easy to install (well, in Windows install gsl binaries first,\r\n # set system path to respective libgsl-0.dll (or cp the dll to\r\n # python\\DLLS ?), in unzipped pygsl edit\r\n # gsl_dist/gsl_site_example.py into gsl_dist/gsl_site.py\r\n # and run \"python setup.py build\" and \"python setup.py install\"\r\n # in MINGW32)\r\n if 1 < 3: # import pygsl on the fly\r\n try:\r\n import pygsl.eigen.eigenvectors # TODO efficient enough?\r\n except ImportError:\r\n print('WARNING: could not find pygsl.eigen module, either install pygsl \\n' +\r\n ' or set option CMA_eigenmethod=1 (is much slower), option set to 1')\r\n self.opts['CMA_eigenmethod'] = 0 # use 0 if 1 is too slow\r\n\r\n self.D, self.B = pygsl.eigen.eigenvectors(self.C)\r\n\r\n elif self.opts['CMA_eigenmethod'] == 0:\r\n # TODO: thoroughly test np.linalg.eigh\r\n # numpy.linalg.eig crashes in 200-D\r\n # and EVecs with same EVals are not orthogonal\r\n self.D, self.B = np.linalg.eigh(self.C) # self.B[i] is a row and not an eigenvector\r\n else: # is overall two;ten times slower in 10;20-D\r\n self.D, self.B = Misc.eig(self.C) # def eig, see below\r\n else:\r\n self.D, self.B = self.opts['CMA_eigenmethod'](self.C)\r\n\r\n\r\n # assert(sum(self.D-DD) < 1e-6)\r\n # assert(sum(sum(np.dot(BB, BB.T)-np.eye(self.N))) < 1e-6)\r\n # assert(sum(sum(np.dot(BB * DD, BB.T) - self.C)) < 1e-6)\r\n idx = np.argsort(self.D)\r\n self.D = self.D[idx]\r\n self.B = self.B[:,idx] # self.B[i] is a row, columns self.B[:,i] are eigenvectors\r\n # assert(all(self.B[self.countiter % self.N] == self.B[self.countiter % self.N,:]))\r\n\r\n # qqqqqqqqqq\r\n if 11 < 3: # limit condition number to 1e13\r\n climit = 1e13 # cave: conditioncov termination is 1e14\r\n if self.D[-1] / self.D[0] > climit:\r\n self.D += self.D[-1] / climit\r\n for i in xrange(self.N):\r\n self.C[i][i] += self.D[-1] / climit\r\n\r\n if 11 < 3 and any(abs(sum(self.B[:,0:self.N-1] * self.B[:,1:], 0)) > 1e-6):\r\n print('B is not orthogonal')\r\n print(self.D)\r\n print(sum(self.B[:,0:self.N-1] * self.B[:,1:], 0))\r\n else:\r\n # is O(N^3)\r\n # assert(sum(abs(self.C - np.dot(self.D * self.B, self.B.T))) < N**2*1e-11)\r\n pass\r\n self.D **= 0.5\r\n self.itereigenupdated = self.countiter", "def diff_effector2(state, th0, alpha, beta, beta_p, p, d):\n dt_state = np.zeros_like(state)\n #print(len(state))\n if alpha == 1:\n for j in range(len(state)):\n if j == 0:\n dt_state[j] = p*beta*th0+2*beta_p*state[-1]-(beta_p+d[\"d_eff\"])*state[j]\n else:\n dt_state[j] = beta_p*state[j-1]- (beta_p+d[\"d_eff\"])*state[j] \n \n else: \n for j in range(len(state)):\n if j == 0:\n dt_state[j] = p*beta*th0 - (beta+d[\"d_prec\"])*state[j] \n elif j < (alpha-1):\n dt_state[j] = beta*state[j-1]-(beta+d[\"d_prec\"])*state[j] \n elif j == (alpha-1):\n # the problem with the 4 and 2 is that since differentiation takes 1 day it should divide twice giving 4 cells\n # however, if it has arrived in the final states if should double every half day\n dt_state[j] = beta*state[j-1]+2*beta_p*state[-1] - (d[\"d_eff\"]+beta_p)*state[j] \n\n else:\n assert j >= alpha\n dt_state[j] = beta_p*state[j-1]- (beta_p+d[\"d_eff\"])*state[j] \n \n return dt_state", "def emissivity_calc (pv, ndvi):\n ndvi_dest = ndvi.copy()\n ndvi_dest[np.where(ndvi < 0)] = 0.991\n ndvi_dest[np.where((0 <= ndvi) & (ndvi < 0.2)) ] = 0.966\n ndvi_dest[np.where((0.2 <= ndvi) & (ndvi < 0.5)) ] = (0.973 * pv[np.where((0.2 <= ndvi) & (ndvi < 0.5)) ]) + (0.966 * (1 - pv[np.where((0.2 <= ndvi) & (ndvi < 0.5)) ]) + 0.005)\n ndvi_dest[np.where(ndvi >= 0.5)] = 0.973\n return ndvi_dest", "def epidote():\n\n rho = 3465.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 211.5; C[0,1] = 65.6; C[0,2] = 43.2; C[0,3] = 0.; C[0,4] = -6.5; C[0,5] = 0.\n C[1,0] = C[0,1]; C[1,1] = 239.; C[1,2] = 43.6; C[1,3] = 0.; C[1,4] = -10.4; C[1,5] = 0.\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 202.1; C[2,3] = 0.; C[2,4] = -20.; C[2,5] = 0.\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 39.1; C[3,4] = 0.; C[3,5] = -2.3\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 43.4; C[4,5] = 0.\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 79.5\n\n return C, rho", "def calculate_v6_L(chi_6222, chi_6222_err, chi_633, chi_633_err, vn_array):\n dN = real(vn_array[:, 0])\n v2_array = vn_array[:, 2]\n v3_array = vn_array[:, 3]\n v6_array = vn_array[:, 6]\n nev = len(dN)\n\n v6_Psi6_sq = mean(abs(v6_array)**2.)\n v6_Psi6_sq_err = std(abs(v6_array)**2.)/sqrt(nev)\n v2_6 = mean(abs(v2_array)**6.)\n v2_6_err = std(abs(v2_array)**6.)/sqrt(nev)\n v3_4 = mean(abs(v3_array)**4.)\n v3_4_err = std(abs(v3_array)**4.)/sqrt(nev)\n v23 = real(mean(v2_array**3.*conj(v3_array)**2.))\n v23_err = real(std(v2_array**3.*conj(v3_array)**2.))/sqrt(nev)\n v6_L = (v6_Psi6_sq - chi_6222**2.*v2_6 - chi_633**2.*v3_4\n - 2.*chi_6222*chi_633*v23)\n v6_L_err = sqrt(\n v6_Psi6_sq_err**2.\n + (2.*chi_6222*chi_6222_err*v2_6)**2. + (chi_6222**2.*v2_6_err)**2.\n + (2.*chi_633*chi_633_err*v3_4)**2. + (chi_633**2.*v3_4_err)**2.\n + (2.*chi_6222_err*chi_633*v23)**2.\n + (2.*chi_6222*chi_633_err*v23)**2.\n + (2.*chi_6222*chi_633*v23_err)**2.)\n return(v6_L, v6_L_err)", "def TDGradientFunction(Prof,x,Trx,rb_spec,abs_spec,dr,inu0,bsrMult,base_T,base_P,r0,lam=[0,0,0,0,0,0]): \n \n iR = Prof['WV Online'].size # range index for a profile into 1D x array\n x2 = np.reshape(x,(iR+1,6))\n xK = x2[0,:] # constants [HSRL Mol HSRL Comb, WV On, WV Off, O2 On ,O2 Off]\n xS = x2[1:,:] # state vector [T, nWV, BSR, phi_HSRL, phi_WV, phi_O2]\n \n grad2 = np.zeros(x2.shape) \n \n #N,dNdB,dNdT = HSRLDerivative(T,BSR,phi,rb_spec,Trx,inu0,K,base_T,base_P)\n HSRL_mol,dHmdB,dHmdT = HSRLDerivative(xS[:,0],xS[:,2],xS[:,3],rb_spec['HSRL'],Trx['HSRL Mol'],inu0['HSRL'],xK[0],base_T,base_P)\n HSRL_comb,dHcdB,dHcdT = HSRLDerivative(xS[:,0],xS[:,2],xS[:,3],rb_spec['HSRL'],Trx['HSRL Comb'],inu0['HSRL'],xK[1],base_T,base_P)\n \n # N,dNdB,dNdnWV,dNdT = WVDIALDerivative(T,nWV,BSR,phi,rb_spec,abs_spec,Trx,inu0,K,base_T,base_P,dr)\n WV_on,dWVndB,dWVndnWV,dWVndT = WVDIALDerivative(xS[:,0],xS[:,1],xS[:,2]+bsrMult['WV'],xS[:,4],rb_spec['WV Online'],abs_spec['WV Online'],Trx['WV Online'],inu0['WV Online'],xK[2],base_T,base_P,dr,r0)\n WV_off,dWVfdB,dWVfdnWV,dWVfdT = WVDIALDerivative(xS[:,0],xS[:,1],xS[:,2]+bsrMult['WV'],xS[:,4],rb_spec['WV Offline'],abs_spec['WV Offline'],Trx['WV Offline'],inu0['WV Offline'],xK[3],base_T,base_P,dr,r0) \n \n # N,dNdB,dNdnWV,dNdT = O2DIALDerivative(T,nWV,BSR,phi,rb_spec,abs_spec,Trx,inu0,K,base_T,base_P,dr)\n O2_on,dO2ndB,dO2ndnWV,dO2ndT = O2DIALDerivative(xS[:,0],xS[:,1],xS[:,2]+bsrMult['O2'],xS[:,5],rb_spec['O2 Online'],abs_spec['O2 Online'],Trx['O2 Online'],inu0['O2 Online'],xK[4],base_T,base_P,dr,r0)\n O2_off,dO2fdB,dO2fdnWV,dO2fdT = O2DIALDerivative(xS[:,0],xS[:,1],xS[:,2]+bsrMult['O2'],xS[:,5],rb_spec['O2 Offline'],abs_spec['O2 Offline'],Trx['O2 Offline'],inu0['O2 Offline'],xK[5],base_T,base_P,dr,r0)\n \n# HSRLModel,dHSdB,dHSdT = HSRLProfileRatioDeriv(xS[:,0],P,xS[:,2], \\\n# Trx['HSRL Mol'],Trx['HSRL Comb'], \\\n# rb_spec['HSRL'],inu0['HSRL'],GainRatio=xK[0])\n#\n# WVModel,dWVdB,dWVdnWV,dWVdT = WaterVaporProfileRatioDeriv(xS[:,0],P,xS[:,1],xS[:,2]*bsrMult['WV'],\n# Trx['WV Online'], Trx['WV Offline'], \\\n# rb_spec['WV Online'],rb_spec['WV Offline'], \\\n# abs_spec['WV Online'],abs_spec['WV Offline'],dr, \\\n# inu0['WV Online'],inu0['WV Offline'],GainRatio=xK[1])\n# \n# O2Model,dO2dB,dO2dnWV,dO2dT = OxygenProfileRatioDeriv(xS[:,0],P,xS[:,1],xS[:,2]*bsrMult['O2'],\n# Trx['O2 Online'], Trx['O2 Offline'], \\\n# rb_spec['O2 Online'],rb_spec['O2 Offline'], \\\n# abs_spec['O2 Online'],abs_spec['O2 Offline'],dr, \\\n# inu0['O2 Online'],inu0['O2 Offline'],GainRatio=xK[2])\n \n HSRLmolBase = 1-(Prof['HSRL Mol'])/(HSRL_mol+Prof['HSRL Mol BG'])\n HSRLcombBase = 1-(Prof['HSRL Comb'])/(HSRL_comb+Prof['HSRL Comb BG'])\n WVonBase = 1-(Prof['WV Online'])/(WV_on+Prof['WV Online BG'])\n WVoffBase = 1-(Prof['WV Offline'])/(WV_off+Prof['WV Offline BG'])\n O2onBase = 1-(Prof['O2 Online'])/(O2_on+Prof['O2 Online BG'])\n O2offBase = 1-(Prof['O2 Offline'])/(O2_off+Prof['O2 Offline BG'])\n \n \n# HSRLbase = 2*(HSRLModel-Prof['HSRL'])/ProfVar['HSRL']\n# WVbase = 2*(WVModel-Prof['WV'])/ProfVar['WV']\n# O2base = 2*(O2Model-Prof['O2'])/ProfVar['O2']\n \n # temperature gradient\n grad2[1:,0] = np.nansum(HSRLmolBase[np.newaxis]*dHmdT,axis=1) \\\n + np.nansum(HSRLcombBase[np.newaxis]*dHcdT,axis=1) \\\n + np.nansum(WVonBase[np.newaxis]*dWVndT,axis=1) \\\n + np.nansum(WVoffBase[np.newaxis]*dWVfdT,axis=1) \\\n + np.nansum(O2onBase[np.newaxis]*dO2ndT,axis=1) \\\n + np.nansum(O2offBase[np.newaxis]*dO2fdT,axis=1)\n# # piece wise penalty function \n# gradpen = lam[0]*np.sign(np.diff(xS[:,0]))\n# gradpen[np.nonzero(np.isnan(gradpen))] = 0\n# grad2[2:,0] = grad2[2:,0] + gradpen\n# grad2[1:-1,0] = grad2[1:-1,0] - gradpen\n# piece wise slope penalty function \n gradpen = lam[0]*np.sign(np.diff(np.diff(xS[:,0])))\n gradpen[np.nonzero(np.isnan(gradpen))] = 0\n grad2[3:,0] = grad2[3:,0] + gradpen\n grad2[2:-1,0] = grad2[2:-1,0] - 2*gradpen\n grad2[1:-2,0] = grad2[1:-2,0] + gradpen\n \n # water vapor gradient\n grad2[1:,1] = np.nansum(WVonBase[np.newaxis]*dWVndnWV,axis=1) \\\n + np.nansum(WVoffBase[np.newaxis]*dWVfdnWV,axis=1) \\\n + np.nansum(O2onBase[np.newaxis]*dO2ndnWV,axis=1) \\\n + np.nansum(O2offBase[np.newaxis]*dO2fdnWV,axis=1)\n # piecewise penalty function\n gradpen = lam[1]*np.sign(np.diff(xS[:,1]))\n gradpen[np.nonzero(np.isnan(gradpen))] = 0\n grad2[2:,1] = grad2[2:,1] + gradpen\n grad2[1:-1,1] = grad2[1:-1,1] - gradpen\n \n # backscatter gradient\n grad2[1:,2] = np.nansum(HSRLmolBase[np.newaxis]*dHmdB,axis=1) \\\n + np.nansum(HSRLcombBase[np.newaxis]*dHcdB,axis=1) \\\n + np.nansum(WVonBase[np.newaxis]*dWVndB,axis=1) \\\n + np.nansum(WVoffBase[np.newaxis]*dWVfdB,axis=1) \\\n + np.nansum(O2onBase[np.newaxis]*dO2ndB,axis=1) \\\n + np.nansum(O2offBase[np.newaxis]*dO2fdB,axis=1) \n# #piecewise penalty function\n# gradpen = lam[2]*np.sign(np.diff(xS[:,2]))\n# gradpen[np.nonzero(np.isnan(gradpen))] = 0\n# grad2[2:,2] = grad2[2:,2] + gradpen\n# grad2[1:-1,2] = grad2[1:-1,2] - gradpen\n \n\n # *bsrMult['WV']\n # *bsrMult['WV']\n # *bsrMult['O2']\n # *bsrMult['O2']\n\n # HSRL Common terms\n grad2[1:,3] = np.nansum(HSRLmolBase[np.newaxis]*HSRL_mol,axis=0) + np.nansum(HSRLcombBase[np.newaxis]*HSRL_comb,axis=0)\n# # piece wise penalty function \n# gradpen = lam[3]*np.sign(np.diff(xS[:,3]))\n# gradpen[np.nonzero(np.isnan(gradpen))] = 0\n# grad2[2:,3] = grad2[2:,3] + gradpen\n# grad2[1:-1,3] = grad2[1:-1,3] - gradpen\n \n # WV Common terms\n grad2[1:,4] = np.nansum(WVonBase[np.newaxis]*WV_on,axis=0) + np.nansum(WVoffBase[np.newaxis]*WV_off,axis=0)\n# # piece wise penalty function \n# gradpen = lam[4]*np.sign(np.diff(xS[:,4]))\n# gradpen[np.nonzero(np.isnan(gradpen))] = 0\n# grad2[2:,4] = grad2[2:,4] + gradpen\n# grad2[1:-1,4] = grad2[1:-1,4] - gradpen\n \n # O2 Common terms\n grad2[1:,5] = np.nansum(O2onBase[np.newaxis]*O2_on,axis=0) + np.nansum(O2offBase[np.newaxis]*O2_off,axis=0)\n# # piece wise penalty function \n# gradpen = lam[5]*np.sign(np.diff(xS[:,5]))\n# gradpen[np.nonzero(np.isnan(gradpen))] = 0\n# grad2[2:,5] = grad2[2:,5] + gradpen\n# grad2[1:-1,5] = grad2[1:-1,5] - gradpen\n\n grad2[0,0] = np.nansum(HSRLmolBase*HSRL_mol/xK[0])\n grad2[0,1] = np.nansum(HSRLcombBase*HSRL_comb/xK[1])\n grad2[0,2] = np.nansum(WVonBase*WV_on/xK[2])\n grad2[0,3] = np.nansum(WVoffBase*WV_off/xK[3])\n grad2[0,4] = np.nansum(O2onBase*O2_on/xK[4])\n grad2[0,5] = np.nansum(O2offBase*O2_off/xK[5])\n \n# grad2[0,1] = np.nansum(WVbase*WVModel/xK[1])\n# grad2[0,2] = np.nansum(O2base*O2Model/xK[2])\n \n# OptError = np.nansum(2*(HSRLModel-Prof['HSRL'])/ProfVar['HSRL']*) \\\n# +np.nansum((WVModel-Prof['WV'])**2/ProfVar['WV']) \\\n# +np.sum((O2Model-Prof['O2'])**2/ProfVar['O2'])\n \n return grad2.flatten()", "def test_uv_degrid_gaussian_kernel():\n\n layout = read_layout(layout_path=f\"{test_data}/test_mwa.txt\")\n xyz = enh_xyz(layout=layout, latitude=mwa_geo.latitude.radians)\n uvw = xyz_uvw(xyz=xyz, freq=freq, dec0=mwa_geo.latitude.radians, ha0=0)\n uv = uv_degrid(\n max_lambda=1400, nside=20, uvw=uvw, sigma=3, kersize=21, kernel=\"gaussian\"\n )\n\n assert uv.shape == (20, 20)\n assert uv[0, 0] == 1.295932713086053e-05", "def compute_FD(mvm_orig,convert_rot = False,radius = 50):\n\n # if needed, convert rotation parameters to mm\n if convert_rot:\n mvm = np.zeros(mvm_orig.shape) #initialize an empty array\n mvm[:,:3] = mvm_orig[:,:3] #translation parameters stay the same\n mvm[:,3:] = mvm_orig[:,3:]*(2*radius*np.pi/360) #rotation params are converted\n else:\n mvm = mvm_orig\n\n # take original movement parameters, demean and detrend\n ddt_mvm = np.diff(mvm,axis=0)\n ddt_mvm = np.vstack((np.zeros((1,6)),ddt_mvm)) #0 pad to make same shape; by def first val is 0\n\n #compute FD\n FD=np.sum(np.abs(ddt_mvm),axis=1)\n\n # return output values\n return mvm, ddt_mvm, FD", "def MC_step(particles,chosen_one,dr,R_cut,v):\n \n #%%Calculate the difference in energy\n other_particles = np.delete(particles,chosen_one,0)\n old_particle = particles[chosen_one]\n new_particle = copy.copy(old_particle) # otherwise I move it anyway\n new_particle[:MC_par['dim']] = (old_particle[:MC_par['dim']] + dr) % MC_par['L_box']\n #%% Apply periodic Boundary conditions and exclude particles outside R_cut\n # Particles at a distance > R_cut don't contribute to the energy\n dE = 0\n #%%\n if(MC_par['charge']):\n for charge_prod in ['same','opp']:\n #%%\n sel_other_particles = other_particles[other_particles[:,-1] * old_particle[-1] == word2sign[charge_prod]]\n old_distances = calc_distances(sel_other_particles[:,:MC_par['dim']],old_particle[:MC_par['dim']],R_cut) \n new_distances = calc_distances(sel_other_particles[:,:MC_par['dim']],new_particle[:MC_par['dim']],R_cut)\n old_histo,bins = np.histogram(old_distances, bins = v_bin)\n new_histo,bins = np.histogram(new_distances, bins = v_bin)\n dE += np.sum( (new_histo-old_histo) * v[charge_prod] )\n #%%\n else:\n old_distances = calc_distances(other_particles,old_particle,R_cut) \n new_distances = calc_distances(other_particles,new_particle,R_cut)\n old_histo,bins = np.histogram(old_distances, bins = v_bin)\n new_histo,bins = np.histogram(new_distances, bins = v_bin)\n dE += np.sum((new_histo-old_histo)*v['unsigned'])\n #%%dE = np.sum(potential(new_distances)) - np.sum(potential(old_distances))\n #Accept or decline the movement\n acc_prob = np.min([1,np.exp(-dE)])\n if np.random.rand() < acc_prob:\n # perform the movement\n particles[chosen_one] = new_particle\n move = 1\n else:\n # Decline\n move = 0\n dE = 0\n\n return dE,move", "def test_realistic_max_dose(self):\n\n # min and max dose can only really hope to be within half a bin width\n\n for struct, data in self.test_structs.items():\n dvh = DVH(data[\"doses\"], data[\"volumes\"])\n diff = dvh.max_dose - data[\"monaco_dvh_max_dose\"]\n self.assertLessEqual(abs(diff), 5.)", "def method1(self):\n cres=0. # Variable for storing Chern number.\n # The U matrices from Fukui's method; storage...\n Ux=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n Uy=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n \n # ... and calculation of U matrices\n for ix in range(self.kS.Nx+1):\n for iy in range(self.kS.Ny+1):\n mat1=self.alleigvecs[:,:,ix ,iy ]\n if ix<self.kS.Nx:\n mat2=self.alleigvecs[:,:,ix+1,iy ]\n else:\n mat2=self.alleigvecs[:,:,1 ,iy ]\n if iy<self.kS.Ny:\n mat3=self.alleigvecs[:,:,ix ,iy+1]\n else:\n mat3=self.alleigvecs[:,:,ix ,1 ]\n Ux[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat2)[:self.NL,:self.NL])\n Uy[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat3)[:self.NL,:self.NL])\n \n # Local estimates of Berry curvature; storage ...\n ftempall=np.zeros((self.kS.Nx,self.kS.Ny),complex)\n # ... and calculation\n for ix in range(self.kS.Nx):\n for iy in range(self.kS.Ny):\n ftemp=np.log(Ux[ix,iy]*Uy[ix+1,iy]/Ux[ix,iy+1]/Uy[ix,iy])\n ftempall[ix,iy]=ftemp # ... of local Berry curvature ...\n cres+=ftemp/2./pi/1j # ... and of Berry phase (Chern number).\n\n return cres.real, ftempall", "def A99_BC_V(Teff, FeH):\n X = np.ravel(np.log10(Teff) - 3.52); FeH = np.ravel(FeH)\n # Equations 17 and 18\n BC17 = -5.531e-2/X - 0.6177 + 4.420*X - 2.669*X**2. + 0.6943*X*FeH - 0.1071*FeH - 8.612e-3*FeH**2.\n BC18 = -9.930e-2/X + 2.887e-2 + 2.275*X - 4.425*X**2. + 0.3505*X*FeH - 5.558e-2*FeH - 5.375e-3*FeH**2\n BC = BC17.copy()\n ii = np.log10(Teff) >= 3.65\n BC[ii] = BC18[ii]\n return BC" ]
[ "0.6110026", "0.5988009", "0.5985353", "0.5741001", "0.5596539", "0.5589506", "0.55479646", "0.5542845", "0.55302805", "0.5492529", "0.5491629", "0.54801345", "0.5469861", "0.54658246", "0.54637504", "0.54476047", "0.5442807", "0.543293", "0.5415737", "0.5393574", "0.53840727", "0.5382802", "0.537793", "0.53678936", "0.53610164", "0.53528124", "0.53499603", "0.53430337", "0.53276354", "0.5318927", "0.53188264", "0.53134847", "0.5307181", "0.53002185", "0.5299309", "0.5281984", "0.5260962", "0.52581936", "0.5247731", "0.5243564", "0.52413577", "0.5236462", "0.5231547", "0.52258176", "0.5219103", "0.5218155", "0.5212286", "0.52100796", "0.5205764", "0.5197318", "0.51956075", "0.51927364", "0.51911473", "0.51878035", "0.5179993", "0.5174075", "0.51653653", "0.5163811", "0.51606745", "0.51525205", "0.51525205", "0.5145524", "0.51376384", "0.51364547", "0.5135736", "0.51348007", "0.51346695", "0.5125735", "0.51209974", "0.511888", "0.51087683", "0.51059693", "0.51059234", "0.51051384", "0.51001", "0.5095037", "0.50939465", "0.5090627", "0.5089715", "0.5082392", "0.5082392", "0.50807065", "0.5080033", "0.50781256", "0.50747645", "0.5074087", "0.50709015", "0.5066003", "0.5065716", "0.5064309", "0.5060172", "0.50592047", "0.50569636", "0.5055836", "0.5047182", "0.50464386", "0.50438666", "0.5043525", "0.5041699", "0.5041641" ]
0.59492534
3
plot a Cepheid at its reddened position on the HR diag. (assume that deredden_cepheids() have been used)
def plot_dereddening(): extinction_coefficients = {'2365-2764-1': np.array([0.2622, 0.844]), '4109-638-1': np.array([0.0524, 0.1576]), '2058-56-1': np.array([0.0751, 0.248]), '3642-2459-1': np.array([0.1907, 0.608]), '3999-1391-1': np.array([0.3911, 1.2480]), '2607-1448-1': np.array([0.0430, 0.1310])} cepheids = {'2365-2764-1': np.array([0.959, 2.09]), '4109-638-1': np.array([0.705, 2.385]), '2058-56-1': np.array([1.222, 1.333]), '3642-2459-1': np.array([1.088, 2.0518]), '3999-1391-1': np.array([1.360, 1.2567]), '2607-1448-1': np.array([1.484, 0.6963])} periods = {'2365-2764-1': 1.61, '4109-638-1': 15.31, '2058-56-1': 63.08, '3642-2459-1': 1.86, '3999-1391-1': 24.98, '2607-1448-1': 8.54} max_periods = max(periods.values()) new_positions_bv_mv = [] # in M_V vs B-V space colors = [] theoretical_position = [] for obj in extinction_coefficients.keys(): # new_positions_bv_mv.append(cepheids[obj]-extinction_coefficients[obj]) new_positions_bv_mv.append(cepheids[obj]) colors.append(periods[obj]/max_periods) theoretical_position.append(-2.78*np.log10(periods[obj])-1.35) for pos in range(len(new_positions_bv_mv)): plt.scatter(new_positions_bv_mv[pos][0], new_positions_bv_mv[pos][1], marker='^', facecolor='w', s=40) plt.scatter(new_positions_bv_mv[pos][0], theoretical_position[pos], marker='o', facecolor='r', s=50) return new_positions_bv_mv, colors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_1d_path(self):\n\n fig = plt.figure(figsize=(8,5))\n \n matches = (self.a_scale == self.c_scale)\n plt.plot(self.a_scale[matches], self.E_coh[matches])\n plt.xlabel('linear deformation coefficient: 0=fcc, 1=bcc')\n plt.ylabel('Cohesive energy (eV/atom)')\n \n return fig", "def plot_hr_diag(hr_df, x='B_V', y='M_V', cutoff=0.2, bvcutoff=0.05):\n plt.figure(figsize=(11., 10.))\n print \"Plotting background stars..\"\n plt.set_cmap('gray_r')\n plt.hist2d(hr_df[x].tolist(), hr_df[y].tolist(), (200, 200), norm=LogNorm(), cmin=10)\n plt.axis([-0.2, 2.35, -3., 7.])\n plt.gca().invert_yaxis()\n plt.xlabel(r'$BT-VT$ (mag)')\n plt.ylabel(r'$M_{VT}$ (mag)') # Plotting M_{VT}\n plt.title(r'$\\sigma_\\pi / \\pi < %s, \\sigma_{BT-VT}< %s$ mag' % (cutoff, bvcutoff))\n print \"..Done\"\n return", "def fig_coh_ph(coh, ph, direc):\n\n colors = plt.cm.cividis(np.linspace(0, 1, coh.shape[0]))\n\n if coh.ndim > 1:\n f, (ax1, ax2) = plt.subplots(1, 2)\n for i, (co, p) in enumerate(zip(coh, ph)):\n ax1.plot(direc, co, c=colors[i])\n ax2.plot(direc, p*180./np.pi, c=colors[i])\n ax1.set_ylabel('Coherence')\n ax1.set_ylim((0, 1.))\n ax2.set_ylabel('Phase')\n ax1.set_xlabel('Angle from H1')\n ax2.set_xlabel('Angle from H1')\n plt.tight_layout()\n\n else:\n plt.figure()\n plt.subplot(121)\n plt.plot(direc, coh, c=colors[0])\n plt.ylim((0, 1.))\n plt.subplot(122)\n plt.plot(direc, ph*180./np.pi, c=colors[0])\n plt.tight_layout()\n\n return plt", "def plotEig(mu, C, axis):\n Q,S = decomposeCov(C)\n axis.arrow(mu[0,0], mu[1,0], .15*Q[0,0], .15*Q[1,0])\n axis.arrow(mu[0,0], mu[1,0], .05*Q[0,1], .05*Q[1,1])", "def plotDihedralEnergy(self, phys, forces, step): \r\n self.plotQuantity(step, phys.app.energies.getTable(4), 'dihedralenergy')", "def plot(self, c='k'):\n plt.plot(self.geometry.convex_hull.exterior.xy[0], self.geometry.convex_hull.exterior.xy[1], c)\n plt.axis('equal')", "def Diag(Fprime, Cprime, E):\n #\n import math\n # Angle for heteronuclear diatonic\n Theta = 0.5 * math.atan(2.0 * Fprime[0, 1] / (Fprime[0, 0] - Fprime[1, 1]))\n # print('Theta', Theta)\n\n Cprime[0, 0] = np.cos(Theta)\n Cprime[1, 0] = np.sin(Theta)\n Cprime[0, 1] = np.sin(Theta)\n Cprime[1, 1] = -np.cos(Theta)\n\n E[0, 0] = Fprime[0, 0] * np.cos(Theta) ** 2 + Fprime[1, 1] * np.sin(Theta) ** 2 + Fprime[0, 1] * np.sin(2.0 * Theta)\n E[1, 1] = Fprime[1, 1] * np.cos(Theta) ** 2 + Fprime[0, 0] * np.sin(Theta) ** 2 - Fprime[0, 1] * np.sin(2.0 * Theta)\n\n if (E[1, 1] <= E[0, 0]):\n Temp = E[1, 1]\n E[1, 1] = E[0, 0]\n E[0, 0] = Temp\n Temp = Cprime[0, 1]\n Cprime[0, 1] = Cprime[0, 0]\n Cprime[0, 0] = Temp\n Temp = Cprime[1, 1]\n Cprime[1, 1] = Cprime[1, 0]\n Cprime[1, 0] = Temp\n return", "def dendogram(self):\r\n \r\n plt.figure(figsize=(20, 7))\r\n dendrogram = sch.dendrogram(sch.linkage(self.X, method='ward'))\r\n plt.title(\"Dendograms\")\r\n plt.axhline(linestyle='--', y=5) \r\n plt.show()", "def draw_rh_lines(data):\n #hnd = extract_right_hand(data);\n hnd = np.array(data['crop']);\n hand.draw_hand_lines(hnd,data['rhkpss'][data['i']]);\n return hnd;", "def plot_dmd(self):\n n_modes = 10\n U = self.uf\n # put the decomposition axis last\n UT = U.transpose(0, 2, 1)\n # create the matrix of snapshots by flattening the non\n # decomp axes so we have a 2d array where we index the\n # decomp axis like snapshots[:,i]\n snapshots = UT.reshape((-1, UT.shape[-1]))\n\n # remove nans\n # TODO: remove nans by interpolation earlier on\n snapshots[np.where(np.isnan(snapshots))] = 0\n\n modes, ritz_values, norms \\\n = mr.compute_DMD_matrices_snaps_method(snapshots, range(n_modes))\n\n # as array, reshape to data dims\n reshaped_modes = modes.A.T.reshape((-1,) + UT.shape[:-1])\n\n fig, ax = plt.subplots(nrows=3)\n c0 = self.mean_velocity_Uf(ax[0])\n\n ax[1].set_title('First mode of DMD')\n ax[1].set_xlabel('time after front passage')\n ax[1].set_ylabel('height')\n c1 = ax[1].contourf(reshaped_modes[0], 100)\n\n ax[2].set_title('Second mode of DMD')\n ax[2].set_xlabel('time after front passage')\n ax[2].set_ylabel('height')\n # TODO: why does reshaped_modes seem to have a list of\n # duplicates?\n # Seems to be complex conjugates - why is this??\n c2 = ax[2].contourf(reshaped_modes[2], 100, levels=c1.levels)\n\n fig.colorbar(c0, ax=ax[0], use_gridspec=True)\n fig.colorbar(c1, ax=ax[1], use_gridspec=True)\n fig.colorbar(c2, ax=ax[2], use_gridspec=True)\n\n fig.tight_layout()\n\n return fig", "def plot_eccentricity(self, z=0):\n p = figure(\n title=\"Cut in plane Z=\" + str(z),\n x_axis_label=\"X axis\",\n y_axis_label=\"Y axis\",\n )\n for j in range(0, self.ntheta):\n p.circle(self.xre[z][j], self.yre[z][j], color=\"red\")\n p.circle(self.xri[z][j], self.yri[z][j], color=\"blue\")\n p.circle(0, 0, color=\"blue\")\n p.circle(self.xi, self.yi, color=\"red\")\n p.circle(0, 0, color=\"black\")\n return p", "def plot_phase_diagram(self):\n t_max = np.log(max(self.temperatures))\n d_min = np.log(min(self.distortions))\n y_axis = [np.log(i) - d_min for i in self.distortions]\n x_axis = [t_max - np.log(i) for i in self.temperatures]\n\n plt.figure(figsize=(12, 9))\n plt.plot(x_axis, y_axis)\n\n region = {}\n for i, c in list(enumerate(self.n_eff_clusters)):\n if c not in region:\n region[c] = {}\n region[c]['min'] = x_axis[i]\n region[c]['max'] = x_axis[i]\n for c in region:\n if c == 0:\n continue\n plt.text((region[c]['min'] + region[c]['max']) / 2, 0.2,\n 'K={}'.format(c), rotation=90)\n plt.axvspan(region[c]['min'], region[c]['max'], color='C' + str(c),\n alpha=0.2)\n plt.title('Phases diagram (log)')\n plt.xlabel('Temperature')\n plt.ylabel('Distortion')\n plt.show()", "def _plot(self, rewards, losses, epsilons):\n plt.figure(figsize=(20,5))\n plt.subplot(131)\n plt.title('Episodic Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('TD Loss')\n plt.plot(losses)\n plt.subplot(133)\n plt.title('Epsilon')\n plt.plot(epsilons)\n plt.tight_layout()\n plt.show()", "def showCl(ell,temps,title='CAMB ISWout power spectrum'):\n plt.plot(ell,temps*ell*(ell+1)/(2*np.pi) *1e12) #1e12 to convert to microK**2\n plt.xlabel('multipole moment l')\n plt.ylabel('l(l+1)C_l/(2pi) [microK**2]')\n plt.title(title)\n plt.show()", "def show_dcr_results(dg):\n cycle = dg.fileDB['cycle'].values[0]\n df_dsp = pd.read_hdf(f'./temp_{cycle}.h5', 'opt_dcr')\n # print(df_dsp.describe()) \n\n # compare DCR and A/E distributions\n fig, (p0, p1) = plt.subplots(2, 1, figsize=(8, 8))\n \n elo, ehi, epb = 0, 25000, 100\n \n # aoe distribution\n # ylo, yhi, ypb = -1, 2, 0.1\n # ylo, yhi, ypb = -0.1, 0.3, 0.005\n ylo, yhi, ypb = 0.05, 0.08, 0.0005\n nbx = int((ehi-elo)/epb)\n nby = int((yhi-ylo)/ypb)\n h = p0.hist2d(df_dsp['trapEmax'], df_dsp['aoe'], bins=[nbx,nby],\n range=[[elo, ehi], [ylo, yhi]], cmap='jet',\n norm=LogNorm())\n # p0.set_xlabel('Energy (uncal)', ha='right', x=1)\n p0.set_ylabel('A/E', ha='right', y=1)\n\n # dcr distribution\n # ylo, yhi, ypb = -20, 20, 1 # dcr_raw\n # ylo, yhi, ypb = -5, 2.5, 0.1 # dcr = dcr_raw / trapEmax\n # ylo, yhi, ypb = -3, 2, 0.1\n ylo, yhi, ypb = 0.9, 1.08, 0.001\n ylo, yhi, ypb = 1.034, 1.0425, 0.00005 # best for 64.4 us pz\n # ylo, yhi, ypb = 1.05, 1.056, 0.00005 # best for 50 us pz\n # ylo, yhi, ypb = 1.016, 1.022, 0.00005 # best for 100 us pz\n nbx = int((ehi-elo)/epb)\n nby = int((yhi-ylo)/ypb)\n h = p1.hist2d(df_dsp['trapEmax'], df_dsp['dcr'], bins=[nbx,nby],\n range=[[elo, ehi], [ylo, yhi]], cmap='jet',\n norm=LogNorm())\n p1.set_xlabel('Energy (uncal)', ha='right', x=1)\n p1.set_ylabel('DCR', ha='right', y=1)\n \n # plt.show()\n plt.savefig(f'./plots/dcr_cyc{cycle}.png', dpi=300)\n plt.cla()", "def scree_plot(self, ev):\n plt.scatter(range(1,len(ev)+1), ev)\n plt.plot(range(1,len(ev)+1), ev)\n plt.title(\"Scree Plot\")\n plt.xlabel(\"Factors\")\n plt.ylabel(\"Eigenvalue\")\n plt.grid()\n plt.show()", "def display_energy_levels_0d(diagram, num_atoms, atoms, h_poly):\n h = eval_hamiltonian(num_atoms, h_poly, (1, 1))\n\n e, v = eigensystem(h)\n\n left = 0\n bottom = 0\n right = max([len(row) for row in diagram.split('\\n')])\n top = len(diagram.split('\\n'))\n\n plot_rows = numpy.ceil(math.sqrt(num_atoms+1))\n plot_cols = plot_rows\n\n for i in range(num_atoms):\n matplotlib.pyplot.subplot(plot_rows, plot_cols, i+1, axisbg=\"#000000\")\n y = [atom[0] for atom in atoms]\n x = [atom[1] for atom in atoms]\n c = numpy.abs(v[i]*v[i])\n\n matplotlib.pyplot.title('E = %f' % numpy.real(e[i]), fontsize = 10)\n norm = matplotlib.colors.Normalize(vmin = min(c),\n vmax = max(0.0001, max(c)))\n #x = [0,0,1,1]\n #y = [0,1,0,1]\n #c = [1,2,3,4]\n matplotlib.pyplot.hexbin(x, y, C = c,\n gridsize = (right-left, top-bottom),\n extent = (left, right, bottom, top),\n cmap = matplotlib.pyplot.get_cmap(\"gray\"),\n norm = norm\n )\n\n matplotlib.pyplot.subplot(plot_rows, plot_cols, num_atoms+1)\n matplotlib.pyplot.scatter(num_atoms*[0], e, s = 0.1)", "def rhombic_dodecahedron(self):\n v = [ [1, 1, 1], [1, 1, -1], [1, -1, 1], [1, -1, -1], [-1, 1, 1], \n [-1, 1, -1], [-1, -1, 1], [-1, -1, -1], [0, 0, 2], [0, 2, 0],\n [2, 0, 0], [0, 0, -2], [0, -2, 0], [-2, 0, 0] ]\n return Polyhedron(vertices = v)", "def energy_kde_paperplot(fields,df):\n plt.figure()\n i = 0\n colorList = ['dodgerblue','tomato']\n lw = 2\n\n meanE_2 = []\n meanE_3 = []\n mup = np.min(df['energy [eV]']) - pp.mu\n chi_0 = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(fields[0]))\n g_en_axis, _, _, _, _, _, _, _, _, _, _, _, _, _ = \\\n occupation_plotter.occupation_v_energy_sep(chi_0, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), np.zeros(len(g_en_axis)), '-', color='black', lineWidth=lw,label='Equilibrium')\n\n for ee in fields:\n chi_2_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n # meanE_2 = utilities.mean_energy(chi_2_i,df)\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_2_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,'--',color = colorList[i],lineWidth=lw,label=r'Low Field {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n # plt.plot(meanE_2-np.min(df['energy [eV]']),0,'.')\n chi_3_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '3_' + \"E_{:.1e}.npy\".format(ee))\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_3_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,color = colorList[i],lineWidth=lw,label=r'Full Drift {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n i = i + 1\n # plt.plot(g_en_axis - np.min(df['energy [eV]']), g_f0ax, '--', color='black', lineWidth=lw,label=r'$f_0$')\n\n plt.legend()\n # plt.ylim([-0.02, 0.015])\n plt.xlabel(r'Energy above CBM ($eV$)')\n plt.ylabel(r'Deviational occupation $\\delta f_{\\mathbf{k}}$ (norm.)')\n # plt.ylabel(r'$\\delta f_{\\mathbf{k}}/f_{\\mathbf{k}}^0$')\n plt.savefig(pp.figureLoc+'energy_KDE.png', bbox_inches='tight',dpi=600)\n\n plt.figure()\n plt.plot(g_en_axis,g_chiax)\n\n plt.figure()\n Z, xedges, yedges = np.histogram2d(df['kx [1/A]']*chi_3_i,df['ky [1/A]']*chi_3_i)\n plt.pcolormesh(xedges, yedges, Z.T)\n\n from scipy.stats.kde import gaussian_kde\n g_inds,_,_ = utilities.gaas_split_valleys(df,False)\n g_df = df.loc[g_inds]\n\n x = g_df['kx [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n y = g_df['ky [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n\n # y = g_df['energy [eV]']*(chi_3_i[g_inds]+g_df['k_FD'])\n k = gaussian_kde(np.vstack([x, y]))\n xi, yi = np.mgrid[x.min():x.max():x.size ** 0.5 * 1j, y.min():y.max():y.size ** 0.5 * 1j]\n zi = k(np.vstack([xi.flatten(), yi.flatten()]))\n\n fig = plt.figure(figsize=(7, 8))\n ax1 = fig.add_subplot(211)\n ax2 = fig.add_subplot(212)\n\n # alpha=0.5 will make the plots semitransparent\n ax1.pcolormesh(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n ax2.contourf(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n\n ax1.set_xlim(x.min(), x.max())\n ax1.set_ylim(y.min(), y.max())\n ax2.set_xlim(x.min(), x.max())\n ax2.set_ylim(y.min(), y.max())", "def interactive_hess(gr,g):\n def plot(size=100):\n fig,ax = plt.subplots()\n fig.set_size_inches(8,6)\n ax.hexbin(gr, g, gridsize=size, bins='log', cmap='inferno', label=\"Relative stellar density\")\n ax.set_title(\"HESS DIAGRAM, gridsize={0:d}\".format(size), fontsize = 15)\n ax.set_xlabel(r\"$g-r$\",fontsize = 25)\n ax.set_ylabel(r\"$g$\",fontsize = 25)\n ax.legend(loc='upper left')\n ax.set_ylim(ax.get_ylim()[::-1])\n plt.show()\n interact(plot, size=(50,300,1),continuous_update=False);", "def plot_1D_edp(self, start=(-10,25), end=(30,-20), N=100):\n rho = []\n x0, z0 = start\n x1, z1 = end\n xpoints = np.linspace(x0, x1, N)\n zpoints = np.linspace(z0, z1, N)\n for x, z in zip(xpoints, zpoints):\n tmp = self.phase * self.F * np.cos(self.qx*x+self.qz*z)\n dist = np.sqrt((x-x0)**2 + (z-z0)**2)\n rho.append([dist, tmp.sum(axis=0)])\n rho = np.array(rho, float)\n X = rho[:,0]\n Y = rho[:,1]\n plt.figure()\n plt.plot(X, Y)", "def keynesian_cross(T, I, G, C):\n # The data vector to be plotted for production and aggregate expenditure:\n Y_arrey = np.linspace(0,300)\n PE_arrey = (C * (Y_arrey - T) + I + G)\n degree = Y_arrey\n\n # The figure\n fig = plt.figure(figsize=(10,5))\n ax = fig.add_subplot(1,1,1)\n\n ax.plot(Y_arrey, degree, label=\"45-degree line\", color='lightblue',linewidth=3)\n ax.plot(Y_arrey, AD_arrey, label=\"AD=C+I+G+NX\", color='darkorange',linewidth=3)\n\n ax.set_xlabel(\"Y\")\n ax.set_ylabel(\"PE\")\n ax.legend(loc=\"upper left\")\n\n ax.grid()\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.spines['left'].set_visible(False)\n return", "def plot_dist(z, dz, om, dom, dist, dh, name, mathname, filename=None):\n # Grid of redshift and matter density values.\n x, y = numpy.meshgrid(z, om)\n pylab.figure(figsize=(5.5,4.5)) \n pylab.imshow(dist/dh, \n extent=(z.min() - dz/2., \n z.max() + dz/2.,\n om.max() + dom/2.,\n om.min() - dom/2., \n ),\n interpolation='nearest',\n aspect = z.max()/om.max(),\n cmap = cm.Spectral,\n )\n cb = pylab.colorbar()\n cb.ax.set_ylabel(r'$' + mathname + '/D_H$')\n\n pylab.contour(x, y, dist/dh, 10, colors='k')\n pylab.xlim(z.min(), z.max())\n pylab.ylim(om.min(), om.max()) \n pylab.xlabel(\"redshift z\")\n pylab.ylabel(r\"$\\Omega_M = 1 - \\Omega_\\lambda$\")\n pylab.title(name)\n if filename is not None:\n prefix, extension = filename.split('.')\n pylab.savefig(prefix + '_' + mathname + '.' + extension,\n bbox_inches=\"tight\")", "def plot_specific_discharge(self, spdis, head=None, kstep=1,\n hstep=1, normalize=False, **kwargs):\n if 'pivot' in kwargs:\n pivot = kwargs.pop('pivot')\n else:\n pivot = 'middle'\n\n if 'ax' in kwargs:\n ax = kwargs.pop('ax')\n else:\n ax = self.ax\n\n if isinstance(spdis, list):\n print(\"Warning: Selecting the final stress period from Specific\"\n \" Discharge list\")\n spdis = spdis[-1]\n\n if self.mg.grid_type == \"structured\":\n ncpl = self.mg.nrow * self.mg.ncol\n\n else:\n ncpl = self.mg.ncpl\n\n nlay = self.mg.nlay\n\n qx = np.zeros((nlay * ncpl))\n qz = np.zeros((nlay * ncpl))\n ib = np.zeros((nlay * ncpl), dtype=bool)\n\n idx = np.array(spdis['node']) - 1\n\n # check that vertex grid cross sections are not arbitrary\n # within a tolerance!\n if self.mg.grid_type != 'structured':\n pts = self.pts\n xuniform = [True if abs(pts.T[0, 0] - i) < 1\n else False for i in pts.T[0]]\n yuniform = [True if abs(pts.T[1, 0] - i) < 1\n else False for i in pts.T[1]]\n if not np.all(xuniform):\n if not np.all(yuniform):\n err_msg = \"plot_specific_discharge does not \" \\\n \"support aribtrary cross sections\"\n raise AssertionError(err_msg)\n\n if self.direction == 'x':\n qx[idx] = spdis['qx']\n elif self.direction == 'y':\n qx[idx] = spdis['qy']\n else:\n err_msg = 'plot_specific_discharge does not ' \\\n 'support arbitrary cross-sections'\n raise AssertionError(err_msg)\n\n qz[idx] = spdis[\"qz\"]\n ib[idx] = True\n\n if self.mg.grid_type == \"structured\":\n qx.shape = (self.mg.nlay, self.mg.nrow, self.mg.ncol)\n qz.shape = (self.mg.nlay, self.mg.nrow, self.mg.ncol)\n ib.shape = (self.mg.nlay, self.mg.nrow, self.mg.ncol)\n\n if isinstance(head, np.ndarray):\n zcentergrid = self.__cls.set_zcentergrid(head)\n else:\n zcentergrid = self.zcentergrid\n\n if nlay == 1:\n x = []\n z = []\n for k in range(nlay):\n for i in range(self.xcentergrid.shape[1]):\n x.append(self.xcentergrid[k, i])\n z.append(0.5 * (zcentergrid[k, i] + zcentergrid[k + 1, i]))\n x = np.array(x).reshape((1, self.xcentergrid.shape[1]))\n z = np.array(z).reshape((1, self.xcentergrid.shape[1]))\n else:\n x = self.xcentergrid\n z = zcentergrid\n\n u = []\n v = []\n ibx = []\n xedge, yedge = self.mg.xyedges\n for k in range(self.mg.nlay):\n u.append(plotutil.cell_value_points(self.xpts, xedge,\n yedge, qx[k, :, :]))\n v.append(plotutil.cell_value_points(self.xpts, xedge,\n yedge, qz[k, :, :]))\n ibx.append(plotutil.cell_value_points(self.xpts, xedge,\n yedge, ib[k, :, :]))\n u = np.array(u)\n v = np.array(v)\n ibx = np.array(ibx)\n x = x[::kstep, ::hstep]\n z = z[::kstep, ::hstep]\n u = u[::kstep, ::hstep]\n v = v[::kstep, ::hstep]\n ib = ibx[::kstep, ::hstep]\n\n # upts and vpts has a value for the left and right\n # sides of a cell. Sample every other value for quiver\n u = u[:, ::2]\n v = v[:, ::2]\n ib = ib[:, ::2]\n\n else:\n # kstep implementation for vertex grid\n projpts = {key: value for key, value in self.__cls.projpts.items()\n if (key // ncpl) % kstep == 0}\n\n # set x and z centers\n if isinstance(head, np.ndarray):\n # pipe kstep to set_zcentergrid to assure consistent array size\n zcenters = self.__cls.set_zcentergrid(np.ravel(head), kstep=kstep)\n else:\n zcenters = [np.mean(np.array(v).T[1]) for i, v\n in sorted(projpts.items())]\n\n u = np.array([qx[cell] for cell in sorted(projpts)])\n\n if self.direction == \"x\":\n x = np.array([np.mean(np.array(v).T[0]) for i, v\n in sorted(projpts.items())])\n else:\n x = np.array([np.mean(np.array(v).T[1]) for i, v\n in sorted(projpts.items())])\n\n z = np.ravel(zcenters)\n v = np.array([qz[cell] for cell\n in sorted(projpts)])\n ib = np.array([ib[cell] for cell\n in sorted(projpts)])\n\n x = x[::hstep]\n z = z[::hstep]\n u = u[::hstep]\n v = v[::hstep]\n ib = ib[::hstep]\n\n if normalize:\n vmag = np.sqrt(u ** 2. + v ** 2.)\n idx = vmag > 0.\n u[idx] /= vmag[idx]\n v[idx] /= vmag[idx]\n\n # mask with an ibound array\n u[~ib] = np.nan\n v[~ib] = np.nan\n\n quiver = ax.quiver(x, z, u, v, pivot=pivot, **kwargs)\n\n return quiver", "def plot_cf(self, **options):\n n = len(self.hs)\n xs = np.arange(-n//2, n//2)\n hs = np.roll(self.hs, len(self.hs) // 2)\n plt.plot(xs, hs.real, label='real', **options)\n plt.plot(xs, hs.imag, label='imag', **options)\n plt.legend()", "def plot_derivatives_divided(self, show=False):\n\n fig, ax = plt.subplots(3, 2, figsize = (15, 10))\n # plt.subplots_adjust(wspace = 0, hspace = 0.1)\n plt.subplots_adjust(hspace=0.5)\n training_index = np.random.randint(self.n_train * self.n_p)\n \n if self.flatten:\n print ('Plotting derivatives... reshaping the flattened data to %s'%str(input_shape))\n # TODO\n temp = self.data['x_p'][training_index].reshape(len(theta_fid),*input_shape)\n x, y = temp.T[:,0]\n else:\n print ('Plotting derivatives... reshaping the flattened data to power spectra')\n temp = self.data['x_p'][training_index].reshape(len(theta_fid),ncombinations,len(ells))\n # temp has shape (num_params, ncombinations, len(ells))\n Cl = temp[:,0,:] # plot the (0,0) autocorrelation bin\n \n # Cl has shape (1,10) since it is the data vector for the \n # upper training image for both params\n labels =[r'$θ_1$ ($\\Omega_M$)']\n\n # we loop over them in this plot to assign labels\n for i in range(Cl.shape[0]):\n if self.rescaled:\n ax[0, 0].plot(ells, Cl[i],label=labels[i])\n else:\n ax[0, 0].loglog(ells, ells*(ells+1)*Cl[i],label=labels[i])\n ax[0, 0].set_title('One upper training example, Cl 0,0')\n ax[0, 0].set_xlabel(r'$\\ell$')\n if self.rescaled:\n ax[0, 0].set_ylabel(r'$C_\\ell$')\n else:\n ax[0, 0].set_ylabel(r'$\\ell(\\ell+1) C_\\ell$')\n\n ax[0, 0].legend(frameon=False)\n\n if self.flatten:\n # TODO\n temp = self.data['x_m'][training_index].reshape(len(theta_fid),*input_shape)\n x, y = temp.T[:,0]\n else:\n temp = self.data['x_m'][training_index].reshape(len(theta_fid),ncombinations,len(ells))\n # temp has shape (num_params, ncombinations, len(ells))\n Cl = temp[:,0,:] # plot the (0,0) autocorrelation bin\n\n for i in range(Cl.shape[0]):\n if self.rescaled:\n ax[1, 0].plot(ells, Cl[i])\n else:\n ax[1, 0].loglog(ells, ells*(ells+1)*Cl[i])\n ax[1, 0].set_title('One lower training example, Cl 0,0')\n ax[1, 0].set_xlabel(r'$\\ell$')\n if self.rescaled:\n ax[1, 0].set_ylabel(r'$C_\\ell$')\n else:\n ax[1, 0].set_ylabel(r'$\\ell(\\ell+1) C_\\ell$')\n\n if self.flatten:\n # TODO\n temp = self.data[\"x_m\"][training_index].reshape(len(theta_fid),*input_shape)\n xm, ym = temp.T[:,0]\n\n temp = self.data[\"x_p\"][training_index].reshape(len(theta_fid),*input_shape)\n xp, yp = temp.T[:,0]\n else:\n temp = self.data['x_m'][training_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl_lower = temp[:,0,:]\n temp = self.data['x_p'][training_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl_upper = temp[:,0,:]\n\n for i in range(Cl_lower.shape[0]):\n ax[2, 0].plot(ells, (Cl_upper[i]-Cl_lower[i])/self.Cl_noiseless)\n ax[2, 0].set_title('Difference between upper and lower training examples');\n ax[2, 0].set_xlabel(r'$\\ell$')\n ax[2, 0].set_ylabel(r'$\\Delta C_\\ell$ / $C_{\\ell,thr}$')\n ax[2, 0].axhline(xmin = 0., xmax = 1., y = 0.\n , linestyle = 'dashed', color = 'black')\n ax[2, 0].set_xscale('log')\n\n # also plot sigma_cl / CL\n sigma_cl = np.sqrt(self.covariance)\n ax[2, 0].plot(ells, sigma_cl/self.Cl_noiseless, label=r'$\\sigma_{Cl} / C_{\\ell,thr}$')\n ax[2, 0].legend(frameon=False)\n\n test_index = np.random.randint(self.n_p)\n\n if self.flatten:\n # TODO\n temp = self.data['x_p_test'][test_index].reshape(len(theta_fid),*input_shape)\n x, y = temp.T[:,0]\n else:\n temp = self.data['x_p_test'][test_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl = temp[:,0,:] # plot the (0,0) autocorrelation bin\n \n for i in range(Cl.shape[0]):\n if self.rescaled:\n ax[0, 1].plot(ells, Cl[i])\n else:\n ax[0, 1].loglog(ells, ells*(ells+1)*Cl[i])\n ax[0, 1].set_title('One upper test example Cl 0,0')\n ax[0, 1].set_xlabel(r'$\\ell$')\n if self.rescaled:\n ax[0, 1].set_ylabel(r'$C_\\ell$')\n else:\n ax[0, 1].set_ylabel(r'$\\ell(\\ell+1) C_\\ell$')\n\n if self.flatten:\n # TODO\n temp = self.data['x_m_test'][test_index].reshape(len(theta_fid),*input_shape)\n x, y = temp.T[:,0]\n else:\n temp = self.data['x_m_test'][test_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl = temp[:,0,:] # plot the (0,0) autocorrelation bin\n\n for i in range(Cl.shape[0]):\n if self.rescaled:\n ax[1, 1].plot(ells, Cl[i])\n else:\n ax[1, 1].loglog(ells, ells*(ells+1)*Cl[i])\n ax[1, 1].set_title('One lower test example Cl 0,0')\n ax[1, 1].set_xlabel(r'$\\ell$')\n if self.rescaled:\n ax[1, 1].set_ylabel(r'$C_\\ell$')\n else:\n ax[1, 1].set_ylabel(r'$\\ell(\\ell+1) C_\\ell$')\n\n if self.flatten:\n # TODO\n temp = self.data[\"x_m_test\"][test_index].reshape(len(theta_fid),*input_shape)\n xm, ym = temp.T[:,0]\n\n temp = self.data[\"x_p_test\"][test_index].reshape(len(theta_fid),*input_shape)\n xp, yp = temp.T[:,0]\n else:\n temp = self.data['x_m_test'][test_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl_lower = temp[:,0,:]\n temp = self.data['x_p_test'][test_index].reshape(len(theta_fid),ncombinations,len(ells))\n Cl_upper = temp[:,0,:]\n \n for i in range(Cl_lower.shape[0]):\n ax[2, 1].plot(ells, (Cl_upper[i]-Cl_lower[i]) / self.Cl_noiseless)\n ax[2, 1].set_title('Difference between upper and lower test samples');\n ax[2, 1].set_xlabel(r'$\\ell$')\n ax[2, 1].set_ylabel(r'$\\Delta C_\\ell$ / $C_{\\ell,thr}$')\n ax[2, 1].axhline(xmin = 0., xmax = 1., y = 0.\n , linestyle = 'dashed', color = 'black')\n ax[2, 1].set_xscale('log')\n\n # also plot sigma_cl / CL\n sigma_cl = np.sqrt(self.covariance)\n ax[2, 1].plot(ells, sigma_cl/self.Cl_noiseless, label=r'$\\sigma_{Cl} / C_{\\ell,thr}$')\n\n plt.savefig(f'{self.figuredir}derivatives_visualization_divided_{self.modelversion}.png')\n if show: plt.show()\n plt.close()", "def plot_IVS(self, parent_figure=None):\n nivs = len(FD.figure_AllIVs)\n cprint(\"c\", \"plot_IVS.\")\n rows = nivs\n cols = 5\n height = 1.5 * nivs\n width = 8.5\n PD = PData()\n ymin = -125.0\n ymax = 40.0\n calx = 120.0\n\n self.P = PH.regular_grid(\n rows,\n cols,\n order=\"rowsfirst\",\n figsize=(width, height),\n showgrid=False,\n verticalspacing=0.01,\n horizontalspacing=0.05,\n margins={\n \"bottommargin\": 0.1,\n \"leftmargin\": 0.07,\n \"rightmargin\": 0.05,\n \"topmargin\": 0.08,\n },\n labelposition=(-0.05, 1.06),\n parent_figure=parent_figure,\n # panel_labels=['A', 'B', 'C', 'D', 'E', 'F'],\n )\n cellpath = config[\"cellDataDirectory\"]\n png_path = Path(config[\"baseDataDirectory\"], config[\"pngDirectory\"])\n cprint(\"c\", \"prepping fo run\")\n\n for rax, iv in enumerate(FD.figure_AllIVs.keys()):\n cprint(\"r\", f\"Doing Cell VCN_c{iv:02d} -----------------------------------\")\n celln = Path(png_path, f\"VCN_c{iv:02d}.png\")\n if celln.is_file(): # add images from png files\n img = mpimg.imread(str(celln))\n self.P.axarr[rax, 0].imshow(img, aspect=\"equal\")\n ylim = self.P.axarr[rax, 0].get_ylim()\n self.P.axarr[rax, 0].set_xlim(900, 1500)\n PH.noaxes(self.P.axarr[rax, 0])\n # plot 3 dendrite decorations\n for iax, dendmode in enumerate([\"passive\", \"normal\", \"active\"]):\n dendm = self.get_dendmode(dendmode)\n sfi = Path(\n cellpath,\n f\"VCN_c{iv:02d}\",\n \"Simulations\",\n \"IV\",\n FD.figure_AllIVs[iv][dendm],\n )\n if not sfi.is_dir():\n cprint(\"r\", f\"Unable to find dir: {str(sfi):s}\")\n continue\n fn = list(sfi.glob(\"*\"))\n sfi = Path(sfi, fn[0])\n if rax > 0:\n calx = None # only one cal bar on this plot, top row.\n self.parent.PLT.plot_traces(\n self.P.axarr[rax, iax + 1],\n sfi,\n PD,\n protocol=\"IV\",\n ymin=ymin,\n ymax=ymax,\n iax=iax,\n figure=self.P.figure_handle,\n ivaxis=self.P.axarr[rax, 4], # accumulate IV's in right side\n ivcolor=colors[iax],\n iv_spike_color=spike_colors[dendmode],\n spike_marker_size=1.5,\n spike_marker_color=spike_colors[dendmode],\n calx=calx,\n caly=-10.0,\n )\n if rax == 0:\n self.P.axarr[rax, iax + 1].set_title(dendmode)\n if iax == 0:\n self.P.axarr[rax, 0].text(-0.1, 0.5, str(iv))\n if parent_figure is None:\n fig = FigInfo()\n fig.P = self.P\n fig.filename = f\"Fig_M1A_Supplemental.pdf\"\n timestamp_str = datetime.datetime.now().strftime(\"%Y-%m-%d-%H:%M\")\n fig.title[\n \"title\"\n ] = f\"SBEM Project Figure 1 Modeling (Supplemental A) ({timestamp_str:s})\"\n return fig\n else:\n return self.P", "def _plot_ecdf(self, numerator_name, denominator_name):\n x = self.ecdf[numerator_name][denominator_name]['x']\n y = self.ecdf[numerator_name][denominator_name]['y']\n\n lower_bound = x[y.index(min(y,\n key=lambda x:\n abs(x-self.confidence_level)))]\n median = x[y.index(min(y, key=lambda x:abs(x-0.5)))]\n upper_bound = x[y.index(min(y,\n key=lambda x:\n abs(x-(1-self.confidence_level))))]\n\n sns.lineplot(x=x, y=y)\n ci = 1 - self.confidence_level\n title = ('Median Lift was {0:.2%}, with a '\n '{1:.0%} CI of [{2:.2%}, {3:.2%}]'.format(median,\n ci,\n lower_bound,\n upper_bound))\n title = self._format_title(title)\n plt.title(title)\n plt.xlabel('Lift')\n plt.ylabel('Cumulative Probability')\n plt.axvline(x=lower_bound, linestyle='dotted', color='black')\n plt.axvline(x=median, linestyle='dotted', color='black')\n plt.axvline(x=upper_bound, linestyle='dotted', color='black')\n sns.despine(left=True)\n locs, labels = plt.xticks()\n labels = self._format_axis_as_percent(locs, labels)\n plt.xticks(locs, labels=labels)", "def plot(\n ecg, \n sample_rate = 500, \n title = 'ECG 12', \n lead_index = lead_index, \n lead_order = None,\n style = None,\n columns = 2,\n row_height = 6,\n show_lead_name = True,\n show_grid = True,\n show_separate_line = True,\n ):\n\n if not lead_order:\n lead_order = list(range(0,len(ecg)))\n secs = len(ecg[0])/sample_rate\n leads = len(lead_order)\n rows = ceil(leads/columns)\n # display_factor = 2.5\n display_factor = 1\n line_width = 0.5\n fig, ax = plt.subplots(figsize=(secs*columns * display_factor, rows * row_height / 5 * display_factor))\n display_factor = display_factor ** 0.5\n fig.subplots_adjust(\n hspace = 0, \n wspace = 0,\n left = 0, # the left side of the subplots of the figure\n right = 1, # the right side of the subplots of the figure\n bottom = 0, # the bottom of the subplots of the figure\n top = 1\n )\n\n fig.suptitle(title)\n\n x_min = 0\n x_max = columns*secs\n y_min = row_height/4 - (rows/2)*row_height\n y_max = row_height/4\n\n if (style == 'bw'):\n color_major = (0.4,0.4,0.4)\n color_minor = (0.75, 0.75, 0.75)\n color_line = (0,0,0)\n else:\n color_major = (1,0,0)\n color_minor = (1, 0.7, 0.7)\n color_line = (0,0,0.7)\n\n if(show_grid):\n ax.set_xticks(np.arange(x_min,x_max,0.2)) \n ax.set_yticks(np.arange(y_min,y_max,0.5))\n\n ax.minorticks_on()\n \n ax.xaxis.set_minor_locator(AutoMinorLocator(5))\n\n ax.grid(which='major', linestyle='-', linewidth=0.5 * display_factor, color=color_major)\n ax.grid(which='minor', linestyle='-', linewidth=0.5 * display_factor, color=color_minor)\n\n ax.set_ylim(y_min,y_max)\n ax.set_xlim(x_min,x_max)\n\n\n for c in range(0, columns):\n for i in range(0, rows):\n if (c * rows + i < leads):\n y_offset = -(row_height/2) * ceil(i%rows)\n # if (y_offset < -5):\n # y_offset = y_offset + 0.25\n\n x_offset = 0\n if(c > 0):\n x_offset = secs * c\n if(show_separate_line):\n ax.plot([x_offset, x_offset], [ecg[t_lead][0] + y_offset - 0.3, ecg[t_lead][0] + y_offset + 0.3], linewidth=line_width * display_factor, color=color_line)\n\n \n t_lead = lead_order[c * rows + i]\n \n step = 1.0/sample_rate\n if(show_lead_name):\n ax.text(x_offset + 0.07, y_offset - 0.5, lead_index[t_lead], fontsize=9 * display_factor)\n ax.plot(\n np.arange(0, len(ecg[t_lead])*step, step) + x_offset, \n ecg[t_lead] + y_offset,\n linewidth=line_width * display_factor, \n color=color_line\n )", "def plot_front(quadrant, ring):\n max_ring = 16\n pos = (quadrant-1)*max_ring + ring-1\n ch = f_ch[:, pos]\n E = f_E[:, pos]\n #if pos in front_special_case:\n # ch = f_ch[1:, pos]\n # E = f_E[1:, pos]\n fig = plt.figure()\n plt.plot(ch, E, color='blue', marker=\"x\", linestyle='None')\n plt.plot(ch, f_gain[pos]*ch+f_offset[pos], color='red')\n plt.title(\"Quadrant {}, ring {}\".format(quadrant, ring))\n plt.xlabel(\"Channel\")\n plt.ylabel(\"E (keV)\")\n #plt.legend([\"Ni, Pb, Sm\", \"Lin. fit\"], loc=0)\n plt.legend([\"Pb, Sm\", \"Lin. fit\"], loc=0)\n fig.set_tight_layout(True)\n plt.show()", "def create_ECDF(data_dict,\r\n x_label=\"X\",\r\n y_label=\"Y\",\r\n title=\"ECDF Plot\",\r\n log_x=False,\r\n marks=[]):\r\n\r\n # copy the dict so we don't change it\r\n data = copy.copy(data_dict)\r\n\r\n # take the log of the data if appropriate\r\n if log_x:\r\n for key in data.keys():\r\n if not data[key] == 0:\r\n data[key] = np.log(data[key])\r\n\r\n # create the list of X and Y data to plot\r\n X = sorted(data.values())\r\n Y = np.arange(len(X)) / len(X)\r\n\r\n # plot the ECDF\r\n plt.style.use('Solarize_Light2')\r\n plt.plot(X, Y, '.', markersize=20)\r\n\r\n # plot vertical lines and a legend marking nations of interest\r\n prop_cycle = plt.rcParams['axes.prop_cycle']\r\n colors = prop_cycle.by_key()['color']\r\n if len(marks) > 0:\r\n for color, nation in zip(colors, marks):\r\n plt.axvline(data[nation], color=color, label=nation)\r\n plt.legend()\r\n\r\n # add titles\r\n plt.xlabel(x_label)\r\n plt.ylabel(y_label)\r\n plt.title(title)\r\n\r\n # display the plot\r\n plt.show()", "def plotERP(self, ep):\n import os \n import matplotlib.pyplot as plt\n \n try:\n filename = ep.filename.split('\\\\')[-1].split('.fif')[0]\n filename = 'plotsEEG_'+filename.split('_')[0] \n except Exception as err: \n filename = 'plots_eeg_file' \n print(err) \n finally:\n print('Saving ERP plots at >>>>', os.getcwd())\n \n try:\n os.mkdir(os.path.join(os.getcwd(), filename)) \n os.chdir(os.path.join(os.getcwd(), filename)) \n except Exception as err:\n print(err) \n \n \n ep = ep.interpolate_bads(reset_bads='True', mode = 'accurate')\n ep.info['bads'] = []\n \n ep.plot_psd(area_mode='range',fmin=0, fmax=40, tmax=10.0).savefig(filename + '_psd')\n\n# picks = ['FC2', 'C4', 'Cz', 'C5', 'FC1'] \n \n ep.plot_image(picks = None, cmap='interactive', sigma=1) \n \n plt.savefig(filename + '_image') \n \n bands = [(0, 4, 'Delta'), (4, 8, 'Theta'), (8, 12, 'Alpha'),\n (12, 30, 'Beta'), (30, 45, 'Gamma')] \n \n ep.plot_psd_topomap(bands=bands, vmin=None, vmax=None, \n tmin=0, tmax=0.5).savefig(filename + '_psd_topo')\n \n ep.plot_sensors().savefig(filename + '_sensors_') \n \n ep.plot_topo_image(vmin=-25, vmax=25, title='ERF images', sigma=3.,\n fig_facecolor='w', font_color='k').savefig(filename + '_image_topo') \n \n ep.average().plot().savefig(filename + 'erp_average_')\n ep.average().plot_image().savefig(filename + '_erp_average_image')\n print('Saving ERP plots at >>>>', os.getcwd())", "def plot(self):\n\t\tself.plotOfCos1().plot()", "def fig_craco_fiducial(outfile='fig_craco_fiducial.png',\n zmax=2.5,DMmax=2500,\n show_Macquart=False,\n log=True,\n label='$\\\\log_{10} \\; p(DM_{\\\\rm EG},z)$',\n Aconts=[0.01, 0.1, 0.5],\n cmap='jet', show=False, figsize=None,\n vmnx=(None,None),\n grid=None, survey=None):\n # Generate the grid\n if grid is None or survey is None:\n survey, grid = analy_H0_I.craco_mc_survey_grid()\n\n # Unpack\n full_zDMgrid, zvals, dmvals = grid.rates, grid.zvals, grid.dmvals\n FRBZ=survey.frbs['Z']\n FRBDM=survey.DMEGs\n \n ##### imshow of grid #######\n fsize = 14.\n plt.figure(figsize=figsize)\n ax1=plt.axes()\n plt.sca(ax1)\n \n plt.xlabel('z')\n plt.ylabel('${\\\\rm DM}_{\\\\rm EG}$')\n #plt.title(title+str(H0))\n \n # Cut down grid\n zvals, dmvals, zDMgrid = figures.proc_pgrid(\n full_zDMgrid, \n zvals, (0, zmax),\n dmvals, (0, DMmax))\n ddm=dmvals[1]-dmvals[0]\n dz=zvals[1]-zvals[0]\n nz, ndm = zDMgrid.shape\n\n # Contours\n alevels = figures.find_Alevels(full_zDMgrid, Aconts, log=True)\n \n # Ticks\n tvals, ticks = figures.ticks_pgrid(zvals)# , fmt='str4')\n plt.xticks(tvals, ticks)\n tvals, ticks = figures.ticks_pgrid(dmvals, fmt='int')# , fmt='str4')\n plt.yticks(tvals, ticks)\n\n # Image \n im=plt.imshow(zDMgrid.T,cmap=cmap,origin='lower', \n vmin=vmnx[0], vmax=vmnx[1],\n interpolation='None',\n aspect='auto')\n \n styles=['--','-.',':']\n ax=plt.gca()\n cs=ax.contour(zDMgrid.T,levels=alevels,origin='lower',colors=\"white\",linestyles=styles)\n\n ax=plt.gca()\n \n muDMhost=np.log(10**grid.state.host.lmean)\n sigmaDMhost=np.log(10**grid.state.host.lsigma)\n meanHost = np.exp(muDMhost + sigmaDMhost**2/2.)\n medianHost = np.exp(muDMhost) \n print(f\"Host: mean={meanHost}, median={medianHost}\")\n plt.ylim(0,ndm-1)\n plt.xlim(0,nz-1)\n zmax=zvals[-1]\n nz=zvals.size\n #DMbar, zeval = igm.average_DM(zmax, cumul=True, neval=nz+1)\n DM_cosmic = pcosmic.get_mean_DM(zvals, grid.state)\n\n \n #idea is that 1 point is 1, hence...\n zeval = zvals/dz\n DMEG_mean = (DM_cosmic+meanHost)/ddm\n DMEG_median = (DM_cosmic+medianHost)/ddm\n\n # Check median\n f_median = scipy.interpolate.interp1d(\n zvals, DM_cosmic+medianHost, \n fill_value='extrapolate')\n eval_DMEG = f_median(FRBZ)\n above = FRBDM > eval_DMEG\n print(f\"There are {np.sum(above)/len(FRBZ)} above the median\")\n\n if show_Macquart:\n plt.plot(zeval,DMEG_mean,color='gray',linewidth=2,\n label='Macquart relation (mean)')\n plt.plot(zeval,DMEG_median,color='gray',\n linewidth=2, ls='--',\n label='Macquart relation (median)')\n l=plt.legend(loc='lower right',fontsize=12)\n #l=plt.legend(bbox_to_anchor=(0.2, 0.8),fontsize=8)\n #for text in l.get_texts():\n #\ttext.set_color(\"white\")\n \n # limit to a reasonable range if logscale\n if log and vmnx[0] is None:\n themax=zDMgrid.max()\n themin=int(themax-4)\n themax=int(themax)\n plt.clim(themin,themax)\n \n ##### add FRB host galaxies at some DM/redshift #####\n if FRBZ is not None:\n iDMs=FRBDM/ddm\n iZ=FRBZ/dz\n # Restrict to plot range\n gd = (FRBDM < DMmax) & (FRBZ < zmax)\n plt.plot(iZ[gd],iDMs[gd],'ko',linestyle=\"\",markersize=2.)\n\n cbar=plt.colorbar(im,fraction=0.046, shrink=1.2,aspect=15,pad=0.05)\n cbar.set_label(label)\n\n fig_utils.set_fontsize(ax, fsize)\n \n plt.tight_layout()\n \n if show:\n plt.show()\n else:\n plt.savefig(outfile, dpi=300)\n print(f\"Wrote: {outfile}\")\n plt.close()", "def do_data_plots(cat, subdir):\n dla_data.noterdaeme_12_data()\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(zmax=5,color=\"blue\")\n np.savetxt(path.join(subdir,\"cddf_all.txt\"), (l_N, cddf, cddf68[:,0], cddf68[:,1], cddf95[:,0],cddf95[:,1]))\n plt.xlim(1e20, 1e23)\n plt.ylim(1e-28, 5e-21)\n plt.legend(loc=0)\n save_figure(path.join(subdir, \"cddf_gp\"))\n plt.clf()\n\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(zmax=5,color=\"blue\", moment=True)\n plt.xlim(1e20, 1e23)\n plt.legend(loc=0)\n save_figure(path.join(subdir, \"cddf_moment_gp\"))\n plt.clf()\n\n #Evolution with redshift\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(4,5, label=\"4-5\", color=\"brown\")\n np.savetxt(path.join(subdir,\"cddf_z45.txt\"), (l_N, cddf, cddf68[:,0], cddf68[:,1], cddf95[:,0],cddf95[:,1]))\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(3,4, label=\"3-4\", color=\"black\")\n np.savetxt(path.join(subdir,\"cddf_z34.txt\"), (l_N, cddf, cddf68[:,0], cddf68[:,1], cddf95[:,0],cddf95[:,1]))\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(2.5,3, label=\"2.5-3\", color=\"green\")\n np.savetxt(path.join(subdir,\"cddf_z253.txt\"), (l_N, cddf, cddf68[:,0], cddf68[:,1], cddf95[:,0],cddf95[:,1]))\n (l_N, cddf, cddf68, cddf95) = cat.plot_cddf(2,2.5, label=\"2-2.5\", color=\"blue\")\n np.savetxt(path.join(subdir,\"cddf_z225.txt\"), (l_N, cddf, cddf68[:,0], cddf68[:,1], cddf95[:,0],cddf95[:,1]))\n plt.xlim(1e20, 1e23)\n plt.ylim(1e-28, 5e-21)\n plt.legend(loc=0)\n save_figure(path.join(subdir,\"cddf_zz_gp\"))\n plt.clf()\n\n #dNdX\n dla_data.dndx_not()\n dla_data.dndx_pro()\n (z_cent, dNdX, dndx68, dndx95) = cat.plot_line_density(zmax=5)\n np.savetxt(path.join(subdir,\"dndx_all.txt\"), (z_cent, dNdX, dndx68[:,0],dndx68[:,1], dndx95[:,0],dndx95[:,1]) )\n plt.legend(loc=0)\n plt.ylim(0,0.16)\n save_figure(path.join(subdir,\"dndx_gp\"))\n plt.clf()\n\n #Omega_DLA\n dla_data.omegahi_not()\n dla_data.omegahi_pro()\n dla_data.crighton_omega()\n (z_cent, omega_dla, omega_dla_68, omega_dla_95) = cat.plot_omega_dla(zmax=5)\n# cat.tophat_prior = True\n# cat.plot_omega_dla(zmax=5, label=\"Tophat Prior\", twosigma=False)\n# cat.tophat_prior = False\n np.savetxt(path.join(subdir,\"omega_dla_all.txt\"), (z_cent, omega_dla, omega_dla_68[:,0],omega_dla_68[:,1], omega_dla_95[:,0], omega_dla_95[:,1]))\n plt.legend(loc=0)\n plt.xlim(2,5)\n plt.ylim(0,2.5)\n save_figure(path.join(subdir,\"omega_gp\"))\n plt.clf()", "def deredden_cepheids(df_variables):\n extinction_coefficients = {'2365-2764-1': np.array([0.2622, 0.844]), '4109-638-1': np.array([0.0524, 0.1576]),\n '2058-56-1': np.array([0.0751, 0.248]), '3642-2459-1': np.array([0.1907, 0.608]),\n '3999-1391-1': np.array([0.3911, 1.2480]), '2607-1448-1': np.array([0.0430, 0.1310])}\n print \"Dereddening Cepheids:\"\n for tyc in extinction_coefficients.keys():\n print \"%s..\" % tyc\n b_minus_v = df_variables[df_variables.tycho2_id == tyc].B_V\n m_v = df_variables[df_variables.tycho2_id == tyc].M_V\n extinc = extinction_coefficients[tyc]\n df_variables.set_value(df_variables.tycho2_id == tyc, 'B_V', b_minus_v - extinc[0])\n df_variables.set_value(df_variables.tycho2_id == tyc, 'M_V', m_v - extinc[1])\n print \"..Done\\n----------\"\n\n return df_variables", "def make_plot(range_km, unfolded_phidp, refl, phidp, kdp, filename):\n\n from matplotlib import pyplot as plt\n\n fig = plt.figure(figsize=[10, 5])\n ax = fig.add_subplot(111)\n\n # filtered phidp and unfolded phidp\n (p1,) = ax.plot(range_km, phidp[\"data\"][0], \"b-\")\n (p2,) = ax.plot(range_km, unfolded_phidp[\"data\"][0], \"g-\")\n\n # set labels\n ax.set_ylim(0, 250)\n ax.set_ylabel(\"Differential phase shift (degrees)\")\n ax.set_xlabel(\"Range (km)\")\n\n # plot KDP and reflectivity on second axis\n ax2 = ax.twinx()\n (p3,) = ax2.plot(range_km, kdp[\"data\"][0], \"r-\")\n (p4,) = ax2.plot(range_km, refl[\"data\"][0] / 10.0)\n\n # decorate and save\n ax2.yaxis.grid(color=\"gray\", linestyle=\"dashed\")\n ax.legend(\n [p1, p2, p3, p4],\n [\"Filtered phiDP\", \"Unfolded phiDP\", \"KDP\", \"Z/10.0\"],\n loc=\"upper left\",\n )\n fig.savefig(filename)", "def plot(sigma, strikes, dips):\n values, vectors = principal(sigma)\n sigma1, sigma2, sigma3 = vectors\n\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='stereonet')\n plt.hold(True)\n ax.density_contourf(strikes, dips)\n #ax.pole(strikes, dips, 'b.')\n ax.line(sigma1[0],sigma1[1], 'r^', label='sigma1', markersize=18)\n ax.line(sigma2[0],sigma2[1], 'g^', label='sigma2', markersize=18)\n ax.line(sigma3[0],sigma3[1], 'b^', label='sigma3', markersize=18)", "def cplot(self, figure, i, n):\n xx, yy = np.meshgrid(range(self.L), range(self.L))\n ax = figure.add_subplot(2,2,n)\n plt.setp(ax.get_yticklabels(), visible=False)\n plt.setp(ax.get_xticklabels(), visible=False) \n plt.pcolormesh(xx, yy, self.config, cmap=plt.cm.RdBu);\n plt.title('Time=%d'%i, fontsize=20)\n plt.xlabel('X', fontsize=12)\n plt.ylabel('Y',fontsize=12) \n plt.axis('tight') \n self.ax = ax", "def plot_XDR_PDR_XCLASS():\n\n fig,axes = plt.subplots(nrows=2, ncols=2, squeeze=True, sharex='col', sharey='row', figsize=(6,6))\n fig.subplots_adjust(hspace=0, wspace=0) #, top=0.80, bottom=0.04, left=0.04, right=0.93)\n\n # get data\n sscs = [SSC['no'] for SSC in SSCs]\n colors = [plt.cm.inferno(i/(len(SSCs)+1)) for i in SSCs['no']]\n HCO_HCN, HNC_HCN, HNC_HCO = [],[],[]\n HCO_HCN_err, HNC_HCN_err, HNC_HCO_err = [],[],[]\n for SSC in SSCs:\n try:\n hco_hcn_med = ratios_XCLASS['HCO+/HCN'][SSC['num']]['median']\n hco_hcn_p16 = ratios_XCLASS['HCO+/HCN'][SSC['num']]['16th']\n hco_hcn_p84 = ratios_XCLASS['HCO+/HCN'][SSC['num']]['84th']\n hco_hcn_low = hco_hcn_med-hco_hcn_p16\n hco_hcn_hig = hco_hcn_p84-hco_hcn_med\n HCO_HCN.append( np.log10(hco_hcn_med) )\n HCO_HCN_err.append( [0.434*hco_hcn_low/hco_hcn_med,0.434*hco_hcn_hig/hco_hcn_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n try:\n hnc_hcn_med = ratios_XCLASS['HNC/HCN'][SSC['num']]['median']\n hnc_hcn_p16 = ratios_XCLASS['HNC/HCN'][SSC['num']]['16th']\n hnc_hcn_p84 = ratios_XCLASS['HNC/HCN'][SSC['num']]['84th']\n hnc_hcn_low = hnc_hcn_med-hnc_hcn_p16\n hnc_hcn_hig = hnc_hcn_p84-hnc_hcn_med\n HNC_HCN.append( np.log10(hnc_hcn_med) )\n HNC_HCN_err.append( [0.434*hnc_hcn_low/hco_hcn_med,0.434*hnc_hcn_hig/hco_hcn_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n try:\n hnc_hco_med = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['median']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_p16 = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['16th']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_p84 = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['84th']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_low = hnc_hco_med-hnc_hco_p16\n hnc_hco_hig = hnc_hco_p84=hnc_hco_med\n HNC_HCO.append( np.log10(hnc_hco_med) )\n HNC_HCO_err.append( [0.434*hnc_hco_low/hnc_hco_med,0.434*hnc_hco_hig/hnc_hco_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n\n # comparison from Baan+08\n B_hcn = [318.2, 14]\n B_hnc = [234.0, 7]\n B_hco = [276.1, 14]\n B_hco_hcn = [B_hco[0]/B_hcn[0], B_hco[0]/B_hcn[0]*np.sqrt((B_hco[1]/B_hco[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hcn = [B_hnc[0]/B_hcn[0], B_hnc[0]/B_hcn[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hco = [B_hnc[0]/B_hco[0], B_hnc[0]/B_hco[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hco[1]/B_hco[0])**2)]\n B_HCO_HCN = [np.log10(B_hco_hcn[0]), 0.434*B_hco_hcn[1]/B_hco_hcn[0]]\n B_HNC_HCN = [np.log10(B_hnc_hcn[0]), 0.434*B_hnc_hcn[1]/B_hnc_hcn[0]]\n B_HNC_HCO = [np.log10(B_hnc_hco[0]), 0.434*B_hnc_hco[1]/B_hnc_hco[0]]\n\n def format_panel(ax):\n ax.xaxis.set_major_locator(MultipleLocator(0.5))\n ax.xaxis.set_minor_locator(MultipleLocator(0.25))\n ax.yaxis.set_major_locator(MultipleLocator(0.5))\n ax.yaxis.set_minor_locator(MultipleLocator(0.25))\n ax.set_axisbelow(True)\n ax.grid(axis='both', which='both')\n\n def label_regions(ax):\n ax.text(0.95, 0.9, 'XDR', color='k', transform=ax.transAxes, ha='right', va='top', weight='bold', fontsize=16)\n ax.text(0.05, 0.1, 'PDR', color='k', transform=ax.transAxes, ha='left', va='bottom', weight='bold', fontsize=16)\n\n # panel 1: HCO+/HCN over HNC/HCO+\n ax = axes[0][0]\n ax.plot([-10,10],[10,-10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c,s in zip(HNC_HCO, HCO_HCN, HNC_HCO_err, HCO_HCN_err, colors, SSCs):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, label='SSC '+str(s['no']), zorder=3)\n ax.errorbar(B_HCO_HCN[0],B_HNC_HCO[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', label=r'NGC 253 (Baan +08)', zorder=4)\n ax.set_xlim(-0.75,0.75)\n ax.set_ylim(-0.85,0.65)\n format_panel(ax)\n ax.set_ylabel(r'log N(HCO$^+$) / N(HCN)', fontsize=12)\n\n # panel 2: HNC/HCN over HCO/HCN\n ax = axes[0][1]\n ax.plot([0,0],[-10,10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([0,10],[-10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCN, HCO_HCN, HNC_HCN_err, HCO_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCN[0],B_HCO_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-0.95,0.55)\n ax.set_ylim(-0.85,0.65)\n format_panel(ax)\n ax.tick_params(labelbottom=True)\n ax.set_xlabel(r'log N(HNC) / N(HCN)', fontsize=12)\n\n # panel 3: HNC/HCO over HNC/HCN\n ax = axes[1][0]\n ax.plot([-10,10],[0,0], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[0,0],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCO, HNC_HCN, HNC_HCO_err, HNC_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCO[0],B_HNC_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-0.75,0.75)\n ax.set_ylim(-1.05,0.45)\n format_panel(ax)\n ax.set_xlabel(r'log N(HNC$^{**}$) / N(HCO$^+$)', fontsize=12)\n ax.set_ylabel(r'log N(HNC) / N(HCN)', fontsize=12)\n\n # panel 4: legend\n ax = axes[1][1]\n ax.set_axis_off()\n fig.legend(loc=3, bbox_to_anchor=(0.55,0.05,0.14,0.3), ncol=1, mode=\"expand\", borderaxespad=0., fontsize=12, frameon=False)\n\n savepath = escape_fname(os.path.join(plotdir, '10.results', 'XDR-PDR_column_density.pdf'))\n os.system('mkdir -p '+os.path.dirname(savepath))\n fig.savefig(savepath, dpi=300, bbox_inches='tight')", "def draw_cd_diagram(df_perf=None, alpha=0.05, title=None, width=10, labels=False, path=None, highlight=None):\n p_values, average_ranks, _ = _wilcoxon_holm(df_perf=df_perf, alpha=alpha)\n if p_values is not None:\n _graph_ranks(average_ranks.values, average_ranks.keys(), p_values, \n cd=None, reverse=True, width=width, textspace=1.25, labels=labels,\n highlight=highlight)\n\n font = {'family': 'sans-serif',\n 'color': 'black',\n 'weight': 'normal',\n 'size': 18,\n }\n if title:\n plt.title(title, fontdict=font, y=0.9, x=0.5)\n if path is None:\n plt.savefig('cd-diagram.png', bbox_inches='tight')", "def matplot_eccentricity(self, z=0, ax=None):\n if ax is None:\n ax = plt.gca()\n for j in range(0, self.ntheta):\n ax.plot(self.xre[z][j], self.yre[z][j], \"r.\")\n ax.plot(self.xri[z][j], self.yri[z][j], \"b.\")\n ax.plot(0, 0, \"r*\")\n ax.plot(self.xi, self.yi, \"b*\")\n ax.set_title(\"Cut in plane Z=\" + str(z))\n ax.set_xlabel(\"X axis\")\n ax.set_ylabel(\"Y axis\")\n plt.axis(\"equal\")\n return ax", "def add_degree_days(self, col='OAT', hdh_cpoint=65, cdh_cpoint=65):\n\n if self.preprocessed_data.empty:\n data = self.original_data\n else:\n data = self.preprocessed_data\n\n # Calculate hdh\n data['hdh'] = data[col]\n over_hdh = data.loc[:, col] > hdh_cpoint\n data.loc[over_hdh, 'hdh'] = 0\n data.loc[~over_hdh, 'hdh'] = hdh_cpoint - data.loc[~over_hdh, col]\n\n # Calculate cdh\n data['cdh'] = data[col]\n under_cdh = data.loc[:, col] < cdh_cpoint\n data.loc[under_cdh, 'cdh'] = 0\n data.loc[~under_cdh, 'cdh'] = data.loc[~under_cdh, col] - cdh_cpoint\n\n self.preprocessed_data = data", "def plotLine(self):\n minc = 0\n maxc = 500\n num = 500\n levels = np.linspace(minc,maxc,num+1)\n title = textwrap.dedent(\"\"\"\\\n Orography difference between LGM and Modern ICE-5G data\n using {0} meter contour interval\"\"\").format((maxc-minc)/num)\n plt.figure()\n plt.contour(self.difference_in_ice_5g_orography,levels=levels)\n plt.title(title)\n pts.set_ticks_to_zero()\n #if self.save:\n #plt.savefig('something')\n print(\"Line contour plot created\")", "def plot4(self, plog=False):\n\n probs = pd.read_csv(self.probfile)\n\n plt.rc('font', size=14)\n fig, ax = plt.subplots()\n plt.plot(self.ds.freq, self.snr, 'k-', alpha=0.5, zorder=1)\n\n # plot the SNR range to search across when finding snr_modes\n for idx, line in enumerate(self.ds.mode_id['f0']):\n w = np.exp(self.ds.mode_id['w0'][idx])\n plt.axvline(x=line-w, color='b', linestyle='-', alpha=0.4)\n plt.axvline(x=line+w, color='b', linestyle='-', alpha=0.4)\n\n # overplot the predicted SNR values at the modes\n plt.scatter(probs['f0'], probs['SNR_Kepler'], label='Kepler - 4yrs', alpha=1, zorder=2)\n plt.scatter(probs['f0'], probs['SNR_TESS365'], label='TESS - 1 yr', alpha=1, zorder=3)\n plt.scatter(probs['f0'], probs['SNR_TESS27'], label='TESS - 27 days', alpha=1, zorder=4)\n\n if plog:\n plt.xscale('log')\n plt.yscale('log')\n plt.xlabel(r'$\\nu$ / $\\rm \\mu Hz$')\n plt.ylabel(r'SNR')\n\n mn = min(star.ds.mode_id['f0']) -\\\n (max(star.ds.mode_id['f0'])-min(star.ds.mode_id['f0']))/7.\n mx = max(star.ds.mode_id['f0']) +\\\n (max(star.ds.mode_id['f0'])-min(star.ds.mode_id['f0']))/7.\n plt.xlim([mn,mx])\n\n plt.legend()\n plt.title('KIC ' + str(self.ds.epic))\n plt.show()\n fig.savefig(os.getcwd() + os.sep + 'DetTest1_plots' + os.sep +\\\n 'plot4_SNR' + self.ds.epic + '.pdf')", "def __plot_convex_hull(self, ax=None) -> None:\n ax.plot(self.points[:, 0], self.points[:, 1], \"o\")\n for simplex in self.hull.simplices:\n ax.plot(self.points[simplex, 0], self.points[simplex, 1], \"k-\")", "def plot_2D_edp(self, xmin=-100, xmax=100, zmin=-100, zmax=100, N=201):\n rho_xz = []\n xgrid = np.linspace(xmin, xmax, num=N)\n zgrid = np.linspace(zmin, zmax, num=N)\n for x in xgrid:\n for z in zgrid:\n tmp = self.phase * self.F * np.cos(self.qx*x+self.qz*z)\n rho_xz.append([x, z, tmp.sum(axis=0)])\n rho_xz = np.array(rho_xz, float) \n X, Y, Z= rho_xz[:,0], rho_xz[:,1], rho_xz[:,2]\n #Y = rho_xz[:,1]\n #Z = rho_xz[:,2]\n X.shape = (N, N)\n Y.shape = (N, N)\n Z.shape = (N, N)\n plt.figure()\n plt.contourf(X, Y, Z)", "def plotErr(self):\n if self.xp and self.wp:\n # plot the spectra\n w=self.ws.value(np.array(self.xp))\n self.errcurve,=self.erraxes.plot(self.xp,self.wp-w,linewidth=0.5,linestyle='',marker='o',color='b')\n if self.dxp and self.dwp:\n # plot the spectra\n dw=self.ws.value(np.array(self.dxp))\n self.delerrcurve,=self.erraxes.plot(self.dxp,self.dwp-dw,linewidth=0.5,linestyle='',marker='x',color='b')", "def plot(self):\n\t\tself.plotOfHeatingCurrent().plot()", "def plotMultiClumps(discs,IDs):\n\n\t\t#Initially plot polytropic indices n=1.5 for all clumps\n\t\tpolyx, polytheta = Analyser.polytropicProfile(n=np.ones(len(IDs))*1.5)\n\n\t\t#plot colors\n\t\tcolors = plt.rcParams['axes.prop_cycle'].by_key()['color']\n\t\tdfrac_cbar = None\n\n\t\tkeepplotting = True\n\t\twhile keepplotting:\n\t\t\ttimelegend = []\n\t\t\tfor i, [d, ID] in enumerate(zip(discs,IDs)):\n\t\t\t\tclumpxyz = d.clumpcat[ID][:3]\n\t\t\t\tr2 = (d.disc.xyzh[0]-clumpxyz[0])**2 + (d.disc.xyzh[1]-clumpxyz[1])**2\n\t\t\t\tmembers = np.sqrt(r2) < d.annulus \t\t\t#members are all particles within radial annulus\n\t\t\t\tinner = np.sqrt(r2) < 0.25\n\n\t\t\t\tgas = d.disc.itype == 1\n\t\t\t\tdust = d.disc.itype == 2\n\t\t\t\tdustfrac = d.disc.dustfrac*1e8 #as I originally set dust-to-gas=1e-10\n\n\t\t\t\t#Calculate temperatures from thermal energies\n\t\t\t\tk = 1.38064852e-16 #ergs\n\t\t\t\tmH = 1.6735575e-24 #grams\n\t\t\t\tgmw = 2.381 #mean mass taken from Phantom\n\t\t\t\tN = sum(gas*d.disc.massofgas)*d.umass/mH/gmw #number of atoms\n\t\t\t\ttemp = 2.*d.disc.utherm*d.uenerg/3./k/N\n\n\t\t\t\tradplot, densav = d.azAverage(np.sqrt(r2[members&gas]), d.disc.density[members&gas]*d.udens)\n\t\t\t\t_, tempav = d.azAverage(np.sqrt(r2[members&gas]), temp[members&gas])\n\t\t\t\traddust, fdustav = d.azAverage(np.sqrt(r2[members&gas]), dustfrac[members&gas],nbins=20)\n\n\t\t\t\ttimelegend.append('%.2f yrs' %d.disc.time)\n\n\t\t\t\t#Plot density vs. radius\n\t\t\t\tplt.figure(1)\n\t\t\t\t#plt.scatter(np.sqrt(r2[members & gas]), d.disc.density[members & gas]*d.udens, s=0.1)\n\t\t\t\tplt.plot(radplot, densav,'-',c=colors[i], label=timelegend[i])\n\t\t\t\tplt.plot(polyx[i], max(d.disc.density[members&gas&inner]*d.udens)*polytheta[i]**1.5,'--',c=colors[i]) #polytropic profile\n\t\t\t\tplt.ylim([1e-13, max(d.disc.density[members&gas]*d.udens*2)])\n\t\t\t\tplt.xlim([0,3.])\n\t\t\t\tplt.ylabel(r'$\\rho$ (g cm$^{-3}$)')\n\t\t\t\tplt.xlabel('Dist from clump centre (au)')\n\t\t\t\tplt.title('Gas density vs. radius')\n\t\t\t\tplt.yscale('log')\n\t\t\t\tplt.legend()\n\n\t\t\t\t#Plot temperature vs. radius\n\t\t\t\tplt.figure(2)\n\t\t\t\t#plt.scatter(np.sqrt(r2[members & gas]), temp[members & gas], s=0.1)\n\t\t\t\tplt.plot(radplot, tempav, '-',c=colors[i], label=timelegend[i])\n\t\t\t\tplt.plot(polyx[i], max(temp[members&gas&inner])*polytheta[i], '--', c=colors[i]) #polytropic profile\n\t\t\t\tplt.ylim([10, max(temp[members&gas]*1.1)])\n\t\t\t\tplt.xlim([0,3.])\n\t\t\t\tplt.ylabel('Temp (K)')\n\t\t\t\tplt.xlabel('Dist from clump centre (au)')\n\t\t\t\tplt.yscale('log')\n\t\t\t\tplt.title('Gas temp vs. radius')\n\t\t\t\tplt.legend()\n\t\t\t\t\n\t\t\t\t#Plot dust fraction vs. radius\n\t\t\t\tplt.figure(3)\n\t\t\t\t#plt.scatter(np.sqrt(r2[members & gas]), dustfrac[members & gas], s=1.0, \n\t\t\t\t#\t c=temp[members & gas], cmap=cm.magma)\n\t\t\t\tplt.plot(raddust, fdustav, '-', c=colors[i], label=timelegend[i])\n\t\t\t\tplt.ylim([0, max(dustfrac[members&gas]*1.1)])\n\t\t\t\tplt.xlim([0,3.])\n\t\t\t\tplt.ylabel('Dust-to-gas ratio')\n\t\t\t\tplt.xlabel('Dist from clump centre (au)')\n\t\t\t\t#plt.yscale('log')\n\t\t\t\tplt.title('Dust fraction vs. radius')\n\t\t\t\t#if dfrac_cbar is None:\n\t\t\t\t#\tdfrac_cbar = plt.colorbar()\n\t\t\t\t#\tdfrac_cbar.set_label('Temp (K)')\n\t\t\t\t#\tplt.clim([0,600])\n\t\t\t\tplt.legend()\n\t\t\t\t\n\t\t\tplt.show()\n\t\t\tplt.clf()\n\n\t\t\tkeepplotting = bool(input('Change polytropic indices and replot? (y=1,n=0)?'))\n\t\t\tif keepplotting:\n\t\t\t\tpolyi = input('Select polytropic indices for dumps (1 - %i): ' %len(IDs))\n\t\t\t\ttry:\n\t\t\t\t\tpolyi = [float(polyi)]\n\t\t\t\texcept:\n\t\t\t\t\tpolyi = map(float, polyi)\n\t\t\t\tpolyx, polytheta = Analyser.polytropicProfile(n=polyi)", "def diagonals_in_hd():\n number_of_pairs = 100000\n angles_for_d = {}\n for d in (10, 100, 1000):\n number_of_corners = 2 ** d - 1\n first_corner = [random.randint(0, number_of_corners) for _ in range(0, number_of_pairs)]\n second_corner = [random.randint(0, number_of_corners) for _ in range(0, number_of_pairs)]\n\n dummy_d = [d for _ in range(0, number_of_pairs)]\n angles = []\n with cf.ProcessPoolExecutor() as executor:\n results = executor.map(find_angle, first_corner, second_corner, dummy_d)\n for result in results:\n angles.append(result)\n ser = pd.Series(angles)\n print(f\"Angles between diagonals for {d} dimensions\")\n print(ser.describe())\n angles_for_d[d] = ser\n\n plot_pmfs_for_ds(angles_for_d)", "def plot_disks(self, ax=None, color=None, alpha=0.4, reshape=True):\n import matplotlib as mpl\n from itertools import cycle\n import numpy as np\n\n if color == 'diameter':\n dset = sorted(set(self.diameters))\n cold = dict(zip(dset, cycle([d['color'] for d in mpl.rcParams['axes.prop_cycle']])))\n color = [cold[d] for d in self.diameters]\n if not np.iterable(color):\n color = cycle((color,))\n\n if ax is None:\n ax = mpl.pyplot.gca()\n\n # rs = np.remainder(self.rs+.5, 1)-.5\n L = self.L\n dloc = (0, 1)\n circs = []\n for (x0, y0), d, c in zip(self.rs, self.diameters, color):\n for x, y in [np.array((x0 + dx, y0 + dy)) for dx in dloc for dy in dloc]:\n if (x + d > 0 and x - d < 1 and y + d > 0 and y - d < 1):\n circ = mpl.patches.Circle((x * L, y * L), d * L / 2,\n axes=ax, ec='none', fc=c, alpha=alpha)\n ax.add_patch(circ)\n circs.append(circ)\n\n if reshape:\n ax.axis([0, L, 0, L])\n ax.set_aspect('equal')\n ax.set_xticks([], [])\n ax.set_yticks([], [])\n return circs", "def plot(self, **kwds):\n c0 = 'blue' # self.latex_options()[\"color_increasing\"]\n c1 = 'red' # self.latex_options()[\"color_decreasing\"]\n G = self.poset().hasse_diagram()\n G.set_pos(self._find_node_positions())\n for a, b, c in G.edges():\n if a < b:\n G.set_edge_label(a, b, 0)\n else:\n G.set_edge_label(a, b, 1)\n return G.plot(color_by_label={0: c0, 1: c1}, **kwds)", "def plot_spectrumxichange(self):\n countgood = 0 ; countbad = 0\n for idata in self.datarg:\n if idata[-1, 0] == 1.: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'b') \n countgood += 1\n print countgood , 'good solution'\n else: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'r') \n print countbad, 'bad solution'\n countbad += 1\n print 'We found %g good solutions and %g tda startdistributions that broke down before xi = 1, we hope that\\'s what you expected' %(countgood,countbad)\n #Create custom artistsr[goodline,badline],['solution','breakdown']\n goodline = pl.Line2D((0,1),(0,0), color='b') \n badline = pl.Line2D((0,1),(0,0), color='r')\n self.layout(self.reader.depvar['depvar'] , r'energy spectrum (a.u.)' , tit = r'All tda start distributions $\\xi$' , legendhand = [goodline , badline] , legendlab = ['solution', 'breakdown'] )\n self.savefig('xispec')", "def cell_data_diag(xval,yval,**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n gal_ob = gal.galaxy(p.gal_index)\n cell_data = gal_ob.cell_data.get_dataframe()\n cell_data['alpha_CO'] = cell_data.m.values / aux.Lsun_to_K_km_s_pc2(cell_data['L_CO(1-0)'].values,'CO(1-0)') \n cell_data['m_H2'] = cell_data.m.values * cell_data.mf_H2_grid.values\n if (xval == 'L_CO(1-0)') | (yval == 'L_CO(1-0)'): cell_data['L_CO(1-0)'] = aux.Lsun_to_K_km_s_pc2(cell_data['L_CO(1-0)'].values,'CO(1-0)')\n if p.xlim: cell_data = cell_data[(cell_data[xval] > p.xlim[0]) & (cell_data[xval] < p.xlim[1])]\n if p.ylim: cell_data = cell_data[(cell_data[yval] > p.ylim[0]) & (cell_data[yval] < p.ylim[1])]\n print(xval,cell_data[xval].values.min(),cell_data[xval].values.max())\n print(yval,cell_data[yval].values.min(),cell_data[yval].values.max())\n x = cell_data[xval].values\n y = cell_data[yval].values\n x[x == 0] = np.min(x[x > 0])\n y[y == 0] = np.min(y[y > 0])\n fig,ax = plt.subplots(figsize=(16,7),facecolor='w')\n if p.hexbin:\n hx = ax.hexbin(cell_data[xval].values,cell_data[yval].values,bins='log',cmap='inferno',mincnt=1,gridsize=100)\n else:\n if p.color != 'k':\n sc = ax.scatter(cell_data[xval].values,cell_data[yval].values,c=np.log10(cell_data[p.color]),s=5,alpha=0.6,vmax=p.vmax)\n plt.colorbar(sc,label=p.color)\n else:\n ax.scatter(cell_data[xval].values,cell_data[yval].values,s=5,alpha=0.6)\n ax.set_xlabel(xval)\n ax.set_ylabel(yval)\n xlim = np.array(ax.get_xlim())\n ax.plot(xlim,xlim,'--g',lw=2,label='1-to-1 relation')\n #ax.plot(xlim,xlim*4,'--g',lw=2,label='alpha_C0 = 4')\n #ax.plot(xlim,xlim*0.18,'--r',lw=2,label='alpha_C0 = 0.18')\n ax.legend()\n if p.log:\n ax.set_xscale('log')\n ax.set_yscale('log')\n if p.xlim:\n ax.set_xlim(p.xlim)\n if p.ylim:\n ax.set_ylim(p.ylim)\n\n if p.savefig:\n if not os.path.isdir(p.d_plot + 'cell_data/'): os.mkdir(p.d_plot + 'cell_data/')\n if not p.color: p.color = '' \n plt.savefig(p.d_plot + 'cell_data/G%i_%s_%s_%s' % (p.gal_index,xval,yval,p.color),dpi=250, facecolor='w')", "def plotPersistenceDiagrams(dgm, **args):\n plot_diagrams(dgm, **args)\n jtplot.style(ticks=True, grid=True, gridlines='--') # Ugh", "def draw_glycoprotein(self, chid):\n fig = mpl.figure.Figure(figsize=(300/self.dpi, 1000/self.dpi), dpi = 100) \n ax = fig.add_axes([0, 0, 1, 1])\n\n if type(chid) is not str:\n chid = chid.get()\n\n l = len(self.myGlycosylator.sequences[chid])\n \n trees = self.original_glycans.copy()\n trees.update(self.linked_glycans)\n \n sequons = [k for k in self.myGlycosylator.sequons.keys() if chid in k[:len(chid)]]\n self.myDrawer.draw_glycoprotein(l, self.myGlycosylator.get_start_resnum(chid), sequons, ax = ax, axis = 1,\n trees = trees, names = self.names, sequon_color = self.sequon_colors)\n ax.axis('equal')\n ax.axis('off')\n figure_canvas_agg = FigureCanvasAgg(fig)\n figure_canvas_agg.draw()\n figure_x, figure_y, figure_w, figure_h = fig.bbox.bounds\n figure_w, figure_h = int(figure_w), int(figure_h)\n # attaching figure to canvas\n self.glycoprotein_image = tk.PhotoImage(master=self.glycan_2D, width=figure_w, height=figure_h)\n self.glycoprotein_2D.create_image(figure_w/2, figure_h/2, image=self.glycoprotein_image)\n tkagg.blit(self.glycoprotein_image, figure_canvas_agg.get_renderer()._renderer, colormode=2)", "def draw(p, layout=\"rd\"):\n import matplotlib.pyplot as plt\n from matplotlib.patches import Wedge\n from matplotlib.font_manager import FontManager\n\n if not isinstance(p, Pharmacophore):\n raise TypeError(\"Expected Pharmacophore, got %s instead\" %\n type(p).__name__)\n\n if not isinstance(layout, str):\n raise TypeError(\"Invalid layout! Expected str, got %s instead.\" %\n type(layout).__name__)\n\n if p.numnodes == 0:\n raise ValueError(\"Pharmacophore is empty!\")\n\n if layout == \"rd\":\n try:\n from decaf.toolkits.rd import layout\n pos = layout(p)\n except Exception as e:\n raise ImportError(\"Cannot use 'rd' layout! Use 'ob' or 'spring'\"\n \"instead\", e)\n\n elif layout == \"ob\":\n try:\n from decaf.toolkits.ob import layout\n pos = layout(p)\n except Exception as e:\n raise ImportError(\"Cannot use 'ob' layout! Use 'rd' or 'spring'\"\n \"instead\", e)\n\n elif layout == \"spring\":\n try:\n pos = spring_layout(p)\n except Exception as e:\n raise ImportError(\"Cannot use spring layout!\", e)\n else:\n raise ValueError(\"Wrong layout specified! Use 'rd', 'ob' or 'spring'\"\n \"instead.\")\n\n ax_coeff = 1.\n\n def fontsize(idx, default=FontManager.get_default_size()):\n coeff = p.nodes[idx][\"freq\"] / p.molecules\n size = default * coeff * ax_coeff\n return size\n\n fig, ax = plt.subplots()\n plt.axis(\"equal\")\n plt.axis(\"off\")\n\n axis = (np.min(pos[:, 0])-1,\n np.max(pos[:, 0])+1,\n np.min(pos[:, 1])-1,\n np.max(pos[:, 1])+1)\n plt.axis(axis)\n\n # calculate scaling ratio for font\n ax_coeff = 12. / max((axis[1]-axis[0]), (axis[3]-axis[2]))\n\n for i in range(p.numnodes):\n for j in range(i):\n if p.edges[i, j] > 0:\n tmp = np.array([pos[i], pos[j]])\n ax.plot(tmp[:, 0], tmp[:, 1], color=\"#000000\", zorder=1)\n\n r = p.nodes[i][\"freq\"] / p.molecules * 0.3\n fsize = fontsize(i)\n nfreq = sum(p.nodes[i][\"type\"].values())\n theta1 = 0.0\n for t in p.nodes[i][\"type\"]:\n delta = 360 * p.nodes[i][\"type\"][t] / nfreq\n theta2 = theta1+delta\n w = Wedge(pos[i], r, theta1, theta2, ec=\"none\", fc=COLORS[t])\n ax.add_artist(w)\n ax.text(pos[i][0], pos[i][1], str(p.nodes[i][\"label\"]),\n color=\"#000000\", ha=\"center\", va=\"center\", size=fsize)\n theta1 = theta2\n\n plt.show()\n return fig, ax", "def momentum_kde2_paperplot(fields):\n plt.figure(figsize=(2.65, 2.5))\n ax = plt.axes([0.18, 0.17, 0.8, 0.8])\n colorList = [med_color, high_color]\n lw = 1.5\n i = 0\n meankx_2 = []\n meankx_3 = []\n k_ax = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_ax_' + '2_' + \"E_{:.1e}.npy\".format(fields[0]))\n # ax.plot(k_ax, np.zeros(len(k_ax)), '-', linewidth=lw, color=eq_color, label='Equilibrium')\n # ax.plot(k_ax, np.zeros(len(k_ax)), '-', linewidth=lw, color=eq_color)\n ax.axhline(0, color='black', linestyle='--', linewidth=0.5)\n # ax.axvline(0, color='gray', linewidth=0.8, alpha=0.5)\n for ee in fields:\n ee_Vcm = ee/100\n k_ax = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_ax_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n kdist_f0_2 = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_dist_f0_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n kdist_2 = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_dist' + '2_' + \"E_{:.1e}.npy\".format(ee))\n kdist_f0_3 = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_dist_f0_' + '3_' + \"E_{:.1e}.npy\".format(ee))\n kdist_3 = np.load(pp.outputLoc + 'Momentum_KDE/' + 'k_dist' + '3_' + \"E_{:.1e}.npy\".format(ee))\n\n chi_2_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n meankx_2.append(utilities.mean_kx(chi_2_i, electron_df))\n chi_3_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '3_' + \"E_{:.1e}.npy\".format(ee))\n meankx_3.append(utilities.mean_kx(chi_3_i, electron_df))\n\n ax.plot(k_ax, kdist_2, '--', linewidth=lw, color=colorList[i], label='Cold '+r'{:.0f} '.format(ee/100)+r'$\\rm V cm^{-1}$')\n ax.plot(k_ax, kdist_3, '-', linewidth=lw,color=colorList[i], label='Warm '+r'{:.0f} '.format(ee/100)+r'$\\rm V cm^{-1}$')\n i = i + 1\n # ax.plot(k_ax, kdist_f0_3, '--', linewidth=lw, color='black', label=r'$f_0$')\n # ax.plot(meankx_2,np.mean(abs(kdist_2))*np.ones(len(meankx_3)), '-', linewidth=lw, color='black')\n # ax.plot(meankx_3,np.mean(abs(kdist_3))*np.ones(len(meankx_3)), '-', linewidth=lw, color='black')\n\n ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))\n ax.locator_params(axis='y', nbins=6)\n ax.locator_params(axis='x', nbins=6)\n # ax.tick_params(direction='in')\n ax.set_xlim(-0.085, 0.081)\n\n plt.xlabel(r'$\\rm k_x \\, \\, (\\AA^{-1})$')\n plt.ylabel(r'Deviational occupation $\\rm \\Delta f_{\\mathbf{k}}$')\n # plt.grid(lw=0.8, linestyle='dotted')\n # plt.ylabel(r'$\\delta f_{\\mathbf{k}}/f_{\\mathbf{k}}^0$')\n # plt.ylim([-1,1])\n plt.legend(frameon=False,prop={'size':different_small_size})\n plt.savefig(pp.figureLoc+'momentum_KDE2.png', dpi=600)", "def plot(self):\n # -- plotting\n fig = plt.figure(figsize=figsize)\n ax = fig.add_axes([0.08, 0.12, 0.55, 0.85])\n ax.plot(self.raw['stress'][1:], self.raw['e'][1:], ls=(0, (1, 1)),\n marker='o', lw=1.5, c='k', mfc='w', label='Experimental data')\n ax.plot(self.sigmaV, self.eSigmaV, ls='', marker='|', c='r', ms=15,\n mfc='w', mew=1.5,\n label=str().join([r'$\\sigma^\\prime_\\mathrm{v_0}=$ ',\n f'{self.sigmaV:.0f} kPa']))\n # Compression index\n x4Cc = np.linspace(\n self.cleaned['stress'].iloc[-4], self.cleaned['stress'].iloc[-1])\n y4Cc = -self.idxCc * np.log10(x4Cc) + self.idxCcInt\n ax.plot(x4Cc, y4Cc, ls='-', lw=1.125, color=colors[1],\n label=str().join([r'$C_\\mathrm{c}=$', f'{self.idxCc:.3f}']))\n if self.fitCc:\n ax.plot(self.cleaned['stress'].iloc[self.maskCc],\n self.cleaned['e'].iloc[self.maskCc], ls='', marker='x',\n color=colors[1],\n label=f'Data for linear fit\\n(R$^2={self.r2Cc:.3f}$)')\n # Recompression index\n x4Cr = np.linspace(self.raw['stress'].iloc[self.maskCr].min(),\n self.raw['stress'].iloc[self.maskCr].max())\n y4Cr = -self.idxCr * np.log10(x4Cr) + self.idxCrInt\n ax.plot(x4Cr, y4Cr, ls='-', lw=1.125, color=colors[2],\n label=str().join([r'$C_\\mathrm{r}=$', f'{self.idxCr:.3f}']))\n ax.plot(self.raw['stress'].iloc[self.maskCr],\n self.raw['e'].iloc[self.maskCr], ls='', marker='+',\n color=colors[2],\n label=f'Data for linear fit\\n(R$^2={self.r2Cr:.3f}$)')\n # other details\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.set(xscale='log', ylabel='Void ratio, $e$',\n xlabel=str().join(['Effective vertical stress, ',\n r'$\\sigma^\\prime_\\mathrm{v}$ [kPa]']))\n ax.xaxis.set_major_formatter(mtick.ScalarFormatter())\n ax.yaxis.set_minor_locator(mtick.AutoMinorLocator())\n ax.grid(False)\n ax.legend(bbox_to_anchor=(1.125, 0.5), loc=6,\n title=r\"\\textbf{Compressibility curve}\")\n return fig", "def plot_vanHove_dt(comp,conn,start,step_size,steps):\n \n (fin,) = conn.execute(\"select fout from comps where comp_key = ?\",comp).fetchone()\n (max_step,) = conn.execute(\"select max_step from vanHove_prams where comp_key = ?\",comp).fetchone()\n Fin = h5py.File(fin,'r')\n g = Fin[fd('vanHove',comp[0])]\n\n temp = g.attrs['temperature']\n dtime = g.attrs['dtime']\n\n\n # istatus = plots.non_i_plot_start()\n \n fig = mplt.figure()\n fig.suptitle(r'van Hove dist temp: %.2f dtime: %d'% (temp,dtime))\n dims = figure_out_grid(steps)\n \n plt_count = 1\n outs = []\n tmps = []\n for j in range(start,start+step_size*steps, step_size):\n (edges,count,x_lim) = _extract_vanHove(g,j+1,1,5)\n if len(count) < 50:\n plt_count += 1\n continue\n #count = count/np.sum(count)\n \n sp_arg = dims +(plt_count,)\n ax = fig.add_subplot(*sp_arg)\n ax.grid(True)\n\n \n alpha = _alpha2(edges,count)\n \n ax.set_ylabel(r'$\\log{P(N)}$')\n ax.step(edges,np.log((count/np.sum(count))),lw=2)\n ax.set_title(r'$\\alpha_2 = %.2f$'%alpha + ' j:%d '%j )\n ax.set_xlim(x_lim)\n plt_count += 1\n\n mplt.draw()\n\n # plots.non_i_plot_start(istatus)\n\n del g\n Fin.close()\n del Fin", "def _plot_ecdf(data, label='Value', alpha=1):\n data = np.array(data)\n data = np.sort(data)\n t = len(data)\n prob = np.arange(t) / t\n plt.plot(data, prob, label=label, alpha=alpha)", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='red')", "def plotdivers(dat, iabscissa, foffset):\r\n from matplotlib.pylab import semilogy, hold, grid, \\\r\n axis, title, text\r\n fontsize = pylab.rcParams['font.size']\r\n\r\n hold(False)\r\n\r\n dfit = dat.f[:,5]-min(dat.f[:,5])\r\n dfit[dfit<1e-98] = np.NaN\r\n\r\n if dat.f.shape[1] > 7:\r\n # semilogy(dat.f[:, iabscissa], abs(dat.f[:,[6, 7, 10, 12]])+foffset,'-k')\r\n semilogy(dat.f[:, iabscissa], abs(dat.f[:,[6, 7]])+foffset,'-k')\r\n hold(True)\r\n\r\n # (larger indices): additional fitness data, for example constraints values\r\n if dat.f.shape[1] > 8:\r\n # dd = abs(dat.f[:,7:]) + 10*foffset\r\n # dd = np.where(dat.f[:,7:]==0, np.NaN, dd) # cannot be\r\n semilogy(dat.f[:, iabscissa], np.abs(dat.f[:,8:]) + 10*foffset, 'm')\r\n hold(True)\r\n\r\n idx = np.where(dat.f[:,5]>1e-98)[0] # positive values\r\n semilogy(dat.f[idx, iabscissa], dat.f[idx,5]+foffset, '.b')\r\n hold(True)\r\n grid(True)\r\n\r\n idx = np.where(dat.f[:,5] < -1e-98) # negative values\r\n semilogy(dat.f[idx, iabscissa], abs(dat.f[idx,5])+foffset,'.r')\r\n\r\n semilogy(dat.f[:, iabscissa],abs(dat.f[:,5])+foffset,'-b')\r\n semilogy(dat.f[:, iabscissa], dfit, '-c')\r\n\r\n if 11 < 3: # delta-fitness as points\r\n dfit = dat.f[1:, 5] - dat.f[:-1,5] # should be negative usually\r\n semilogy(dat.f[1:,iabscissa], # abs(fit(g) - fit(g-1))\r\n np.abs(dfit)+foffset, '.c')\r\n i = dfit > 0\r\n # print(np.sum(i) / float(len(dat.f[1:,iabscissa])))\r\n semilogy(dat.f[1:,iabscissa][i], # abs(fit(g) - fit(g-1))\r\n np.abs(dfit[i])+foffset, '.r')\r\n\r\n # overall minimum\r\n i = np.argmin(dat.f[:,5])\r\n semilogy(dat.f[i, iabscissa]*np.ones(2), dat.f[i,5]*np.ones(2), 'rd')\r\n # semilogy(dat.f[-1, iabscissa]*np.ones(2), dat.f[-1,4]*np.ones(2), 'rd')\r\n\r\n # AR and sigma\r\n semilogy(dat.f[:, iabscissa], dat.f[:,3], '-r') # AR\r\n semilogy(dat.f[:, iabscissa], dat.f[:,2],'-g') # sigma\r\n semilogy(dat.std[:-1, iabscissa], np.vstack([list(map(max, dat.std[:-1,5:])), list(map(min, dat.std[:-1,5:]))]).T,\r\n '-m', linewidth=2)\r\n text(dat.std[-2, iabscissa], max(dat.std[-2, 5:]), 'max std', fontsize=fontsize)\r\n text(dat.std[-2, iabscissa], min(dat.std[-2, 5:]), 'min std', fontsize=fontsize)\r\n ax = array(axis())\r\n # ax[1] = max(minxend, ax[1])\r\n axis(ax)\r\n text(ax[0]+0.01, ax[2], # 10**(log10(ax[2])+0.05*(log10(ax[3])-log10(ax[2]))),\r\n '.f_recent=' + repr(dat.f[-1,5]) )\r\n\r\n # title('abs(f) (blue), f-min(f) (cyan), Sigma (green), Axis Ratio (red)')\r\n title('blue:abs(f), cyan:f-min(f), green:sigma, red:axis ratio', fontsize=fontsize-1)\r\n # pylab.xticks(xticklocs)\r", "def plotdivers(dat, iabscissa, foffset):\r\n from matplotlib.pylab import semilogy, hold, grid, \\\r\n axis, title, text\r\n fontsize = pylab.rcParams['font.size']\r\n\r\n hold(False)\r\n\r\n dfit = dat.f[:,5]-min(dat.f[:,5])\r\n dfit[dfit<1e-98] = np.NaN\r\n\r\n if dat.f.shape[1] > 7:\r\n # semilogy(dat.f[:, iabscissa], abs(dat.f[:,[6, 7, 10, 12]])+foffset,'-k')\r\n semilogy(dat.f[:, iabscissa], abs(dat.f[:,[6, 7]])+foffset,'-k')\r\n hold(True)\r\n\r\n # (larger indices): additional fitness data, for example constraints values\r\n if dat.f.shape[1] > 8:\r\n # dd = abs(dat.f[:,7:]) + 10*foffset\r\n # dd = np.where(dat.f[:,7:]==0, np.NaN, dd) # cannot be\r\n semilogy(dat.f[:, iabscissa], np.abs(dat.f[:,8:]) + 10*foffset, 'm')\r\n hold(True)\r\n\r\n idx = np.where(dat.f[:,5]>1e-98)[0] # positive values\r\n semilogy(dat.f[idx, iabscissa], dat.f[idx,5]+foffset, '.b')\r\n hold(True)\r\n grid(True)\r\n\r\n idx = np.where(dat.f[:,5] < -1e-98) # negative values\r\n semilogy(dat.f[idx, iabscissa], abs(dat.f[idx,5])+foffset,'.r')\r\n\r\n semilogy(dat.f[:, iabscissa],abs(dat.f[:,5])+foffset,'-b')\r\n semilogy(dat.f[:, iabscissa], dfit, '-c')\r\n\r\n if 11 < 3: # delta-fitness as points\r\n dfit = dat.f[1:, 5] - dat.f[:-1,5] # should be negative usually\r\n semilogy(dat.f[1:,iabscissa], # abs(fit(g) - fit(g-1))\r\n np.abs(dfit)+foffset, '.c')\r\n i = dfit > 0\r\n # print(np.sum(i) / float(len(dat.f[1:,iabscissa])))\r\n semilogy(dat.f[1:,iabscissa][i], # abs(fit(g) - fit(g-1))\r\n np.abs(dfit[i])+foffset, '.r')\r\n\r\n # overall minimum\r\n i = np.argmin(dat.f[:,5])\r\n semilogy(dat.f[i, iabscissa]*np.ones(2), dat.f[i,5]*np.ones(2), 'rd')\r\n # semilogy(dat.f[-1, iabscissa]*np.ones(2), dat.f[-1,4]*np.ones(2), 'rd')\r\n\r\n # AR and sigma\r\n semilogy(dat.f[:, iabscissa], dat.f[:,3], '-r') # AR\r\n semilogy(dat.f[:, iabscissa], dat.f[:,2],'-g') # sigma\r\n semilogy(dat.std[:-1, iabscissa], np.vstack([list(map(max, dat.std[:-1,5:])), list(map(min, dat.std[:-1,5:]))]).T,\r\n '-m', linewidth=2)\r\n text(dat.std[-2, iabscissa], max(dat.std[-2, 5:]), 'max std', fontsize=fontsize)\r\n text(dat.std[-2, iabscissa], min(dat.std[-2, 5:]), 'min std', fontsize=fontsize)\r\n ax = array(axis())\r\n # ax[1] = max(minxend, ax[1])\r\n axis(ax)\r\n text(ax[0]+0.01, ax[2], # 10**(log10(ax[2])+0.05*(log10(ax[3])-log10(ax[2]))),\r\n '.f_recent=' + repr(dat.f[-1,5]) )\r\n\r\n # title('abs(f) (blue), f-min(f) (cyan), Sigma (green), Axis Ratio (red)')\r\n title('blue:abs(f), cyan:f-min(f), green:sigma, red:axis ratio', fontsize=fontsize-1)\r\n # pylab.xticks(xticklocs)\r", "def plot(self):\n #prepare the marker list\n marker = itertools.cycle((',', '+', '.', 'o', '*',\n '^', 'v', '<', '>', '8',\n 's', 'p', 'h', 'H', 'D',\n 'd'))\n # first categorised with plane\n for each_plane in self.plane_list:\n if self.is_literal:\n label = \"[\" + \"{0} {1} {2}\".format(each_plane[0], each_plane[1], each_plane[2]) + \"]\"\n else:\n label = \"{\"+\"{0}, {1}, {2}\".format(each_plane[0], each_plane[1], each_plane[2]) + \"}\"\n x_list = []\n y_list = []\n if self.is_literal:\n tmp = [each_plane]\n opposite_plane = [-item for item in each_plane]\n tmp.append(opposite_plane)\n else:\n tmp = PoleFigure.get_permutations(each_plane)\n # second categorised with grain ID\n my_marker = \".\" # default marker\n for i in range(len(self.__data)):\n each_euler = self.__data[i]\n if self.unique_marker:\n my_marker = marker.next()\n plt.rcParams['text.usetex'] = False # otherwise, '^' will cause trouble\n euler = EulerAngle(each_euler[0], each_euler[1], each_euler[2])\n rot_m = np.dot(self.__ref, euler.rotation_matrix)\n self.__data[i] = RotationMatrix(rot_m).euler_angle\n for each_pole in tmp:\n tmp_pole = np.array(each_pole) / self.lattice_vector\n tmp_pole /= np.linalg.norm(tmp_pole)\n coord = np.dot(rot_m, tmp_pole)\n if coord[2] < 0:\n continue # not pointing up, moving on\n else:\n x = coord[0] / (1.0 + float(coord[2]))\n y = coord[1] / (1.0 + float(coord[2]))\n # need to rotate 90 degree\n x_list.append(y)\n y_list.append(-x)\n # start plotting\n if self.__clr_list is not None:\n clr = self.__clr_list.next()\n else:\n clr = np.random.rand(3, 1)\n plt.scatter(x_list, y_list, marker=my_marker, c=clr, label=label, edgecolor='none')\n # label x/y axis\n plt.text(1.1, 0.0, \"y\", horizontalalignment='center', verticalalignment='center', fontsize=15)\n plt.text(0.0, -1.1, \"x\", horizontalalignment='center', verticalalignment='center', fontsize=15)\n # set legend\n plt.legend(loc='upper left', numpoints=1, ncol=6, fontsize=8, bbox_to_anchor=(0, 0))\n plt.title(self.title)\n plt.savefig(self.title + \".\" + self.output)\n plt.close()", "def depsdh(eps, H, Th):\n # fmt: off\n return (\n eps**2 / ((1 - eps**2) * (eps**2 - Th) *\n sqrt(1 - (3 * eps**4 + eps**2 * (H - 9 * Th - 5) + 15 * Th)**2 /\n (225 * (1 - eps**2)**2 * (eps**2 - Th)**2)))\n )\n # fmt: on", "def plotTI():\n min_dl = dlam[dlam != 0].min()\n S = int(0.4/min_dl)\n fig = pl.figure(figsize = (8,6))\n ax = fig.add_subplot(1,1,1)\n ax.spines['bottom'].set_position('zero')\n ax.spines['top'].set_color('none')\n ax.spines['right'].set_color('none')\n ax.xaxis.set_ticks_position('bottom')\n ax.yaxis.set_ticks_position('left')\n\n for k, spine in ax.spines.items():\n spine.set_zorder(12.2)\n\n xs, ndx, dx = [0], 0, 0.001\n colors = ['r', 'g', '#7F38EC', '#9F000F', 'b', 'y']\n min_y, max_y = 0, 0\n\n lines = tuple()\n ## lv_names2 = [r'$Coulomb$', r'$vdWaals$'] ## for the paper\n lv_names2 = []\n for j in range(n_components):\n y = ave_dhdl[:,j]\n if not (y == 0).all():\n lv_names2.append(r'$%s$' % P.lv_names[j].capitalize())\n\n for j in range(n_components):\n\n y = ave_dhdl[:,j]\n if not (y == 0).all():\n\n # Get the coordinates.\n lj = lchange[:,j]\n x = lv[:,j][lj]\n y = y[lj]/P.beta_report\n\n if 'TI' in P.methods:\n # Plot the TI integration area.\n ss = 'TI'\n for i in range(len(x)-1):\n min_y = min(y.min(), min_y)\n max_y = max(y.max(), max_y)\n #pl.plot(x,y)\n if i%2==0:\n pl.fill_between(x[i:i+2]+ndx, 0, y[i:i+2], color=colors[ndx], alpha=1.0)\n else:\n pl.fill_between(x[i:i+2]+ndx, 0, y[i:i+2], color=colors[ndx], alpha=0.5)\n xlegend = [-100*wnum for wnum in range(len(lv_names2))]\n pl.plot(xlegend, [0*wnum for wnum in xlegend], ls='-', color=colors[ndx], label=lv_names2[ndx]) ## for the paper\n\n if 'TI-CUBIC' in P.methods and not cubspl[j]==0:\n # Plot the TI-CUBIC interpolation curve.\n ss += ' and TI-CUBIC'\n xnew = numpy.arange(0, 1+dx, dx)\n ynew = cubspl[j].interpolate(y, xnew)\n min_y = min(ynew.min(), min_y)\n max_y = max(ynew.max(), max_y)\n pl.plot(xnew+ndx, ynew, color='#B6B6B4', ls ='-', solid_capstyle='round', lw=3.0)\n\n else:\n # Plot the TI-CUBIC integration area.\n ss = 'TI-CUBIC'\n for i in range(len(x)-1):\n xnew = numpy.arange(x[i], x[i+1]+dx, dx)\n ynew = cubspl[j].interpolate(y, xnew)\n ynew[0], ynew[-1] = y[i], y[i+1]\n min_y = min(ynew.min(), min_y)\n max_y = max(ynew.max(), max_y)\n if i%2==0:\n pl.fill_between(xnew+ndx, 0, ynew, color=colors[ndx], alpha=1.0)\n else:\n pl.fill_between(xnew+ndx, 0, ynew, color=colors[ndx], alpha=0.5)\n\n # Store the abscissa values and update the subplot index.\n xs += (x+ndx).tolist()[1:]\n ndx += 1\n\n # Make sure the tick labels are not overcrowded.\n xs = numpy.array(xs)\n dl_mat = numpy.array([xs-i for i in xs])\n ri = range(len(xs))\n\n def getInd(r=ri, z=[0]):\n primo = r[0]\n min_dl=ndx*0.02*2**(primo>10)\n if dl_mat[primo].max()<min_dl:\n return z\n for i in r:\n for j in range(len(xs)):\n if dl_mat[i,j]>min_dl:\n z.append(j)\n return getInd(ri[j:], z)\n\n xt = [i if (i in getInd()) else '' for i in range(K)]\n pl.xticks(xs[1:], xt[1:], fontsize=10)\n pl.yticks(fontsize=10)\n #ax = pl.gca()\n #for label in ax.get_xticklabels():\n # label.set_bbox(dict(fc='w', ec='None', alpha=0.5))\n\n # Remove the abscissa ticks and set up the axes limits.\n for tick in ax.get_xticklines():\n tick.set_visible(False)\n pl.xlim(0, ndx)\n min_y *= 1.01\n max_y *= 1.01\n pl.ylim(min_y, max_y)\n\n for i,j in zip(xs[1:], xt[1:]):\n pl.annotate(('%.2f' % (i-1.0 if i>1.0 else i) if not j=='' else ''), xy=(i, 0), xytext=(i, 0.01), size=10, rotation=90, textcoords=('data', 'axes fraction'), va='bottom', ha='center', color='#151B54')\n if ndx>1:\n lenticks = len(ax.get_ymajorticklabels()) - 1\n if min_y<0: lenticks -= 1\n if lenticks < 5:\n from matplotlib.ticker import AutoMinorLocator as AML\n ax.yaxis.set_minor_locator(AML())\n pl.grid(which='both', color='w', lw=0.25, axis='y', zorder=12)\n pl.ylabel(r'$\\mathrm{\\langle{\\frac{ \\partial U } { \\partial \\lambda }}\\rangle_{\\lambda}\\/%s}$' % P.units, fontsize=20, color='#151B54')\n pl.annotate('$\\mathit{\\lambda}$', xy=(0, 0), xytext=(0.5, -0.05), size=18, textcoords='axes fraction', va='top', ha='center', color='#151B54')\n if not P.software.title()=='Sire':\n lege = ax.legend(prop=FP(size=14), frameon=False, loc=1)\n for l in lege.legendHandles:\n l.set_linewidth(10)\n pl.savefig(os.path.join(P.output_directory, 'dhdl_TI.pdf'))\n pl.close(fig)\n return", "def plot_eigpers(self, cdf_cut=0.90, figsize=(9, 5), tick_fontsize=20,\n label_fontsize=32, show_cutoff=True, title=None, savepath=None, annotate=False):\n\n sns.set(style=\"ticks\")\n f2, ax_scat2 = plt.subplots(1, sharex=True, figsize=figsize)\n sns.scatterplot(self.eigenval, self.pers, ax=ax_scat2)\n\n pcut, cdf_name = self.get_pcutoff(cdf_cut=cdf_cut)\n print(f\"p_cut: {pcut} \\n cdf_name: {cdf_name}\")\n # Append new plots\n divider = make_axes_locatable(ax_scat2)\n\n # Add plot in right\n axHisty = divider.append_axes(\"right\", 1.5, pad=0.1, sharey=ax_scat2)\n axHisty.tick_params(labelleft=False, left=False, labelsize=tick_fontsize,\n bottom=True, labelbottom=True, top=False, labeltop=False)\n axHisty.set_xlabel(\"counts\", fontsize=label_fontsize)\n\n # add plot above eighist\n axBox = divider.append_axes(\"top\", 0.5, pad=0.1)\n axBox.tick_params(labelleft=False, left=False, labelsize=tick_fontsize,\n bottom=False, labelbottom=False, top=False, labeltop=False)\n\n ax_scat2.set_ylabel(r\"$r_{\\alpha}$\", fontsize=label_fontsize)\n ax_scat2.set_xlabel(r\"$\\overline{\\lambda}_{\\alpha}$\", fontsize=label_fontsize)\n ax_scat2.tick_params(labelsize=tick_fontsize)\n ax_scat2.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))\n ax_scat2.set_ylim(0, 1.2)\n\n # Plot pers distribution with cutoff\n if annotate:\n sns.distplot(self.pers, bins=max(20, int(len(self.eigenval) / 10)), vertical=True, norm_hist=True,\n kde=False, fit=eval(\"scipy.stats.\" + cdf_name), ax=axHisty, color='k', label=cdf_name)\n axHisty.legend()\n else:\n sns.distplot(self.pers, bins=max(20, int(len(self.eigenval) / 10)), vertical=True, norm_hist=True,\n kde=False, fit=eval(\"scipy.stats.\" + cdf_name), ax=axHisty, color='k')\n # plot eigenvalue box plot\n sns.boxplot(self.eigenval, ax=axBox)\n\n if show_cutoff:\n # Add pers cutoff lines\n axHisty.axhline(pcut, color='g', linestyle='--')\n ax_scat2.axhline(pcut, color='g', linestyle='--')\n\n # retrieve eigval cutoff from boxplot\n Q3 = np.quantile(self.eigenval, 0.75)\n max_whisker = Q3 + (1.5 * scipy.stats.iqr(self.eigenval)) # Q3+1.5IQR\n maxwhisk = self.eigenval[self.eigenval <= max_whisker].max()\n\n _q3 = self.eigenval[self.eigenval <= Q3].max()\n\n if isinstance(self.eigcut, int) or isinstance(self.eigcut, float):\n print(f\"eigcut: {self.eigcut}\")\n ax_scat2.axvline(self.eigcut, color='r', linestyle='--')\n axBox.axvline(self.eigcut, color='r', linestyle='--')\n if annotate:\n for a in range(len(self.eigenval)):\n if self.pers[a] > pcut and self.eigenval[a] >= np.round(self.eigcut, 2):\n ax_scat2.annotate(str(a + 1), xy=(self.eigenval[a], self.pers[a]), fontsize=14)\n\n if self.eigcut == \"outliers\":\n print(f\"maxwhisk: {maxwhisk}\")\n ax_scat2.axvline(maxwhisk, color='r', linestyle='--')\n axBox.axvline(maxwhisk, color='r', linestyle='--')\n\n if annotate:\n for a in range(len(self.eigenval)):\n if self.pers[a] > pcut and self.eigenval[a] >= np.round(maxwhisk, 2):\n ax_scat2.annotate(str(a + 1), xy=(self.eigenval[a], self.pers[a]), fontsize=14)\n\n # ev = np.asarray(self.eigenval)\n # ps = np.asarray(self.pers)\n # col = np.where((ps > pcut) & (ev > maxwhisk), 'r',\n # np.where((ps < pcut) & (ev < maxwhisk), 'gray',\n # np.where(ps < pcut, 'gray', \"gray\")))\n # sns.scatterplot(ev, ps, ax=ax_scat2, c=col)\n if self.eigcut == \"Q3\":\n print(f\"Q3: {Q3}\")\n ax_scat2.axvline(Q3, color='r', linestyle='--')\n axBox.axvline(Q3, color='r', linestyle='--')\n if annotate:\n for a in range(len(self.eigenval)):\n if self.pers[a] > pcut and self.eigenval[a] >= np.round(_q3, 2):\n ax_scat2.annotate(str(a + 1), xy=(self.eigenval[a], self.pers[a]), fontsize=14)\n f2.tight_layout()\n if title:\n f2.suptitle(title, fontsize=label_fontsize)\n plt.show()\n if savepath is not None:\n f2.savefig(savepath, dpi=300)", "def plotDustFrac(self,ID,ifile):\n clumpxyz = self.clumpcat[ID][:3]\n r2 = (self.disc.xyzh[0]-clumpxyz[0])**2 + (self.disc.xyzh[1]-clumpxyz[1])**2\n members = np.sqrt(r2) < self.annulus #members are all particles within radial annulus\n inner = np.sqrt(r2) < 0.25\n\n gas = self.disc.itype == 1\n dust = self.disc.itype == 2\n dustfrac = self.disc.dustfrac*1e8 #as I originally set dust-to-gas=1e-10\n\n #Calculate temperatures from thermal energies\n k = 1.38064852e-16 #ergs\n mH = 1.6735575e-24 #grams\n gmw = 2.381 #mean mass taken from Phantom\n N = sum(gas*self.disc.massofgas)*self.umass/mH/gmw #number of atoms\n temp = 2.*self.disc.utherm*self.uenerg/3./k/N\n\n\t\t#Plot dust fraction vs. radius\n plt.scatter(np.sqrt(r2[members & gas]), dustfrac[members & gas], s=1.0,\n c=temp[members & gas], cmap=cm.magma)\n plt.yscale('log')\n plt.ylim([0.001,1.0])\n plt.xlim([0,3.])\n plt.ylabel('Dust-to-gas ratio')\n plt.xlabel('Dist from clump centre (au)')\n plt.title('Dust fraction vs. radius')\n cbar = plt.colorbar()\n cbar.set_label('Temp (K)')\n\t\tplt.clim([0,600])\n plt.legend(['%.2f yrs' %self.disc.time])\n\t\tplt.savefig('%s/fragplots/%i.png' %(self.wd,ifile))\n\t\t#plt.show()\n\t\tplt.clf()", "def Excentricity_graph(self,ccenter,cradius,icenter):\n \n ellip = 0.\n q = 1. - ellip\n pa = 0.\n \n stamp = self['STAMP'].copy()\n mask = self['MASK'].copy()\n sky = self['BACKGROUND']\n Img = stamp - sky\n Img[num.where(mask != 0)] = 0.\n \n id = self._getGraphId()\n root = 'Excentricity_%s' % (id,)\n pngname = root + '.png' ; epsname = root + '.eps'\n jpgname = root + '.jpg'\n doStamp(Img,pngname,format='PNG')\n Convert(pngname,jpgname)\n \n Painted = Paint(jpgname)\n Painted.load()\n \n Painted.DrawEllipse(ccenter,cradius,q,pa,color='red',linewidth=2)\n Painted.DrawCross(ccenter,length=20,color='red')\n Painted.DrawCross(icenter,length=20,color='green')\n \n text = 'E=%.2f o/o' % self['M_E']\n \n # Painted.Graffiti(text,commtextpos)\n \n Painted.save(jpgname) \n Painted.release()\n \n Convert(jpgname,epsname)\n os.system('rm %s %s' % (pngname,jpgname))\n \n self['figures']['Excentricity'] = epsname\n self['figcomms']['Excentricity'] = text", "def plot_dispatch(pv, demand, E, week=30):\n\n sliced_index = (pv.index.week==week)\n pv_sliced = pv[sliced_index]\n demand_sliced = demand[sliced_index]\n self_consumption = E['inv2load'][sliced_index]\n \n direct_self_consumption = np.minimum(pv_sliced,demand_sliced)# E['inv2load'][sliced_index]\n indirect_self_consumption = self_consumption-direct_self_consumption\n res_pv_sliced = E['res_pv'][sliced_index]\n grid2load_sliced = E['grid2load'][sliced_index]\n store2inv_sliced = E['store2inv'][sliced_index]\n LevelOfCharge = E['LevelOfCharge'][sliced_index]\n inv2grid = E['inv2grid'][sliced_index]\n grid2load = E['grid2load'][sliced_index]\n aux=np.maximum(0,self_consumption)\n\n fig, axes = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(17, 4*3), frameon=False,\n gridspec_kw={'height_ratios': [3, 1, 1], 'hspace': 0.04})\n\n #fig, ax = plt.subplots(figsize=(17, 4))\n axes[0].plot(demand_sliced.index, demand_sliced, color='black', lw=2,label='demand')\n axes[0].plot(pv_sliced.index, pv_sliced, color='black',ls='--', lw=2,label='PV')\n axes[0].fill_between(direct_self_consumption.index, 0, direct_self_consumption, color='orange', alpha=.8, label='DSC')\n axes[0].fill_between(pv_sliced.index, self_consumption, pv_sliced , where=pv_sliced<demand_sliced,color='blue', hatch='//',\n alpha=.3,label='ISC')\n axes[0].fill_between(pv_sliced.index, direct_self_consumption, pv_sliced ,where=pv_sliced>demand_sliced, color='gold', alpha=.3,label='Excess PV')\n\n axes[0].fill_between(grid2load_sliced.index,self_consumption,demand_sliced,color='red',alpha=.2, label='grid2load')\n \n\n #axes[0].plot(grid2load_sliced.index, grid2load_sliced, color='red', ls=\":\", lw=1)\n axes[0].set_ylim([0, axes[0].get_ylim()[1] ])\n axes[0].set_ylabel('Power (kW)')\n\n axes[1].fill_between(LevelOfCharge.index, 0, LevelOfCharge, color='grey', alpha=.2, label='SOC')\n axes[1].set_ylabel('State of Charge (kWh)')\n\n axes[2].fill_between(inv2grid.index, 0, inv2grid, color='green', alpha=.2,label='injected2grid')\n axes[2].fill_between(inv2grid.index, 0, -grid2load, color='red', alpha=.2,label='grid drawn')\n axes[2].set_ylabel('In/out from grid (kW)')\n axes[0].legend()\n axes[1].legend()\n axes[2].legend()\n return", "def interactions_plot():\n data = load_data('ints_CC'),load_data('ints_CD')\n fig,ax = plt.subplots()\n plot_mean_std(data_CC,ax,'C-C interactions')\n plot_mean_std(data_CD,ax,'C-D interactions')\n plt.xlabel('cluster size, n')\n plt.legend(loc='best')\n plt.savefig('interactions.pdf')", "def test_plot_hid(self):\n # also produce a light curve with the same binning\n command = ('{0} -b 100 --e-interval {1} {2}').format(\n os.path.join(self.datadir, 'monol_testA_nustar_fpma_ev_calib' +\n HEN_FILE_EXTENSION), 3, 10)\n\n hen.lcurve.main(command.split())\n lname = os.path.join(self.datadir,\n 'monol_testA_nustar_fpma_E3-10_lc') + \\\n HEN_FILE_EXTENSION\n os.path.exists(lname)\n cname = os.path.join(self.datadir,\n 'monol_testA_nustar_fpma_E_10-5_over_5-3') + \\\n HEN_FILE_EXTENSION\n hen.plot.main([cname, lname, '--noplot', '--xlog', '--ylog', '--HID',\n '-o', 'dummy.qdp'])", "def plot_2d(self):\n fig = plt.figure(figsize=(10,8))\n \n d = int(len(self.a_scale.flat)**0.5)\n a_scale = self.a_scale.reshape(d,d)\n c_scale = self.c_scale.reshape(d,d)\n E_coh = self.E_coh.reshape(d,d)\n plt.pcolormesh(a_scale, c_scale, E_coh)\n plt.xlabel('xy linear deformation coefficient')\n plt.xlabel('z linear deformation coefficient')\n cbar = plt.colorbar()\n cbar.ax.set_ylabel('cohesive energy (eV/atom)',\n fontsize='x-large')\n plt.show()\n \n return fig", "def findzpd(self):\n dc=0.5*self.rms*self.ndstep\n #fixed at 0.1 of the dispersion\n dd=0.1*self.ws.coef[1]\n\n #set upt he docef values\n dcoef=self.ws.coef*0.0\n dcoef[0]=dc\n dcoef[1]=dd\n self.ws=st.findxcor(self.xarr, self.farr, self.swarr, self.sfarr, self.ws, \n dcoef=dcoef, ndstep=self.ndstep, best=False, inttype='interp')\n self.plotArt()\n self.redraw_canvas()", "def plot_dispatch_comm(pv, demand, E, week=30,flag=False):\n\n sliced_index = (pv.index.week==week)\n pv_sliced = pv[sliced_index]\n demand_sliced = demand[sliced_index]\n self_consumption = E['inv2load'][sliced_index]\n \n direct_self_consumption = np.minimum(pv_sliced,demand_sliced)# E['inv2load'][sliced_index]\n indirect_self_consumption = self_consumption-direct_self_consumption\n res_pv_sliced = E['res_pv'][sliced_index]\n grid2load_sliced = E['grid2load'][sliced_index]\n store2inv_sliced = E['store2inv'][sliced_index]\n LevelOfCharge = E['LevelOfCharge'][sliced_index]\n inv2grid = E['inv2grid'][sliced_index]\n grid2load = E['grid2load'][sliced_index]\n aux=np.maximum(0,self_consumption)\n\n fig, axes = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(17, 4*3), frameon=False,\n gridspec_kw={'height_ratios': [3, 1, 1], 'hspace': 0.04})\n\n #fig, ax = plt.subplots(figsize=(17, 4))\n axes[0].plot(demand_sliced.index, demand_sliced, color='black', lw=2,label='demand')\n \n if flag:\n axes[0].plot(pv_sliced.index, pv_sliced, color='green', lw=2,label='pv')\n axes[0].plot(direct_self_consumption.index, direct_self_consumption, color='yellow', lw=2,label='DSC')\n axes[0].plot(indirect_self_consumption.index, indirect_self_consumption, color='orange', lw=2,label='ISC')\n axes[0].plot(grid2load_sliced.index, grid2load_sliced, color='red', lw=2,label='grid')\n\n else:\n axes[0].fill_between(direct_self_consumption.index, 0, direct_self_consumption, color='orange', alpha=.8, label='DSC')\n axes[0].fill_between(pv_sliced.index, self_consumption, pv_sliced ,where=pv_sliced<demand_sliced, color='blue', hatch='//',\n alpha=.3,label='ISC')\n axes[0].fill_between(pv_sliced.index, direct_self_consumption, pv_sliced , color='gold', alpha=.3,label='Excess PV')\n\n axes[0].fill_between(grid2load_sliced.index,self_consumption,demand_sliced,color='red',alpha=.2, label='grid2load')\n axes[0].set_ylim([0, axes[0].get_ylim()[1] ])\n axes[0].set_ylabel('Power (kW)')\n\n axes[1].fill_between(LevelOfCharge.index, 0, LevelOfCharge, color='grey', alpha=.2, label='SOC')\n axes[1].set_ylabel('State of Charge (kWh)')\n\n axes[2].fill_between(inv2grid.index, 0, inv2grid, color='green', alpha=.2,label='injected2grid')\n axes[2].fill_between(inv2grid.index, 0, -grid2load, color='red', alpha=.2,label='grid drawn')\n axes[2].set_ylabel('In/out from grid (kW)')\n axes[0].legend()\n axes[1].legend()\n axes[2].legend()\n return", "def test_2d_plot(self):\n db = pd.HDFStore('test.h5')\n df_iv = db['iv']\n dates = df_iv[df_iv['dte'] == 30]['date']\n impl_vols = df_iv[df_iv['dte'] == 30]['impl_vol']\n db.close()\n\n print df_iv.sort_values('impl_vol').head()\n\n plt.plot(dates, impl_vols)\n plt.xlabel('date')\n plt.ylabel('impl_vols')\n plt.show()", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='blue')", "def plot_path_estimate(self, ax, q, d, title=True):\n q_data = self.data['EM_data/{}'.format(q)]\n est_mean = q_data['path_means']\n est_sdev = q_data['path_sdevs']\n\n if (d == 0):\n path = self.xr\n label = 'X'\n # dxy = self.dx\n elif (d == 1):\n path = self.yr\n label = 'Y'\n # dxy = self.dy\n else:\n raise ValueError('d must be either 0 or 1')\n\n tt = self.DT * np.arange(self.N_T)\n ax.fill_between(tt,\n est_mean[:, d] - est_sdev[:, d],\n est_mean[:, d] + est_sdev[:, d],\n alpha=0.5, linewidth=0.25, color='r')\n ax.plot(tt,\n est_mean[:, d], label='estimate', c='r')\n ax.plot(tt,\n path, label='actual', c='b')\n ax.set_xlim([0, self.DT * self.N_T])\n ax.set_xlabel('Time (s)')\n ax.set_ylabel('Relative position (arcmin)')\n if title:\n # ax.set_title(label + ' Pos., shift = %.2f' % dxy)\n ax.set_title('{} Position'.format(label))", "def _draw_ephemeris_info(self) -> None:\n\n ephemerides = self.ephemerides\n basic_annotations = self.basic_annotations\n center_ra, center_dec = EphemerisService.center_position(ephemerides)\n\n # is the target moving much?\n ra_min = min(ephemerides, key=lambda e: e.ra).ra\n ra_max = max(ephemerides, key=lambda e: e.ra).ra\n dec_min = min(ephemerides, key=lambda e: e.dec).dec\n dec_max = max(ephemerides, key=lambda e: e.dec).dec\n\n ra_width = ra_max - ra_min\n dec_width = dec_max - dec_min\n if (\n ra_width > FinderChart.MINIMUM_PATH_BOX_WIDTH\n or dec_width > FinderChart.MINIMUM_PATH_BOX_WIDTH\n ):\n significant_movement = True\n else:\n significant_movement = False\n\n # we have to convert angles to floats, as the vstack function does not accept\n # Quantity values\n right_ascensions = [e.ra for e in ephemerides]\n declinations = [e.dec for e in ephemerides]\n epochs = [e.epoch for e in ephemerides]\n start_time = epochs[0]\n end_time = epochs[-1]\n\n dra_start_to_end = right_ascensions[-1] - right_ascensions[0]\n ddec_start_to_end = declinations[-1] - declinations[0]\n\n # plot the target's path\n if significant_movement and not basic_annotations:\n # we have to convert angles to floats, as the vstack function does not accept\n # Quantity values\n right_ascensions_deg = [e.ra.to_value(u.deg) for e in ephemerides]\n declinations_deg = [e.dec.to_value(u.deg) for e in ephemerides]\n lv = np.vstack([right_ascensions_deg, declinations_deg])\n self.plot.show_lines(\n [lv], layer=\"object_path_lines\", color=\"b\", linewidth=1, alpha=1\n )\n\n # direction at the start and end\n ddec_start = declinations[1] - declinations[0]\n dra_end = right_ascensions[-1] - right_ascensions[-2]\n ddec_end = declinations[-1] - declinations[-2]\n\n if not basic_annotations:\n if significant_movement:\n # plot the arrow at the end time to show the direction\n self._draw_arrow_head(\n right_ascensions[-1], declinations[-1], dra_end, ddec_end\n )\n else:\n ra_correction = abs(np.cos(center_dec))\n v_x, v_y = dra_start_to_end * ra_correction, ddec_start_to_end\n length = np.sqrt(v_x ** 2 + v_y ** 2)\n v_x, v_y = (\n v_x.to_value(u.deg) / length.to_value(u.deg),\n v_y.to_value(u.deg) / length.to_value(u.deg),\n )\n self._draw_arrow_head(\n center_ra + 0.0013 * u.deg * v_x / ra_correction,\n center_dec + 0.0013 * u.deg * v_y,\n dra_start_to_end,\n ddec_start_to_end,\n )\n\n # the labels shouldn't overlap with the path\n abs_vertical_shift = 0.002 * u.deg\n if significant_movement:\n label_position_start = {\n \"horizontal_alignment\": \"center\",\n \"horizontal_position\": right_ascensions[0],\n \"vertical_alignment\": \"top\" if ddec_start > 0 else \"bottom\",\n \"vertical_position\": declinations[0],\n \"vertical_shift\": (-1 if ddec_start > 0 else 1)\n * abs_vertical_shift,\n }\n label_position_end = {\n \"horizontal_alignment\": \"center\",\n \"horizontal_position\": right_ascensions[-1],\n \"vertical_alignment\": \"bottom\" if ddec_start > 0 else \"top\",\n \"vertical_position\": declinations[-1],\n \"vertical_shift\": (1 if ddec_end > 0 else -1) * abs_vertical_shift,\n }\n else:\n radius = 0.5 * FinderChart.MINIMUM_PATH_BOX_WIDTH\n abs_vertical_position_offset = radius\n label_position_start = {\n \"horizontal_alignment\": \"center\",\n \"horizontal_position\": center_ra,\n \"vertical_alignment\": \"top\" if ddec_start_to_end > 0 else \"bottom\",\n \"vertical_position\": center_dec\n + (-1 if ddec_start_to_end > 0 else 1)\n * abs_vertical_position_offset,\n \"vertical_shift\": (-1 if ddec_start_to_end > 0 else 1)\n * abs_vertical_shift,\n }\n label_position_end = {\n \"horizontal_alignment\": \"center\",\n \"horizontal_position\": center_ra,\n \"vertical_alignment\": \"bottom\" if ddec_start_to_end > 0 else \"top\",\n \"vertical_position\": center_dec\n + (1 if ddec_start_to_end > 0 else -1)\n * abs_vertical_position_offset,\n \"vertical_shift\": (1 if ddec_start_to_end > 0 else -1)\n * abs_vertical_shift,\n }\n\n # add the start time label\n self.draw_label(\n label_position_start[\"horizontal_position\"],\n label_position_start[\"vertical_position\"]\n + label_position_start[\"vertical_shift\"],\n start_time.strftime(\"%Y-%m-%d %H:%M UT\"),\n size=\"8\",\n horizontalalignment=label_position_start[\"horizontal_alignment\"],\n verticalalignment=label_position_start[\"vertical_alignment\"],\n color=(0, 0, 1),\n )\n\n # add the end time label\n self.draw_label(\n label_position_end[\"horizontal_position\"],\n label_position_end[\"vertical_position\"]\n + label_position_end[\"vertical_shift\"],\n end_time.strftime(\"%Y-%m-%d %H:%M UT\"),\n size=\"8\",\n horizontalalignment=label_position_end[\"horizontal_alignment\"],\n verticalalignment=label_position_end[\"vertical_alignment\"],\n color=(0, 0, 1),\n )\n\n # add a \"target circle\" if the movement isn't significant\n if not significant_movement:\n self.draw_circle(\n center_ra, center_dec, FinderChart.MINIMUM_PATH_BOX_WIDTH / 2.0, \"b\"\n )\n else:\n # output the time range\n self.draw_label(\n center_ra,\n center_dec - 4 * u.arcmin,\n start_time.strftime(\"%Y-%m-%d %H:%M UT\")\n + \" - \"\n + end_time.strftime(\"%Y-%m-%d %H:%M UT\"),\n size=\"large\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n color=(0, 0.5, 1),\n )", "def plot_ipr_DOS(self, outdir=None, title=r'$D(\\omega)$ for gyroscopic network', fname='ipr_magneticgyro_hist',\n alpha=1.0, FSFS=12, show=True, inverse_PR=True, save=True, **kwargs):\n # First make sure all eigvals are accounted for\n # Don't attribute the eigenvectors and eigenvalues for a method that simply plots them\n eigval = self.get_eigval(attribute=False)\n\n # Also make sure ipr is defined\n if self.ipr is None:\n ipr = self.calc_ipr(attribute=False)\n else:\n ipr = self.ipr\n\n # Register cmap if necessary\n if inverse_PR:\n if 'viridis' not in plt.colormaps():\n cmaps.register_colormaps()\n\n fig, DOS_ax, cbar_ax, cbar = leplt.initialize_colored_DOS_plot(eigval, 'gyro', alpha=alpha,\n colorV=ipr, colormap='viridis', linewidth=0,\n cax_label=r'$p^{-1}$', climbars=True,\n **kwargs)\n else:\n if 'viridis_r' not in plt.colormaps():\n cmaps.register_colormaps()\n print '1/ipr = ', 1. / ipr\n fig, DOS_ax, cbar_ax, cbar = leplt.initialize_colored_DOS_plot(eigval, 'gyro', alpha=alpha,\n colorV=1. / ipr, colormap='viridis_r',\n linewidth=0, cax_label=r'$p$',\n climbars=False, **kwargs)\n\n plt.title(title, fontsize=FSFS)\n if save:\n if outdir is None:\n outdir = dio.prepdir(self.lp['meshfn'])\n plt.savefig(outdir + fname + self.lp['meshfn_exten'] + '.png')\n\n if show:\n plt.show()\n else:\n return DOS_ax", "def visualize_data(dqn_rewards, ddqn_rewards):\n \n fig, ax = plt.subplots()\n x_values = list(range(1, dqn_rewards.size + 1))\n ax.plot(x_values, dqn_rewards, label='dqn rewards')\n ax.plot(x_values, ddqn_rewards, label='ddqn rewards')\n plt.xlabel('episodes')\n plt.title('Cumulative Reward per Game')\n plt.legend()\n plt.show()", "def plot(self):\n\t\tplot_chain(self.database_path, self.temp_folder)\n\t\tplot_density(self.database_path, self.temp_folder, self.cal_params)", "def epidote():\n\n rho = 3465.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 211.5; C[0,1] = 65.6; C[0,2] = 43.2; C[0,3] = 0.; C[0,4] = -6.5; C[0,5] = 0.\n C[1,0] = C[0,1]; C[1,1] = 239.; C[1,2] = 43.6; C[1,3] = 0.; C[1,4] = -10.4; C[1,5] = 0.\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 202.1; C[2,3] = 0.; C[2,4] = -20.; C[2,5] = 0.\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 39.1; C[3,4] = 0.; C[3,5] = -2.3\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 43.4; C[4,5] = 0.\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 79.5\n\n return C, rho", "def plot_covid():\n\n norway, usa = get_covid()\n _plot_covid(norway[0], norway[1], \"Norway\", \"12 Mar 2020\", \"#636efa\", \"#ef553b\")\n _plot_covid(usa[0], usa[1], \"USA\", \"22 Mar 2020\", \"#ef553b\", \"#636efa\")\n\n plot_relative_covid(norway, usa)", "def plot_ece(confidences, accuracies, label='', ax=None, **kwargs):\n if not ax:\n plt.figure()\n ax = plt.gca()\n # ax.figure.set_size_inches(6, 4)\n ax.plot(confidences, accuracies, label=label, **kwargs)\n xbins = [i / 10. for i in range(11)]\n ax.plot(xbins, xbins, linestyle=':', color='black')\n ax.set_xlabel('Model Confidence')\n ax.set_ylabel('Model Accuracy')\n # ax.set_title(\n # f\"Reliability Diagram Trained on {train_name}, Evaluated on {ood_name}\")\n # ax.set_title(f\"Reliability Diagram\")\n ax.legend(loc=4, facecolor='white')\n return ax", "def depolarizer(dp):\n return np.array([[1,0,0,0],[0,dp,0,0],[0,0,dp,0],[0,0,0,dp]])", "def eeg_fdr(p_array,q,plot='false'):\t\n\tN = len(p_array) \n\tp_array_srtind = np.argsort(p_array)\n\tp_array_srt = p_array[p_array_srtind]\n\tp_bound=np.zeros((N))\n\tfor i in range(N):\n\t\tp_bound[i] = i * q/N\n\t\n\tidx = p_array_srt < p_bound\n\t\n\tif np.sum(idx == 'true') > 0:\n\t\tp_thresh_fdr = np.max(p_array_srt[idx])\n\t\ti_max_fdr = p_array_srt[idx].argmax()\t\t\t\n\t\tif plot == 'true':\n\t\t\tfig = plt.figure(19,figsize=[5,5])\n\t\t\tax = fig.add_subplot(111, autoscale_on=False, xlim=[0,120], ylim=[0,10])\n\t\t\tplt.plot(N*p_array_srt,'bo-',lw=2)\n\t\t\tplt.plot(N*p_bound,'k--',lw=2)\n\t\t\tplt.plot([10,20],[9,9],'bo-',lw=2)\n\t\t\tplt.plot([10,20],[8,8],'k--',lw=2)\n\t\t\tplt.text(25,9, r'$p_{i} \\cdot N$', fontsize = 13, va='center')\n\t\t\tplt.text(25,8, r'$q \\cdot i$', fontsize = 13,va='center')\n\t\t\tplt.text(1.2 * i_max_fdr,0.5 * p_thresh_fdr*N,'FDR (q = ' + str('%.2f' %q) + ') = ' + str('%.4f' %p_thresh_fdr),va = 'center')\n\t\t\tplt.fill_between([0,i_max_fdr],[p_thresh_fdr*N,p_thresh_fdr*N],0,color='k',alpha=0.3,lw=0)\n\t\t\tplt.xlabel('i',fontsize = 13)\n\t\t\tplt.ylabel(r'$N_{FP}$',fontsize = 13)\t\n\telse:\n\t\tprint \"None of the p-value exceeds the FDR threshold (there is no p-value (p_i) for which p_i < i*q/N) \"\n\t\tp_thresh_fdr = []\n\t\n\treturn p_thresh_fdr", "def __draw_rhombus(img, rhombus):\n for i, point in enumerate(rhombus):\n p1 = tuple(rhombus[i][0])\n p2 = tuple(rhombus[(i+1) % 4][0])\n cv2.line(img, p1, p2, color=(29, 131, 255), thickness=2)\n return img", "def DewPoint(e):\n\n ln_ratio=np.log(e/611.2)\n Td=((17.67-ln_ratio)*degCtoK+243.5*ln_ratio)/(17.67-ln_ratio)\n return Td-degCtoK", "def hkdr_chd(age, female, cur_smoker, diab_dur,\n egfr, acr, nonhdl_mmol):\n xFeat = np.array([clean_age(age),\n female,\n cur_smoker,\n clean_diab_dur(diab_dur),\n np.log10(clean_egfr(egfr)),\n np.log10(1+clean_acr(acr)),\n clean_nonhdl(nonhdl_mmol, meas=\"mmol\")])\n return cox_surv(xFeat,\n HKDR_CHD[\"coef\"],\n HKDR_CHD[\"sm\"],\n HKDR_CHD[\"const\"],\n HKDR_CHD[\"shrink\"])", "def test_1_2_dimethylcyclohexane(self):\n def draw(image: ShapeImage):\n image.add_regular_hexagon(\n 100, start_coord=(400, 400)\n )\n image.add_line((487, 350), (487, 250))\n image.add_line((574, 400), (661, 350))\n\n self._test_shape(\n image_size=(1000, 1000),\n expected_corners=np.array([\n [[400, 400]],\n [[487, 350]],\n [[574, 400]],\n [[574, 500]],\n [[487, 550]],\n [[400, 500]],\n # Methyl groups\n [[487, 250]],\n [[661, 350]]\n ]),\n drawer=draw,\n expected_edges=np.array([\n [[400, 400, 487, 350]],\n [[487, 350, 574, 400]],\n [[574, 400, 574, 500]],\n [[574, 500, 487, 550]],\n [[487, 550, 400, 500]],\n [[400, 500, 400, 400]],\n # To methyl groups\n [[487, 350, 487, 250]],\n [[574, 400, 661, 350]]\n ])\n )", "def draw_heaters(ax, windtunnel):\n draw_heater(ax, windtunnel.heater_l)\n draw_heater(ax, windtunnel.heater_r)", "def plot_dist_ony(z, dz, om, dom, dist, dh, name, mathname, filename=None):\n\n\n dist = dist/dh\n z = z * numpy.ones(dist.shape)\n om = om * numpy.ones(dist.shape)\n\n pylab.figure(figsize=(5.5,4.5)) \n\n\n pylab.contour(z, dist, om, 50)\n cb = pylab.colorbar()\n cb.ax.set_ylabel(r'$\\Omega_M = 1 - \\Omega_\\lambda$')\n \n pylab.xlim(z.min(), z.max())\n pylab.ylim(dist.min(), dist.max()) \n pylab.xlabel(\"redshift z\")\n pylab.ylabel(name + r': $'+mathname+'/D_H$')\n pylab.title(name)\n if filename is not None:\n prefix, extension = filename.split('.')\n pylab.savefig(prefix + '_' + mathname + '_ony.' + extension,\n bbox_inches=\"tight\")", "def plot_gheat_g(seed=1):\n fig, ax = plt.subplots(figsize=[2.5*plotdl.latex_width_inch, 3*plotdl.latex_height_inch])\n \n r = Factory_psi1_psiN( \"aapta_of_s_N{number_of_points[0]}.npz\", N=400)\n ckg = r.create_if_missing(dict(model_name= [\"Anderson\",], \n number_of_points=[400,], bandwidth=[1,],\n dis_param=np.linspace(0,1,100),c=[1,], k=[1.57,], seed=np.arange(1,6))) \n color_seq = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k'])\n for (seed,c) in zip(np.arange(1,6),color_seq):\n ck = ckg[ckg['seed']==seed]\n g, psi_1, psi_N = ck['g'], ck['psi_N'], ck['psi_1']\n\n psi_heat = 2*(abs(psi_1)**2)*(abs(psi_N)**2) / ((abs(psi_1)**2) + (abs(psi_N)**2))\n \n phs = np.nansum(psi_heat,axis=1)\n \n psi1psiN = np.nansum(abs(psi_1*psi_N), axis=1)\n #print(ckg['dis_param'], phs)\n ax.plot(ck['dis_param'], phs,'.', color=c)\n ax.plot(ck['dis_param'], abs(g),'+', color=c)\n ax.plot(ck['dis_param'], psi1psiN,'d', color=c)\n ax.set_xlabel('dis_param')\n mkdir_and_savefig(fig, 'pta_comparison_of_s_N400.png')\n plt.close(fig)\n ## use last ck\n fig1, axes1 = plt.subplots(3,2,figsize=[2*plotdl.latex_width_inch, 3*plotdl.latex_height_inch],\n sharex=True, sharey=True)\n axes1.flat[0].xaxis.set_major_locator(MaxNLocator(4))\n axes1.flat[0].yaxis.set_major_locator(MaxNLocator(4))\n for n, ax1 in zip(range(1,20,3), axes1.flat):\n ax1.plot(abs(ck['psi_1'][n]), abs(ck['psi_N'][n]), '.') \n ax1.set_title(\"W = {:0.2}\".format(ck['dis_param'][n]))\n fig1.savefig('pta_psi_1_psi_2_N400.png')\n \n ax.cla()\n ax.plot(ck['dis_param'], np.real(g), label='real')\n ax.plot(ck['dis_param'], np.imag(g), label='imag')\n ax.plot(ck['dis_param'], np.abs(g), label='abs')\n ax.legend(loc = 'upper right')\n ax.set_xlabel('dis_param')\n ax.set_ylabel('g')\n mkdir_and_savefig(fig, 'pta_real_imag_g_s_N400')", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='green')", "def plot_connector(ax, arc_df):\n # gene = get_property(arc_df, 'gene_name')\n clvs = arc_df['aclv_t'].values.tolist()\n scs = arc_df['sc_t'].values.tolist()\n\n # lowest = -0.1 if (gene, dise) in MAIN_PLOT_GD_PAIRS_COMPLEX else -0.8\n lowest = -1\n # for i in clvs + scs:\n for i in clvs:\n # -0.25 is fragile, but seems to be good for now. It's affect by hspace\n # when specifying the grid and ymin, but the exact relationship is unclear, not sure\n # what unit does hspace use\n\n # or if zorders of different axes are pro, then it's fine.\n ax.plot([i, i], [lowest, 0], ':', linewidth=0.5, color='#333333', clip_on=False)", "def plot(self,nwins,show=True,fname=None):\n #setup figure and axes ...\n maxcolumns = 5\n ncolumns = min(maxcolumns,nwins)\n nrows = np.ceil(nwins/ncolumns).astype(int)\n figsize = ncolumns * 1.2, nrows * 1.2 + 0.5\n fig, axes = plt.subplots(nrows, ncolumns, figsize=figsize)\n for ax in axes[:-1,:].flatten():\n for xlabel_i in ax.get_xticklabels():\n xlabel_i.set_visible(False)\n for ax in axes[:,1:].flatten():\n for ylabel_i in ax.get_yticklabels():\n ylabel_i.set_visible(False)\n\n #loop through tapers and plot them\n for itaper in range( min(self.nwins, nwins) ):\n evalue = self.eigenvalues[itaper]\n coeffs = self._coeffs(itaper)\n ax = axes.flatten()[itaper]\n grid = MakeGridDH(coeffs)\n ax.imshow(grid)\n ax.set_title('concentration: {:2.2f}'.format(evalue))\n fig.tight_layout(pad=0.5)\n\n if show: plt.show()\n if fname is not None:\n fig.savefig(fname)", "def plot_dichro(scans_plus, scans_minus, positioner=None, detector=None,\n monitor=None, fluo=False, title=''):\n\n if detector is None:\n detector = []\n for det in counters.detectors:\n detector.extend(det.hints['fields'])\n if len(detector) > 1:\n warn(f\"Found multiple hinted detectors: {detector}, using the \"\n f\"first one: {detector[0]}\")\n detector = detector[0]\n\n if monitor is None:\n monitor = counters.monitor\n\n fig, axs = plt.subplots(2, 2, figsize=(8, 6))\n\n results = []\n for scans, label in zip([scans_plus, scans_minus], ['plus', 'minus']):\n if scans is not None:\n try:\n scans = list(scans)\n except TypeError:\n scans = [scans]\n\n if title != '':\n title += '\\n'\n title += f'{label}: {scans}\\n'\n\n results.append(load_multi_dichro(\n scans, db, positioner=positioner, detector=detector,\n monitor=monitor, transmission=not fluo\n ))\n\n _plot_one_xmcd(results[-1], axs[0], label)\n\n if len(results) > 1:\n combined = (\n results[0][0], # energy\n (results[0][3] - results[0][3])/2., # xmcd\n (results[0][3] + results[0][3])/2., # artifact\n )\n\n _plot_combined_xmcd(combined, axs[1])\n\n plt.suptitle(title)\n plt.tight_layout()\n\n return fig, axs" ]
[ "0.563348", "0.5631599", "0.5533224", "0.5502739", "0.5487151", "0.5443683", "0.5419953", "0.5403358", "0.5385931", "0.53529364", "0.5345485", "0.53156674", "0.53142434", "0.53062516", "0.5289918", "0.52762717", "0.527185", "0.5271523", "0.524425", "0.52201706", "0.5217703", "0.5190057", "0.5187612", "0.51803994", "0.51727897", "0.51549786", "0.5152855", "0.5112868", "0.51081824", "0.51024467", "0.50999206", "0.509904", "0.5083173", "0.50759196", "0.506512", "0.505442", "0.50320673", "0.50166273", "0.50160915", "0.50121194", "0.5005173", "0.49989876", "0.49962506", "0.49848726", "0.49806425", "0.4978418", "0.49688265", "0.4961558", "0.49601555", "0.49570155", "0.4950464", "0.49437487", "0.49364913", "0.49358457", "0.49192744", "0.49161917", "0.49131453", "0.4908418", "0.4898755", "0.48880985", "0.48823687", "0.48811847", "0.4878161", "0.48660168", "0.48660168", "0.48597416", "0.4858185", "0.485284", "0.48504078", "0.4848098", "0.48478335", "0.48288718", "0.48283184", "0.48242107", "0.48236346", "0.48197892", "0.48192593", "0.48145753", "0.4812265", "0.48122334", "0.4811265", "0.47974557", "0.47889498", "0.47866985", "0.47847745", "0.4784565", "0.47772047", "0.47758955", "0.47755587", "0.47724354", "0.4772132", "0.4770261", "0.4769311", "0.47667545", "0.47614193", "0.4761389", "0.47526452", "0.47487345", "0.47485936", "0.47484118" ]
0.6225962
0
Initializes the connection with IMDb.
def initialize_connection(): session = imdb.IMDb() return session
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _setup_connection(cls):\n try:\n cls.imdb_access = imdb.IMDb()\n except imdb.IMDbError, err:\n print \"Problem with connectivity to imdb.com due to %s \" \\\n % (err)", "def initialize():\n\t\tDBHelper.con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')", "def init(self):\n return self.conn.init()", "def initialize(self):\n if not self.connection.is_closed():\n self.connection.close()\n\n self.connection.connect()", "def init_connection(self, connection):", "def __init__(self):\n self._connection = get_db_connection()", "def __init__(self):\n\n\t\tself.connection = self.get_connection()", "def __init__(self):\n\t\tself.obtainDatabaseConnection()", "def init(self, userdata, conn):\r\n pass", "def __init__(self):\n self.dbcon = DbConnection.get_con()", "def __init__(self):\n self.dbconnect = dbConnection.connection", "def init(self):\n self.db.connect()\n try:\n self.db.create_tables([JambiModel], safe=True)\n JambiModel.create(ref='0')\n self.logger.info('Database initialized')\n except IntegrityError:\n self.logger.info('Database was already initialized')\n self.db.close()", "def bootstrap(self):\n\n self.db = connection_manager.get(DbConnection, host=self.ip, port=3306, user=self.user, password=self.password)\n\n self.connected = True", "def __post_init__(self):\n self.dbase = databases.Database(\n self.dsn,\n min_size=self.min_size,\n max_size=self.max_size\n )\n self.engine, self.meta = self.get_engine_metadata()", "def init(self):\n self.conn = None\n\n return True", "def __init__(self):\r\n self.conn = create_connection(DATABASE_PATH)", "def __init__(self, con_uri=None, db_name=\"douyin\"):\n super().__init__()\n self.con_uri = con_uri or 'localhost'\n self.client = AsyncIOMotorClient(self.con_uri)\n self.db = self.client[db_name]", "def init_connection(self, db):\n log.info(\"== Stage 1: Init ==\")\n self.use_db(db)\n self.set_no_binlog()\n self.get_mysql_settings()\n self.init_mysql_version()\n self.sanity_checks()\n self.set_tx_isolation()\n self.set_sql_mode()\n self.enable_priority_ddl()\n self.skip_cache_fill_for_myrocks()\n self.enable_sql_wsenv()\n self.override_session_vars()\n self.get_osc_lock()", "def __init__(self):\n\n self.db = ImageDB()\n self.vitess = VitessConn()\n self.minio = MinioConn()", "def connect(self):\n self.conn.connect()", "def __init__(self, app_database):\n try:\n self.database_configuration = app_database\n self.conn = None\n self.cursor = None\n except Exception as error:\n print(f\"DBCM::__init__::{error}\")", "def init_database(self):\n # init_database(self.engine)", "def __init__(self, dbconnect):\n self.dbconnect = dbconnect", "async def conn(self) -> None:\n self.bot.db = await aiosqlite.connect('database.db')", "def setup(cls):\n super().setup()\n cls.db = DBCommunication()", "def connect(self):\n super(NERDmLoader, self).connect()\n self.lateloadr._client = self._client\n self.lateloadr._db = self._db\n self.relloadr._client = self._client\n self.relloadr._db = self._db", "def __connect(self):\n self.conn = pymysql.connect(self.opts.DB_HOST, self.opts.DB_USER,\n self.opts.DB_PASSWORD, self.opts.DB_NAME)", "def __init__(self, wm) -> None:\n conf_dict = wm.context.config.arango_storage._to_dict()\n\n log.debug(conf_dict)\n client = ArangoClient(hosts=conf_dict['hosts'])\n db = client.db(conf_dict['database'],\n username=conf_dict['username'],\n password=conf_dict['password'])\n\n self.db = db\n self.client = client", "async def init(self):\n self.init_connection_params()\n self._pool = await self._create_pool()\n\n return self", "def init_database(self):\n init_database(self.engine)", "def __init__(self, connection):\n\n self._conn = connection", "def __init__(self, connection):\n self.conn = connection", "def __init__(self, session):\n self.session = session\n self.dbi = DBInterface(self.session)", "def memb_init(self):\n self.initialize()", "def connect(self):\n self.engine = create_engine(self.connection_string)\n self.conn = self.engine.connect()\n self.connected = True", "def __init__(self, *args):\n _table.Connection_swiginit(self, _table.new_Connection(*args))", "def connect_db(self):\n try:\n self.connection = self.engine.connect()\n except Exception:\n self.print_std_error()", "def initialize(self) -> None:\n # First, establish a connection to the specified database\n try:\n self._connect_to_db()\n except psycopg2.OperationalError: # specified database does not exist\n with psycopg2.connect(database=DATABASE_ENV[\"POSTGRES_DB\"],\n user=self.dbuser, password=self.dbpassword,\n host=self.dbhost, port=str(self.dbport)) as con:\n with con.cursor() as cur:\n con.autocommit = True # cannot create db inside a transaction\n cur.execute(f'CREATE DATABASE \"{self.dbname}\"')\n con.autocommit = False\n self._connect_to_db() # try again\n\n # Second, create the necessary database table, only if required\n with self._connection.cursor() as cur:\n cur.execute(f\"\"\"\n CREATE TABLE IF NOT EXISTS \"{self.MESSAGE_TABLE_NAME}\" (\n id SERIAL PRIMARY KEY,\n key CHAR(4) NOT NULL,\n value REAL NOT NULL,\n ts TIMESTAMP NOT NULL,\n tz TEXT NOT NULL\n );\n \"\"\")\n self._connection.commit()", "async def on_startup(self):\n await self.controller.database.connect()", "def initialize(self):\n if self.real:\n self.agent.connect(self)\n else:\n self.connect() # Connect python client to VREP\n self.agent.connect(self)", "def __init__(self):\n\t\tDBHelper.initialize() #initiate dababase helper", "def initialize_database():\n # TODO: Refactor the funtime library\n this.db = Store(this.host).create_lib(this.store_name).get_store()", "def setUp(self):\n self.a = backend.dbconnection.DBConnect()", "def __init__(self):\n self.connection = DbConnector()\n self.db_connection = self.connection.db_connection\n self.cursor = self.connection.cursor\n\n self.ACTIVITY_ID = 1\n self.TRACKPOINT_ID = 1", "def __init__(self):\n\n self.connection = sqlite3.connect(self.dabatabase_name, uri=True)\n self.connection.cursor()\n self.connection.execute(self._create_table_stm)\n self.connection.commit()", "def __init__(self):\n self.__connection = pymysql.connect(host=vars.host,\n user = vars.username,\n password = vars.password,\n db = vars.db,\n charset = \"utf8mb4\",\n cursorclass = pymysql.cursors.DictCursor\n )", "def __init__(self):\n self.db = Databank()\n self.db.connection()\n # self.db.cursor.execute('USE library')", "def __init__(self, db_client):\n self.db_client = db_client\n self.db = ''", "def _initialize(self):\n self.send_init_command()", "def connect(self, dbapi_connection, connection_record):", "def open_connection(self):\n self.conn = pymysql.connect(host=self.host, user=self.user, passwd=self.passwd, db=self.db)", "def _connect(self):\n self.connection = RedisConnection(self.host, self.port, self.dbname)", "def __init__(self):\n self._masquarade('odbc')", "def __init__(self,schema_name = 'null'):\n\t\tself.connected = False\n\t\tself.__schema_name = ''\n\t\tself.__db = ''\n\t\tself.__cursor = ''\n\t\tself.__engine = ''\n\t\tif schema_name != 'null':\n\t\t\tself.connect(schema_name)", "def obtainDatabaseConnection(self):\n\t\tself.databaseConnector = DatabaseConnector()", "def __init__(self):\n\n self.tableConnString = os.environ['ENTITYTABLE_CONNECTIONSTRING'];\n self.__table = None", "def __init__(self):\n self.database = Database()\n self.load_config()", "def __init__(self):\n self._setup()\n # Encryption/decryption cipher handler\n self.__cipher = self.__get_cipher()\n # Setup the engine for the sqlite database\n self._engine = create_engine(self.db_uri)\n # Configure the SQLAlchemy metadata\n self._metadata = MetaData()\n self._metadata.bind = self._engine\n self._load_db()\n # Configure the auto-mapping base model\n self._base = automap_base(metadata=self._metadata)\n self._base.prepare()\n # Setup a session generator for database connections\n self._session = sessionmaker(bind=self._engine)", "def init(self):\n # IMPORTANT: create a new gob database model entry for this object\n self.gobify()", "def connect(self):\n self.client = MongoClient(self.mongo_uri)\n self.db = self.client[self.db_name]", "def connect(self):\n\t\t# PostgreSQL PyPgSQL\n\t#\tcp = adbapi.ConnectionPool(\"pyPgSQL.PgSQL\", database=\"test\")\n\t\t# MySQL\n\t\tself.dbpool = adbapi.ConnectionPool('MySQLdb',\n\t\t\thost = self.settings.get('hostname', 'localhost'),\n\t\t\tport = self.settings.get('port', 3306),\n\t\t\tdb = self.settings.get('database'),\n\t\t\tuser = self.settings.get('username'),\n\t\t\tpasswd = self.settings.get('password'),\n\t\t\tcursorclass = MySQLdb.cursors.DictCursor,\n\t\t\tcharset = 'utf8',\n\t\t\tuse_unicode = True,\n\t\t)", "def __init__(self):\n self.try_to_connect()", "def __init__(self):\n\n self.Connection = None\n self.logger = LogFactory().getLibLogger()", "def connect(self):\n self.db = pymysql.connect(self.db_ip, self.uid, self.pwd, self.db_name)\n self.cursor = self.db.cursor()", "async def initialize(self, *, only_init_tables: bool=False):\n self.pool = await aiomysql.create_pool(\n host=self.host, user=self.user, password=self.passwd,\n loop=self.loop)\n db_exists = await self._check_db_exists()\n if not db_exists:\n await self._create_db()\n db_initialized = False\n else:\n db_initialized = await self._check_db_initialized()\n # We close the pool and create a new one because aiomysql doesn't\n # provide an easy way to change the active database for an entire\n # pool, just individual connections.\n self.pool.terminate()\n self.pool = await aiomysql.create_pool(\n host=self.host, user=self.user, password=self.passwd,\n db=self.dbname, loop=self.loop)\n if not db_initialized:\n await self._init_db(only_init_tables)\n await self._upgrade_db()\n log.msg('Database initialized')", "def __init__(self):\n with open('config.json') as config:\n data = json.load(config)\n\n password = self.decode_password(data['db']['password'])\n db_conn_string = 'postgresql://' + data['db']['username'] + ':' + password + '@' + \\\n data['db']['hostname'] + ':' + data['db']['port'] + '/' + data['db']['database']\n\n self.engine = create_engine(db_conn_string)\n try:\n conn = self.engine.connect()\n if conn is not None:\n print(\"-I- Successful Database Connection\")\n except Exception as e:\n print(\"-W- \" + str(e))", "def __init__(self, backend=None, dbstring=None):\n self.connected = False\n if backend is None:\n global tgis_backend\n if decode(tgis_backend) == \"sqlite\":\n self.dbmi = sqlite3\n else:\n self.dbmi = psycopg2\n else:\n if decode(backend) == \"sqlite\":\n self.dbmi = sqlite3\n else:\n self.dbmi = psycopg2\n\n if dbstring is None:\n global tgis_database_string\n self.dbstring = tgis_database_string\n\n self.dbstring = dbstring\n\n self.msgr = get_tgis_message_interface()\n self.msgr.debug(1, \"DBConnection constructor:\"\\\n \"\\n backend: %s\"\\\n \"\\n dbstring: %s\"%(backend, self.dbstring))\n #\"\\n traceback:%s\"%(backend, self.dbstring,\n #str(\" \\n\".join(traceback.format_stack()))))", "def __open(self):\n\n try:\n cnx = mysql.connector.connect(\n host=self.__host,\n user=self.__user,\n password=self.__password,\n database=self.__database\n )\n self.__connection = cnx\n self.__cursor = cnx.cursor(buffered=True, dictionary=True)\n\n except mysql.connector.Error as err:\n print(\"Something went wrong: {}\".format(err))", "def __init__(self, connection):\n super().__init__(connection)", "def __init__(self):\n self.username = CONF.get('zerodb', 'username')\n self.password = CONF.get('zerodb', 'password')\n self.host = CONF.get('zerodb', 'host')\n self.port = int(CONF.get('zerodb', 'port'))\n self.db = zerodb.DB((self.host, self.port),\n username=self.username,\n password=self.password)", "def __init__(self):\n\t\tConnectorMySQL.__init__(self)", "def __init__(self, connection_str: str = None, **kwargs):\n self._ip = get_ipython()\n self._debug = kwargs.get(\"debug\", False)\n super().__init__()\n\n self.formatters = {\"datetime\": self._format_datetime, \"list\": self._format_list}\n self._loaded = self._is_kqlmagic_loaded()\n\n if not self._loaded:\n self._load_kql_magic()\n\n self._schema: Dict[str, Any] = {}\n\n if connection_str:\n self.current_connection = connection_str\n self.connect(connection_str)", "def __init__(self, user, password, database='mesomat', host='localhost'): \n \n \n self.config = {\n 'user' : user,\n 'password' : password,\n 'host' : host,\n 'database' : database,\n 'raise_on_warnings' : True,\n 'auth_plugin' : 'mysql_native_password'\n }\n \n self.INSERT_SAMPLE_COLUMN_COMMAND = ()\n \n \n self.connected = False\n self.cursor = None\n self.cnx = None", "def set_connection(self, vsm_conn):\n conn = Connection(vsm_conn.ip,\n vsm_conn.username,\n vsm_conn.password,\n \"api/2.0\", vsm_conn.type)\n self.connection = conn", "def initialize(self):\n\n db = dict()\n\n db['meta'] = Meta(None)\n db['race'] = Race(None, None, None, None, None)\n db['track'] = Track(None, None)\n db['classes'] = set([])\n db['teams'] = set([])\n db['drivers'] = set([])\n\n self.db = db", "def initialize(self):\r\n if not self.context:\r\n self.context = SQLContext(self.url, self.connection, self.schema)\r\n if self.table is None:\r\n self.table = self.context.table(self.table_name)\r\n if not self.fields:\r\n self.read_fields()\r\n self.field_names = self.fields.names()", "def connect(self):\n\t\tself._entity_server_connection.attempt_connection()", "def init_connection_state(self):\r\n # if 'mars connection=true' in self.__connection_string.lower():\r\n # # Issue #41 - Cannot use MARS with savepoints\r\n # self.features.uses_savepoints = False\r\n # cache the properties on the connection\r\n self.connection.adoConnProperties = dict([(x.Name, x.Value) for x in self.connection.adoConn.Properties])\r\n\r\n unsupported_sql = False\r\n if self.is_sql2000(make_connection=False):\r\n # SQL 2000 doesn't support the OUTPUT clause\r\n self.features.can_return_id_from_insert = False\r\n unsupported_sql = True\r\n elif self.is_sql2005(make_connection=False):\r\n unsupported_sql = True\r\n\r\n if unsupported_sql:\r\n warnings.warn(\r\n \"This version of MS SQL server is no longer tested with \"\r\n \"django-mssql and not officially supported/maintained.\",\r\n DeprecationWarning)", "def connect(self):\n if self.db is not None:\n self.disconnect()\n\n self.db = MySQLdb.connect(host=self.conn.host, port=self.conn.port, db=self.conn.db, user=self.conn.user, passwd=self.conn.pwd, use_unicode=True, charset='utf8', cursorclass=MySQLdb.cursors.DictCursor)\n self.db.autocommit(self.conn.auto_commit)", "def test_init(self):\n self.assertIsNotNone(DatabaseIntermediary(), self.ec.db)", "def db_init(self):\n if self.platform == 'Linux':\n print(self.db)\n conn = sqlite3.connect(self.db)\n # if debug\n conn.set_trace_callback(print)\n # converter = Converter(self.db)\n # converter.mdb2sqlite()\n # return converter.conn\n return conn, conn.cursor()\n elif self.platform == 'Windows':\n # Todo: update database config.\n user = ''\n password = ''\n odbc_conn_str = \"DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};\" \\\n \"DBQ=%s;UID=%s;PWD=%s\" % (self.db, user, password)\n import pypyodbc\n conn = pypyodbc.connect(odbc_conn_str)\n return conn, conn.cursor()\n else:\n raise OSError('Unsupported OS')", "def connect(self):\n if not self.is_connected:\n self._init_cec_connection()", "def connect(cls):\n cls.conn = MySQLdb.connect(**cls.configs)\n cls.conn.autocommit(cls.autocommit)", "async def connect(self) -> None:\n if hasattr(self.db, \"connect\"):\n await self.db.connect()", "def __init__(self) -> None:\n settings = get_project_settings()\n self.db = pymysql.connect(\n host=settings['MYSQL_SERVER'],\n port=settings['MYSQL_PORT'],\n user=settings['MYSQL_USERNAME'],\n password=settings['MYSQL_PASSWORD'],\n db=settings['MYSQL_DB']\n ) \n self.cursor = self.db.cursor()", "def configure(self):\n # Defaults\n self.db_type = DB_TYPE.POSTGRES\n self.db_name = \"ambari\"\n self.db_user = \"ambari\"\n self.db_password = \"bigdata\"\n self.db_host = \"localhost\"\n self.db_url = None\n\n if os.path.exists(AMBARI_PROPERTIES_LOCATION):\n self.ambari_props = self.read_conf_file(AMBARI_PROPERTIES_LOCATION)\n\n if \"server.jdbc.database\" in self.ambari_props:\n self.db_type = self.ambari_props[\"server.jdbc.database\"].upper()\n if \"server.jdbc.database_name\" in self.ambari_props:\n self.db_name = self.ambari_props[\"server.jdbc.database_name\"]\n if \"server.jdbc.user.name\" in self.ambari_props:\n self.db_user = self.ambari_props[\"server.jdbc.user.name\"]\n if \"server.jdbc.user.passwd\" in self.ambari_props:\n self.db_password = self.read_file(self.ambari_props[\"server.jdbc.user.passwd\"])\n if \"server.jdbc.hostname\" in self.ambari_props:\n self.db_host = self.ambari_props[\"server.jdbc.hostname\"]\n if \"server.jdbc.url\" in self.ambari_props:\n self.db_url = self.ambari_props[\"server.jdbc.url\"]\n if \"ambari-server.user\" in self.ambari_props:\n self.ambari_server_user = self.ambari_props[\"ambari-server.user\"]\n\n #Logger.info(\"Using database type: {0}, name: {1}, host: {2}\".format(self.db_type, self.db_name, self.db_host))\n connection_string = \"dbname='{0}' user='{1}' host='{2}' password='{3}'\".format(self.db_name, self.db_user, self.db_host, self.db_password)\n\n if self.db_type == DB_TYPE.POSTGRES:\n try:\n import psycopg2 # covered by GNU Lesser General Public License\n except Exception, e:\n Logger.error(\"Need to install python-psycopg2 package for Postgres DB. E.g., yum install python-psycopg2\\n\")\n self.terminate()\n elif self.db_type == DB_TYPE.MYSQL:\n try:\n import pymysql # covered by MIT License\n except Exception, e:\n Logger.error(\"Need to install PyMySQL package for Python. E.g., yum install python-setuptools && easy_install pip && pip install PyMySQL\\n\")\n self.terminate()\n else:\n Logger.error(\"Unknown database type: {0}.\".format(self.db_type))\n self.terminate()\n\n self.conn = None\n self.cursor = None\n try:\n Logger.debug(\"Initializing database connection and cursor.\")\n if self.db_type == DB_TYPE.POSTGRES:\n self.conn = psycopg2.connect(connection_string)\n self.cursor = self.conn.cursor()\n elif self.db_type == DB_TYPE.MYSQL:\n self.conn = pymysql.connect(self.db_host, self.db_user, self.db_password, self.db_name)\n self.cursor = self.conn.cursor()\n\n Logger.debug(\"Created database connection and cursor.\")\n self.cursor.execute(\"SELECT metainfo_key, metainfo_value FROM metainfo WHERE metainfo_key='version';\")\n rows = self.cursor.fetchall()\n if rows and len(rows) == 1:\n self.ambari_version = rows[0][1]\n # Logger.info(\"Connected to database!!! Ambari version is {0}\\n\".format(self.ambari_version))\n\n # Must be Ambari 2.0.0 or higher\n if self.compare_versions(self.ambari_version, MIN_AMBARI_VERSION) < 0:\n Logger.error(\"Must be running Ambari Version {0} or higher.\\n\".format(MIN_AMBARI_VERSION))\n self.terminate()\n else:\n Logger.error(\"Unable to determine Ambari version.\")\n self.terminate()\n\n self.set_cluster()\n except Exception, e:\n Logger.error(\"I am unable to connect to the database. Error: {0}\\n\".format(e))\n self.terminate()\n else:\n raise Exception(\"Could not find file {0}\".format(AMBARI_PROPERTIES_LOCATION))", "def __setup_conn__(self, **kwargs):\n self.ext_conn = setup_conn(**kwargs)", "def _db_connection(self):\n pass", "def __init__(self):\n\n # For now, we'll connect to the target via the Apollo debug controller.\n # This should be replaced by a high-speed USB link soon; but for now\n # we'll use the slow debug connection.\n self._debugger = ApolloDebugger()\n self._serial = self._find_serial_connection()", "def initialise(self):\n self.set_up()", "def dbinit( *args, **kwargs ):", "def __init__(self, client, db_name):\n self.client = client\n self.db_name = db_name", "def __init__(self, connection):\n self.con = connection\n self.recordset = None\n self.recordset_df = None", "def do_init(self):\n\n pass", "def connect_to_db(self):\n self.read_config()\n print('Connecting to database...', end=\"\")\n self.db_conn = pymysql.connect(host=self.host, user=self.user, db=self.db)\n self.db_cur = self.db_conn.cursor()\n print('[DONE]')", "def initDbConnection():\n try:\n db_user = os.environ[\"BDB_DB_USER\"]\n db_pass = os.environ[\"BDB_DB_PASS\"]\n db_name = os.environ[\"BDB_DB_NAME\"]\n db_host = os.environ[\"BDB_DB_HOST\"]\n db_port = os.environ[\"BDB_DB_PORT\"]\n except Exception as e:\n logger.critical(\"could not parse environment for DB connection parameters\")\n return ERR\n db_config = {\n \"pool_size\": 5,\n \"max_overflow\": 2,\n \"pool_timeout\": 30,\n \"pool_recycle\": 1800\n }\n logger.info(\"attempting MySQL connection to %s:%s\" % (db_host, db_port))\n logger.debug(\"connection parameters: DB name %s, DB user %s\" % (db_name, db_user))\n logger.debug(\"connection config: %s\" % db_config)\n try:\n dbConn = sqlalchemy.create_engine(\n sqlalchemy.engine.url.URL.create(\n drivername=\"mysql+pymysql\",\n username=db_user,\n password=db_pass,\n host=db_host,\n port=db_port,\n database=db_name\n ),\n **db_config\n )\n except Exception as e:\n logger.critical(\"could not connect to DB - %s\" % e)\n return ERR\n logger.info(\"connection established to MySQL server at %s:%s\" % (db_host, db_port))\n return dbConn", "def init_db():\n db.drop_all()\n db.create_all()\n\n print(\"Initialized Connect 4 Database.\")", "def __init__(self, conn):\n self.conn = conn\n self.tx_id = None", "def init_post_connection(self):\n\n if self.authorized and not self.post_initiated:\n self.create_tables_and_apply_patches()\n self.post_initiated = True\n\n PyFunceble.INTERN[\"mysql\"] = self.__dict__.copy()", "def connect_db(self) -> sqlite3.Connection:\n self.connection = sqlite3.connect(self.database)\n self.connection.row_factory = sqlite3.Row\n\n self.get_cursor()" ]
[ "0.75613075", "0.7126608", "0.70486325", "0.70344806", "0.6966653", "0.6887101", "0.6868013", "0.6741305", "0.6721468", "0.67194366", "0.6630532", "0.65871525", "0.6585673", "0.6537051", "0.6491186", "0.64337355", "0.6409378", "0.6344977", "0.6321777", "0.63136494", "0.6289494", "0.62547606", "0.62376785", "0.62314355", "0.621757", "0.62145686", "0.62050384", "0.61879784", "0.61844724", "0.6172804", "0.6171649", "0.6163888", "0.6148798", "0.6082627", "0.6058399", "0.6048978", "0.6045933", "0.60402054", "0.603941", "0.60361046", "0.6023141", "0.6021331", "0.6020797", "0.6019185", "0.60177034", "0.60126305", "0.6010238", "0.6002608", "0.60019547", "0.60012007", "0.59788555", "0.596723", "0.5965498", "0.59644854", "0.594474", "0.59394604", "0.5931768", "0.5928973", "0.59152764", "0.5914996", "0.591189", "0.59102285", "0.5900658", "0.5897105", "0.5895567", "0.58915627", "0.58874345", "0.58702594", "0.58596194", "0.5859578", "0.58572024", "0.5837545", "0.58280474", "0.58272386", "0.5825578", "0.5822524", "0.58213687", "0.5809634", "0.5808344", "0.5804727", "0.5794381", "0.57937217", "0.5793631", "0.5789391", "0.5788393", "0.5780928", "0.57777864", "0.5768781", "0.5764527", "0.57601357", "0.57498306", "0.57402515", "0.57284105", "0.5726495", "0.57235163", "0.5723086", "0.5715796", "0.57135", "0.5712338", "0.57122827" ]
0.75485164
1
Given an imdb session object, will search the site for the given search term.
def search_for_title(session, search_term): try: s_result = session.search_movie(search_term) shows = {} # made the keys of the namedtuple a digit for ease of selecting the correct one later for count, result in enumerate(s_result): show_id = count movie_id = result.movieID title = result['long imdb canonical title'] url = f'http://www.imdb.com/title/tt{movie_id}/parentalguide' shows[count] = Show(show_id, movie_id, title, url) return shows except imdb._exceptions.IMDbDataAccessError: display_error()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search(self, term):", "def run_search(term, imdb_page, debug = False):\n\n # confirm function call\n if debug:\n print(\"run_search()\")\n\n # scrub search term for imdb\n formatted_term = \"+\".join(term.split())\n\n # add page information to search term\n if imdb_page > 0:\n page_specifier = f\"&start={ (imdb_page * 50) + 1 }\"\n else:\n page_specifier = \"\"\n\n # get BeautifulSoup data for search term\n search_string = \"https://www.imdb.com/search/title?title=\" + formatted_term + \"&title_type=tv_series\" + page_specifier\n if debug:\n print(f\"search_string: {search_string}\")\n search_soup = bs4.BeautifulSoup(requests.get(search_string).text, features=\"html.parser\")\n\n #get max page\n if imdb_page < 1:\n\n # identify element that states range and number of results\n desc = search_soup.select(\".desc\")[0]\n span = desc.select(\"span\")[0].contents[0][0:-8]\n\n # get number of results\n if span[:8] == \"1-50 of \":\n span = span[8:]\n try:\n result_num = float(span)\n except:\n result_num = 0\n\n # calculate max_pages\n max_pages = int(ceil(result_num / 5))\n if debug:\n print(result_num)\n print(max_pages)\n\n else:\n max_pages = None;\n\n # get valid pages for no_results\n low = imdb_page * 10;\n high = low + 9\n page_range = [low, high]\n\n # cultivate return list\n links = search_soup.select(\"h3 > a\")\n\n if debug:\n print(links)\n\n search_results = []\n\n print(len(links))\n\n for i in range(len(links)):\n if debug:\n print(f\"result: {i}\")\n\n try:\n show_div = links[i]\n except:\n break\n s = (show_div.contents[0], show_div.get(\"href\"))\n search_results.append(s)\n\n if debug:\n print(f\"search results length: {len(search_results)}\")\n\n return {\"results\": search_results, \"max\": max_pages, \"range\": page_range}", "def search(self, q):\n self.__query = q\n self.scrape_page()", "def test_IMDB_Search_TC_002_search_for_a_movie(self):\n # to load a given URL in browser window\n self.driver.get(self.base_url) \n # to enter search term, we need to locate the search textbox\n searchTextBox=self.driver.find_element_by_id(\"suggestion-search\")\n # to clear any text in the search textbox\n searchTextBox.clear()\n # to enter the search term in the search textbox via send_keys() function\n searchTextBox.send_keys(self.search_term)\n # to search for the entered search term\n searchTextBox.send_keys(Keys.RETURN)\n # to see movie title element\n searchMovieBox = self.driver.find_element_by_link_text(\"Silicon Valley\")\n # to verify if the search results page loaded\n self.assertIn(\"Find - IMDb\",self.driver.title)\n # to verify if the search results page contains any results or no results were found.\n self.assertNotIn(\"No results found.\",self.driver.page_source)", "def search(query_string):", "def search(search_term, page=1):\n if len(search_term) < 1:\n raise InvalidSearchException(\"Search term not detailed enough.\")\n if len(search_term) > 2000:\n raise InvalidSearchException(\"Search term is too long. Max length is 2000 characters.\")\n search_results = tmdb.search_for_movie_by_title(search_term, page)\n matched_movies = search_results['results']\n num_items = search_results['total_results']\n num_pages = search_results['total_pages']\n response_page = search_results['page']\n if response_page != page:\n logger.error(\"Response page does not match requested page: %s != %s\",\n response_page, page)\n logger.info('Found list of movies in db: ' + str(matched_movies))\n return {\n 'items': [Movie.convert_to_movie(a) for a in matched_movies if a is not None],\n 'total_items': num_items,\n 'total_pages': num_pages,\n 'page': page,\n 'search_term': search_term,\n }", "def search(text):\n s = Search()\n result = _search(s, text)\n _print_results(result)\n return result", "def search(self, query, maxhits=100):", "def search(self, search):\n raise NotImplementedError", "def search(searchTerm):\n api = twitter.Api()\n tweets = api.GetSearch(searchTerm)\n for tweet in tweets:\n util.safe_print(tweet.GetText())", "def search(api_key, term, location):\n\n\n\n url_params = {\n\n 'term': term.replace(' ', '+'),\n\n 'location': location.replace(' ', '+'),\n\n 'limit': SEARCH_LIMIT\n\n }\n\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search(api_key, term, location):\r\n\r\n url_params = {\r\n 'term': term.replace(' ', '+'),\r\n 'location': location.replace(' ', '+'),\r\n 'limit': SEARCH_LIMIT\r\n }\r\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search(self, term):\n return self._search(self._root, term, 0)", "def do_imdb_title_search(query):\n url = baseurl + '/search/title?' + query\n logging.warn(url)\n return geturl2(url)", "def search(api_key, term, location):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': SEARCH_LIMIT\n }\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search(self, query):", "def search():\n pass", "def search(self, keyword):\n from modules.pages.search_page import SearchPage\n self.button_click(self.SEARCH_BUTTON)\n search_item = self.get_element(self.SEARCH_INPUT)\n search_item.send_keys(keyword, Keys.RETURN)\n return SearchPage(self.driver)", "def search(term, location):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': SEARCH_LIMIT\n }\n return request(API_HOST, SEARCH_PATH, url_params=url_params)", "def search(term, location):\n \n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': SEARCH_LIMIT\n }\n return request(API_HOST, SEARCH_PATH, url_params=url_params)", "def search(self, term, start=None, limit=None):\r\n params = base.get_params(None, locals())\r\n url = '{0}/searchResults'.format(self.get_url())\r\n return http.Request('GET', url, params), parsers.parse_json", "def wikimedia_request(search_term, continue_val):\n S = requests.Session()\n\n URL = \"https://en.wikipedia.org/w/api.php\"\n\n # SEARCHPAGE = \"matrix\"\n\n PARAMS = {\n \"action\": \"query\",\n \"format\": \"json\",\n \"list\": \"search\",\n \"srsearch\": search_term,\n \"srlimit\": 400\n }\n if continue_val:\n PARAMS.update(continue_val)\n R = S.get(url=URL, params=PARAMS).json()\n return R", "async def search(self, keyword):\n await self.coordinator.data.search_media(self.zone_id, keyword)", "def search_videos(self, search_term):\n print(\"search_videos needs implementation\")", "def search_from_terms(api, term, **kwargs):\n tweets=api.GetSearch(term=term)\n return {\"tweets\":tweets}", "def search(term, location, search_limit):\n\n payload = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': search_limit\n }\n\n return request(SEARCH_PATH, payload)", "def search(self, query_id, query_str):\n pass", "def search():\n\tif not request.vars.search_term:\n\t\tredirect(URL('index'))\n\tterm = request.vars.search_term\n\torigterm = term\n\tterm = term.replace(' ','|')\n\tartists = db.executesql(\"select distinct(m1.id), m1.art_name, m1.artist_type, m1.country, m1.b_year,m1.b_month,m1.b_date,m1.e_year,m1.e_month,m1.e_day,ts_rank(to_tsvector(m1.art_name),to_tsquery('\"+term+\"')) rank from art_info m1 where to_tsvector('english',m1.art_name) @@ to_tsquery('\"+term+\"') order by rank desc limit 20;\")\n\talbums = db.executesql(\"select distinct(m1.id),m2.name,m1.art_id,m1.art_name,m1.rel_type,m1.count,ts_rank(to_tsvector(m2.name),to_tsquery('\"+term+\"')) rank from rel_art m1, release_name m2, release_group m3 where m3.name = m2.id and m3.id = m1.id and to_tsvector('english',m2.name) @@ to_tsquery('\"+term+\"') order by rank desc limit 20;\")\n\tsongs = db.executesql(\"select m2.id, m1.name, m3.art_id, m3.art_name, m3.rel_id, m3.rel_name from track_name m1, recording m2, rec_rel_art m3 where m1.id = m2.name and m2.id = m3.rec_id and lower(m1.name) LIKE lower('%%\"+origterm+\"%%') limit 20;\")\n\treturn dict(songs=songs, albums=albums, artists=artists)", "def search_movie_data(search_term, page, connected_user_uuid):\n if not (UserModel.query.filter_by(uuid=connected_user_uuid).first()):\n return err_resp(\"User not found!\", 404)\n movies, total_pages = Paginator.get_from(\n MovieModel.query.filter(MovieModel.title.ilike(search_term+\"%\")).union(\n MovieModel.query.filter(MovieModel.title.ilike(\"%\"+search_term+\"%\"))),\n page,\n )\n\n try:\n movie_data = MovieBase.loads(movies)\n\n return pagination_resp(\n message=\"Movie data sent\",\n content=movie_data,\n page=page,\n total_pages=total_pages\n )\n\n except Exception as error:\n current_app.logger.error(error)\n return internal_err_resp()", "def search(self, word):", "def search(self, query, limit=10):\n word_ids, url_ids = self.query(query, limit)\n selected_url = random.choice(url_ids)\n print(\"User selected url \\\"{}\\\"\".format(self.get_url_name(selected_url)))\n return SearchNet().train_query(word_ids, url_ids, selected_url)", "def search(self, search_word: str, base_url: str, depth_limit: int=0):\n self.__search(search_word,base_url,depth_limit)\n print(json.dumps(self.__result, ensure_ascii=False))", "def search(self, query, engine=\"duckduckgo\"):\n response = self.get(\"https://duckduckgo.com/html/?q=%s&ia=web\" % query)\n if not response.text:\n return {}\n\n parsed = self.parse(response)\n results = parsed.duckduckgo_results()\n ret = {\n \"response\": response,\n \"query\": query,\n \"results\": results,\n \"parsed\": parsed,\n }\n\n return ret", "def search(request):\n if 'q' in request.GET:\n term = request.GET['q']\n story_list = Story.objects.filter(Q(title__contains=term)|Q(markdown_content__contains=term))\n heading = \"Search results\"\n return render_to_response(\"cms/story_list.html\",locals())", "def search(self, search_params):\n if self.db.is_data_set():\n return self.db.search(search_params)\n else:\n self.crawler.initialize()\n # return self.db.search(search_params)", "def _mw_search(self, baseurl, searchquery):\n params = urllib.parse.urlencode({\n 'action': 'opensearch',\n 'search': searchquery,\n 'format': 'json',\n })\n api_data = self._mw_api_call(baseurl, params)\n\n search_result_titles = api_data[1]\n if not search_result_titles:\n raise callbacks.Error(f\"No search results for {searchquery!r}\")\n return search_result_titles", "def search(search_term):\r\n if search_term:\r\n if 'list' in search_term:\r\n search_term = search_term.split('?list')[0]\r\n\r\n url = SEARCH_URL.format(API_KEY, util.web.quote(search_term.encode('ascii', 'ignore')))\r\n response = util.web.http_get(url=url, json=True, referer='http://tinychat.com')\r\n\r\n if response['json'] is not None:\r\n try:\r\n if 'items' in response['json']:\r\n for item in response['json']['items']:\r\n video_id = item['id']['videoId']\r\n details = video_details(video_id)\r\n if details is not None:\r\n return {\r\n 'type': 'youTube',\r\n 'video_id': video_id,\r\n 'video_time': details['video_time'],\r\n 'video_title': details['video_title']\r\n }\r\n except KeyError as ke:\r\n log.error(ke, exc_info=True)\r\n return None", "def search(self, text: str, category: str = None):\n if category:\n # ensuring that it will be in lowercase\n category = category.lower()\n\n if not category or not category in self.search_categories:\n category = \"all\"\n\n search_url = f\"{SITE_URL}/{self.search_categories[category]}/?adb.search={text}\"\n\n # return answer.text\n return self.fetch_url(search_url)", "def _do_search(self,search_terms,order_by='relevance'):\n try:\n self._logger.info('Hey!listen up!...Attempting to perform a search in MegaVideo')\n self._logger.debug('Search Terms: %s',search_terms)\n srv = MegaVideoService(self._logger)\n query = MegaVideoQuery()\n query.query = search_terms\n query.sort = order_by\n query.max_results=int(self.search_max_results)\n return srv.query(query)\n except:\n self._logger.exception('Dammit!...An error ocurred while searching in MegaVideo...')\n return None\n else:\n self._logger.info('Great!...The MegaVideo search was succesfull...')", "def search_videos(self, search_term):\n all_videos = self._video_library.get_all_videos()\n all_videos.sort(key=lambda x: x.title)\n matching_videos = []\n for video in all_videos:\n if search_term.lower() in video.title.lower():\n matching_videos.append(video)\n\n matching_videos.sort(key=lambda x: x.title)\n\n if len(matching_videos) == 0:\n print(f\"No search results for {search_term}\")\n return\n\n print(\"Here are the results for cat:\")\n for i, matching_video in enumerate(matching_videos):\n print(f\"{i + 1}) {str(matching_video)}\")\n\n print(\n \"Would you like to play any of the above? If yes, specify the number of the video.\\nIf your answer is not a valid number, we will assume it's a no.\")\n video_number = input()\n\n # print(video_number)\n\n try:\n int_video_number = int(video_number)\n if int_video_number > len(matching_videos) or int_video_number < 0:\n return\n else:\n self.play_video(matching_videos[int_video_number - 1].video_id)\n except ValueError:\n return", "def search_videos(self, search_term):\n recommendations = []\n for video in self.videos_dict:\n if not video.flagged and search_term in self.videos_dict[video]:\n recommendations.append(self.videos_dict[video])\n \n recommendations.sort()\n n = len(recommendations)\n\n\n if n == 0:\n print(f\"No search results for {search_term}\")\n else:\n print(f\"Here are the results for {search_term}:\")\n for i in range(n):\n print(f\"{i+1}) {recommendations[i]}\")\n print(\"Would you like to play any of the above? If yes, specify the number of the video.\")\n print(\"If your answer is not a valid number, we will assume it's a no.\")\n\n try:\n response = int(input())\n if response in range(1,n+1):\n wanted_video_info = recommendations[response-1]\n #print(wanted_video_info)\n s = wanted_video_info\n result = re.search(r\"\\(([A-Za-z0-9_]+)\\)\", s)\n #print(result.group(1))\n self.play_video(result.group(1))\n except ValueError:\n pass", "def search(self, *args, **kwargs): # real signature unknown\n pass", "def search(api_key, term, location, limit, offset):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': limit,\n 'offset': offset\n }\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search(self, query):\n logger.debug('Performing search for: '+query)\n write_textfield('queryString', query+\"\\n\", check=False)\n self.waitForLoaderToDisappear()", "def search(self, **kwargs):\n return keyword_search(self._rq_list, **kwargs)", "def search_by_title(title):\n\turl = tmdb_api(\"search/movie\")+\"&query=\"+urllib.quote_plus(title)\n\tresponse = json.load(urllib2.urlopen(url))\n\treturn JSONResponse(response)", "def search(bearer_token, term, location, offset = None, SEARCH_LIMIT = 3):\n #'limit': SEARCH_LIMIT,\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': None,\n 'offset':offset\n }\n return request(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)", "def lookup(self, search_string):\n url = self.create_search_url(search_string)\n self.logger.debug(\"lookup: using search url: %s\" % url)\n search_results = self.get_search_results(url)\n results = []\n # Search results is an XML string with basic top level info about\n # all the entities that matched our search string..\n #\n dom = parseString(search_results).firstChild\n entity = first_child(dom, \"entity\")\n while entity:\n if self.parser.content == \"movies\":\n results.append(Movie(entity, self))\n else:\n results.append(Series(entity, self))\n entity = next_sibling(entity, \"entity\")\n return results", "def _search(self, query):\n return self._request(query)", "def search_term(self, search_term: str):\n\n self._search_term = search_term", "def search(self, query):\n launch_gs_app('search',\n self.browser,\n GoogleSuite.SEARCH_URL.format(_urlencode([('q', query)])))", "def full_search(pw, *arg, **kw):\n return pw.search(*arg, **kw)", "def search(api_key, term, location, offset, RADIUS_SIZE):\n #DEBUG\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'offset': offset,\n 'location': location.replace(' ', '+'),\n 'radius': RADIUS_SIZE,\n 'limit': 50\n }\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search():\n\n # Make sure user is logged in\n if \"username\" not in session:\n return render_template(\"index.html\", message=\"Please login to view that page!\")\n\n if request.method == \"POST\":\n # Get search form\n search = request.form.get(\"search\").strip()\n search_partial = search + '%'\n\n books = db.execute(\"SELECT * FROM books WHERE isbn LIKE :search OR author LIKE :search OR title LIKE :search\", {\"search\": search_partial}).fetchall()\n\n return render_template(\"search.html\", username=session[\"username\"], search=search, books=books)\n # Actually you cant get this without being logged in\n return render_template(\"search.html\")", "def dirty_yt_search(keyword):\n yt_url = 'https://www.youtube.com/results'\n search_args = {'search_query': keyword}\n\n resp = requests.get(yt_url, search_args)\n print(resp.text)\n search_results = re.findall(r'href=\\\"\\/watch\\?v=(.{11})', resp.text)\n return 'http://www.youtube.com/watch?v=' + search_results[0]", "def search(request):\n\n # get form data \n searchItem = request.GET.get(\"q\")\n # if searchItem is an exact match redirect to that page\n if (util.get_entry(searchItem) is not None):\n return HttpResponseRedirect(reverse(\"entry\", kwargs={\n \"title\": searchItem\n }))\n # add any pages with the string in it to results list \n else: \n results = []\n substring = False\n for title in util.list_entries():\n if searchItem.upper() in title.upper():\n results.append(title)\n if results:\n substring = True\n # return results\n return render(request, \"encyclopedia/search.html\", {\n \"searchItem\": searchItem,\n \"substring\": substring,\n \"results\": results\n })", "def search():\n # Check for database tables\n check_db()\n # Check for GET data\n search_query = request.args.get(\"q\", None)\n # Format search results as HTML\n search_results = get_search_results_html(search_query)\n # Format recent searches as HTML\n recent_searches = get_recent_searches_html()\n\n return html_wrapper('<h1>' + SITE_NAME + '''</h1>\n <form action=\"/\" method=\"GET\">\n <input type=\"text\" name=\"q\">\n <input type=\"submit\" value=\"search\">\n </form>''' + search_results + recent_searches)", "def search(term: str):\n api_key = current_app.config[\"GIPHY_API_KEY\"]\n search_result = giphy_api_bridge.search_gif(api_key, term, 5)\n return json.dumps(search_result, default=lambda o: o.__dict__, indent=2)", "def search():\n url = create_search_url()\n links = make_selenium_search(url)\n\n return links", "def search(self, id):\n\n db = self.connection(\"imdb\")\n\n try:\n cur = db.cursor()\n sql = \"SELECT * FROM film WHERE id = %s;\"\n cur.execute(sql, (id,))\n return cur.fetchall()\n except:\n print(\"Cannot find the film!\")\n\n db.close()", "def search(self, *args, **kwargs):", "def search(self, term):\n data = self.__get_data_from_db(term)\n\n if not data:\n data = self.__get_data_from_store(term)\n self.__set_data_to_db(term, data)\n print(data)\n return data", "def test_IMDB_Search_TC_003_see_movie_details(self):\n # to load a given URL in browser window\n self.driver.get(self.base_url) \n # to enter search term, we need to locate the search textbox\n searchTextBox=self.driver.find_element_by_id(\"suggestion-search\")\n # to clear any text in the search textbox\n searchTextBox.clear()\n # to enter the search term in the search textbox via send_keys() function\n searchTextBox.send_keys(self.search_term)\n # to search for the entered search term\n searchTextBox.send_keys(Keys.RETURN)\n # to click on movie title \n searchMovieBox = self.driver.find_element_by_link_text(\"Silicon Valley\").click()\n #self.driver.implicitly_wait(5)\n self.driver.find_element_by_xpath('//head/title[1]')\n # to verify if the search results page loaded\n self.assertIn(\"Silicon Valley (TV Series 2014–2019) - IMDb\",self.driver.title)\n # to verify if the search results page contains any results or no results were found.\n self.assertNotIn(\"No results found.\",self.driver.page_source)", "def search():\n query = input('Please enter your search query\\n')\n # For now, we will just print the whole database\n #db_actions.display()\n db_actions.search(query)", "def search(self, keyword, options=None, max_req=1000):\n # TODO add filter options\n search_box = self.driver.find_element_by_xpath(\n \"//form[@id='extended-nav-search']//input[@placeholder='Search']\")\n search_box.send_keys(keyword)\n search_box.send_keys(Keys.RETURN)\n self.default_wait.until(url_changed(self.driver.current_url))\n\n self.logger.info(\"Searching keyword {} \\nwith options {}\".format(keyword, options))\n\n req_count = 0\n ret = []\n has_next = True\n while has_next and req_count < max_req:\n req_count += 1\n self.logger.info(\"Request #{} to url {}\".format(req_count, self.driver.current_url))\n page_info, has_next = self._scrape_single_page(self._extract_search_results)\n ret.extend(page_info)\n\n return ret", "def search(self, search_phrase, start_index=1):\n search_params = {'key': self.search_auth_key,\n 'cx': self.search_auth_cx,\n 'q': search_phrase,\n 'start': start_index}\n app_log.info(\"Searching google with query: %s\", search_phrase)\n url = url_concat(self.google_api_url, search_params)\n app_log.info(\"Search url: %s\", url)\n future = self._fetch_results(url)\n return future", "async def search(self, *args, **kwargs):\n pass", "def search():\n query = request.args['query']\n # find instances of the entered word in title, tags or ingredients\n results = mongo.db.places.find({\n '$or': [\n {'name': {'$regex': query, '$options': 'i'}},\n {'tags': {'$regex': query, '$options': 'i'}},\n {'city': {'$regex': query, '$options': 'i'}},\n ]\n })\n return render_template('search.html', query=query, results=results)", "def search(query):\n\tprint('-> \tSeraching -> {}'.format(query))\n\told_html = driver.find_element_by_tag_name('html').text\n\ts = driver.find_element_by_name('q')\n\ts.send_keys(query)\n\ts.send_keys(Keys.ENTER) \n\treturn wait_for(old_html)", "def _search(client, search_string):\n if search_string is None:\n logger.info(uxstring.UxString.list_all, fg=\"green\")\n\n current_page = 0\n total_pages = get_search_results(client, search_string, current_page)\n if total_pages < 1:\n return\n\n while 0 <= current_page < total_pages:\n try:\n prompt_resp = click.prompt(uxstring.UxString.pagination,\n type=str)\n next_page = get_next_page(prompt_resp, current_page)\n if next_page == -1:\n model_id = prompt_resp\n display_search_info(client, model_id)\n elif next_page >= total_pages or next_page < 0:\n continue\n elif next_page != current_page:\n get_search_results(client, search_string, next_page)\n current_page = next_page\n\n except click.exceptions.Abort:\n return", "def _search(self, searchterm, pred, **args):\n # TODO: DRY with sparql_ontol_utils\n searchterm = searchterm.replace('%','.*')\n namedGraph = get_named_graph(self.handle)\n query = \"\"\"\n prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#>\n SELECT ?c WHERE {{\n GRAPH <{g}> {{\n ?c {pred} ?l\n FILTER regex(?l,'{s}','i')\n }}\n }}\n \"\"\".format(pred=pred, s=searchterm, g=namedGraph)\n bindings = run_sparql(query)\n return [r['c']['value'] for r in bindings]", "def search(bearer_token, term, location):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': SEARCH_LIMIT\n }\n return request_from_yelp(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)", "def initiateSearch(search_context, text):\n searches = []\n log.msg(\"Searching: %s\" % text)\n\n if not isinstance(text, dict) :\n \n artistStart = text.find(SearchKeys.ARTIST)\n titleStart = text.find(SearchKeys.TITLE)\n albumStart = text.find(SearchKeys.ALBUM)\n\n if max(artistStart, titleStart, albumStart) != -1:\n d = dict()\n \n def pullout(text2):\n if text2.find(SearchKeys.ARTIST) != -1:\n text2 = text2[0:text2.find(SearchKeys.ARTIST)]\n\n if text2.find(SearchKeys.ALBUM) != -1:\n text2 = text2[0:text2.find(SearchKeys.ALBUM)]\n\n if text2.find(SearchKeys.TITLE) != -1:\n text2 = text2[0:text2.find(SearchKeys.TITLE)]\n return text2.strip()\n\n if artistStart != -1:\n d['artist'] = pullout(text[artistStart+len(SearchKeys.ARTIST) : len(text)])\n\n if titleStart != -1:\n d['title'] = pullout(text[titleStart+len(SearchKeys.TITLE) : len(text)])\n \n if albumStart != -1:\n d['album'] = pullout(text[albumStart+len(SearchKeys.ALBUM) : len(text)])\n log.msg(\"Parsed Search: \" + str(d))\n text = d\n\n # Fire off search in parallel\n for key, mediasrc in __mediaSources.items():\n log.msg(\"\\tSending Search Request to %s\" % key)\n searches.append(deferToThread(mediasrc.search, text))\n\n # When all searches return combine them. The lists will be\n # returned as a list of a touples consisting of a sucess/failure\n # boolean followed by the results returned by the individual source\n dl = DeferredList(searches)\n\n def sendResults(results):\n \"\"\"\n Combines the results returned by the deferred list\n into master_result and passes it to all the registered\n controllers.\n \"\"\"\n log.msg(\"Search Returned from all sources\")\n master_result = []\n for status, result in results:\n if status:\n master_result += result\n\n for key, mediactr in __controllers.items():\n log.msg(\"\\tSending Result to %s\" % key)\n mediactr.searchCompleted(search_context, master_result)\n\n dl.addCallback(sendResults)", "def search(bearer_token, term, offset, location):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'offset': offset\n }\n\n return request(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)", "def keyword_search(keywords):\n try:\n return itunespy.search(keywords)[0]\n except LookupError:\n return None", "def search(self, sstrings, **kwargs):\n si = self.allinfo()\n return _search(si, sstrings, **kwargs)", "def Search(self, request, global_params=None):\n config = self.GetMethodConfig('Search')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Search(self, request, global_params=None):\n config = self.GetMethodConfig('Search')\n return self._RunMethod(\n config, request, global_params=global_params)", "def lookup(title):\n\n # Contact API\n try:\n api_key = os.environ.get(\"API_KEY\")\n response = requests.get(\n f\"http://www.omdbapi.com/?s={title}&apikey=ced7be9a\")\n response.raise_for_status()\n except requests.RequestException:\n return None\n\n # parse response\n try:\n movie = response.json()\n search = movie[\"Search\"]\n search_list = []\n for i in range(len(search)):\n search_prop = {\"title\": search[i][\"Title\"],\n \"year\": search[i][\"Year\"], \n \"poster\": search[i][\"Poster\"],\n \"id\": search[i][\"imdbID\"]}\n search_list.append(search_prop)\n\n return search_list\n\n except (KeyError, TypeError, ValueError):\n return None", "def search_by_keyword(self, keyword):\n if self.indexes and keyword in self.indexes.keys():\n articles_indexes = self.indexes[keyword][:self.no_returned_articles]\n return retrieve_articles(articles_indexes)\n else:\n return None", "def media_search(query, term):\n table = Media.__table__\n search_statement = or_(\n cast(table.c.id, Text).ilike('%' + term + '%'),\n cast(table.c.type, Text).ilike('%' + term + '%'),\n table.c.name.ilike('%' + term + '%'),\n table.c.cast.ilike('%' + term + '%'),\n cast(table.c.seasons, Text).ilike('%' + term + '%'),\n cast(table.c.release_date, Text).ilike('%' + term + '%'),\n table.c.last_aired.ilike('%' + term + '%'),\n table.c.image.ilike('%' + term + '%'),\n cast(table.c.running, Text).ilike('%' + term + '%'),\n table.c.overview.ilike('%' + term + '%'),\n table.c.other_images.ilike('%' + term + '%'),\n table.c.videos.ilike('%' + term + '%'),\n cast(table.c.imdb_id, Text).ilike('%' + term + '%'),\n cast(table.c.tmdb_id, Text).ilike('%' + term + '%'),\n cast(table.c.runtime, Text).ilike('%' + term + '%'),\n table.c.tagline.ilike('%' + term + '%'),\n cast(table.c.popularity, Text).ilike('%' + term + '%'),\n cast(table.c.average_rating, Text).ilike('%' + term + '%'))\n return query.filter(search_statement)", "def perform_search(self):\n\n self.implicitly_wait(5)\n html_element = self.find_element_by_xpath(\n '/html/body').get_attribute('outerHTML')\n soup = Scraper(html_element)\n target = soup.find_search_field()\n\n for elem in target:\n for attr, value in elem.items():\n placeholder = self.find_elements_by_css_selector(\n f'input[{attr}=\"{value}\"]'\n )\n for element in placeholder:\n try:\n element.send_keys(self.keywords)\n element.send_keys(Keys.RETURN)\n print(colored(':: Placeholder fullfilled ::', 'green'))\n return\n except:\n print(\n colored('Can\\'t type inside the search input', 'yellow'))", "def search(self, value):\n self.base_selenium.set_text(element='general:search', value=value)\n self.base_selenium.click(element='general:search')\n time.sleep(self.base_selenium.TIME_MEDIUM)\n return self.result_table()", "def search_db(term):\n session = get_session()\n try:\n search_statement = or_(\n search.c.name.ilike('%' + term + '%'),\n search.c.about.ilike('%' + term + '%'),\n search.c.kind.ilike('%' + term + '%'),\n search.c.image.ilike('%' + term + '%'),\n cast(search.c.id, Text).ilike('%' + term + '%'),\n cast(search.c.release_date, Text).ilike('%' + term + '%'))\n query = session.query(search).filter(search_statement)\n query = search_filter(request.args, query)\n query = order_query(search, request.args, query)\n final_query = query\n\n query = set_limit_offset(request.args, query)\n\n data = query.all()\n count = final_query.count()\n return jsonify({\n 'items': search_schema.dump(data, many=True).data,\n 'count': count\n })\n finally:\n session.close()", "def search():\n\n # TO DO: refine with wildcard to curb superfluous results\n \n # logged in users can search for books\n # via 'isbn', 'author', or 'title'\n query = request.form.get(\"search\")\n if not query:\n return render_template(\"home.html\", result=0, name=session[\"name\"],result_head=\"Results\")\n \n # query 'isbn'\n if query.isdigit():\n res = db.execute(\"SELECT * FROM books WHERE isbn LIKE :query\",\n {\"query\": f\"{query}%\"}).fetchall()\n else:\n # query 'author'\n res = db.execute(\"SELECT * FROM books WHERE author LIKE :query\",\n {\"query\": f\"{query}%\"}).fetchall()\n # If no result from author, query 'title'\n if len(res) == 0:\n res = db.execute(\"SELECT * FROM books WHERE title LIKE :query\",\n {\"query\": f\"{query}%\"}).fetchall()\n if len(res) == 0:\n res = 0\n return render_template(\"home.html\", result=res, name=session[\"name\"], result_head=\"Results\")", "def search_koodous_db(self, term, page=1, page_size=100):\n url = self.API_URL % ('apks', '?search=%s&page=%i&page_size=%i' % (term, page, page_size), \"\" )\n return requests.get(url=url, headers=self.headers, proxies=self.proxies, verify=self.verify_ssl)", "def search():\n if request.method == \"GET\":\n mongo_collection = mongo_database[\"questions\"]\n query = request.args.get(\"keyword\")\n result = mongo_collection.find({\"$text\": {\"$search\": query}})\n objects = []\n for object in result:\n objects.append(object)\n return render_template(\"search.html\", cards=objects)\n else:\n return start()", "def search_results(request):\r\n mdict = request.matchdict\r\n rdict = request.GET\r\n\r\n if 'terms' in mdict:\r\n phrase = \" \".join(mdict['terms'])\r\n else:\r\n phrase = rdict.get('search', '')\r\n\r\n if rdict.get('search_mine') or 'username' in mdict:\r\n with_user = True\r\n else:\r\n with_user = False\r\n\r\n username = None\r\n if with_user:\r\n if 'username' in mdict:\r\n username = mdict.get('username')\r\n elif request.user and request.user.username:\r\n username = request.user.username\r\n\r\n # with content is always in the get string\r\n search_content = asbool(rdict.get('with_content', False))\r\n\r\n conn_str = request.registry.settings.get('sqlalchemy.url', False)\r\n searcher = get_fulltext_handler(conn_str)\r\n\r\n # check if we have a page count submitted\r\n page = rdict.get('page', 0)\r\n count = rdict.get('count', 10)\r\n\r\n try:\r\n res_list = searcher.search(\r\n phrase,\r\n content=search_content,\r\n username=username if with_user else None,\r\n ct=count,\r\n page=page\r\n )\r\n except ValueError:\r\n request.response.status_int = 404\r\n ret = {'error': \"Bad Request: Page number out of bound\"}\r\n return _api_response(request, ret)\r\n\r\n constructed_results = []\r\n for res in res_list:\r\n return_obj = dict(res)\r\n return_obj['tags'] = [dict(tag[1]) for tag in res.tags.items()]\r\n\r\n # the hashed object is there as well, we need to pull the url and\r\n # clicks from it as total_clicks\r\n return_obj['url'] = res.hashed.url\r\n return_obj['total_clicks'] = res.hashed.clicks\r\n\r\n constructed_results.append(return_obj)\r\n\r\n return _api_response(request, {\r\n 'search_results': constructed_results,\r\n 'result_count': len(constructed_results),\r\n 'phrase': phrase,\r\n 'page': page,\r\n 'with_content': search_content,\r\n 'username': username,\r\n })", "def _search(progtext, qs=None, splash=True, pre_load=True):\n g.message = \"Searching for '%s%s%s'\" % (c.y, progtext, c.w)\n\n # show splash screen during fetch\n if splash:\n g.content = logo(c.b) + \"\\n\\n\"\n screen_update()\n\n # perform fetch\n wdata = call_gdata('search', qs)\n songs = get_tracks_from_json(wdata)\n\n if songs and pre_load:\n # preload first result url\n kwa = {\"song\": songs[0], \"delay\": 0}\n t = threading.Thread(target=preload, kwargs=kwa)\n t.start()\n\n if songs:\n g.model.songs = songs\n return True\n\n return False", "def search():\n search = request.form.get(\"search\")\n results = mongo.db.recipes.find({\"$text\": {\"$search\": search}}).limit(2)\n result_count = mongo.db.recipes.find(\n {\"$text\": {\"$search\": search}}).count()\n if result_count > 0:\n return render_template(\"pages/search.html\", results=results, search=search, isFooter=True)\n else:\n flash(\"No results found.\")\n return render_template(\"pages/search.html\", results=results, search=search, isFooter=True)", "def do_search(search):\n import StringIO\n from x84.bbs import echo, getch\n disp_msg('SEARChiNG')\n resp = requests.get(u'http://apple.accuweather.com'\n + u'/adcbin/apple/Apple_find_city.asp',\n params=(('location', search),))\n locations = list()\n if resp is None:\n disp_notfound()\n elif resp.status_code != 200:\n # todo: logger.error\n echo(u'\\r\\n' + u'StAtUS COdE: %s\\r\\n\\r\\n' % (resp.status_code,))\n echo(repr(resp.content))\n echo(u'\\r\\n\\r\\n' + 'PRESS ANY kEY')\n getch()\n else:\n xml_stream = StringIO.StringIO(resp.content)\n locations = list([dict(elem.attrib.items())\n for _event, elem in ET.iterparse(xml_stream)\n if elem.tag == 'location'])\n if 0 == len(locations):\n disp_notfound()\n else:\n disp_found(len(locations))\n return locations", "def search(q):\n if q in DB.SEARCH_CACHE.keys():\n return DB.SEARCH_CACHE[q]\n else:\n DB.SEARCH_CACHE[q] = {}\n for k, v in DB.MUSIC.iteritems():\n if v.is_like(q):\n DB.SEARCH_CACHE[q][v.uuid] = v.__dict__\n for k, v in DB.VIDEOS.iteritems():\n if v.is_like(q):\n DB.SEARCH_CACHE[q][v.uuid] = v.__dict__\n for k, v in DB.IMAGES.iteritems():\n if v.is_like(q):\n DB.SEARCH_CACHE[q][v.uuid] = v.__dict__\n return DB.SEARCH_CACHE[q]", "def search(request):\n\n term = \"\"\n organizations = None\n memberships = None\n events = None\n persons = None\n airports = None\n training_requests = None\n comments = None\n only_result = None\n\n if request.method == \"GET\" and \"term\" in request.GET:\n form = SearchForm(request.GET)\n if form.is_valid():\n term = form.cleaned_data.get(\"term\", \"\")\n tokens = re.split(r\"\\s+\", term)\n\n organizations = Organization.objects.filter(\n Q(domain__icontains=term) | Q(fullname__icontains=term)\n ).order_by(\"fullname\")\n if len(organizations) == 1 and not only_result:\n only_result = organizations[0]\n\n memberships = Membership.objects.filter(\n registration_code__icontains=term\n ).order_by(\"-agreement_start\")\n if len(memberships) == 1 and not only_result:\n only_result = memberships[0]\n\n events = Event.objects.filter(\n Q(slug__icontains=term)\n | Q(host__domain__icontains=term)\n | Q(host__fullname__icontains=term)\n | Q(url__icontains=term)\n | Q(contact__icontains=term)\n | Q(venue__icontains=term)\n | Q(address__icontains=term)\n ).order_by(\"-slug\")\n if len(events) == 1 and not only_result:\n only_result = events[0]\n\n # if user searches for two words, assume they mean a person\n # name\n if len(tokens) == 2:\n name1, name2 = tokens\n complex_q = (\n (Q(personal__icontains=name1) & Q(family__icontains=name2))\n | (Q(personal__icontains=name2) & Q(family__icontains=name1))\n | Q(email__icontains=term)\n | Q(secondary_email__icontains=term)\n | Q(github__icontains=term)\n )\n persons = Person.objects.filter(complex_q)\n else:\n persons = Person.objects.filter(\n Q(personal__icontains=term)\n | Q(family__icontains=term)\n | Q(email__icontains=term)\n | Q(secondary_email__icontains=term)\n | Q(github__icontains=term)\n ).order_by(\"family\")\n\n if len(persons) == 1 and not only_result:\n only_result = persons[0]\n\n airports = Airport.objects.filter(\n Q(iata__icontains=term) | Q(fullname__icontains=term)\n ).order_by(\"iata\")\n if len(airports) == 1 and not only_result:\n only_result = airports[0]\n\n training_requests = TrainingRequest.objects.filter(\n Q(group_name__icontains=term)\n | Q(family__icontains=term)\n | Q(email__icontains=term)\n | Q(github__icontains=term)\n | Q(affiliation__icontains=term)\n | Q(location__icontains=term)\n | Q(user_notes__icontains=term)\n )\n if len(training_requests) == 1 and not only_result:\n only_result = training_requests[0]\n\n comments = Comment.objects.filter(\n Q(comment__icontains=term)\n | Q(user_name__icontains=term)\n | Q(user_email__icontains=term)\n | Q(user__personal__icontains=term)\n | Q(user__family__icontains=term)\n | Q(user__email__icontains=term)\n | Q(user__github__icontains=term)\n ).prefetch_related(\"content_object\")\n if len(comments) == 1 and not only_result:\n only_result = comments[0]\n\n # only 1 record found? Let's move to it immediately\n if only_result and not form.cleaned_data[\"no_redirect\"]:\n msg = format_html(\n \"You were moved to this page, because your search <i>{}</i> \"\n \"yields only this result.\",\n term,\n )\n if isinstance(only_result, Comment):\n messages.success(request, msg)\n return redirect(\n only_result.content_object.get_absolute_url()\n + \"#c{}\".format(only_result.id)\n )\n elif hasattr(only_result, \"get_absolute_url\"):\n messages.success(request, msg)\n return redirect(only_result.get_absolute_url())\n\n else:\n messages.error(request, \"Fix errors below.\")\n\n # if empty GET, we'll create a blank form\n else:\n form = SearchForm()\n\n context = {\n \"title\": \"Search\",\n \"form\": form,\n \"term\": term,\n \"organisations\": organizations,\n \"memberships\": memberships,\n \"events\": events,\n \"persons\": persons,\n \"airports\": airports,\n \"comments\": comments,\n \"training_requests\": training_requests,\n }\n return render(request, \"dashboard/search.html\", context)", "def search(self, sstrings, **kwargs):\n if self._info is None or self._info is False:\n self._info = self.allinfo()\n return _search(self._info, sstrings, **kwargs)", "def run_search(search_object: str, query: str, scope: Optional[str] = None, size: str = None, sort: str = None,\n order: str = None, artifact_source: str = None) -> dict:\n result = do_search(search_object, query=json.loads(query), scope=scope, size=size, sort=sort, order=order,\n artifact_source=artifact_source, err_operation='Search operation failed')\n in_progress = result.get('af_in_progress')\n status = 'in progress' if in_progress else 'complete'\n search_info = {\n 'AFCookie': result.get('af_cookie'),\n 'Status': status,\n 'SessionStart': datetime.now().strftime(\"%Y-%m-%dT%H:%M:%S\")\n }\n return search_info", "def search(keyword=None, **kwargs):\n\n instance = Ceic._get_instance()\n\n if keyword is not None and keyword.strip() != \"\":\n kwargs[\"keyword\"] = keyword\n\n search_series_method = instance._series_facade.search_series\n result = instance._make_request(search_series_method, **kwargs)\n\n return result", "def search(request):\n raise NotImplementedError", "def search_text(self, search_text):\n\n self._search_text = search_text", "def search(request):\n title = \"Voices search\"\n search_term = request.params.get('search_term','')\n form = Form(request)\n searchstring = u'%%%s%%' % search_term\n\n # generic_filter can be applied to all Node (and subclassed) objects\n\n generic_filter = or_(\n Content.title.like(searchstring),\n Content.body.like(searchstring),\n )\n\n results = DBSession.query(Content).filter(Content.type !='listing').filter(generic_filter).\\\n order_by(Content.title.asc()).all()\n\n\n page_url = PageURL_WebOb(request)\n page = int(request.params.get(\"page\", 1))\n paginator = Page(results,\n page=page,\n items_per_page=10,\n url=page_url)\n\n return render_to_response(\"buddy:templates/home/searchresult.mako\",\n dict(paginator=paginator,title=title,\n form=FormRenderer(form)),request=request)", "def search(request):\r\n\tinput_text = request.GET.get('search-text', '')\r\n\tgames = Game.objects.filter(name__icontains=input_text)\r\n\treturn render(request, 'home.html', {'games': games, 'MEDIA_URL': settings.MEDIA_URL})" ]
[ "0.67246073", "0.6698477", "0.66643846", "0.64279336", "0.64254475", "0.6390886", "0.6354886", "0.6213453", "0.619048", "0.6099298", "0.60874957", "0.60230345", "0.60201484", "0.60025597", "0.5991887", "0.59684485", "0.5968053", "0.59504765", "0.5949628", "0.5926764", "0.5893906", "0.5893904", "0.58898854", "0.5888839", "0.5882262", "0.587834", "0.5868911", "0.5849583", "0.5847064", "0.5830924", "0.5822395", "0.58023596", "0.5771059", "0.5748871", "0.57415104", "0.5740893", "0.5732189", "0.5721185", "0.5682231", "0.56818795", "0.5679789", "0.5670192", "0.5622222", "0.56145364", "0.5612819", "0.56106", "0.56077564", "0.5606392", "0.5589905", "0.5573218", "0.5572508", "0.55601335", "0.55491257", "0.55477715", "0.5544229", "0.55385995", "0.55348676", "0.5534533", "0.5532488", "0.5523514", "0.5522371", "0.5520872", "0.5516574", "0.54972494", "0.54941785", "0.54914516", "0.548372", "0.5479756", "0.5476757", "0.5476178", "0.5474251", "0.54705983", "0.5470565", "0.54666424", "0.546546", "0.5458669", "0.5454572", "0.5454572", "0.5452467", "0.5452077", "0.54246587", "0.5414", "0.5401063", "0.5398951", "0.5397162", "0.53966665", "0.5394451", "0.5393746", "0.5385302", "0.5381739", "0.5372393", "0.5370658", "0.536914", "0.535701", "0.53376347", "0.53075784", "0.530356", "0.53020877", "0.5298151", "0.52974415" ]
0.5997695
14
Displays a generic error message when there is a connection error.
def display_error(): clear_screen() line = '#' * 20 print(f'{line}\n# CONNECTION ERROR #\n{line}') exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_error(self, conn, msg):\n print(\"ERROR PLACEHOLDER\")\n\n return", "def __display_error(self, socket_error):\r\n\t\tif socket_error == QAbstractSocket.RemoteHostClosedError:\r\n\t\t\tself._window.open_dialog(\"Serveur déconnecté\", \"Le serveur s'est déconnecté !\")\r\n\t\t\t# Add signal to be emitted that pops up a dialog window\r\n\t\telif socket_error == QAbstractSocket.OperationError: # Raised when the socket already is connected\r\n\t\t\tpass\r\n\t\telse:\r\n\t\t\tself._window.open_dialog(\"Erreur de connection\",\r\n\t\t\t\t\t\t\t\t\t \"L'erreur suivante est survenue : {}.\".format(self.__tcpSocket.errorString()),\r\n\t\t\t\t\t\t\t\t\t type=\"error\")", "def _connection_failed(self, link_uri, msg):\n print \"Connection to %s failed: %s\" % (link_uri, msg)", "def _connection_failed(self, link_uri, msg):\n print('Connection to %s failed: %s' % (link_uri, msg))", "def _connect_failed(self):\n\t\tself.root.stdout.write(\"Error: Connection Failed!\\n\")\n\t\tself.client = False", "def error(self, msg=None):\n\t\tdebug(\"Connection Error:\", True)\n\n\t\tif msg is not None:\n\t\t\tdebug(msg, True)\n\t\t\n\t\tif self.port is not None:\n\t\t\tself.port.close()\n\t\t\n\t\tself.state = State.Unconnected", "def error_handler(msg):\n print \"Server Error: %s\" % msg", "def error_handler(msg):\n print \"Server Error: %s\" % msg", "def error_handler(msg):\n print(\"Server Error: %s\" % msg)", "def connection_failed(self, connection, error):\n assert False", "def db_connection_error(error):\n return internal_server_error(error)", "def _connection_failed(self, link_uri, msg):\n\t\tprint \"Connection to %s failed: %s\" % (link_uri, msg)\n\t\tself.is_connected = False", "def display_error(self, message):\n self.ui_widget.display_error(message=message)", "def offline_error():\n\n colored('No available internet connection\\n', 'red')", "def error(self, msg):\n self.send_command('error', {\n 'msg': msg,\n })", "def show_error(title, message, print_message=False):\n\n pass", "def send_server_error(self):\n\n self.send_message(\n Message(\n Codes.SERVER_ERROR,\n { 'message': 'The server has encountered an internal error.' }\n )\n )", "def __connect_failed__(self):\n # Ask the user what to do with the error\n choice = input(\"[A]bort, [C]hange address and port, or [R]etry?\")\n if (choice.lower() == \"a\"):\n exit()\n elif (choice.lower() == \"c\"):\n address = input(\"Please enter the address:\")\n port_number = input(\"Please enter the port:\")", "def onConnectError(self, fetcher, error): #$NON-NLS-1$\r", "def error(self, message):\n print message", "def print_requests_connectionerror(cls, class_name):\n print(\n f\"{cls.ERROR_PREFIX} {cls.REQUESTS_PACKAGE_CONNECTIONERROR_MESSAGE} '{class_name}'.\"\n )", "def error(self, message=None, show_help=True):", "def _on_server_error(server, *_):\n exception = sys.exc_info()[1]\n if isinstance(exception, ConnectionError):\n # These are expected errors when the browser closes the connection.\n return\n # Other errors would be unexpected, so print them.\n traceback.print_exc()", "def error(cls, message):\n print('[ERROR] {0}'.format(message))", "def on_connection_error(self):\n log.error(\"Stream connection has errored or timed out\")", "def error():\n title = session.get('title', 'Error')\n error_message = session.get('error_message', 'An error has occurred.')\n level = session.get('level', 'error')\n logger.error(\"Displaying error to the user\", error_message=error_message, level=level)\n return render_template('errors/error.html', title=title, error_message=error_message, level=level)", "def error(msg):\n click.secho(f'[ERROR] {msg}', fg='red')", "def _on_error(self, error):\n print(error + \" for \" + self.session_name)", "def error(self, error_msg):\n print(\"ERROR DETECTED\")\n print(error_msg)", "async def help_error(self, ctx, error):\n await self.log_error_and_apologize(ctx, error)", "def onCheckConnectionError(self):\r\n\r\n # show the error message\r\n msgBox = QMessageBox(self)\r\n msgBox.setWindowTitle(conf_parser.get(\"APP\", \"name\"))\r\n msgBox.setText(\"Internet connection not detected.\")\r\n msgBox.setStandardButtons(QMessageBox.Retry | QMessageBox.Close)\r\n msgBox.setDefaultButton(QMessageBox.Close)\r\n ret = msgBox.exec()\r\n\r\n # interact user\r\n if(ret == QMessageBox.Close):\r\n # exit program\r\n sys.exit()\r\n if(ret == QMessageBox.Retry):\r\n # retry connection\r\n self.thread = threading.Thread(target=self.checkServerThread)\r\n self.thread.setDaemon(True)\r\n self.thread.start()\r\n self.pros = 0\r\n self.check_timer.start(100)", "def handle_error(self):\n self.cmd_channel.debug(\"DTPHandler.handle_error()\")\n try:\n raise\n # if error is connection related we provide a detailed\n # information about it\n except socket.error, err:\n if err[0] in errno.errorcode:\n error = err[1]\n else:\n error = \"Unknown connection error\"\n # an error could occur in case we fail reading / writing\n # from / to file (e.g. file system gets full)\n except EnvironmentError, err:\n error = _strerror(err)\n except:\n # some other exception occurred; we don't want to provide\n # confidential error messages to user so we return a\n # generic \"unknown error\" response.\n logerror(traceback.format_exc()) \n error = \"Unknown error\"\n self.cmd_channel.respond(\"426 %s; transfer aborted.\" %error)\n self.close()", "def show_error(self, message: str):\n QMessageBox.about(self, 'Error!', message)", "def error(self, request):\n if self.debug:\n import cgitb\n request.stdout.write('Content-Type: text/html\\r\\n\\r\\n' +\n cgitb.html(sys.exc_info()))\n else:\n errorpage = \"\"\"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n<html><head>\n<title>Unhandled Exception</title>\n</head><body>\n<h1>Unhandled Exception</h1>\n<p>An unhandled exception was thrown by the application.</p>\n</body></html>\n\"\"\"\n request.stdout.write('Content-Type: text/html\\r\\n\\r\\n' +\n errorpage)", "def show_error(self):\n print('LSE Error : {}'.format(self._error))", "def send_error(msg):\n\n print(msg)", "def server_error(request):\n return defaults.server_error(request, template_name=get_template_name(request, \"500.html\"))", "def bcp_error(self, **kwargs):\n self.log.warning('Received error command from client')", "def error(message):\n print str(message)", "def error():\n return render_template(\"404.html\")", "def show_error(self):\n if self.error is None:\n return\n from PartSeg.common_gui.error_report import ErrorDialog\n\n if isinstance(self.error, TiffFileException):\n mess = QMessageBox()\n mess.setIcon(QMessageBox.Critical)\n mess.setText(\"During read file there is an error: \" + self.error.args[0])\n mess.setWindowTitle(\"Tiff error\")\n mess.exec()\n return\n if isinstance(self.error, SegmentationLimitException):\n mess = QMessageBox()\n mess.setIcon(QMessageBox.Critical)\n mess.setText(\"During segmentation process algorithm meet limitations:\\n\" + \"\\n\".join(self.error.args))\n mess.setWindowTitle(\"Segmentation limitations\")\n mess.exec()\n return\n dial = ErrorDialog(self.error, \"Exception during program run\")\n # TODO check\n # dial.moveToThread(QApplication.instance().thread())\n dial.exec()", "def error_mess():\n print(\"Sorry, I didn't understand that.\")", "def error(message, code=400):\n return render_template(\"error.html\", top=code, bottom=message)", "def error(self, message):\n sys.stderr.write('error: %s\\n' % message)\n self.print_help()\n sys.exit(2)", "def error(self, msg):\n self._seen_error = msg\n print(\"***\", msg, file=self.stdout)\n\n if not self.config.show_traceback_on_error:\n return\n\n etype, evalue, tb = sys.exc_info()\n if tb and tb.tb_frame.f_code.co_name == \"default\":\n tb = tb.tb_next\n if tb and tb.tb_frame.f_code.co_filename == \"<stdin>\":\n tb = tb.tb_next\n if tb: # only display with actual traceback.\n self._remove_bdb_context(evalue)\n tb_limit = self.config.show_traceback_on_error_limit\n fmt_exc = traceback.format_exception(\n etype, evalue, tb, limit=tb_limit\n )\n\n # Remove last line (exception string again).\n if len(fmt_exc) > 1 and fmt_exc[-1][0] != \" \":\n fmt_exc.pop()\n\n print(\"\".join(fmt_exc).rstrip(), file=self.stdout)", "def _display_error(message: str) -> None:\n print()\n print(message, end='\\n\\n')", "def error(self, msg, *args, **kwargs):\n pass", "def _on_connection_error(self, exception):\n print(\"connection failed: {}\".format(exception))\n time.sleep(1)\n self.connect()", "async def about_error(self, ctx, error):\n await self.log_error_and_apologize(ctx, error)", "def server_error(e):\n return render_template('500.html'), 500", "def __init__(self, error_msg):\n super(ConnectionException, self).__init__(error_msg)", "def server_error(error):\n error_message = str(error)\n return render_template('error-pages/500-page.html', error_message=error_message, isFooter=True), 500", "def client_error_view(request, err_msg: str, err_code: int):\n\tctxt = ctxt_cat({})\n\tctxt[\"error_code\"] = err_code\n\tctxt[\"error_msg\"] = err_msg\n\n\treturn render(request, 'error_template.html', ctxt, status=err_code)", "def server_error(e):\n return 'Error while serving request', 500", "def database_connection_error(error):\n message = str(error)\n app.logger.critical(message)\n return {\n 'status_code': status.HTTP_503_SERVICE_UNAVAILABLE,\n 'error': 'Service Unavailable',\n 'message': message\n }, status.HTTP_503_SERVICE_UNAVAILABLE", "def error():\n return render_template(\"error.html\", **locals())", "def _raise_unknown_error(ex):\n raise MsticpyKqlConnectionError(\n \"Another exception was returned by the service\",\n *ex.args,\n f\"Full exception:\\n{str(ex)}\",\n title=\"connection failed\",\n )", "def error_open_mess(url: str) -> None:\n meta = MainData()\n print(('{0}Can not open URL: {1} {2}{3}').format(meta.clrs['red'],\n meta.clrs['lblue'],\n url,\n meta.clrs['reset']))", "def on_connection_open_error(self, _unused_connection, err):\n self.logger.info('Connection open failed: %s', err)\n self.reconnect()", "def server_error(e):\n return 'Eftirfarandi villa kom upp: {}'.format(e), 500", "def error_page(self, error_message: str, status: int = 400):\n self.set_status(status)\n self.render('error.jinja2', error=error_message)", "def error(msg):\n sys.stdout.write('%s[ ERROR ]%s %s\\n' % (colors.RED, colors.RESET, msg))", "def hostname_error(self, msg):\n raise NotImplementedError('hostname_error')", "def error_message(self):\n return u'Something wrong with {}, ' \\\n u'try switch to another series provider'\\\n .format(type(self).__name__)", "def error_page(e):\n \n return render_template('error-page.html'), 404", "def internal_server_error(error):\n return render_template('error.html', error_msg=\"500 Internal Server error\", pagetitle=\"500 Internal Server error\"), 500", "def print_server_error(e):\n\n print('Error:')\n print('>>>')\n print(e)\n print('^^^')\n print('Traceback:\\n\\n')\n traceback.print_exc()\n print('^^^')\n print('\\n\\n\\n')", "def clientConnectionFailed(self, connector, reason):\n\n moduleCoordinator.ModuleCoordinator().putError(\"Error connecting to \" + self.config['botnet'], self.module)", "def errorview(request):\n \n #return a page indicating an error has occured\n return render(request, 'SmellGuessTemplate/error.html')", "def server_error(request, template_name='500.html'):\n # don't risk running context processors\n context = dict(settings.TEMPLATE_CONSTANTS)\n context['MEDIA_URL'] = settings.MEDIA_URL\n context['STATIC_URL'] = settings.STATIC_URL\n return render_to_response(template_name, context)", "def error(self, message):\n self._clear()\n print(\"ERROR:\", message)\n self._draw()", "def display_error(msg, *args):\n msg = _concat_message(msg, *args)\n errmsg = \"ERROR: %s\" % msg\n if verbose > 0:\n print >> sys.stderr, errmsg.encode(\"UTF-8\")\n munkilog.log(errmsg)\n # append this error to our errors log\n munkilog.log(errmsg, \"errors.log\")\n # collect the errors for later reporting\n if \"Errors\" not in reports.report:\n reports.report[\"Errors\"] = []\n reports.report[\"Errors\"].append(\"%s\" % msg)", "def error(str):\n\n Utils.send('error', str)", "def internal_error_handler(error):\r\n return render_template('error.500.html')", "def _handle_error(cls, e, request):\r\n if e.check(InvalidRequest):\r\n msg = e.getErrorMessage()\r\n code = httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]\r\n else:\r\n e.printTraceback()\r\n msg = 'Fatal Error'\r\n code = httpstatus.HTTP_STATUS_CODE_INTERNAL_SERVER_ERROR[0]\r\n\r\n cls._render(request, code, 'text/plain; charset=utf-8', msg)", "def err(msg):\n print(colored.red(\"[ERROR]: {0}\".format(msg)))", "def renderError(self, error_code):\n\n self.error(error_code)\n self.response.write(\"Oops! Something went wrong.\")", "def renderError(self, error_code):\n\n self.error(error_code)\n self.response.write(\"Oops! Something went wrong.\")", "async def test_connection_error(hass: HomeAssistant, conn_error) -> None:\n\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n result = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"], user_input=DEMO_USER_INPUT\n )\n assert result[\"type\"] == data_entry_flow.FlowResultType.FORM\n assert result[\"errors\"] == {\"base\": \"cannot_connect\"}", "def error(self, *args):\n\n self.log(\"ERROR:\", args)\n if not self.transport.connected:\n Timer(5, connect(self.host, self.port)).register(self)", "def _send_database_problem(self):\n template_filename = self._get_config_template('databaseerror')\n text = read_template(\n template_filename,\n title='%s - Datebase error' % SERVER_NAME,\n header='Database error')\n if not text:\n self._send_internal_server_error()\n return\n self._send_head(text, 500)\n if not self._header_only:\n self.wfile.write(text)", "def sendError(self, errorMessage):\n LOG_ERROR(\"Client.sendError: \" + errorMessage, \"EDEN\")\n # default implementation: disconnect from server\n self.disconnectFromServer()", "def bad_request(error):\r\n\treturn render_template('error_template.html' , title = \"Aaaah ...\", \r\n\t\t\t\t\t\t\t\t\t\t\t\t\tmessage = \"나는 이해하지 못한다.\",\r\n \t\t\t\t\t\t\t\t\t\t\t\tsubline = \"Yeah, the server couldn't understand what you asked for, probably because you didn't give a choice of download.\", \r\n \t\t\t\t\t\t\t\t\t\t\t\timage_location = url_for('static', filename = 'images/simpson-gangam.jpg')), 400", "def error(message):\n if DEBUG:\n with print_lock:\n print((Colours.FAIL + 'ERROR: ' + Colours.END_COLOUR + message).strip())", "def handle_error(error):\n return render_template('errors/{}.html'.format(error.code)), error.code", "def ConnectByNameError(self) -> _n_0_t_14:", "def error(errornum):\n return render_template('error.html', errornum=errornum)", "def error(message='Ops, there are some error...'):\n print(colorful_text(message, Fore.RED))", "def error():\n if \"last_error\" in session.keys():\n error_msg = session[\"last_error\"]\n del session[\"last_error\"]\n else:\n error_msg = \"An unspecified error occurred.\"\n if \"username\" in session.keys():\n buttons = [\n NavButton(url_for(\"logout\"), \"Log Out\"),\n NavButton(url_for(\"show_main\"), \"Main\"),\n NavButton(url_for(\"show_help\"), \"Help\"),\n ]\n else:\n buttons = [\n NavButton(url_for(\"login\") + \"?sso\", \"Log In\"),\n NavButton(url_for(\"show_main\"), \"Main\"),\n NavButton(url_for(\"show_help\"), \"Help\"),\n ]\n return render_template(\"error.html\", message=error_msg, buttons=buttons)", "def _error(msg):\n\n error(None, msg)", "def error(self, message):\n ErrorExit('error: {}\\n'.format(message), 2)", "def _error(message):\n\n current.session.error = current.T(message)\n redirect(URL(c=\"default\", f=\"index\"))", "def hearError(self, errcode, *args):\n print \"recieved error:\",errcode", "def errorDialog(self, errormessage):\r\n Tk.tkMessageBox.showerror(self, 'Error', errormessage)", "async def error(cls, description, **kwargs):\n return await cls.message(description, color = discord.Color(0xff72bb))", "def error(self, error):\n pass", "def server_error(err):\n log.error(err)\n return err.msg, 500", "def error_msg(error):\n if request.path.startswith(\"/api\"):\n return jsonify({\"message\": str(error)}), 500\n else:\n return render_template(\"error.html\", message=error), 500", "def server_fault(e):\n return \"Something went wrong, and it is our fault. Try reloading the page.\"", "def httperror( status_code=500, message=b'' ):" ]
[ "0.7256641", "0.70974416", "0.7031372", "0.70112437", "0.69415045", "0.6857179", "0.6755222", "0.6755222", "0.67330766", "0.6692993", "0.66359854", "0.65511245", "0.6525908", "0.6504783", "0.6490381", "0.64878225", "0.64797825", "0.64601153", "0.64406806", "0.64355856", "0.64314383", "0.6419784", "0.6413911", "0.63966876", "0.6359918", "0.6353594", "0.63503057", "0.63500124", "0.63422966", "0.6341983", "0.6330458", "0.63229525", "0.63149244", "0.6289267", "0.6272679", "0.6248507", "0.6232306", "0.6225434", "0.6223931", "0.62087715", "0.6203021", "0.6189145", "0.61871123", "0.6181708", "0.617383", "0.6172184", "0.61703855", "0.61655277", "0.6155882", "0.6153348", "0.61512977", "0.6137671", "0.6132929", "0.61269504", "0.61238897", "0.61108774", "0.61055905", "0.61029845", "0.60794973", "0.6077308", "0.60741585", "0.60679835", "0.6059675", "0.60592455", "0.60582805", "0.6056131", "0.6044153", "0.60396993", "0.603286", "0.6027679", "0.60222244", "0.6017726", "0.6013907", "0.59966534", "0.5991204", "0.59889185", "0.5987545", "0.5987545", "0.5984615", "0.5979172", "0.59771216", "0.5974473", "0.5966165", "0.59467256", "0.5941925", "0.5940607", "0.59330595", "0.59231704", "0.59174895", "0.5911864", "0.59099895", "0.59021765", "0.5902134", "0.58979136", "0.589728", "0.5891569", "0.5888966", "0.5885824", "0.58854365", "0.58832914" ]
0.738177
0
Given a dictionary of show objects, displays them and gives you a choice of which one to get further details for.
def display_shows(shows): another = False # put this portion into an infinite loop to allow for reviewing other listed media while True: # display a different question on the second or more pass if another: again = input('\nWould you like to go back and review a different one? ([y]/n)') # if user replies with anything starting with a n, break out of the loop if again.lower().startswith('n'): clear_screen() break else: clear_screen() # list all of the shows that were found for n in range(len(shows)): print(f'[{n:2}] {shows[n].title}') # fix bug where a non-digit is given try: choice = int(input('\nWhich one would you like to review? ')) another = True # after first pass, set this flag except ValueError: clear_screen() break if choice in shows.keys(): clear_screen() print(f'Retrieving additional information for {shows[choice].title}') # the plot is not on the same page as the other information so requires its own scrape plot = get_plot(shows[choice].url) # if the plot was found, display it, otherwise skip past it if plot: print('\n[PLOT]\n') wrapped = textwrap.dedent(plot).strip() print(textwrap.fill(wrapped, initial_indent=' ', subsequent_indent=' ', width=110)) # scrape the actual content that we need scrape_movie(shows[choice].url) else: print(f'\n{choice} is not a valid choice!') break
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_show_information():\n\n #getting the guidebox_id variable from show_page.html\n guidebox_id = request.args.get(\"guidebox_id\")\n\n #get the show from the database\n show = Show.find_show_with_guidebox_id(guidebox_id)\n\n #check if show has a description, if it does then just pass the show on\n if show.description and show.network:\n print \"\\n\\n\\nShow description and network in database.\\n\\n\\n\"\n #if not, call API to get the show description, add description to show information in the database\n\n else:\n #API call to get the show information\n show_data = guidebox_show_info(guidebox_id)\n \n #add show description to table\n Show.add_description_network_to_show(show, show_data)\n print \"\\n\\n\\nAdded show description and network to the database.\\n\\n\\n\"\n \n\n show_info = Show.find_show_with_guidebox_id(guidebox_id)\n\n return jsonify(show_info.as_dict())", "def fetch_show_information (self, id, type):\n # check if we have a show or a movie, the request made depends on this\n if type == 'show':\n paths = [\n ['videos', id, ['requestId', 'regularSynopsis', 'evidence']],\n ['videos', id, 'seasonList', 'current', 'summary']\n ]\n else:\n paths = [['videos', id, ['requestId', 'regularSynopsis', 'evidence']]]\n response = self._path_request(paths=paths)\n return self._process_response(response=response, component='Show information')", "def do_show(self, *args):\n args = [ele for ele in args[0].split(' ')]\n if args[0] == '':\n print(\"** class name missing **\")\n return\n if args[0] not in self.list_classes:\n print(\"** class doesn't exist **\")\n return\n if len(args) != 2:\n print(\"** instance id missing **\")\n return\n\n storage.reload()\n dict_objs = storage.all()\n if dict_objs is None or dict_objs == []:\n print(\"** no instance found **\")\n return\n\n key = \"{}.{}\".format(args[0], args[1])\n if key in dict_objs.keys():\n print(dict_objs[key])\n else:\n print(\"** no instance found **\")", "def getShowDetails(self):\n searchURL = \"http://api.tvmaze.com/shows/\" + str(self.__showID) \\\n + \"?embed=cast\"\n\n response = requests.get(searchURL)\n data = response.json()\n\n self.__detailsJSON = self.parseShowDetails(data)", "def get_show_info(self, id, **kwargs):\n kwargs['id'] = id\n return self.get('info/show.json', **kwargs)", "def do_show(self, arg):\n arg = arg.split()\n try:\n args = arg[0] + \".\" + arg[1]\n except:\n pass\n objects = storage.all()\n if len(arg) is 0:\n print(\"** class name missing **\")\n elif len(arg) == 1 and arg[0] in self.dict.keys():\n print(\"** instance id missing **\")\n elif arg[0] not in self.dict.keys():\n print(\"** class doesn't exist **\")\n elif args not in objects:\n print(\"** no instance found **\")\n else:\n print(objects[args])", "def do_show(self, arg):\n args = arg.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n elif len(args) < 2 and args[0] in self.class_dict:\n print(\"** instance id missing **\")\n return\n\n object_dict = storage.all()\n if args[0] in self.class_dict:\n for full_key in object_dict:\n key = full_key.split(\".\")\n if key[1] == args[1]:\n print(object_dict[full_key])\n return\n print(\"** no instance found **\")\n else:\n print(\"** class doesn't exist **\")", "def show(list_of_dicts, key):\n print(\"\\nHere are the stocks I have considered for you:\")\n for i in list_of_dicts: # iterates through list_of_dicts and prints Name and Market Cap\n print(f\" - {i['Name']} - {key} is {i[key]} \")", "def generate_show_details_label(shows, show_id):\n print shows[show_id]\n return \"{starttime:<4}: {name: >40}\\n{details}\".format(**shows[show_id])", "def get_show_list():\n\n url = 'https://api.transistor.fm/v1/shows'\n r = httpx.get(url, headers=header)\n typer.echo([(x['id'], x['attributes']['title']) for x in r.json()['data']])", "def do_show(self, arg, opts):\n # If arguments are being passed as a list instead of as a string\n if USE_ARG_LIST:\n if arg:\n arg = arg[0]\n else:\n arg = ''\n\n param = arg.strip().lower()\n result = {}\n maxlen = 0\n for p in self.settable:\n if (not param) or p.startswith(param):\n result[p] = '%s: %s' % (p, str(getattr(self, p)))\n maxlen = max(maxlen, len(result[p]))\n if result:\n for p in sorted(result):\n if opts.long:\n self.poutput('{} # {}'.format(result[p].ljust(maxlen), self.settable[p]))\n else:\n self.poutput(result[p])\n else:\n raise LookupError(\"Parameter '%s' not supported (type 'show' for list of parameters).\" % param)", "def do_show(self, args):\n temp = args.split()\n\n if len(temp) == 0:\n print(\"** class name missing **\")\n return\n elif temp[0] not in self.myclasses:\n print(\"** class doesn't exist **\")\n return\n elif len(temp) < 2:\n print('** instance id missing **')\n return\n else:\n all_objs = storage.all()\n for i in all_objs.keys():\n if i == \"{}.{}\".format(temp[0], temp[1]):\n print(all_objs[i])\n return\n print('** no instance found **')", "def do_show(self, arg, opts):\n # If arguments are being passed as a list instead of as a string\n if USE_ARG_LIST:\n if arg:\n arg = arg[0]\n else:\n arg = ''\n\n param = arg.strip().lower()\n result = {}\n maxlen = 0\n for p in self.settable:\n if (not param) or p.startswith(param):\n result[p] = '%s: %s' % (p, str(getattr(self, p)))\n maxlen = max(maxlen, len(result[p]))\n if result:\n for p in sorted(result):\n if opts.long:\n self.poutput('%s # %s' % (result[p].ljust(maxlen), self.settable[p]))\n else:\n self.poutput(result[p])\n else:\n raise LookupError(\"Parameter '%s' not supported (type 'show' for list of parameters).\" % param)", "def do_show(self, args):\n args = args.split()\n print(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if args[0] not in HBNBCommand.class_check:\n print(\"** class doesn't exist **\")\n return\n\n all_objs = storage.all()\n key = args[0] + '.' + args[1]\n if key in all_objs:\n print(all_objs[key])\n else:\n print(\"** no instance found **\")", "def series_information(guidebox_id):\n\n \n #query the database to check if the show's already there\n show = Show.find_show_with_guidebox_id(guidebox_id)\n\n #if show is in database, pass jinja the show result\n if show:\n show_info = show\n print \"Show in the database.\"\n else:\n #else, run API call to get show info\n show_info = guidebox_show_info(guidebox_id)\n\n #insert new show into table\n show = Show.add_show(show_info)\n print \"%s has been added to database.\" % (show_info[\"title\"])\n \n show_info = Show.find_show_with_guidebox_id(guidebox_id)\n \n if \"current_user\" in session:\n #find the current logged in user\n email = session.get(\"current_user\")\n\n #use email to find the user_id\n user_id = User.find_user_id_with_email(email)\n\n #check if user has favorited show already\n favorite = Favorite.find_show_favorites_list(guidebox_id, user_id)\n\n #if user has favorited\n if favorite:\n favorited = True\n #if has not favorited yet\n else:\n favorited = False\n else:\n favorited = False\n\n return render_template(\"show_page.html\",\n show_info=show_info,\n favorited=favorited)", "def do_show(self, args):\n args = shlex.split(args)\n dicti = storage.all()\n if not args:\n print(\"** class name missing **\")\n elif not args[0] in name_of_class:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif \"{}.{}\".format(args[0], args[1]) in dicti:\n print(dicti[\"{}.{}\".format(args[0], args[1])])\n else:\n print(\"** no instance found **\")", "def show(*args, **kwargs):\n from . import core\n\n return core.show(*args, **kwargs)", "def do_show(self, line):\n\n args = line.split()\n\n if not args:\n print(\"** class name missing **\")\n elif args[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n elif len(args) < 2:\n print(\"** instance id missing **\")\n else:\n key = args[0] + \".\" + args[1]\n dict_objects = storage.all()\n obj = dict_objects.get(key)\n if obj:\n print(obj)\n else:\n print(\"** no instance found **\")", "def show(self, class_name, inst_id, stored_objects):\n instance = \"{}.{}\".format(class_name, inst_id)\n if instance not in stored_objects:\n print(\"** no instance found **\")\n else:\n print(stored_objects[instance])", "def displayShow(app, *options):\n\n inNb = app.inNb\n _browse = app._browse\n display = app.display\n display.setup()\n data = display.current\n return showDict(\"<b>current display options</b>\", data, _browse, inNb, *options)", "def show(*args):", "def show(*args):", "def show(*args):", "def show(*args):", "def show(self) -> None:", "def do_show(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if args[0] not in HBNBCommand.valid_classes:\n print(\"** class doesn't exist **\")\n return\n all_objs = storage.all()\n for objs_id in all_objs.keys():\n if objs_id == args[1] and args[0] in str(type(all_objs[objs_id])):\n print(all_objs[objs_id])\n return\n print(\"** no instance found **\")", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def show(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"show\"), kwargs)", "def do_show(self, arg):\n if len(arg) == 0:\n print(\"** class name missing **\")\n return\n coms = tuple(arg.split())\n if coms[0] not in self.cls:\n print(\"** class doesn't exist **\")\n elif len(coms) < 2:\n print(\"** instance id missing **\")\n else:\n obj = coms[0] + \".\" + coms[1]\n if obj not in storage.all().keys():\n print(\"** no instance found **\")\n else:\n print(storage.all()[obj])", "def show(self, *args, **kwargs) -> None:\n pass", "def show(self, *args, **kwargs) -> None:\n pass", "def show(self, *args, **kwargs) -> None:\n pass", "def show(self,id, **kw):\n r = validate_get(id)\n return dict(name=name, namepl=namepl, record=r)", "def delegation_show(request, pk):\n delegation = Delegation.objects.get(pk=pk)\n\n delegates = Delegate.objects.filter(delegation_id=pk).order_by(\"committee__name\")\n\n context = {\"delegation\": delegation, \"delegates\": delegates, \"delegation_show\": True}\n template = \"jurycore/delegation_show.html\"\n return render(request, template, context)", "def show_info(self, occurrence_item, all_events_opt, all_metrics_opt,\n all_opt):\n if all_opt:\n self.__show_all()\n sys.exit(0)\n\n if all_events_opt:\n self.__show_all_events()\n sys.exit(0)\n\n if all_metrics_opt:\n self.__show_all_metrics()\n sys.exit(0)\n\n occurrence = occurrence_item[0]\n if occurrence in self.events_list:\n self.__print_events_info(occurrence)\n elif occurrence in self.metrics_list:\n self.__print_metrics_info(occurrence)\n else:\n print(\"Event or Metric \\\"{}\\\" not found.\".format(occurrence))\n sys.exit(1)\n sys.exit(0)", "def display(self, *args, **kwargs):\n return self.show(*args, **kwargs)", "def t1_show(**kwargs):\n sessiontoken = kwargs['sessiontoken']\n proxy = kwargs['proxy']\n json_response = get_t1_json(proxy, sessiontoken)\n if json_response != False:\n t1_gateways = json_response['results']\n table = PrettyTable(['Name', 'id', 'Type'])\n for i in t1_gateways:\n if 'type' not in i:\n i['type'] = None\n table.add_row([i[\"display_name\"], i[\"id\"], i[\"type\"]])\n print(table)\n else:\n print(\"Something went wrong, please try again.\")\n sys.exit(1)", "def show_data():", "def do_show(self, argv):\n argument_split = argv.split()\n aux = 0\n if len(argument_split) == 0:\n print(\"** class name missing **\")\n elif not argument_split[0] in self.__names:\n print(\"** class doesn't exist **\")\n elif len(argument_split) < 2:\n print(\"** instance id missing **\")\n elif argument_split[0] in self.__names:\n for key, obj in models.storage.all().items():\n if key == argument_split[0]+\".\"+argument_split[1]:\n aux = 1\n print(obj)\n if aux == 0:\n print(\"** no instance found **\")", "def show(self, show_id=False, show_prob=False, cut=None, crit=None):\n return self._root.show(\n show_id=show_id, show_prob=show_prob, cut=cut, crit=crit)", "def __init__(self, showID):\n self.__showID = showID\n self.__detailsJSON = None", "def get(self, show_id):\r\n show = Shows.query.filter_by(ShowID=show_id).first_or_404()\r\n content = jsonify({\r\n \"shows\": [{\r\n \"date\": get_iso_format(show.ShowDate),\r\n \"countryCode\": show.CountryCode,\r\n \"country\": show.Country,\r\n \"city\": show.City,\r\n \"venue\": show.Venue,\r\n \"setlist\": self.get_setlist(show.ShowID),\r\n \"otherBands\": self.get_other_bands(show.ShowID),\r\n \"people\": self.get_show_people(show.ShowID),\r\n }]\r\n })\r\n\r\n return make_response(content, 200)", "def view_details_complete():\n curItem = complete_tereeview.focus().strip('#')\n\n with open(\"images_url_dict.json\", \"r\") as images_dict_fo_complete:\n imgs_dict = json.load(images_dict_fo_complete)\n name = \"-\".join(curItem.lower().split())\n\n url, title, ID = imgs_dict[name]\n\n webbrowser.open_new_tab(\"https://eztv.io/shows/{}/{}/\".format(ID, title))", "def shows():\n \n\n # displays list of shows at /shows\n shows = Shows.query.all()\n\n data = []\n\n for show in shows :\n \n v = Venue.query.get(show.venue_id)\n a = Artist.query.get(show.artist_id)\n\n t = str(datetime.strptime(show.start_time, \"%Y-%m-%d %H:%M:%S\"))\n\n data.append({\n \"venue_id\": show.id,\n \"venue_name\": v.name,\n \"artist_id\": a.id,\n \"artist_name\": a.name,\n \"artist_image_link\": a.image_link,\n \"start_time\": t\n })\n\n # TODO: replace with real venues data.\n # num_shows should be aggregated based on number of upcoming shows per venue.\n \n return render_template(\"pages/shows.html\", shows=data)", "def _on_details_navigating(self, evt):\n \n # get URL\n url = evt.url\n \n # parse URL\n match = DETAILS_URL_PATTERN.search(url)\n if not match:\n return\n \n # get match\n parameter = match.group('parameter')\n value = match.group('value').replace(\"%20\", \" \")\n \n # check value\n if not value:\n return\n \n # show article by DOI\n if parameter == 'doi':\n link = \"https://dx.doi.org/%s\" % value\n try: webbrowser.open(link, autoraise=1)\n except: pass\n \n # show article by PMID (in PubMed)\n elif parameter == 'pmid':\n link = \"https://ncbi.nlm.nih.gov/pubmed/%s\" % value\n try: webbrowser.open(link, autoraise=1)\n except: pass\n \n # search by author (in PubMed)\n elif parameter == 'author':\n query = \"%s[AU]\" % value\n self._search_repository(query)\n \n # search by journal (in PubMed)\n elif parameter == 'journal':\n query = \"%s[JT]\" % value\n self._search_repository(query)\n \n # show articles by author (in library)\n elif parameter == 'authorid':\n query = \"%s[AUID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # show articles by label (in library)\n elif parameter == 'labelid':\n query = \"%s[LABELID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # show articles by collection (in library)\n elif parameter == 'collectionid':\n query = \"%s[COLLECTIONID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # set article rating\n elif parameter == 'rating':\n if value in \"012345\":\n self._on_articles_rating(rating=int(value))\n \n # set article colour\n elif parameter == 'colour':\n colour = mwx.COLOUR_BULLETS.get(value, None)\n if colour is not None:\n self._on_articles_colour(colour=colour)\n \n # reveal PDF file\n elif parameter == 'pdf':\n path = os.path.join(self._library.library_path, value+\".pdf\")\n self._on_articles_reveal_pdf(path=path)", "def show(self, keys=None, sort_keys_function=None):\n output_keys = keys or self.keys\n if not self.items:\n print(\"No items to show\")\n else:\n for item in self.__get_items(sort_keys_function):\n for output_key in output_keys:\n print(\"{0:25}: {1!s}\".format(output_key, getattr(item, self.mapping[output_key])))\n print(\"-\" * 25)", "def show(self):\n\n pass", "def ShowObject(object_id):\n return ShowObjects(object_id)==1", "def test_detail(self):\n response = Tmdb.detail(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(data['id'])\n self.assertTrue(data['name'])\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)", "def shows():\n # displays list of shows at /shows\n # replace with real venues data.\n # num_shows should be aggregated based on number of upcoming shows per venue.\n\n # This is 3 tuple with (Show, Artist, Venue) objects\n show_query = db.session.query(Show, Artist, Venue).filter(Show.artist_id ==\n Artist.id).filter(Show.venue_id==Venue.id).all()\n data = [\n {\"venue_id\": sav[2].id, \"venue_name\": sav[2].name,\n \"artist_id\":sav[1].id, \"artist_name\": sav[1].name,\n \"artist_image_link\": sav[1].image_link,\n \"start_time\": sav[0].start_time\n }\n for sav in show_query\n ]\n # print(data)\n return render_template('pages/shows.html', shows=data)", "def _show(self, **kwargs):\n\n resource_name = self._get_resource_name(**kwargs)\n\n return self._make_request(\n uri='%s/%s' % (self._metadata['uri'], resource_name)\n )", "def cat_details(cat_id, shelter_id):\n\n shelter = petfinder.shelter_data_map(shelter_id)\n shelter = list(shelter.values())\n cat = petfinder.cat_data_map(cat_id)\n cat = list(cat.values())\n\n return render_template('more_details.html',\n shelter=shelter,\n cat=cat)\n\n #if user selects <3 to favorite a cat then redirct to the login page", "def do_event_show(client, args):\n kwargs = {}\n if args.type is not None:\n kwargs['obj_type'] = args.type\n if args.id is not None:\n kwargs['obj_id'] = args.id\n if args.since is not None:\n kwargs['since'] = args.since\n if args.until is not None:\n kwargs['until'] = args.until\n if args.limit and int(args.limit) > 0:\n kwargs['limit'] = int(args.limit)\n if args.offset and int(args.offset) > 0:\n kwargs['offset'] = int(args.offset)\n if args.ascending:\n kwargs['order'] = 'asc'\n if args.action is not None:\n kwargs['action'] = args.action\n logs = client.logs.list(**kwargs)\n utils.print_list(logs, client.logs.columns)", "def info(self):\n past_shows = self.get_shows(Show.start_time <= datetime.now())\n upcoming_shows = self.get_shows(Show.start_time > datetime.now())\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'genres': self.genres,\n 'address': self.address,\n 'city': self.city,\n 'state': self.state,\n 'phone': self.phone,\n 'website': self.website,\n 'facebook_link': self.facebook_link,\n 'seeking_talent': self.seeking_talent,\n 'seeking_description': self.seeking_description,\n 'image_link': self.image_link,\n 'past_shows': past_shows,\n 'upcoming_shows': upcoming_shows,\n 'past_shows_count': len(past_shows),\n 'upcoming_shows_count': len(upcoming_shows)\n }", "def show(args, syn):\n \n ent = syn.get(args.id, downloadFile=False)\n syn.printEntity(ent)", "def do_show(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n return False\n if args[0] in classes:\n if len(args) > 1:\n key = args[0] + \".\" + args[1] # args 0 is name, args 1 es id\n if key in models.storage.all():\n print(models.storage.all()[key])\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def info(self):\n past_shows = self.get_shows(Show.start_time <= datetime.now())\n upcoming_shows = self.get_shows(Show.start_time > datetime.now())\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'genres': self.genres,\n 'city': self.city,\n 'state': self.state,\n 'phone': self.phone,\n 'website': self.website,\n 'facebook_link': self.facebook_link,\n 'seeking_venue': self.seeking_venue,\n 'seeking_description': self.seeking_description,\n 'image_link': self.image_link,\n 'past_shows': past_shows,\n 'upcoming_shows': upcoming_shows,\n 'past_shows_count': len(past_shows),\n 'upcoming_shows_count': len(upcoming_shows)\n }", "def do_show(self, args):\n\n func = getattr(args, \"func\", None)\n\n if func is not None:\n func(self, args)\n else:\n self.do_help(\"show\")", "def detail(): \n\n # get contentid\n content_id = request.args.get('contentid')\n\n # get shortest places\n title, places = get_shortest(content_id)\n print(content_id)\n\n return render_template('detail.html', \n title=title,\n content_id=content_id,\n places=places, \n count=len(places))", "def show(self):", "def parse_show(self, res, date, movie_id, theater_id):\n times = res.css(SELECTORS['SHOW_TIMES']).re(r'[0-9]{1,2}[:][0-9]{2}')\n obj = {\n 'movie_id': movie_id,\n 'theater_id': theater_id,\n 'date': DateHelper.strtodatetime(date),\n 'start': DateHelper.strtoseconds(times[0]),\n 'end': DateHelper.strtoseconds(times[1]),\n 'type': SelectHelper.get_array(res, SELECTORS['SHOW_TYPE']),\n 'url': BASE_URL + SelectHelper.get(res, SELECTORS['SHOW_URL']),\n }\n return Show(obj)", "def parseShowDetails(self, data):\n cast = []\n\n title = data[\"name\"]\n\n year = data[\"premiered\"]\n try:\n year = year[:4] # just get the year from premiere date\n except TypeError:\n year = \"N/A\" # premiere date unavailable\n\n imdbRating = data[\"rating\"][\"average\"]\n\n try:\n network = data[\"network\"][\"name\"]\n except TypeError:\n network = \"N/A\" # network unavailable\n\n try:\n streaming = data[\"webChannel\"][\"name\"]\n except TypeError:\n streaming = \"N/A\"\n\n try:\n poster = data[\"image\"][\"medium\"]\n except TypeError:\n poster = \"N/A\" # poster unavailable\n\n summary = re.sub(\"<.*?>\", \"\", data[\"summary\"]) # remove HTML tags\n\n count = 0\n for member in data[\"_embedded\"][\"cast\"]:\n if(count == 3): # only get first 3 cast members listed\n break\n\n try:\n castImage = member[\"person\"][\"image\"][\"medium\"]\n except TypeError:\n castImage = \"N/A\" # cast image unavailable\n\n tempCast = {\n \"name\": member[\"person\"][\"name\"],\n \"character\": member[\"character\"][\"name\"],\n \"image\": castImage\n }\n cast.append(tempCast)\n count += 1\n\n numSeasons = self.getNumSeasons()\n\n details = {\n \"id\": self.__showID,\n \"title\": title,\n \"year\": year,\n \"numSeasons\": numSeasons,\n \"imdbRating\": imdbRating,\n \"network\": network,\n \"streaming\": streaming,\n \"poster\": poster,\n \"summary\": summary,\n \"cast\": cast,\n }\n\n return details", "def show(self):\n pass", "def get_details(self):", "def committee_show(request, pk):\n committee = Committee.objects.get(pk=pk)\n\n delegates = Delegate.objects.filter(committee_id=pk)\n\n context = {\"committee\": committee, \"delegates\": delegates}\n template = \"jurycore/committee_show.html\"\n return render(request, template, context)", "def display_log(obj, title=None, show=False):\n print(obj)", "def show_venue(venue_id):\n # shows the venue page with the given venue_id\n result = db.session.query(Venue).filter(Venue.id == venue_id)\n result = result[0]\n\n past_shows_count = 0\n upcoming_shows_count = 0\n\n past_shows = []\n upcoming_shows = []\n\n all_shows = Shows.query.all()\n\n print(all_shows)\n\n for show in all_shows:\n if show.venue_id == result.id:\n show_time = datetime.strptime(show.start_time, '%Y-%m-%d %H:%M:%S')\n if show_time > datetime.now() :\n upcoming_shows.append(show)\n else: \n past_shows.append(show)\n \n past_shows_count = len(past_shows)\n upcoming_shows_count = len(upcoming_shows)\n \n\n # TODO: replace with real venue data from the venues table, using venue_id (DONE)\n resdata = {\n \"id\": result.id,\n \"name\": result.name,\n \"genres\": json.loads(result.genres),\n \"address\": result.address,\n \"city\": result.city,\n \"state\": result.state,\n \"phone\": result.phone,\n \"website\": result.website,\n \"facebook_link\": result.facebook_link,\n \"seeking_talent\": result.seeking_talent,\n \"seeking_description\": result.seeking_description,\n \"image_link\": result.image_link,\n \"past_shows\": past_shows,\n \"upcoming_shows\": upcoming_shows,\n \"past_shows_count\": past_shows_count,\n \"upcoming_shows_count\": upcoming_shows_count,\n }\n \n data = list(filter(lambda d: d[\"id\"] == venue_id, [resdata]))[0]\n return render_template(\"pages/show_venue.html\", venue=data)", "def do_instance_show(cs, args):\n try:\n instance = cs.instances.detail(args.instance)\n except exceptions.NotFound as e:\n msg = \"No server with an id of '%s' exists\" % args.instance\n e.message = msg\n raise\n\n _print_server_details(instance)", "def executeShow(self,\n rsrcType,\n showAdditionalParams=[],\n rsrcAdditionalParams=[]):\n\n args = [\"show\",\n \"--wavefrontHost\", util.wavefrontHostName,\n \"--apiToken\", util.wavefrontApiToken] \\\n + showAdditionalParams \\\n + [rsrcType] \\\n + rsrcAdditionalParams\n wc = wavectl.Wavectl(designForTestArgv=args)\n\n with util.StdoutCapture() as captOut:\n wc.runCmd()\n\n return captOut.str()", "def view_details_wishlist():\n try:\n curItem = wishlist_treeview.focus().strip('#')\n\n with open(\"images_url_dict.json\", \"r\") as images_dict_fo_complete:\n imgs_dict = json.load(images_dict_fo_complete)\n name = \"-\".join(curItem.lower().split())\n\n _, title, ID = imgs_dict[name]\n\n webbrowser.open_new_tab(\"https://eztv.io/shows/{}/{}/\".format(ID, title))\n except KeyError:\n print(\"Failed to use series list\")\n\n webbrowser.open_new_tab(\"https://www.imdb.com/find?ref_=nv_sr_fn&q={}&s=tt\".format(curItem))", "def get_shows(self, comparison):\n results = []\n\n for show in db.session.query(\n Show.venue_id.label('venue_id'),\n Venue.name.label('venue_name'),\n Venue.image_link.label('venue_image_link'),\n func.to_char(Show.start_time, 'YYYY-MM-DD HH24:MI:SS').label('start_time')\n ).filter(\n Show.artist_id == self.id,\n Show.venue_id == Venue.id\n ).filter(\n comparison\n ).all():\n\n results.append({\n 'venue_id': show.venue_id,\n 'venue_name': show.venue_name,\n 'venue_image_link': show.venue_image_link,\n 'start_time': show.start_time\n })\n return results", "def do_show(self, arg):\n arg_list = arg.split(\" \") if type(arg) == str else arg\n if not arg:\n print(\"** class name missing **\")\n return\n if arg_list[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n return\n if len(arg_list) < 2:\n print(\"** instance id missing **\")\n return\n key = arg_list[0] + \".\" + arg_list[1]\n if key not in storage.all():\n print(\"** no instance found **\")\n return\n print(storage.all()[key])", "def handleShow(self):\n logging.info(self.total_ticket)\n for entry in self.log:\n logging.info(entry)\n logging.info(self.role)", "def list_all_shows(self):\n self.load_shows()\n print()\n for show_id in sorted(\n self.shows_data, key=lambda show_id: self.shows_data[show_id]['title']\n ):\n next_show = self.shows_data[show_id]\n if next_show['watchedEpisodes'] <= 0:\n show_sign = '-'\n elif next_show['watchedEpisodes'] < next_show['totalEpisodes']:\n show_sign = '+'\n else:\n show_sign = ' '\n\n alias = self.alias_by_title(next_show['title'])\n if not alias:\n alias = '-'\n\n print('{0}{1}({7}): {2}/{3} ({4}%), rating = {5}({6})'.format(\n show_sign,\n tr_out(next_show['title']),\n # next_show['ruTitle'],\n next_show['watchedEpisodes'], next_show['totalEpisodes'],\n 100 * (\n next_show['watchedEpisodes'] / next_show['totalEpisodes']\n if next_show['totalEpisodes'] > 0 else 0\n ),\n next_show['rating'],\n next_show['watchStatus'][0],\n alias\n ))\n print()", "def show(app, tuples, _asString=False, **options):\n\n display = app.display\n\n if not display.check(\"show\", options):\n return \"\"\n\n _browse = app._browse\n inNb = app.inNb\n asString = _browse or _asString\n\n dContext = display.distill(options)\n end = dContext.end\n start = dContext.start\n condensed = dContext.condensed\n condenseType = dContext.condenseType\n\n api = app.api\n F = api.F\n\n item = condenseType if condensed else RESULT\n\n if condensed:\n tuples = condense(api, tuples, condenseType, multiple=True)\n\n html = []\n\n for (i, tup) in tupleEnum(tuples, start, end, LIMIT_SHOW, item, inNb):\n item = F.otype.v(tup[0]) if condensed and condenseType else RESULT\n thisResult = prettyTuple(\n app,\n tup,\n seq=i,\n item=item,\n _asString=asString,\n **options,\n )\n if asString:\n html.append(thisResult)\n\n if asString:\n return \"\".join(html)", "def update_show_info(show_node):\n result_dict = {}\n try:\n # data source (tvrage)\n show_info_e_list = requests.get(\n 'http://services.tvrage.com/feeds/full_show_info.php?sid={0}'.format(show_node['id']))\n result_dict = xmltodict.parse(show_info_e_list.text)\n\n show_node['started'] = result_dict['Show'].get('started', None)\n show_node['ended'] = result_dict['Show'].get('ended', None)\n show_node['image'] = result_dict['Show'].get('image', None)\n show_node['status'] = result_dict['Show'].get('status', None)\n\n # data source (omdbapi)\n omdb_show_info = requests.get(\n 'http://www.omdbapi.com/?t={0}&y=&plot=full&r=json'.format(show_node['name']))\n dict_omdb_show_info = json.loads(omdb_show_info.text)\n if dict_omdb_show_info['Response'] == 'True':\n for key, value in dict_omdb_show_info.iteritems():\n show_node[key] = value\n show_node.push()\n\n except ValueError as e:\n logger.exception(\"Value Error\")\n return\n except Exception as e:\n logger.exception(\"Some network issue, will try again\")\n\n # Country\n show_to_country(show_node, result_dict['Show'].get('origin_country', 'unknown'))\n\n #Genres\n if result_dict['Show'].get('genres', None) is not None:\n genre_list = []\n if type(result_dict['Show']['genres']['genre']) is list:\n genre_list = result_dict['Show']['genres']['genre']\n else:\n genre_list.append(result_dict['Show']['genres']['genre'])\n\n for genre in genre_list:\n show_to_genre(show_node, genre)\n\n #Seasons\n season_list = []\n if result_dict['Show'].get('Episodelist', None) is None:\n # there are no seasons for this show\n return\n\n if type(result_dict['Show']['Episodelist']['Season']) is list:\n season_list = result_dict['Show']['Episodelist']['Season']\n else:\n season_list.append(result_dict['Show']['Episodelist']['Season'])\n\n for season in season_list:\n season_node = get_season_for_show(show_node, season['@no'])\n\n #Episodes\n episode_list = []\n if type(season['episode']) is list:\n episode_list = season['episode']\n else:\n episode_list.append(season['episode'])\n\n count = 1\n for episode in episode_list:\n episode_basic_info = {\n 'airdate': episode.get('airdate', None),\n 'epnum': count,\n 'screencap': episode.get('screencap', None),\n 'title': episode.get('title', None)\n }\n episode_node = get_episode_for_season_show(show_node, season_node ,episode_basic_info)\n\n # Add episode info\n try:\n omdb_episode_info = requests.get('http://www.omdbapi.com/?t={0}&Season={1}&Episode={2}'\n .format(show_node['name'],\n season_node['no'],\n episode_node['epnum']))\n\n dict_omdb_episode_info = json.loads(omdb_episode_info.text)\n\n if dict_omdb_episode_info['Response'] == 'True':\n for key, value in dict_omdb_episode_info.iteritems():\n episode_node[key] = value\n episode_node.push()\n except ValueError as e:\n logger.exception(\"Value error\")\n except Exception as e:\n logger.exception(\"network issue: wil try again\")\n success = True\n\n show_episode = Relationship(season_node, \"has\", episode_node)\n graph.create(show_episode)\n count = count + 1", "def get_shows(self, **kwargs):\n return self.get('shows.json', **kwargs)", "def do_show(self, arg):\n args = shlex.split(arg)\n if len(args) == 0:\n print(\"** class name missing **\")\n return False\n if args[0] in class_type:\n if len(args) > 1:\n key = args[0] + \".\" + args[1]\n if key in models.storage.all():\n print(models.storage.all()[key])\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def select_show_event(obj):\n new_text=generate_show_details_label(obj.parent.parent.shows,obj.show_id)\n print new_text\n obj.parent.parent.show_details_label.text=new_text", "def show(self, item_id):\n pass", "def showInfo(p,personDict):\n info1 = personDict['EnterpriseID'][p[0]]\n info2 = personDict['EnterpriseID'][p[1]]\n print (\"Person A:\",info1)\n print (\"Person B:\",info2)", "def get_shows(self, comparison):\n results = []\n\n for show in db.session.query(\n Show.artist_id.label('artist_id'),\n Artist.name.label('artist_name'),\n Artist.image_link.label('artist_image_link'),\n func.to_char(Show.start_time, 'YYYY-MM-DD HH24:MI:SS').label('start_time')\n ).filter(\n Show.artist_id == Artist.id,\n Show.venue_id == self.id\n ).filter(\n comparison\n ).all():\n\n results.append({\n 'artist_id': show.artist_id,\n 'artist_name': show.artist_name,\n 'artist_image_link': show.artist_image_link,\n 'start_time': show.start_time\n })\n return results", "def should_show():", "def display_single_actor(actor: dict):\n actor_name = \" \".join([n.title() for n in actor[\"name\"].split(\" \")])\n print(colored(figlet_format(actor_name, font=\"chunky\", width=220), \"magenta\"))\n # First and Last activity dates\n actor_activity_dates(actor)\n if \"known_as\" in actor:\n aka = \", \".join(list(actor['known_as'].split(',')))\n aka = f\"{chunk_long_description(aka)}\"\n aka = f\"{colored('Otherwise known as', attrs=['bold', 'underline'])}\\n{aka}\"\n print(f\"{aka}\\n\")\n if actor[\"description\"]:\n print(colored(\"Adversary description\", attrs=[\"bold\", \"underline\"]))\n print(f\"{chunk_long_description(actor['description'])}\\n\")\n # Actor capability, origin and type\n actor_type_and_capability(actor)\n # Motivations\n simple_list_display(\"motivations\", actor, \"Motivations\")\n # Objectives\n simple_list_display(\"objectives\", actor, \"Objectives\")\n # Capabilities\n simple_list_display(\"capabilities\", actor, \"Capabilities\")\n # Target Regions\n simple_list_display(\"target_regions\", actor, \"Targeted regions\")\n # Target Countries\n large_list_display(\"target_countries\", actor, \"Targeted countries\")\n # Target industries\n large_list_display(\"target_industries\", actor, \"Targeted industries\")\n # Kill chain\n if \"kill_chain\" in actor:\n chain = actor[\"kill_chain\"]\n print(colored(\"Tactics, Techniques and Procedures\", attrs=[\"bold\", \"underline\"]))\n for key, val in chain.items():\n if \"rich_text_\" not in key:\n if val[:3] != \"\\r\\n\\t\" and val[:3] != \"CVE\":\n val = \"\\r\\n\\t\" + val\n key = \" \".join([k.title() for k in key.split(\"_\")]).replace(\"And\", \"and\")\n print(f\"{bold(key)}: {chunk_long_description(val, 100)}\\n\")\n # eCrime Kill chain\n if \"ecrime_kill_chain\" in actor:\n ekc = actor[\"ecrime_kill_chain\"]\n print(colored(\"ECrime Tactics, Techniques and Procedures\", attrs=[\"underline\"]))\n for key, val in ekc.items():\n if \"rich_text_\" not in key and val:\n key = \" \".join([k.title() for k in key.split(\"_\")]).replace(\"And\", \"and\")\n print(f\"{bold(key)}: {val}\")", "def cli_print_record( field_list, showid=False):\n debug(\"cli_print_record(%s)\" % field_list)\n try:\n raw_record_list = api.find_records(field_list)\n except NoRecordsFound as error:\n print \"No records found for: %(field_list)s, %(error)s\" % locals() \n return False\n except InvaildQuery as error:\n print \"Not query to query database with\"\n return False\n\n # Grab all the display fields from the field_list\n display_field_list = api.get_display_fields(field_list)\n\n # Commented out, as i will assume if you have not asked for any fields,\n # then you want them all\n # Make sure that name is in the display_field_list\n #if 'name' not in display_field_list:\n # display_field_list.append('name')\n\n record_list = []\n record_length = defaultdict(int)\n for raw_record in raw_record_list:\n record = raw_record\n for k, v in raw_record.items():\n if isinstance(v, list):\n v = \",\".join(v)\n record[k] = v\n if record_length[k] < len(str(v)):\n record_length[k] = len(str(v))\n record_list.append(record)\n\n if display_field_list:\n\n simple_format = re.sub('(?P<m>\\w+)',\"%(\\g<m>)s\", \" \".join(display_field_list) )\n\n # Better formatting of the simple_format string\n display_string = \"\"\n for d in display_field_list:\n display_string += \"%%(%s)-%ds \" % (d, record_length[d])\n simple_format = display_string\n\n for record in record_list:\n try:\n print simple_format % record\n except KeyError as error:\n debug(\"cli_print_record: unable to print fields for record: %(error)s\" % locals())\n else:\n for record in record_list:\n print\n print '\\033[1m%(name)s\\033[0m' % record\n for key, value in sorted(record.items()):\n if type(value).__name__ in [ 'str', 'unicode','int','float','bool']:\n print \" %(key)s: %(value)s\" % locals()\n continue\n elif type(value).__name__ in [ 'list', 'set']:\n print \" %s: %s\" % ( key, \",\".join( value) )\n continue\n elif type(value).__name__ == 'ObjectId':\n if showid:\n print \" %(key)s: %(value)s\" % locals()\n continue\n elif type(value).__name__ == 'NoneType':\n continue\n\n else:\n raise RecordKeeperException(\"Unhandled data format '%s' <%s>\" % ( key, type(value).__name__))", "def show_by_sample(request, sample_id, page_num=1):\n\tsample = Sample.objects.get(id=sample_id)\n\tquery = models.AnalysisAttribute.objects.filter(sample=sample)\n\tfiltered = False\n\tif 'showvalue' in request.GET and request.GET['showvalue']:\n\t\tquery = query.filter(value__isnull=False)\t\t\n\t\tfiltered=True\n\tpaginator = Paginator(query.order_by('-date_time_last_updated'), 10)\n\tpage = paginator.page( page_num )\n\tanalyses = page.object_list\n\treturn render_to_response('analysis/show_by_sample.html', locals())", "def showPoseDetails(pose_id):\n\n pose = Pose.query.get(pose_id)\n next_poses = None\n if pose.next_poses: # pose.next_poses = dictionary of next poses {pose_id: weight, pose_id: weight}\n next_poses = {} # want to compose a dictionary {pose_id: {name: \"Down Dog\", weight: 1} ... }\n for p in pose.next_poses:\n # p_name = db.session.query(Pose.name).filter(Pose.pose_id == int(p)).first()[0]\n p_object = Pose.query.get(int(p))\n next_poses[p] = {\"name\": p_object.name, \"weight\": pose.next_poses[p], \"img_url\": p_object.img_url}\n\n prev_poses = None\n if pose.prev_pose_str:\n prev_poses = pose.prev_pose_str.split(',') # list of previous poses\n\n return render_template(\"pose-details.html\", \n pose=pose,\n next_poses=next_poses,\n prev_poses=prev_poses)", "def load_show(id):\n best = request.accept_mimetypes.best_match([\n \"application/json\", 'text/html'\n ])\n if id is None or request.data:\n failed = {\"Error\": \"The request object does not follow specifications - see documentation.\"}\n return Response(\n response=json.dumps(failed),\n status=400,\n mimetype='application/json'\n )\n elif best != \"application/json\":\n response = Response(\n response=json.dumps({\"Error\": \"This body type is not supported.\"}),\n status=406,\n mimetype='application/json'\n )\n # Get the load from datastore\n load_key = client.key(\"load\", int(id))\n load = client.get(key=load_key)\n\n # Check the load exists\n if load:\n data = dict(load)\n data[\"id\"] = load.key.id\n data[\"self\"] = request.url_root + \"loads/\" + str(load.key.id)\n if data[\"carrier\"]:\n data[\"carrier\"][\"self\"] = request.url_root + \"boats/\" + str(data[\"carrier\"][\"id\"])\n\n response = Response(\n response=json.dumps(data),\n status=200,\n mimetype='application/json'\n )\n return response\n else:\n response = Response(\n response=json.dumps({\"Error\": \"No load with this load_id exists\"}),\n status=404,\n mimetype='application/json'\n )\n return response", "def do_show(self, line):\n try:\n tokens = split(line)\n except ValueError:\n return None\n if len(tokens) < 1:\n print(\"** class name missing **\")\n else:\n objects = models.storage.all()\n cls = models.getmodel(tokens[0])\n if cls is None:\n print(\"** class doesn't exist **\")\n elif len(tokens) < 2:\n print(\"** instance id missing **\")\n elif \".\".join(tokens[:2]) not in objects:\n print(\"** no instance found **\")\n else:\n print(objects[\".\".join(tokens[:2])])", "def show(self, opc=True):\n self.ventana.show_all()\n if opc:\n #mostrar la ventana\n self.ventana.show()\n else:\n #ocultarla\n self.ventana.hide()", "def get_show_people(self, show_id):\r\n result = []\r\n for person in ShowsPeopleMapping.query.filter_by(ShowID=show_id).all():\r\n pd = {\r\n \"showID\": person.ShowID,\r\n \"personID\": person.PersonID,\r\n \"instruments\": person.Instruments,\r\n }\r\n result.append(pd)\r\n return result", "def show_tracker(tracker_info):\n if not tracker_info:\n error(\"Must provide tracker to show\")\n entry = \"{} | {}\"\n for t in tracker_info:\n if not tracker_exists(t):\n error(\"Cannot show data for unknown tracker '\" + t + \"'\")\n results = fetch_all(t)\n for r in results:\n print(entry.format(r[1], r[2]))", "def show_artist(artist_id):\n\n result = db.session.query(Artist).filter(Artist.id == artist_id)\n result = result[0]\n\n past_shows_count = 0\n upcoming_shows_count = 0\n\n past_shows = []\n upcoming_shows = []\n\n all_shows = Shows.query.all()\n\n print(all_shows)\n\n for show in all_shows:\n if show.artist_id == result.id:\n show_time = datetime.strptime(show.start_time, '%Y-%m-%d %H:%M:%S')\n if show_time > datetime.now() :\n upcoming_shows.append(show)\n else: \n past_shows.append(show)\n \n past_shows_count = len(past_shows)\n upcoming_shows_count = len(upcoming_shows)\n\n resdata = {\n \"id\": result.id,\n \"name\": result.name,\n \"genres\": json.loads(result.genres),\n \"city\": result.city,\n \"state\": result.state,\n \"phone\": result.phone,\n \"website\": result.website,\n \"facebook_link\": result.facebook_link,\n \"seeking_venue\": result.seeking_venue,\n \"seeking_description\": result.seeking_description,\n \"image_link\": result.image_link,\n \"past_shows\": past_shows,\n \"upcoming_shows\": upcoming_shows,\n \"past_shows_count\": past_shows_count,\n \"upcoming_shows_count\": upcoming_shows_count,\n }\n\n data = list(filter(lambda d: d[\"id\"] == artist_id, [resdata]))[0]\n return render_template(\"pages/show_artist.html\", artist=data)", "def shown(func):\n name = f\"{func.__name__}( )\"\n @wraps(func)\n def wrapped_func(*args, **kwargs):\n res = func(*args, **kwargs)\n res = show(**{name: res})\n return res\n return wrapped_func" ]
[ "0.6592299", "0.6279458", "0.62582874", "0.61725926", "0.61149275", "0.6025211", "0.60056853", "0.5981849", "0.59650505", "0.59365", "0.59159607", "0.58936185", "0.5889489", "0.5884984", "0.5873648", "0.5829819", "0.58108586", "0.57891476", "0.57777935", "0.5762831", "0.5757785", "0.5757785", "0.5757785", "0.5757785", "0.5743318", "0.57253516", "0.5720866", "0.5720866", "0.5720866", "0.5720866", "0.5720866", "0.5720866", "0.5720866", "0.5720866", "0.56889915", "0.56614375", "0.56614375", "0.56614375", "0.56407875", "0.5637489", "0.5615409", "0.56151843", "0.5613934", "0.56086737", "0.5605552", "0.5599511", "0.55882484", "0.5579726", "0.55643797", "0.55615264", "0.55589175", "0.5558582", "0.55540824", "0.55438423", "0.5538486", "0.5537348", "0.5520562", "0.55149835", "0.5512866", "0.54994875", "0.5486933", "0.5486538", "0.5478898", "0.54667157", "0.54658246", "0.5465355", "0.5461099", "0.54607725", "0.5459353", "0.5442733", "0.543044", "0.5428371", "0.54036915", "0.5397689", "0.53959286", "0.5387241", "0.53796196", "0.53760505", "0.5361307", "0.53533465", "0.5352195", "0.5350237", "0.53437114", "0.5335796", "0.533428", "0.5324484", "0.5320598", "0.53157854", "0.5305189", "0.52935195", "0.52727777", "0.52643806", "0.52595234", "0.5258444", "0.5256261", "0.5250255", "0.5248597", "0.5246493", "0.5243604", "0.5235816" ]
0.60066134
6
Displays the ratings that were scraped.
def display_ratings(ratings): # only attempt to display the ratings if any were found if ratings: print('\n[RATINGS]\n') for rating in ratings: print(f' {rating}', end=' ') # needed to get printing back to normal print()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_ratings(all_ratings):\n print(\"Here is the current list of all ratings:\")\n for restaurant, rating in sorted(all_ratings.items()):\n print(f'{restaurant} is rated at {rating}.')", "def display_player_ratings(player_ratings):\r\n print('\\nCLASSEMENT DES PARTICIPANTS:\\n Nom ELO Score')\r\n for i in range(0, len(player_ratings)):\r\n print(players_table.get(doc_id=player_ratings[i][0])['Nom'],\r\n players_table.get(doc_id=player_ratings[i][0])['ELO'],\r\n player_ratings[i][1])", "def get_ratings(self):\n return self.ratings", "def get_ratings(self):\n return self.ratings", "def check_ratings(self):\n\n self.browser.get('https://www.imdb.com/')\n\n for title in self.titles:\n input_bar = self.browser.find_element_by_id('navbar-query')\n input_bar.clear()\n\n input_bar.send_keys(title)\n input_bar.send_keys(Keys.RETURN)\n\n time.sleep(3)\n\n # Click on the first suggestion\n css_selector = \"div.findSection:nth-child(3) > table:nth-child(2) > tbody:nth-child(1) > tr:nth-child(1) > td:nth-child(2) > a:nth-child(1)\"\n self.browser.find_element_by_css_selector(css_selector).click()\n time.sleep(3)\n\n # Pull details that will always be available\n score = str(self.browser.find_element_by_class_name('ratingValue').text)\n score = score.split('/10')[0].replace(',', '.')\n\n time.sleep(3)\n\n summary = str(self.browser.find_element_by_class_name('summary_text').text)\n subtext = str(self.browser.find_element_by_class_name('subtext').text)\n\n # Pull details that differ between movies and series\n try:\n duration = str(self.browser.find_element_by_class_name('bp_sub_heading').text) # Only for series\n if 'episodes' not in duration:\n duration = 'Some episodes'\n except Exception:\n # bp_sub_heading won't be found on a movie page\n duration = 'movie'\n\n if subtext[0].isdigit():\n # Split up the details from the subtext\n subtext_list = subtext.split(' | ')\n else:\n # Some movies' subtext starts with 'R' / 'PG-13'\n subtext_list = subtext.split(' | ')\n del subtext_list[0]\n\n # Duration\n if duration == 'movie':\n show_type = 'Movie'\n duration = subtext_list[0]\n try:\n year = datetime.datetime.strptime(subtext_list[2].split(' (')[0], '%d %B %Y').strftime('%Y')\n except ValueError:\n year = str(subtext_list[2].split(' (')[0][-4:])\n\n else: # series\n show_type = 'Serie'\n # Retrieve last season and its release date\n season_tab = str(self.browser.find_element_by_class_name('seasons-and-year-nav').text).strip()\n\n numbers = re.findall('[0-9]+', season_tab)\n latest_season = int(numbers[0])\n latest_year = int(max(numbers, key=lambda x: int(x)))\n\n duration += ' (%d Seasons in %d), %s per episode' % (latest_season, latest_year, subtext_list[0])\n\n year = re.findall('[0-9]+', subtext_list[2])[0]\n\n # Pull some more data out from the subtext\n genres = subtext_list[1].split(', ')\n\n # Pull details that are not always available\n creds_list = []\n creds = self.browser.find_elements_by_class_name('credit_summary_item')\n for c in creds:\n temp = str(c.text)\n if '|' in temp:\n temp = temp.split('|')[0]\n\n creds_list.append(temp)\n\n self.data_dict[title] = {\n 'score': score,\n 'summary': summary,\n 'duration': duration,\n 'credits': creds_list,\n 'genres': genres,\n 'released': year,\n 'type': show_type,\n }", "def printJudgeRatings(self):\n\n try:\n judgeNotesLogger.info(\"printJudgeRatings: Printing out judge ratings from '%s'\\n\", self.notesFile)\n\n # Print Normal List First.\n for ratingTuple in self.judgedSongList:\n if ratingTuple[0][2] != \"\":\n print(\"SONG:\", ratingTuple[0][0], \"{\"+ratingTuple[0][1]+\"}\", \"(\"+ratingTuple[0][2]+\")\",\n \"\\nRATING:\", \"[\"+str(ratingTuple[1])+\"/10]\\n\")\n else:\n print(\"SONG:\", ratingTuple[0][0], \"{\"+ratingTuple[0][1]+\"}\",\n \"\\nRATING:\", \"[\"+str(ratingTuple[1])+\"/10]\\n\")\n\n # Print Special List Second.\n for ratingTuple in self.specialSongList:\n if ratingTuple[0][2] != \"\":\n print(\"SONG:\", ratingTuple[0][0], \"{\"+ratingTuple[0][1]+\"}\", \"(\"+ratingTuple[0][2]+\")\",\n \"\\nRATING:\", \"[\"+str(ratingTuple[1])+\"]\\n\")\n else:\n print(\"SONG:\", ratingTuple[0][0], \"{\"+ratingTuple[0][1]+\"}\",\n \"\\nRATING:\", \"[\"+str(ratingTuple[1])+\"]\\n\")\n \n except:\n judgeNotesLogger.warning(\"printJudgeRatings: {0}: {1}\".format(sys.exc_info()[0].__name__,\n str(sys.exc_info()[1])))", "def get_rating(self):\n self.rating = imdb.get_title_ratings(self.ID)['rating']", "def findRatings():\n if request.method == 'POST':\n connector = appEngine.connect()\n rating = int(request.form['rating'])\n joinTable = connector.execute(\"SELECT movie.movieName, actor.actorName, rating.rating FROM movie INNER JOIN rating ON movie.movieID=rating.movie_ID INNER JOIN movie_actor ON movie.movieID=movie_actor.movie_ID INNER JOIN actor ON movie_actor.actor_ID=actor.actorID WHERE rating.rating >= (?);\", (rating))\n result = {'data': [dict(zip(tuple(joinTable.keys()), i)) for i in joinTable.cursor]}\n return result\n return render_template('rating_search.html')", "def printRawRatings(self):\n\n try:\n judgeNotesLogger.info(\"printRawRatings: Retrieving Raw Ratings from '%s'\\n\", self.notesFile)\n sortedRatings = sorted(self.ratingsRaw.keys(), key=float)\n for rating in sortedRatings:\n print(\"[\"+str(rating)+\"/10]:\"+str(self.ratingsRaw[rating]))\n ratingSum = self.getRatingSum()\n sortedRatings = sorted(self.specialRatingsRaw.keys(), key=str.lower)\n for rating in sortedRatings:\n print(\"[\"+str(rating)+\"]:\"+str(self.specialRatingsRaw[rating]))\n print(\"TOTAL:\"+str(round(ratingSum, 1)))\n print(\"JUDGEDFILES:\"+str(self.numJudgedFiles))\n print(\"SPECIALFILES:\"+str(self.numSpecialFiles))\n print(\"TOTALFILES:\"+str(self.numTotalFiles))\n print(\"AVERAGE:\"+str(round(self.average, 2))+\"\\n\")\n\n except:\n judgeNotesLogger.warning(\"printRawRatings: {0}: {1}\".format(sys.exc_info()[0].__name__,\n str(sys.exc_info()[1])))", "def printRatingsToSongs(self):\n judgeNotesLogger.info(\"printRatingsToSongs: Printing songs for each rating parsed\")\n try:\n\n # Print out normal ratings first.\n sortedRatings = sorted(self.ratingsToSongs.keys(), key=float)\n for rating in sortedRatings:\n print(\"\") # For neater printing. Newline still occurs here\n songsInRating = self.ratingsToSongs[rating]\n print(\"[\"+str(rating)+\"/10]\")\n for song in songsInRating:\n if song[2] != \"\":\n print(\"-->\", song[0], \"{\"+song[1]+\"}\", \"(\"+song[2]+\")\")\n else:\n print(\"-->\", song[0], \"{\"+song[1]+\"}\")\n\n # Print out special ratings after.\n sortedRatings = sorted(self.specialRatingsToSongs.keys(), key=str.lower)\n for rating in sortedRatings:\n print(\"\") # For neater printing. Newline still occurs here\n songsInRating = self.specialRatingsToSongs[rating]\n print(\"[\"+str(rating)+\"]\")\n for song in songsInRating:\n if song[2] != \"\":\n print(\"-->\", song[0], \"{\"+song[1]+\"}\", \"(\"+song[2]+\")\")\n else:\n print(\"-->\", song[0], \"{\"+song[1]+\"}\")\n \n print(\"\") # For neater printing. Newline still occurs here\n except:\n judgeNotesLogger.warning(\"printRatingsToSongs: {0}: {1}\".format(sys.exc_info()[0].__name__,\n str(sys.exc_info()[1])))", "def review_rating(self, soup):\n logging.info('Getting hotel review rating.')\n reviews_rating = {}\n if soup.select_one('div.scores_full_layout') is None:\n logging.error('Cant get extended rating.')\n reviews_rating = {}\n else:\n for review_rating in soup.select_one('div.scores_full_layout').findAll(\n 'li', {\"class\": \"clearfix\"}):\n rating_class = review_rating.find(\"p\", {\"class\": \"review_score_name\"}).text.strip()\n rating_score = review_rating.find(\"p\", {\"class\": \"review_score_value\"}).text.strip()\n reviews_rating[rating_class] = rating_score\n\n return reviews_rating", "def enterRating():\n if request.method == 'POST':\n movieName = request.form['movieName']\n username = request.form['userName']\n rating = request.form['rating']\n comment = request.form['comment']\n post([movieName, username, rating, comment])\n return render_template('rating_enter.html')", "def print_recommendations(self):\n\n rec_vector = self.generate_recommendation()\n\n print(\"Recommendations for user {} \".format(self.username))\n\n for ranking, subreddit_name in enumerate(rec_vector, 1):\n print(\"{}.: {}\".format(ranking, subreddit_name))\n\n if ranking%10 == 0 and ranking!=0:\n check_if_move_on = True\n print(\"\\nType c and press enter for the next 10 subreddits.\\n\")\n print(\"Type q and press enter to return to main menu.\\n\")\n\n while check_if_move_on:\n choice = input()\n\n if choice == 'c':\n break\n\n elif choice == 'q':\n break\n\n else:\n print(\"Not a valid entry, please enter again.\")\n\n # break the whole thing if they want to quit\n if choice == 'q':\n break", "def all(self):\n ratings = []\n for i in range (1, self.pages()+1):\n ratings.extend(self.page(i))\n \n self._set_attrs_to_values({'ratings': ratings})\n return ratings", "def get_ratings(self):\n df = pd.read_csv(IoManager.CARD_RATINGS_FILE_PATH)\n df = IoManager.scale_ratings(df)\n df = IoManager.normalize_ratings_per_archetype(df)\n df = self.add_ratings_sum(df)\n # print(df[[\"name\", \"monogreen\", \"simic_ramp\", \"general\"]].tail(60))\n # print(df[[\"name\", \"general\"]].sort_values(ascending=False, by=\"general\").head(50))\n return df", "def ratings(self):\n session = Session.object_session(self)\n return session.query(Rating).join(Section).filter(Section.professor == self).all()", "def all_prods(request):\n products = Product.objects.all()\n stars = Product.objects.annotate(\n avg_review=Avg('productreview__rating'),\n )\n context = {\n 'products': products,\n 'stars': stars\n }\n return render(request, \"products.html\", context)", "def all_ratings(self):\n\n for u, u_ratings in iteritems(self.ur):\n for i, r in u_ratings:\n yield u, i, r", "def ratings(self, ratings):\n\n self._ratings = ratings", "def ratings(self, ratings):\n\n self._ratings = ratings", "def rating_review(catalog):\n reviews = list()\n errors = 0\n for ix, page in enumerate(catalog.iloc[:, 0], 1):\n try:\n soup_2 = fetch(page, \"\").find_all(\"div\", {\"class\": \"col-xs-16 review_container\"})\n for comment in soup_2:\n comment_text = comment.find_all(\"div\", {\"class\": \"the_review\"})[0].text.strip()\n icon = str(comment.find_all(\"div\")[0])\n if \"fresh\" in icon:\n reviews.append('1 - ' + comment_text)\n elif \"rotten\" in icon:\n reviews.append('0 - ' + comment_text)\n except:\n errors += 1\n print('\\r4/4 — {:.2%} of reviews scraped. Error rate: {:.2%}'.format(ix/len(catalog),\n errors/ix), end=' ')\n print('\\r{} reviews successfully scraped. Error rate: {:.2%}'.format(\n len(reviews)-errors, errors/ix), end='\\n')\n return reviews", "def get_rating(text):\n movie = text\n page = requests.get('http://www.imdb.com/find?ref_=nv_sr_fn&q=' + movie + '&s=tt')\n soup1 = BeautifulSoup(page.content, 'html.parser')\n movieid = soup1.select(\".findList tr a\")[0].get('href')\n movielink = \"http://www.imdb.com\" + movieid\n mlinkpage = requests.get(movielink)\n soup2 = BeautifulSoup(mlinkpage.content, 'html.parser')\n movierating = soup2.select(\".ratingValue span\")[0].text\n metascore = soup2.select(\".metacriticScore\")\n reviewlink = movielink + 'reviews'\n linkpage = requests.get(reviewlink)\n soup3 = BeautifulSoup(linkpage.content, 'html.parser')\n \n return soup3, movierating", "def get_reviews(review_url):\n print review_url\n html = urllib.urlopen(review_url).read()\n soup = bs4.BeautifulSoup(html, 'html.parser')\n\n rating_scores = soup.findAll(\"span\", \"ratingScore\")\n num_ratings = len(rating_scores) - 1\n\n current_reviews = soup.findAll(\"div\", \"currentVintageProfessinalReviews\")\n num_cur_reviews = str(current_reviews).count('ratingProvider')\n past_reviews = soup.findAll(\"ul\", \"pastVintagesProfessionalReviews\")\n num_past_reviews = str(past_reviews).count('ratingProvider')\n\n print 'There are {0} reviews for prior vintages of this wine.'.format(num_past_reviews)\n print 'There are {0} current reviews for this vintage.\\n'.format(num_cur_reviews)\n\n rating_provider = soup.findAll(\"span\", \"ratingProvider\")\n rating_score = soup.findAll(\"span\", \"ratingScore\")\n reviewers = re.findall('(?<![A-Z])[>]([A-Z]+(?![A-Z]))', str(rating_provider))\n ratings = re.findall('(?<![A-Z])[0-9]{2}(?![A-Z])', str(rating_score))\n\n print \"Ratings List:\", ratings\n print \"Current Reviews: \", num_cur_reviews\n\n currentreviews = []\n for j in range(num_cur_reviews):\n print \"Current Review #\"+str(j+1)+\":\", reviewers[j], ratings[j]\n currentreviews.append((reviewers[j], ratings[j]))\n print currentreviews\n\n print \"\\nPast Reviews: \", num_past_reviews\n past_review_ratings = []\n for k in range(num_cur_reviews, num_past_reviews+num_cur_reviews):\n #print \"Past Review #\"+str(k-num_cur_reviews+1)+\":\", reviewers[k], int(ratings[k])\n past_review_ratings.append(float(ratings[k]))\n if k > 30:\n break\n if num_past_reviews != 0:\n avg_past_reviews = sum(past_review_ratings)/len(past_review_ratings)\n round(avg_past_reviews, 2)\n else:\n avg_past_reviews = 0\n\n print \"Average of Past Reviews: \", avg_past_reviews\n\n return currentreviews, avg_past_reviews", "def show_movie_profile(movie_id):\n\n # movie object given a movie_id\n movie = Movie.query.filter_by(movie_id=movie_id).first()\n\n # list of all rating objects for a given movie_id ordered by user_id\n sorted_ratings = Rating.query.filter_by(movie_id=movie_id).order_by('user_id').all()\n\n return render_template(\"movie_profile.html\", movie=movie, ratings=sorted_ratings)", "def display_round_matches(player_ratings):\r\n print('\\nMATCHES DE LA RONDE: ')\r\n for i in range(0, len(player_ratings), 2):\r\n print(players_table.get(doc_id=player_ratings[i][0])['Nom'], \"(BLANCS)\",\r\n \"contre\",\r\n players_table.get(doc_id=player_ratings[i+1][0])['Nom'], \"(NOIRS)\")", "def results():\n df = mdr.elo_ratings.sort_values([\"elo\", \"matches\"], ascending=False)\n df[\"score\"] = (1 / (1 + 10**((1500 - df[\"elo\"]) / 400))) * 100\n df = df[[\"items\", \"score\", \"matches\"]]\n\n html_formatted = df.to_html(classes=[\"table\", \"table-dark\", \"table-hover\"], index=False, float_format=\"%.1f\")\n\n return render_template('results.html', table=html_formatted)", "def show_recommendation_pool(self, top_n=None):\n i = 0\n if top_n is None:\n top_n = self.number_of_recommendations\n\n for _, rdata in self.recommendation_pool.items():\n print(\"\\n{R.movie_id} - {R.title} - {R.genres}\".format(\n R=rdata['movie_obj']))\n\n if 'title_similarity' in rdata:\n print(\" Title Similarity: {} - ({})\".format(\n rdata['title_similarity'], rdata['movie_obj'].title))\n\n if 'genres_similarity' in rdata:\n print(\" Genres Similarity: {} - ({})\".format(\n rdata['genres_similarity'], rdata['movie_obj'].genres))\n\n if 'tags_similarity' in rdata:\n print(\" Tags Similarity: {} - ({})\".format(\n rdata['tags_similarity'], rdata['tags']))\n\n if 'final_similarity' in rdata:\n print(\" -> Final Similarity: {}\".format(\n rdata['final_similarity']))\n\n i += 1\n if top_n and i >= top_n:\n break", "def output(self):\n print \"Name:\", self.name\n print \"City:\", self.city\n print \"Country:\", self.country\n print \"Number of Reviews:\", len(self.sentiments)\n print \"Old Reviews (Stars):\", self.stars_avg\n print \"Old Reviews (%):\", self.stars_avg/5\n print \"New Rating (Stars)\", self.new_rating*5\n print \"New Rating (%):\", self.new_rating", "def movie_list():\n\n movies = Movie.query.order_by('title').join(Rating).all()\n # movies = Movie.query.options(db.joinedload('rating')).order_by('title').all()\n\n return render_template(\"movie_list.html\", movies=movies)", "def load_ratings(self):\n logging.debug(\"Loading ratings data...\")\n\n # loading ratings\n data=requests.get(self.__URL_RATINGS)\n self.__dataframe_ratings=pd.DataFrame(data.json())\n # calculate implicit and explicit ratings\n # XXX use a function to calculate implicit rating considering the video lead time\n self.__dataframe_ratings['rating_implicit'] = (self.__dataframe_ratings['video_watch_time']/100) * 0.3\n self.__dataframe_ratings['rating_explicit'] = (self.__dataframe_ratings['rating_value']) * 0.7\n\n # create a new column to put implicit or explicit rating value\n self.__dataframe_ratings['overall_rating_value'] = self.__dataframe_ratings['rating_implicit'] + self.__dataframe_ratings['rating_explicit']\n\n logging.debug(\"Ratings data loaded! n=%s\" % self.__dataframe_ratings.shape[0])\n\n return self.__dataframe_ratings", "def display_rating(instance, args):\r\n raw_rating = instance.rating(args)\r\n # Do string conversion here\r\n if not raw_rating:\r\n str_rating = 'N/A'\r\n else:\r\n str_rating = \"{0:.2f}\".format(raw_rating)\r\n return str_rating", "def get_ratings(soup_recipe):\n ratings = soup_recipe.find(\"span\", {\"itemprop\": \"ratingValue\"})\n if not ratings:\n ratings = None\n else:\n ratings = ratings.get_text()\n rating_count = soup_recipe.find(\"span\", {\"itemprop\": \"reviewCount\"})\n if not rating_count:\n rating_count = None\n else:\n rating_count = rating_count.get_text()\n return ratings, rating_count", "def index():\n reviews = Reviews()\n print(reviews.all())\n return render_template('review/index.html', title=\"Index\", reviews=reviews.all())", "def user_ratings(user_id):\n return _fetch_records(f\"SELECT item_id, rating_type FROM ratings WHERE user_id = {user_id}\")", "def visualize_raw_dat_ratings(self, dat):\n\n # Ratings Plots\n print('Plots for Ratings Data')\n\n # Shown as binned ratings\n bins = [0,1,2,3,4,5]\n pd.cut(dat['Average User Rating'], bins).value_counts().plot(kind = 'bar')\n plt.xlabel('Scores')\n plt.ylabel('Count')\n plt.title('Scores Histogram')\n plt.show()\n\n #Show all ratings\n plt.hist(dat['Average User Rating'].dropna())\n plt.xlabel('Scores')\n plt.ylabel('Count')\n plt.title('Scores Histogram')\n plt.show()", "def plot_raw_data(ratings):\n # do statistics.\n num_items_per_user = np.array((ratings != 0).sum(axis=0)).flatten()\n num_users_per_item = np.array((ratings != 0).sum(axis=1).T).flatten()\n sorted_num_movies_per_user = np.sort(num_items_per_user)[::-1]\n sorted_num_users_per_movie = np.sort(num_users_per_item)[::-1]\n\n # plot\n fig = plt.figure()\n ax1 = fig.add_subplot(1, 2, 1)\n ax1.plot(sorted_num_movies_per_user, color='blue')\n ax1.set_xlabel(\"users\")\n ax1.set_ylabel(\"number of ratings (sorted)\")\n ax1.grid()\n\n ax2 = fig.add_subplot(1, 2, 2)\n ax2.plot(sorted_num_users_per_movie)\n ax2.set_xlabel(\"items\")\n ax2.set_ylabel(\"number of ratings (sorted)\")\n ax2.set_xticks(np.arange(0, 2000, 300))\n ax2.grid()\n\n plt.tight_layout()\n plt.savefig(\"stat_ratings\")\n plt.show()\n # plt.close()\n return num_items_per_user, num_users_per_item", "def plot_raw_data(ratings):\n # do statistics.\n num_items_per_user = np.array((ratings != 0).sum(axis=0)).flatten()\n num_users_per_item = np.array((ratings != 0).sum(axis=1).T).flatten()\n sorted_num_movies_per_user = np.sort(num_items_per_user)[::-1]\n sorted_num_users_per_movie = np.sort(num_users_per_item)[::-1]\n\n # plot\n fig = plt.figure()\n ax1 = fig.add_subplot(1, 2, 1)\n ax1.plot(sorted_num_movies_per_user, color='blue')\n ax1.set_xlabel(\"users\")\n ax1.set_ylabel(\"number of ratings (sorted)\")\n ax1.grid()\n\n ax2 = fig.add_subplot(1, 2, 2)\n ax2.plot(sorted_num_users_per_movie)\n ax2.set_xlabel(\"items\")\n ax2.set_ylabel(\"number of ratings (sorted)\")\n ax2.set_xticks(np.arange(0, 2000, 300))\n ax2.grid()\n\n plt.tight_layout()\n plt.savefig(\"stat_ratings\")\n plt.show()\n # plt.close()\n return num_items_per_user, num_users_per_item", "def rating_list_view(request):\n\n serializer = RatingSerializer(data=request.DATA)\n if serializer.is_valid():\n try:\n rating = serializer.object\n values = {\n 'evil_value': rating.evil_value,\n 'easier_value': rating.easier_value,\n 'vague_value': rating.vague_value,\n 'brainy_value': rating.brainy_value,\n }\n\n make_rate(rating.teacher.id, **values)\n\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n except Exception as e:\n errors = {'global': [e.message]}\n return Response(errors, status=status.HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def get_item_rating(self, soup: BeautifulSoup) -> None:\n try:\n rating = soup.find(\"span\", class_=\"_10fy1f8\").get_text()\n except AttributeError:\n rating = None\n self.__collected_dic[\"rating\"].append(rating)", "def reviews(request):\n reviews = Review.objects.all()\n\n context = {\n 'reviews': reviews,\n }\n return render(request, 'reviews/reviews.html', context)", "def results():\n # Get data from form that came with the request\n data = flask.request.form\n #has attributes og_abstract similarity keywords num_results\n result = filter_top5_recommender([data['og_abstract']], JAS_tfidf, JAS_nmf, JAS_vecs, \n data['similarity'], keyword=data['keyword'], \n num_results=data['num_results'])\n \n # Put the result in a nice dict so we can send it as json\n \n #return flask.render_template(\"results.html\",result = result)\n\n \n return flask.render_template(\"results.html\",result = result) #, plot_url = plot_url)\n\t#return '<img src=\"data:image/png;base64,{}\">'.format(plot_url)", "def rate_movie(movie_id):\n\n user_rating = request.args.get(\"user_rating\")\n # get user id from log in email address\n user_email = session[\"logged_in_user_email\"]\n\n user = User.query.filter(User.email == user_email).one()\n\n user_id = user.user_id\n\n # Check if user rating exists in database\n # If user has rated this movie before, update value\n # Else, add user rating to database by movie id and user id\n if db.session.query(Rating.score).filter(Rating.movie_id == movie_id, Rating.user_id == user_id).all():\n # When updating a value, we need to use the key-value pair in update()\n db.session.query(Rating).filter(Rating.movie_id == movie_id, Rating.user_id == user_id).update({\"score\": user_rating})\n\n # db.session.query(Rating).filter(Rating.movie_id == movie_id, Rating.user_id == user_id).update(Rating.score == user_rating)\n db.session.commit()\n\n flash(\"You have rated this movie before! It has now been updated to %s.\" % (user_rating), \"warning\")\n return redirect(\"/users/%s\" % user_id)\n\n else:\n db.session.add(Rating(movie_id=movie_id, user_id=user_id, score=user_rating))\n db.session.commit()\n \n flash(\"You have rated this movie a %s.\" % (user_rating), \"info\")\n \n return redirect(\"/users/%s\" % user_id)\n\n\n # Get user rating routed correctly, as this was just test code\n # Fix label format for movie profile page\n\n return render_template(\"rate_movie.html\", user_rating=user_rating)", "def all_ratings(Y):\n bar_count_ratings(Y, 'Rating distribution for all movies')\n plt.show()", "def profile(request, slug):\n student = get_object_or_404(Student, slug=slug)\n ratings = Rating.objects.filter(student=student).order_by('timestamp')\n latest_ratings = get_latest_ratings_list(ratings)[-50:]\n average_rating = ratings.aggregate(Avg('rating'))\n studyspace_rating_breakdown = ratings.values(\n 'studyspace', 'studyspace__space_name'\n ).annotate(num_votes=Count('studyspace')).order_by('-num_votes')[:3]\n context = {\n 'login_form': LoginForm(),\n 'user_form': UserForm(),\n 'student_form': StudentForm(),\n 'student': student,\n 'ratings': ratings,\n 'latest_ratings': latest_ratings,\n 'average_rating': average_rating,\n 'studyspace_rating_breakdown': studyspace_rating_breakdown\n }\n return render(request, 'spacefinder/profile.html', context)", "def get_average_rating(self):\n count = 0\n total = 0\n ratings_length = len(self.ratings)\n if ratings_length > 0:\n for rating in self.ratings:\n count += 1\n total += rating\n average = total / count\n return average\n else:\n print(\"There does not seem to be any ratings for {book}\".format(book=self.title))", "def rating_form(movie_id):\n\n current_movie = Movie.query.filter_by(movie_id=movie_id).first()\n title = current_movie.title\n\n return render_template('rating_form.html', title=title)", "def print_alph_restaurant_ratings(restaurant_dict): \n\n for restaurant_name, rating in sorted(restaurant_dict.items()):\n # print \"{} is rated at {}.\".format(restaurant_name,\n # rating)\n\n \n restaurant_name = restaurant_dict.items[0]\n rating = restaurant_dict.items[1]\n\n print restaurant_name, rating", "def __str__(self):\n return str(self.get_rating())", "def download_imdb_ratings():\n print(\"Option not implemented\")\n sys.exit(1)\n return {}", "def satisfaction_ratings(self):\r\n return resources.SatisfactionRatings(self)", "def reviews(request):\n review = Review.objects.all()\n return render(request, 'reviews.html', {\"review\": review})", "def results():\n\n queryName = request.form['query']\n queryStars = request.form['stars']\n \n datasource = DataSource()\n listOfRestaurantNames = datasource.searchRestaurantsByNameAndMinimumStars(queryName, queryStars)\n restaurants = datasource.generateRestaurantObjects(listOfRestaurantNames[:15])\n\n return render_template('results.html', restaurants=restaurants)", "def rating_distribution( ratings, plot=True ):\n possible_ratings = np.unique( ratings )\n dist = []\n for pr in possible_ratings:\n dist.append( np.count_nonzero( ratings == pr ) )\n dist = np.array( dist )\n print( 'distribution of ratings', dist )\n if plot:\n plt.bar( possible_ratings, dist )\n plt.savefig( 'rating_distribution.pdf' )", "def plot_raw_data(ratings, pl = True):\n # do statistics.\n num_items_per_user = np.array((ratings != 0).sum(axis=0)).flatten()\n num_users_per_item = np.array((ratings != 0).sum(axis=1).T).flatten()\n sorted_num_movies_per_user = np.sort(num_items_per_user)[::-1]\n sorted_num_users_per_movie = np.sort(num_users_per_item)[::-1]\n\n if pl:\n # plot\n fig = plt.figure()\n ax1 = fig.add_subplot(1, 2, 1)\n ax1.plot(sorted_num_movies_per_user, color='blue')\n ax1.set_xlabel(\"users\")\n ax1.set_ylabel(\"number of ratings (sorted)\")\n ax1.grid()\n\n ax2 = fig.add_subplot(1, 2, 2)\n ax2.plot(sorted_num_users_per_movie)\n ax2.set_xlabel(\"items\")\n ax2.set_ylabel(\"number of ratings (sorted)\")\n #ax2.set_xticks(np.arange(0, 2000, 300))\n ax2.grid()\n\n plt.tight_layout()\n plt.savefig(\"../results/stat_ratings\")\n plt.show()\n # plt.close()\n return num_items_per_user, num_users_per_item", "def _star_reviewers(self, star_num, page_num):\n one_star_url = self._star_reviews_url(star_num, page_num)\n req = Request(one_star_url, headers=self.firefox)\n content = urlopen(req).read()\n return self._parse_reviewers(content)", "def item_ratings(self):\n return self.get_ratings().sum(axis=0)", "def confirm_star_rating(request):\n docId = database.child(\"Users\").child(\"Doctor\").get()\n docIdList = []\n for i in docId.each():\n docIdKey = i.key()\n docIdList.append(docIdKey)\n doctorNameList = []\n doctorId = []\n serviceRList = []\n behaviourRList = []\n accdRList = []\n for i in docIdList:\n tempDoctorName = database.child(\"Users\").child(\"Doctor\").child(i).child(\"fname\").get().val()\n doctorNameList.append(tempDoctorName)\n tempServiceR = database.child(\"Ratings\").child(\"Doctor\").child(i).child(\"serviceR\").get().val()\n serviceRList.append(tempServiceR)\n tempBehaviourR = database.child(\"Ratings\").child(\"Doctor\").child(i).child(\"behaviourR\").get().val()\n behaviourRList.append(tempBehaviourR)\n tempAccdR = database.child(\"Ratings\").child(\"Doctor\").child(i).child(\"accdR\").get().val()\n accdRList.append(tempAccdR)\n dId = i\n doctorId.append(dId)\n allDoctorName = zip(doctorNameList, doctorId)\n ratings = zip(doctorId, serviceRList, behaviourRList, accdRList)\n if star_rating_form:\n return render(request, 'addRatings/addRatings.html', {'allDoctorName': allDoctorName, 'ratings': ratings})", "def show_results(self):\n print(\"Survey results:\")\n for response in self.responses:\n print('- ' + response)", "def snippet_detail(request, snippet_id):\n snippet = get_object_or_404(Snippet, pk=snippet_id)\n return render_to_response('cab/snippet_detail.html',\n { 'object': snippet,\n 'num_ratings': snippet.rating_set.count(),\n 'rating_score': Rating.objects.score_for_snippet(snippet.id) },\n context_instance=RequestContext(request))", "def movie_page(movie_id):\n\n current_movie = Movie.query.filter_by(movie_id=movie_id).first()\n title = current_movie.title\n released = current_movie.released_at\n url = current_movie.imdb_url\n thing = current_movie.movie_id\n\n movie_rating = db.session.query(Rating.score).join(Movie).filter(\n Movie.movie_id==thing).all()\n\n return render_template('movie_page.html', current_movie=current_movie, \n title=title, released=released, url=url, movie_rating=movie_rating)", "def get_rating(self):\n self.total = sum(int(review['stars']) for review in self.reviews.values())\n if self.total > 0:\n return round(self.total / self.reviews.count(), 1)\n else:\n return self.total", "def show_recipe_results():\n if not g.user:\n flash(\"Please login to view.\",\"warning\")\n return redirect('/login')\n\n data = search_recipes(request)\n recipes = data['results']\n print(recipes)\n \n return render_template('recipes/show.html',recipes=recipes)", "def plans(request):\n results = Product.objects.filter(category__icontains='P')\n stars = Product.objects.annotate(\n avg_review=Avg('productreview__rating'),\n )\n context = {\n 'products': results,\n 'stars': stars\n }\n if not results:\n messages.error(request, \"No plans as of yet, that will change soon!\")\n return redirect(reverse('products'))\n else:\n return render(request, \"products.html\", context)", "def list(self, request):\n queryset = Students.objects.filter(average_rating=5.0)\n students = normalize_students(queryset)\n return Response(students)", "def find_rating():\n print(\"***** Finding Star/Rating *****\")\n while (True):\n print()\n business_object = query_business_name()\n if business_object == \"back\":\n return\n elif business_object is None:\n continue\n\n print(\"This business is rated \" + str(\n business_object['stars']) + \" stars with \" + str(\n business_object['review_count']) + \" reviews.\\n\")\n\n print_business(business_object)", "def print_rating(self, subject_id):\n\n if subject_id in self.ratings and (self.ratings[subject_id] is not None):\n # checking if \"i'm tired\" or 'review later' appear in ratings\n do_not_save = any([ rt.lower() in cfg.ratings_not_to_be_recorded\n for rt in self.ratings[subject_id]])\n\n # not saving ratings meant not to be saved!\n if do_not_save:\n self.ratings.pop(subject_id)\n else:\n print(' id: {}\\n'\n 'rating: {}\\n'\n ' notes: {}'.format(subject_id, self.ratings[subject_id],\n self.notes[subject_id]))\n else:\n print(f'rating for {subject_id} has not been recorded!')", "def netflix_print(writer, rating):\n if isinstance(rating, float):\n writer.write(('%.1f' % rating) + \"\\n\")\n elif isinstance(rating, int):\n writer.write(str(rating) + \":\\n\")\n else:\n writer.write(rating + \"\\n\")", "def with_rating(self):\n return self.annotate(\n rating=F('overall_posts_rating') * 10 + F('overall_comments_rating')\n )", "def test_assign_and_read_ratings(self):\n\n self.signup('user@example.com', 'user')\n self.login('user@example.com')\n csrf_token = self.get_csrf_token_from_response(\n self.testapp.get('/explore/%s' % self.EXP_ID))\n\n # User checks rating\n ratings = self.get_json('/explorehandler/rating/%s' % self.EXP_ID)\n self.assertEqual(ratings['user_rating'], None)\n self.assertEqual(\n ratings['overall_ratings'],\n {'1': 0, '2': 0, '3': 0, '4': 0, '5': 0})\n\n # User rates and checks rating\n self.put_json(\n '/explorehandler/rating/%s' % self.EXP_ID, {\n 'user_rating': 2\n }, csrf_token\n )\n ratings = self.get_json('/explorehandler/rating/%s' % self.EXP_ID)\n self.assertEqual(ratings['user_rating'], 2)\n self.assertEqual(\n ratings['overall_ratings'],\n {'1': 0, '2': 1, '3': 0, '4': 0, '5': 0})\n\n # User re-rates and checks rating\n self.login('user@example.com')\n self.put_json(\n '/explorehandler/rating/%s' % self.EXP_ID, {\n 'user_rating': 5\n }, csrf_token\n )\n ratings = self.get_json('/explorehandler/rating/%s' % self.EXP_ID)\n self.assertEqual(ratings['user_rating'], 5)\n self.assertEqual(\n ratings['overall_ratings'],\n {'1': 0, '2': 0, '3': 0, '4': 0, '5': 1})\n\n self.logout()", "def top_ten(request):\n if request.method == 'GET':\n movies = Movie.objects.filter(date_of_release__lte=datetime.date.today())\n movies = movies.order_by('-rating')[:10]\n serializer = MovieSerializer(movies, many=True)\n return Response(serializer.data)", "def apparel(request):\n results = Product.objects.filter(category__icontains='A')\n stars = Product.objects.annotate(\n avg_review=Avg('productreview__rating'),\n )\n context = {\n 'products': results,\n 'stars': stars\n }\n if not results:\n messages.error(request, \"No apparel as of yet, that will change soon!\")\n return redirect(reverse('products'))\n else:\n return render(request, \"products.html\", context)", "def fetch():\n current_count = docs_count\n query = Query(feedback_db, selector = {'_id':{'$gt':0}}, fields = ['Rating', 'timestamp'])\n for doc in query()['docs']:\n if ratings.has_key(doc['timestamp']):\n pass\n else:\n ratings[doc['timestamp']] = doc['Rating']\n publish()# to publish the rating to iotf\n time.sleep(10)", "def visualize_raw_dat_rating_cnt(self, dat):\n print('Number of Reviews')\n\n plt.hist(dat['User Rating Count'].dropna())\n plt.show()\n\n print('There is a long-tail to the data, so a subset will be shown as well')\n print('~80% of the data is pictured below')\n plt.hist(dat[dat['User Rating Count'] < 500].dropna(subset = ['User Rating Count'])['User Rating Count'])\n plt.show()", "def alphabetized_restaurant_ratings(restaurant_ratings_dictionary):\n for name, rating in sorted(restaurant_ratings_dictionary.items()):\n print(f\"{name} is rated at {rating}.\")", "def rating(self):\n average = self.review.all().aggregate(Avg('rating'))['rating__avg']\n if not average:\n return 0\n return average", "def get_ratings(self):\n return Vote.objects.filter(content_type=self.get_content_type(), object_id=self.instance.pk, key=self.field.key)", "def user_page(user_id):\n\n current_user = User.query.filter_by(user_id=user_id).first()\n email = current_user.email\n zipcode = current_user.zipcode\n age = current_user.age\n thing = current_user.user_id\n\n movie_list = db.session.query(Rating.score, \n Movie.title).join(Movie).filter(Rating.user_id==thing).all()\n\n\n\n return render_template(\"user_page.html\", email=email, user_id=user_id,\n zipcode=zipcode, age=age, movie_list=movie_list)", "def all_ratings(self):\n return {\n 'average': self.average_rating(),\n 'total': self.proto.aggregateRating.ratingsCount,\n 'oneStar': self.proto.aggregateRating.oneStarRatings,\n 'twoStar': self.proto.aggregateRating.twoStarRatings,\n 'threeStar': self.proto.aggregateRating.threeStarRatings,\n 'fourStar': self.proto.aggregateRating.fourStarRatings,\n 'fiveStar': self.proto.aggregateRating.fiveStarRatings,\n }", "def extract_review_rating(soup):\r\n notes = (\"One\", \"Two\", \"Three\", \"Four\", \"Five\" )\r\n review_rating = \"None\"\r\n section = soup.find(\"div\", attrs={\"class\": \"col-sm-6 product_main\"})\r\n for n in notes:\r\n note = \"star-rating \" + n\r\n if section.find(\"p\", attrs={\"class\": note}):\r\n review_rating = n \r\n return review_rating", "def get_ratings():\n query = \"\"\"\n SELECT DISTINCT rating\n FROM film\n \"\"\"\n cursor.execute(query)\n result = cursor.fetchall()\n\n return pd.DataFrame(result, columns=['Rating'])", "def show_user_profile(user_id):\n\n user = User.query.filter_by(user_id=user_id).one()\n rating = Rating.query.filter_by(user_id=user_id).all()\n\n \n return render_template(\"user_detail.html\", user=user, rating=rating)", "def coin_rate(request, coin):\n coin_obj = get_object_or_404(Coins, symbol=coin.upper())\n ratings = Rating.objects.filter(name_coin = coin_obj)\n return render(request, 'scraper/coin_rate.html', {'ratings': ratings})", "def post(self, request, slug):\n rating = request.data.get(\"rate\", {})\n serializer = self.serializer_class(data=rating)\n serializer.is_valid(raise_exception=True)\n rating = serializer.data.get('rating')\n try:\n article = Article.objects.get(slug=slug)\n except Article.DoesNotExist:\n raise NotFound(\"An article with this slug does not exist\")\n\n ratings = Ratings.objects.filter(rater=request.user.profile,\n article=article).first()\n if not ratings:\n ratings = Ratings(\n article=article,\n rater=request.user.profile,\n stars=rating)\n ratings.save()\n avg = Ratings.objects.filter(\n article=article).aggregate(Avg('stars'))\n return Response({\n \"avg\": avg\n }, status=status.HTTP_201_CREATED)\n\n if ratings.counter >= 5:\n raise PermissionDenied(\n \"You are not allowed to rate this article more than 5 times.\"\n )\n\n ratings.counter += 1\n ratings.stars = rating\n ratings.save()\n avg = Ratings.objects.filter(article=article).aggregate(Avg('stars'))\n return Response({\"avg\": avg}, status=status.HTTP_201_CREATED)", "def single_project(request,project_id):\n project=Project.objects.get(id=project_id)\n # get all ratings for a specific project\n all_ratings=Rating.objects.filter(project_id=project)\n average_rates=[]\n overall_final_rating=0\n \n for rating in all_ratings:\n average_rates.append(rating.average_vote)\n \n final_rates=sum(average_rates)\n if len(all_ratings)>0:\n overall_rating=final_rates/len(all_ratings)\n overall_final_rating=overall_rating\n else:\n overall_rating=0\n overall_final_rating=overall_rating \n \n return render(request, 'single_project.html',{'project':project,'overall_final_rating':overall_final_rating})", "async def rating_req(self, ctx):\r\n data = await self.get_players()\r\n await self.bot.send_message(ctx.message.channel, \"Rating: {:d}\".format(data['964']['rating']))\r\n await self.bot.send_message(ctx.message.channel, \"Season Wins: {:d}\".format(data['964']['seasonWins']))\r\n await self.bot.send_message(ctx.message.channel, \"Season Losses: {:d}\".format(data['964']['seasonLosses']))\r\n await self.bot.send_message(ctx.message.channel, \"Ratio: {:.4f}\".format(float(data['964']['seasonWins']) / float(data['964']['seasonLosses'])))", "def home(request):\n context = get_context()\n\n context['title'] = 'Simple App'\n context['teachers'] = Teacher.objects.all().order_by('-teacher_median_rating')\n #context['tea_rait'] = Review.objects.filter()\n return render(request, 'home.html', context)", "def rate_snippet(request, snippet_id):\n snippet = get_object_or_404(Snippet, pk=snippet_id)\n \n if not Rating.objects.already_rated(request.user.id, snippet.id):\n score = request.GET.get('score')\n if score:\n rating = Rating.objects.create(snippet=snippet,\n user=request.user,\n score={ 'up': 1,\n 'down': -1 }[score])\n return HttpResponseRedirect(snippet.get_absolute_url())", "def __count_and_average_ratings(self):\n logger.info(\"Counting movie ratings...\")\n movie_ID_with_ratings_RDD = self.ratings_RDD.map(lambda x: (x[1], x[2])).groupByKey()\n movie_ID_with_avg_ratings_RDD = movie_ID_with_ratings_RDD.map(get_counts_and_averages)\n self.movies_rating_counts_RDD = movie_ID_with_avg_ratings_RDD.map(lambda x: (x[0], x[1][0]))", "def __count_and_average_ratings(self):\n logger.info(\"Counting movie ratings...\")\n movie_ID_with_ratings_RDD = self.ratings_RDD.map(lambda x: (x[1], x[2])).groupByKey()\n movie_ID_with_avg_ratings_RDD = movie_ID_with_ratings_RDD.map(get_counts_and_averages)\n self.movies_rating_counts_RDD = movie_ID_with_avg_ratings_RDD.map(lambda x: (x[0], x[1][0]))", "def get_reviews(item_id, shop_id, review_num=10) -> list:\n get_url = f\"{_shopee_base_url}/api/v2/item/get_ratings?filter=0&flag=1&itemid={item_id}&limit={review_num}&offset=0&shopid={shop_id}\"\n r = requests.get(get_url, headers=_user_agent_header, proxies=proxy_dict)\n ratings = r.json()['data']['ratings']\n reviews = []\n for rating in ratings:\n reviews.append({\n 'origin': 'Shopee',\n 'author': rating['author_username'],\n 'rating': rating['rating_star'],\n 'review': rating['comment'], \n 'review_likes': rating['like_count'],\n 'summary': 'Summary is very nice. Amazing!'\n })\n return reviews", "def user_detail(username):\n\n user = User.query.filter_by(username=username).one()\n if user.user_exps:\n list_ratings = [(user.user_exps[i].rating, user.user_exps[i].visit.restaurant.name) for i in range(len(user.user_exps))]\n sorted_ratings = sorted(list_ratings, reverse=True)\n highest_rated_restaurant = sorted_ratings[0][1]\n most_sim = predict_restaurant(highest_rated_restaurant)\n else:\n sorted_ratings = [('No restaurants rated yet.', '')]\n highest_rated_restaurant = 'No restaurants rated yet.'\n most_sim = 'Rate a restaurant to get a recommendation!'\n session['username'] = user.username\n\n # from sqlalchemy import create_engine\n # engine = create_engine('postgresql://meatup')\n # df_users = pandas.read_sql_query('select * from \"users\"', con=engine)\n # df_visits = pandas.read_sql_query('select * from \"visits\"', con=engine)\n # import pdb; pdb.set_trace()\n\n return render_template(\"user.html\",\n user=user,\n sorted_ratings=sorted_ratings,\n highest_rated=highest_rated_restaurant,\n most_sim=most_sim)", "def star_rating(table, record_id, splitstars=False):\n import uuid\n id = uuid.uuid4()\n row=db(db.plugin_wiki_rating.tablename==table)(db.plugin_wiki_rating.record_id==record_id).select().first()\n rating = row.rating if row else 0\n callback = URL('plugin_wiki', 'star_rate', args = [table,record_id])\n incr = 0.5 if splitstars else 1\n return TAG[''](DIV(_id='star'+str(id),_class='rating'),\n SCRIPT(\"jQuery(document).ready(function(){jQuery('%(uid)s').rating('%(callback)s',{increment:%(incr)s, maxvalue:5, curvalue:%(rating)s});});\" % dict(uid='#star'+str(id), callback=callback,incr=incr, rating=rating)))", "def scoringpage (request):\n # Define views here\n context = {}\n return render(request, 'scoringPage.html', context=context)", "def item_view_reviews(request):\n\n result = {}\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n if p is not None:\n # product details are not needed\n #result = p.details(u)\n\n reviews = Review.objects.filter(product=p).exclude(reviewer=u)\n result['count'] = str(reviews.count())\n result['reviews'] = [r.get_json(me=u) for r in reviews]\n else:\n result['result'] = '0'\n\n return JSONHttpResponse(result)", "def get_user_reviews(self):\n raw_review_data = imdb.get_title_user_reviews(self.ID) # Returns dictionary of dicts\n reviews_dict = raw_review_data['reviews'] # Stores the dictionary of reviews\n for dictionary in reviews_dict:\n self.user_reviews.append(dictionary['reviewText'])\n # Appends review text to list\n self._review_printer() # Calls printer to output reviews", "def _get_rating(snippet_html, category):\n attr = 'rating-container-{category}'.format(category=category)\n ratings_table_html = snippet_html.find('td', 'listingratings')\n category_html = ratings_table_html.find('div', attr)\n return int(list(list(category_html.children)[1])[0])", "def score_display():\n data = score_reader()\n for index, datum in enumerate(data):\n score_format = \"%s ...... %s/%s\" % (datum[0].capitalize(), datum[1], datum[2])\n print(score_format)", "def _do_action_import_ratings(self):\n self._run_express_job(\n \"org.kiji.tutorial.load.MovieRatingsImporter\",\n options=\"--ratings ml-100k/u.data\"\n )\n self._scan_table(\"users\")", "def display_current_match(i, player_ratings):\r\n print('MATCH', int(i/2+1))\r\n print(players_table.get(doc_id=player_ratings[i][0])['Nom'], \"(BLANCS)\",\r\n \"contre\",\r\n players_table.get(doc_id=player_ratings[i+1][0])['Nom'], \"(NOIRS)\")", "def show_scores(self):\n for text in self.score_text:\n text.draw()" ]
[ "0.6716487", "0.66836923", "0.6439657", "0.6439657", "0.64141905", "0.6388619", "0.6368018", "0.635137", "0.62618804", "0.6109449", "0.61091423", "0.609001", "0.6076961", "0.60705274", "0.6063568", "0.60551393", "0.60547274", "0.60209185", "0.60183233", "0.60183233", "0.6004603", "0.59918994", "0.591316", "0.5913026", "0.5902307", "0.58399844", "0.5830568", "0.5813931", "0.57905143", "0.57540613", "0.57414126", "0.5732822", "0.573244", "0.56994766", "0.56816614", "0.5657897", "0.5657897", "0.56474924", "0.5644632", "0.56122047", "0.55755335", "0.5575521", "0.5573808", "0.55714804", "0.5557286", "0.55454326", "0.55384624", "0.5536655", "0.55272865", "0.55135477", "0.55066293", "0.54989684", "0.54876584", "0.5479076", "0.5451145", "0.54493016", "0.5448127", "0.5432448", "0.54279983", "0.541206", "0.53965926", "0.5390753", "0.53890806", "0.53883564", "0.5379227", "0.53776217", "0.53725404", "0.53710115", "0.53636265", "0.5355215", "0.5352241", "0.53518593", "0.5351033", "0.53429556", "0.53261036", "0.5317412", "0.5313924", "0.53029746", "0.52880144", "0.5279648", "0.5276843", "0.52732724", "0.52724063", "0.5266079", "0.5262339", "0.5256622", "0.5251465", "0.52448064", "0.52448064", "0.5234547", "0.52310383", "0.5228549", "0.5227393", "0.5203834", "0.52029806", "0.5194178", "0.5192466", "0.5191067", "0.5180347", "0.5172417" ]
0.74975514
0
Displays the given section in the proper format.
def display_section(title, category, category_comments): # only attempt to print these if either of them were found if category or category_comments: print(f'\n[{title.upper()}]') print(f' {category}\n') for comment in category_comments: # print(f' * {comment}') wrapped = textwrap.dedent(comment).strip() print(textwrap.fill(wrapped, initial_indent=' * ', subsequent_indent=' ', width=110))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_section(self, s):\n section = s.upper()\n\n self.print_newline()\n self.print_newline()\n self._write('%s\\n' % section)\n self._write('%s\\n' % ('-' * len(section)))\n self.print_newline()", "def print_section(section_name, width=120):\n section_name = ' ' + section_name + ' '\n print('{:=^{ }}'.format(section_name, width))", "def format_sections(self, sections: SectionDict) -> str:", "def printsection(section):\n print('===')\n for key in section.keys():\n print(\"Key: %s\" % key)\n for item in section[key]:\n print(' %s' % item)", "def display_section(name):\n assert all((GENERAL, TRAINING, DETECTION, EVALUATION))\n section_frame = pd.DataFrame(eval(name)).T.fillna('-')\n section_frame['flags'] = section_frame.index.values\n section_frame['flags'] = section_frame['flags'].apply(lambda c: f'--{c}')\n section_frame = section_frame.reset_index(drop=True).set_index('flags')\n print(f'\\n{name.title()}\\n')\n print(\n section_frame[\n [\n column_name\n for column_name in ('help', 'required', 'default')\n if column_name in section_frame.columns\n ]\n ].to_markdown()\n )", "def display_sections(course_name, sections):\n num_sections = len(sections)\n _print('%s has %d sections so far' % (course_name, num_sections))\n for i, section in enumerate(sections, 1):\n _print('%d - Download %s videos' % (i, section.name))\n _print('%d - Download them all' % (num_sections + 1))", "def print_section(msg):\n print(('#' * 40) + '\\n# ' + msg + '\\n' + ('#' * 40))", "def section(sect_name, width=TERMINAL_CHARS):\n\n true_length = width - len(sect_name) - 1\n print(\"\\n_%s\" % (sect_name.upper() + \"_\" * true_length))", "def print_section( secName, dictName ):\n\n\t# Number of entries in IA dictionary.\n\tmaxitem = len(dictName)\n\n\toutFile.write('\\n\\\\begin{enumerate}\\n')\n\n\tfor m in xrange(0,maxitem):\n\n\t\toutFile.write('\\\\item ' + dictName[m])\n\n\t\tbeans = [bean for bean in secName if int(bean[1]) == (m+1)]\n\t\t\n \t\tif beans[0][-1] == 'N/A':\n \t\t\toutFile.write('\\n\\t\\\\begin{description}[font=\\\\normalfont]\\n')\n \t\t\toutFile.write('\\t\\t\\\\item[N/A]\\n')\n \t\t\toutFile.write('\\t\\\\end{description}\\n\\n')\n\t\telse:\n\t\t\tl = len(beans)\n\t\t\tif beans[0][2].startswith('AY'):\n\t\t\t\toutFile.write('\\t\\\\begin{description}[leftmargin=1.75cm, font=\\\\normalfont]\\n')\n\t\t\t\tbeans[0][2] = string.replace(beans[0][2],'AY','\\\\textsc{ay}')\n\t\t\telse:\n\t\t\t\toutFile.write('\\t\\\\begin{description}[font=\\\\normalfont]\\n')\n\t\t\tfor i in xrange(0,l):\n\t\t\t\tif beans[i][2].startswith('AY'):\n\t\t\t\t\tbeans[i][2] = string.replace(beans[i][2],'AY','\\\\textsc{ay}')\n\t\t\t\toutFile.write('\\t\\t\\\\item[\\\\small ' + beans[i][2] + '] ' + beans[i][3] + '\\n')\n\n\t\t\t\tif len(beans[i]) == 5:\n\t\t\t\t\toutFile.write('\\n\\n\\t\\t' + beans[i][4] + '\\n')\n\n\t\t\toutFile.write('\\t\\\\end{description}\\n\\n')\n\n\toutFile.write('\\\\end{enumerate}\\n')", "def print_sub_section(self, s, level=0):\n section = s.capitalize()\n\n self.print_newline()\n self._write('%s+ %s\\n' % ('-' * level, section))\n self.print_newline()", "def render_section(self, name):\n s = self.sections[name]\n return self.render_disassembly(s.data(), s.addr)", "def print_sections(type_sections):\n indent = \" \"\n for key, sec in type_sections.items():\n PrintHelper.print(sec['title'], PrintHelper.OKBLUE+PrintHelper.BOLD)\n if sec['intro']:\n PrintHelper.print(indent + str(sec['intro']), PrintHelper.GRAY)\n PrintHelper.print(indent + str(list(sec['data_types'])), PrintHelper.OKBLUE)", "def section_header(text):\n\n print \"---- %s ----\" % text", "def latex_subsection(section):\n string = '\\\\subsubsection*{' + section['heading'] + '}\\n'\n string += section['value'] + '\\n'\n return string", "def print_section_header(title, color):\n\tblock = \"#\" * (len(title) + 2)\n\tprint(color + Style.BRIGHT + block)\n\tprint(\"#\", title)\n\tprint(block + \"\\n\" + Style.RESET_ALL)", "def visit_section(self, node):\n self.section_level += 1\n self.body.append(self.starttag(node, \"section\"))", "def print_section_header(title, COLOR):\n\tblock = \"#\" * (len(title) + 2)\n\tprint(COLOR + Style.BRIGHT + block)\n\tprint(\"#\", title)\n\tprint(block + \"\\n\" + Style.RESET_ALL)", "def print_section_header(title, color):\n block = \"#\" * (len(title) + 2)\n print(\"\\n\" + color + Style.BRIGHT + block)\n print(\"#\", title)\n print(block + \"\\n\" + Style.RESET_ALL)", "def output(self, maxLength:int, minLength:int =0) -> str:\n self._contents = \"[\" + str(self._sectionNumber) + \"] \" + self._sectionName\n return(super().output(minLength, maxLength))", "def _get_section(registry, section, title, hdg_level1=\"#\", hdg_level2=\"=\",\n output_dir=None):\n file_per_topic = output_dir is not None\n lines = [title, hdg_level1 * len(title), \"\"]\n if file_per_topic:\n lines.extend([\".. toctree::\", \" :maxdepth: 1\", \"\"])\n\n topics = sorted(registry.get_topics_for_section(section))\n for topic in topics:\n help = registry.get_detail(topic)\n heading, text = help.split(\"\\n\", 1)\n if not text.startswith(hdg_level2):\n underline = hdg_level2 * len(heading)\n help = \"%s\\n%s\\n\\n%s\\n\\n\" % (heading, underline, text)\n else:\n help = \"%s\\n%s\\n\\n\" % (heading, text)\n if file_per_topic:\n topic_id = _dump_text(output_dir, topic, help)\n lines.append(\" %s\" % topic_id)\n else:\n lines.append(help)\n\n return \"\\n\" + \"\\n\".join(lines) + \"\\n\"", "def section(self, label):\n return self.text(label, bold=True)", "def do_section(parser, token, template='parts/section.html', end='endsection'):\n bits = token.split_contents()[1:]\n if len(bits) is 0:\n title, attrs = '', {}\n elif len(bits) is 1:\n title, attrs = bits[0], {}\n elif len(bits) % 2 is 0:\n raise template.TemplateSyntaxError(\"Your attributes don't match up: %s\" % ', '.join(bits[1:]))\n else:\n title = bits[0]\n attrs = dict(zip(bits[1::2], bits[2::2]))\n nodelist = parser.parse((end,))\n parser.delete_first_token()\n return SectionNode(template, title, attrs, nodelist)", "def display_menu(self, title: str, subtitle: str = \"\\n\", question: dict = None):\n self.clean()\n print(f\"{title}\")\n print(f\"{subtitle}\\n\")\n for key, value in question.items():\n print(f\"\\t{key} - {value[1]}\")\n print(\"\\n\" * 2)", "def section():\n # type: () -> None\n idx = get_arg(\"idx\", False)\n href = get_arg(\"href\", False)\n category = get_arg(\"category\")\n offset = get_arg(\"offset\", 0)\n if idx:\n # Section menu\n url = locs.get_search_url(idx, page=offset)\n data = locs.get_json(url)\n paginate(data.get(\"pagination\"), category, idx)\n for data in data.get(\"facets\", [{}])[0].get(\"filters\"):\n title = data.get(\"title\").title()\n count = data.get(\"count\")\n add_menu_item(section,\n \"{} [{} items]\".format(title, count),\n {\"href\": data.get(\"on\"), \"category\": title})\n if href:\n # Playable items\n data = locs.get_json(href)\n parse_search_results(data, category)\n xbmcplugin.setContent(plugin.handle, \"videos\")\n xbmcplugin.setPluginCategory(plugin.handle, category)\n xbmcplugin.endOfDirectory(plugin.handle)", "def sub_section(title, lines, pad_len=4, sep='-'):\n pad = pad_len * \" \"\n msg = \"{pad}{}\\n{pad}{}\\n\".format(title, sep * len(title), pad=pad)\n msg += pad + (\"\\n\" + pad).join(lines) + \"\\n\"\n\n return msg", "def display_student(s_info):\n print('')\n print('Your information:')\n print(f'{s_info.student_id} - {s_info.first_name} {s_info.last_name}')", "def get_course_about_section(course, section_key):\r\n\r\n # Many of these are stored as html files instead of some semantic\r\n # markup. This can change without effecting this interface when we find a\r\n # good format for defining so many snippets of text/html.\r\n\r\n # TODO: Remove number, instructors from this list\r\n if section_key in ['short_description', 'description', 'key_dates', 'video',\r\n 'course_staff_short', 'course_staff_extended',\r\n 'requirements', 'syllabus', 'textbook', 'faq', 'more_info',\r\n 'number', 'instructors', 'overview',\r\n 'effort', 'end_date', 'prerequisites', 'ocw_links']:\r\n\r\n try:\r\n\r\n request = get_request_for_thread()\r\n\r\n loc = course.location.replace(category='about', name=section_key)\r\n\r\n # Use an empty cache\r\n field_data_cache = FieldDataCache([], course.id, request.user)\r\n about_module = get_module(\r\n request.user,\r\n request,\r\n loc,\r\n field_data_cache,\r\n course.id,\r\n not_found_ok=True,\r\n wrap_xmodule_display=False,\r\n static_asset_path=course.static_asset_path\r\n )\r\n\r\n html = ''\r\n\r\n if about_module is not None:\r\n try:\r\n html = about_module.render('student_view').content\r\n except Exception: # pylint: disable=broad-except\r\n html = render_to_string('courseware/error-message.html', None)\r\n log.exception(\r\n u\"Error rendering course={course}, section_key={section_key}\".format(\r\n course=course, section_key=section_key\r\n ))\r\n return html\r\n\r\n except ItemNotFoundError:\r\n log.warning(\r\n u\"Missing about section {key} in course {url}\".format(key=section_key, url=course.location.to_deprecated_string())\r\n )\r\n return None\r\n elif section_key == \"title\":\r\n return course.display_name_with_default\r\n elif section_key == \"university\":\r\n return course.display_org_with_default\r\n elif section_key == \"number\":\r\n return course.display_number_with_default\r\n\r\n raise KeyError(\"Invalid about key \" + str(section_key))", "def section(data):\n if len(data['index']) == 2 and data['index'][1][0].isdigit():\n element = {}\n element['is_section'] = True\n element['section_id'] = '-'.join(data['index'])\n if u\"§§ \" == data['title'][:3]:\n element['is_section_span'] = True\n else:\n element['is_section_span'] = False\n match = SECTION_TITLE_REGEX.match(data['title'])\n element['label'] = match.group(1)\n element['sub_label'] = match.group(2)\n return element", "def test_writeSection(self):\n output = StringIO()\n self.builder._writeSection(\n output, \"Features\",\n [(3, \"Great stuff.\"),\n (17, \"Very long line which goes on and on and on, seemingly \"\n \"without end until suddenly without warning it does end.\")])\n self.assertEquals(\n output.getvalue(),\n \"Features\\n\"\n \"--------\\n\"\n \" - Great stuff. (#3)\\n\"\n \" - Very long line which goes on and on and on, seemingly without end\\n\"\n \" until suddenly without warning it does end. (#17)\\n\"\n \"\\n\")", "def display(self):\n print \"\\n\\n***********************\\n\"\n print \"Info about group %s, name=%s, path=%s\" % (self.sdef['id'], \n self.name, self.path)\n print \"sdef=\"\n pp.pprint(self.sdef)\n print \"expanded_def=\"\n pp.pprint (self.expanded_def)\n print \"includes=\"\n pp.pprint (self.includes)\n print \"parent_attributes=\"\n pp.pprint (self.parent_attributes)\n print \"attributes=\"\n pp.pprint (self.attributes)\n print \"mstats=\"\n pp.pprint (self.mstats)", "def _write_section_start(section_name, fobj):\n\n fobj.write(string.capwords(section_name, '_') + '\\n')", "def display(self):\r\n\t\ts = self.options['space']\r\n\t\tv = self.level\r\n\t\tp = self.options['sep']\r\n\t\tt = self.options['tab']\r\n\t\tb = self.options['bullet']\r\n\t\tprint(v*t+b+s+self.abbrev+s+p+s+self.text)", "def which(self, section):\n\n self.program.state.last_viewed = section\n ids = getattr(self.program.state, section)\n return ''.join([\n self._render(i+1, s) for i, s in enumerate(ids)\n ])", "def section(self):\n return SECTION_NAME_TO_SECTION[self.section_name]", "def get_course_info_section(request, course, section_key):\r\n usage_key = course.id.make_usage_key('course_info', section_key)\r\n\r\n # Use an empty cache\r\n field_data_cache = FieldDataCache([], course.id, request.user)\r\n info_module = get_module(\r\n request.user,\r\n request,\r\n usage_key,\r\n field_data_cache,\r\n course.id,\r\n wrap_xmodule_display=False,\r\n static_asset_path=course.static_asset_path\r\n )\r\n\r\n html = ''\r\n\r\n if info_module is not None:\r\n try:\r\n html = info_module.render('student_view').content\r\n except Exception: # pylint: disable=broad-except\r\n html = render_to_string('courseware/error-message.html', None)\r\n log.exception(\r\n u\"Error rendering course={course}, section_key={section_key}\".format(\r\n course=course, section_key=section_key\r\n ))\r\n\r\n return html", "def _show(self, indent = 0):\n print(\" \"*indent, \"Name:\", self.name)\n print(\" \"*indent, \"Description:\", self.description)", "def print_config_main_sections(self):\n try:\n print(\"Main Sections in config file : \", self.sections_in_config)\n except:\n print(\"Invalid Config File.\")", "def show(self, package='', name='', uid='', params={}):\n return self.__post('show-nat-section', package, name, uid, params)", "def display_simple(self):\n print(\"\") \n print(\"Date: {}\".format(self.date))\n print(\" Task name: {}\".format(self.task_name))\n print(\" Time spent: {} minutes\".format(self.time_spent))\n print(\" Notes: {}\".format(self.notes))\n print(\" Task number: {}\".format(self.task_number))\n print(\"\")", "def __repr__(self):\n (sections, section_titles) = self._get_summary_struct()\n return _toolkit_repr_print(self, sections, section_titles, width=30)", "def subsection_handler(request, usage_key_string):\r\n if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):\r\n usage_key = UsageKey.from_string(usage_key_string)\r\n try:\r\n course, item, lms_link = _get_item_in_course(request, usage_key)\r\n except ItemNotFoundError:\r\n return HttpResponseBadRequest()\r\n\r\n preview_link = get_lms_link_for_item(usage_key, preview=True)\r\n\r\n # make sure that location references a 'sequential', otherwise return\r\n # BadRequest\r\n if item.location.category != 'sequential':\r\n return HttpResponseBadRequest()\r\n\r\n parent = get_parent_xblock(item)\r\n\r\n # remove all metadata from the generic dictionary that is presented in a\r\n # more normalized UI. We only want to display the XBlocks fields, not\r\n # the fields from any mixins that have been added\r\n fields = getattr(item, 'unmixed_class', item.__class__).fields\r\n\r\n policy_metadata = dict(\r\n (field.name, field.read_from(item))\r\n for field\r\n in fields.values()\r\n if field.name not in ['display_name', 'start', 'due', 'format'] and field.scope == Scope.settings\r\n )\r\n\r\n can_view_live = False\r\n subsection_units = item.get_children()\r\n for unit in subsection_units:\r\n state = compute_publish_state(unit)\r\n if state in (PublishState.public, PublishState.draft):\r\n can_view_live = True\r\n break\r\n\r\n return render_to_response(\r\n 'edit_subsection.html',\r\n {\r\n 'subsection': item,\r\n 'context_course': course,\r\n 'new_unit_category': 'vertical',\r\n 'lms_link': lms_link,\r\n 'preview_link': preview_link,\r\n 'course_graders': json.dumps(CourseGradingModel.fetch(usage_key.course_key).graders),\r\n 'parent_item': parent,\r\n 'locator': usage_key,\r\n 'policy_metadata': policy_metadata,\r\n 'subsection_units': subsection_units,\r\n 'can_view_live': can_view_live\r\n }\r\n )\r\n else:\r\n return HttpResponseBadRequest(\"Only supports html requests\")", "def __repr__(self):\n out = \"\"\n for section_name, section_data in sorted(self.header.items()):\n if section_name== 'Error':\n continue\n out += '\\n'.join(['='*80, \" \"*20 + section_name, '='*80]) + '\\n'\n for key, val in sorted(section_data.items()):\n out += ' - {0} : {1}\\n'.format(key, val)\n out += '\\n'\n return out", "def show_config() -> None:\n with _config_lock:\n config_util.show_config(\n _section_descriptions, cast(Dict[str, ConfigOption], _config_options)\n )", "def show_info(self): \n color= Fore.WHITE\n print(f\"\"\" {color} \nNombre: {self.name} \nRuta: {self.route }\nFecha de salida: {self.departure_date}\"\"\")\n print(\"<\"*8, \">\"*8)\n print(\"El precio por habitacion es:\")\n for key, value in self.prize.items():\n color_value= (Fore.GREEN + str(value))\n color_key= Fore.WHITE + \"Habitacion\" + \" \" + key\n print(f\"\"\" {color_key} : {color_value}$ \"\"\")\n \n print(Fore.WHITE + \"<\"*8, \">\"*8)\n for floor, info in self.floors_info.items():\n piso=(Fore.WHITE + floor)\n print(f\" {piso}:{info} \")\n \n \n print(\"<\"*8, \">\"*8)\n print(\"Capacidad por tipo de habitacion: \")\n for key, value in self.room_capacity.items():\n print(f\"Habitacion {key}: {value} personas \",\"\\t\")\n return \"\"", "def __repr__(self):\n\n (sections, section_titles) = self._get_summary_struct()\n return _tkutl._toolkit_repr_print(self, sections, section_titles, width=30)", "def sections(self):\n raise NotImplementedError()", "def __repr__(self):\n\n (sections, section_titles) = self._get_summary_struct()\n\n return _toolkit_repr_print(self, sections, section_titles, width=30)", "def get_section_view(items, sections, last_modified):\n from django.template.loader import get_template\n from django.template import Context\n tSection = get_template('app/pinboard/section.html')\n #tItem = get_template('app/pinboard/pinboarditem.html')\n tItem = get_template('app/discussboard/discussitem.html')\n content = ''\n unknown = _(u'Beiträge')\n section = '--START--'\n pinitems = []\n comment_counts = get_visible_comment_count_by_item_containers(items)\n for i in items :\n if last_modified < i.last_modified:\n last_modified = i.last_modified\n if section != i.section :\n if section != unknown :\n if section != '--START--' and pinitems != [] :\n cSection = Context ( { 'section': section, 'pinitems': pinitems } )\n content += tSection.render ( cSection)\n if i.section in sections :\n section = i.section\n else :\n section = unknown\n pinitems = []\n if i.item.url_more != '':\n more_items = show_link(i.item.url_more, _(u'Weitere Infos ...'), i.item.url_more_extern,\n url_class='navLink')\n else:\n more_items = ''\n item_section = Context ({\n 'name' : i.item.name,\n 'title' : i.item.title,\n 'text' : i.item.text,\n 'user_name' : i.item.string_1,\n 'email' : i.item.string_2,\n 'date' : i.get_last_modified(),\n 'image_url' : i.item.image_url,\n 'image_url_url': i.item.image_url_url,\n 'image_extern' : i.item.image_extern,\n 'last_modified': i.get_last_modified(),\n 'more_infos' : more_items,\n 'comments' : comment_counts[i.item.id]\n })\n pinitems.append(tItem.render(item_section))\n if section != '--START--' and pinitems != []:\n cSection = Context ( { 'section': section, 'pinitems': pinitems } )\n content += tSection.render ( cSection)\n return content, last_modified", "def show(self,mode=0,level=0,ident=''):\n if self.locked : l='L'\n else : l=' '\n tmp= '%sd%-3d %s %-6s %-30s Vendor: %-10s %-10s Size: %10s' % \\\n (ident,self.idx,l,self.name,self.guid.strip()[-29:],self.vendor,self.model,printsz(self.size))\n if level>0:\n tmp+='\\n'+ident+' Paths:'\n for p in self.paths.values() : tmp+='\\n'+p.show(mode,level-1,ident+' ')\n tmp+='\\n'+ident+' Partitions:'\n for p in self.partitions.values() : tmp+='\\n'+p.show(mode,level-1,ident+' ')\n tmp+='\\n'\n return tmp", "def fancy_section_type(section):\n return {\n 'LEC': 'Lecture',\n 'LAB': 'Laboratory',\n 'TUT': 'Tutorial',\n 'PRA': 'Practicum',\n 'COR': 'Correspondence',\n 'SEM': 'Seminar',\n 'ONL': 'Online',\n }.get(section, section)", "def parse_section(self, root, fmt):\n return self.parse_tag(root, fmt)", "def add_section(self, section_name: str) -> None:\n pass", "def add_section(self, section_name: str) -> None:\n pass", "def write_section(self, fhandle, sect):\n fhandle.write(\"[%s]\\n\" % sect)\n for opt in sorted(self.file_parser.options(sect)):\n fhandle.write('{0} = {1}\\n'.format(opt, self.file_parser.get(sect, opt)))", "def display(self):\n\n print('\\n')\n for key, val in self.option.items():\n print(key, val, '\\n') # make it more confortable to read\n self.get_choice() # launch automaticly the choice method after display", "def display(self):\r\n\t\ts = self.options['space']\r\n\t\tv = self.level\r\n\t\tt = self.options['tab']\r\n\t\tp = self.options['sep']\r\n\t\tb = self.options['bullet']\r\n\t\tprint(v*t+b+s+self.text)\r\n\t\tfor each_item in self.items:\r\n\t\t\teach_item.display()", "def about(display=True):\n\n ABOUT_TEXT = \"\"\"\nPre-release version %s (%s) of Topographica; an updated\nversion may be available from topographica.org.\n\nThis program is free, open-source software available under the BSD\nlicense (http://www.opensource.org/licenses/bsd-license.php).\n\"\"\"%(release,version)\n if display:\n print ABOUT_TEXT\n else:\n return ABOUT_TEXT", "def _get_section_scores_html(self):\n ctx_data = {'section_scores': self.student_section_scores}\n\n html = loader.render_django_template(\n 'templates/xblock_jupyter_graded/section_scores.html',\n ctx_data\n )\n\n return html", "def display(self, assignment):\r\n # Subclasses can print in a prettier way, or display with a GUI\r\n print(assignment)", "def show(self):\r\n display(self.grid_part)", "def _write_section(self, outfile, header, rep, inline_vals=True, rebasings=None):\r\n def rebase(txt):\r\n for rebase_from, rebase_to in rebasings:\r\n if rebase_to is None:\r\n if rebase_from in txt:\r\n return None\r\n else:\r\n txt = txt.replace(rebase_from, rebase_to)\r\n return txt\r\n\r\n rebasings = rebasings or []\r\n items = []\r\n for k, vals in rep.iteritems():\r\n for v in vals:\r\n item = rebase('%s -> %s%s' % (k, '' if inline_vals else '\\n', v))\r\n if item:\r\n items.append(item)\r\n items.sort()\r\n outfile.write(header + ':\\n')\r\n outfile.write('%d items\\n' % len(items))\r\n for item in items:\r\n outfile.write(item)\r\n outfile.write('\\n')", "def print_object_details(obj: object) -> None:\n print_section(obj, 'Type', print_type)\n print_section(obj, 'Documentation', print_documentation)\n print_section(obj, 'Attributes', print_attributes)\n print_section(obj, 'Methods', print_methods)\n print_section_delimiter()", "def __getitem__(self, section):\n #first translate from CFN into HOT terminology if necessary\n if section not in self.SECTIONS:\n section = HOTemplate20130523._translate(\n section, self._CFN_TO_HOT_SECTIONS,\n _('\"%s\" is not a valid template section'))\n\n if section not in self.SECTIONS:\n raise KeyError(_('\"%s\" is not a valid template section') % section)\n if section in self.SECTIONS_NO_DIRECT_ACCESS:\n raise KeyError(\n _('Section %s can not be accessed directly.') % section)\n\n if section == self.MAPPINGS:\n return {}\n\n if section == self.DESCRIPTION:\n default = 'No description'\n else:\n default = {}\n\n # if a section is None (empty yaml section) return {}\n # to be consistent with an empty json section.\n the_section = self.t.get(section) or default\n\n # In some cases (e.g. parameters), also translate each entry of\n # a section into CFN format (case, naming, etc) so the rest of the\n # engine can cope with it.\n # This is a shortcut for now and might be changed in the future.\n if section == self.RESOURCES:\n return self._translate_resources(the_section)\n\n if section == self.OUTPUTS:\n return self._translate_outputs(the_section)\n\n return the_section", "def _section_write(self, sname, stitle, selname):\n if not self.args.old_galaxy:\n this_sect = etree.SubElement(self.inputs, 'section', name=sname, title=stitle, expanded='False')\n when_yes = None\n else:\n this_sect = etree.SubElement(self.inputs, 'conditional', name=sname)\n this_sect_sel = etree.SubElement(this_sect, 'param', name=selname, type='select',\n label=stitle)\n opt_yes = etree.SubElement(this_sect_sel, 'option', value='yes')\n opt_yes.text = 'yes'\n opt_no = etree.SubElement(this_sect_sel, 'option', value='no', selected='true')\n opt_no.text = 'no'\n when_yes = etree.SubElement(this_sect, 'when', value='yes')\n return this_sect, when_yes", "def __section_style(self):\n font = FontStyle()\n font.set(face=FONT_SERIF, size=10, italic=0, bold=0)\n para = ParagraphStyle()\n para.set_font(font)\n para.set(first_indent=-1.5, lmargin=1.5)\n para.set_top_margin(0.5)\n para.set_bottom_margin(0.25) \n para.set_description(_('The style used for each section.'))\n self.default_style.add_paragraph_style(\"PLC-Section\", para)", "def generate_section_report(self):\n summary_line = [\"No Students in Section\",0,0,0,0,0,0,0.0]\n day_type_list, dates_list = \\\n load_daytypes_lists(self.start_date, self.total_days,\n self.section, self.total_days)\n self.num_schooldays = len(dates_list)\n if len(self.students):\n summary_line[0] = \"Averages\"\n for i in xrange(0,self.total_days):\n if (((day_type_list[i][0] & \\\n SchoolDB.models.StudentAttendanceRecord.school_day)) or\n ((day_type_list[i][1] & \\\n SchoolDB.models.StudentAttendanceRecord.school_day))):\n #skip non school days\n self.keys_list.append(\"\")\n self.table_data.append(\n self._generate_section_report_day(\n dates_list[i], summary_line)[0])\n if (len(self.table_data)):\n for i in range(1,8):\n summary_line[i] = \\\n round((float(summary_line[i])/ len(self.table_data)),1)\n self.table_data.append(summary_line)\n self.table_descriptor = \\\n [('date','string','Date'),\n ('m_en', 'number', 'Male En'),\n ('f_en', 'number', 'Female En'),\n ('m_morn', 'number', 'Male Morn'),\n ('f_morn', 'number', 'Female Morn'),\n ('m_aft', 'number', 'Male Aft'),\n ('f_aft', 'number', 'Female Aft'),\n ('percent', 'number', '% Present')]", "def showOnlineHelp(self, section=None, subsection=None, **data):\n template_specification = self.fill_overlay_attributes(None, \"TVB\", \"Online-Help\", \"help/online_help\", \"help\")\n\n # Add URL of the help page\n template_specification[\"helpURL\"] = self.config.get_help_url(section, subsection)\n\n return self.fill_default_attributes(template_specification)", "def display_menu():\n print(\"\"\"\\nChoose option:\n (1) List statistics\n (2) Display 3 cities with longest names\n (3) Display county's name with the largest number of communities\n (4) Display locations, that belong to more than one category\n (5) Advanced search\n (0) Exit program\"\"\")", "def _populate_section(self, algo_group, result_template):\n if algo_group.module == CONNECTIVITY_MODULE:\n result_template[KEY_SECTION] = 'connectivity'\n result_template[KEY_SUB_SECTION] = 'connectivity'\n result_template[KEY_SUBMENU_LIST] = self.connectivity_submenu\n elif algo_group.group_category.display:\n ### Visualizers on the Burst Page\n result_template[KEY_SECTION] = 'burst'\n result_template[KEY_SUB_SECTION] = 'view_' + algo_group.subsection_name\n\n elif algo_group.group_category.rawinput:\n ### Upload algorithms\n result_template[KEY_SECTION] = 'project'\n result_template[KEY_SUB_SECTION] = 'data'\n elif 'RAW_DATA' in algo_group.group_category.defaultdatastate:\n ### Creators\n result_template[KEY_SECTION] = 'stimulus'\n result_template[KEY_SUB_SECTION] = 'stimulus'\n else:\n ### Analyzers\n result_template[KEY_SECTION] = algo_group.group_category.displayname.lower()\n result_template[KEY_SUB_SECTION] = algo_group.subsection_name\n result_template[KEY_SUBMENU_LIST] = self.analyze_adapters", "def _create_section(section: str, description: str) -> None:\n assert section not in _section_descriptions, (\n 'Cannot define section \"%s\" twice.' % section\n )\n _section_descriptions[section] = description", "def printinfo(assign, question):\n print(\"Last Name: Bell\")\n print (\"First Name: Daniel\")\n print(\"Student ID: 282911\")\n print(\"Course: CPSC 231\")\n print(\"Tutorial Section: T02\")\n print(\"Assignment: %d\" %assign)\n print(\"Question: %s\" %question)\n print(\"\")", "def display(self):\n statement = f\"\"\"\n ------\n By {self.prescribed_by.name.upper()}\n ------\n Patient Detail!\n Name: {self.prescribed_to.name.capitalize()}\n Age: {self.prescribed_to.age}\n Gender: {self.prescribed_to.gender}\n Prescribed Medicines!\"\"\"\n print(statement)\n self.display_cure()", "def _section_course_info(course_key, access):\r\n course = get_course_by_id(course_key, depth=None)\r\n\r\n section_data = {\r\n 'section_key': 'course_info',\r\n 'section_display_name': _('Course Info'),\r\n 'access': access,\r\n 'course_id': course_key,\r\n 'course_display_name': course.display_name,\r\n 'enrollment_count': CourseEnrollment.num_enrolled_in(course_key),\r\n 'has_started': course.has_started(),\r\n 'has_ended': course.has_ended(),\r\n 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': course_key.to_deprecated_string()}),\r\n }\r\n\r\n try:\r\n advance = lambda memo, (letter, score): \"{}: {}, \".format(letter, score) + memo\r\n section_data['grade_cutoffs'] = reduce(advance, course.grade_cutoffs.items(), \"\")[:-2]\r\n except Exception:\r\n section_data['grade_cutoffs'] = \"Not Available\"\r\n # section_data['offline_grades'] = offline_grades_available(course_key)\r\n\r\n try:\r\n section_data['course_errors'] = [(escape(a), '') for (a, _unused) in modulestore().get_course_errors(course.id)]\r\n except Exception:\r\n section_data['course_errors'] = [('Error fetching errors', '')]\r\n\r\n return section_data", "def print_header():\n print('------------------------------------')\n print(' Lesson04')\n print(' Kata Fourteen Assignment')\n print('------------------------------------\\n')", "def _display_name_for_ora_block(self, block):\n unit = modulestore().get_item(block.parent)\n section = modulestore().get_item(unit.parent)\n\n return \"{section}: {unit}\".format(\n section=section.display_name,\n unit=unit.display_name\n )", "def sections(self):\n raise NotImplementedError()", "def display_entry(self, entry):\n border = '-' * 50\n print(border)\n print('Employee: {}'.format(entry.employee_name))\n print('Task Name: {}'.format(entry.task_name))\n print(\"Date: {}\".format(entry.date))\n print(\"Time Spent: {}\".format(entry.time_spent))\n if entry.notes != '':\n print(\"Notes:\\n{}\\n{}\".format('----------', entry.notes))\n print(border)", "def show_contents(self):\n print(self.filename, 'loaded')\n\n table = [['group', 'parameter']]\n for group in self.file:\n table.append([group, self.dict[group]])\n display(HTML(tabulate.tabulate(table, tablefmt='html')))\n\n print('Call directly as an attribute or call (parameter) or (group, parameter) to retrieve data')\n print('Use .show_info(group) to show parameter shapes')", "def print_content(self):\n if self.piece!=None:\n print('%s : %s %s' % (self.name, self.piece.color, self.piece.piece_type))\n else:\n print('%s : empty' % (self.name))", "def display(self):\n while (True):\n self.print()\n choice = self.get_choice()\n if (choice == len(self.options)):\n break\n else:\n self.options[choice].function()", "def _output_section_write(self):\n if not self.args.old_galaxy:\n self.output_sect = etree.SubElement(self.inputs, 'section', name='output_opt', title='Additional Output Parameters', expanded='False')\n else:\n self.output_sect = etree.SubElement(self.inputs, 'conditional', name='output_opt')\n self.output_sect_sel = etree.SubElement(self.output_sect, 'param', name='output_opt_sel', type='select',\n label='Additional output parameters?')\n self.opt_yes = etree.SubElement(self.output_sect_sel, 'option', value='yes')\n self.opt_yes.text = 'yes'\n self.opt_no = etree.SubElement(self.output_sect_sel, 'option', value='no', selected='true')\n self.opt_no.text = 'no'\n self.when_yes = etree.SubElement(self.output_sect, 'when', value='yes')", "def print_overview_slide():\r\n print '<div id=\"overview\" class=\"step\" ' \\\r\n ' data-x=\"3000\" data-y=\"1500\" data-scale=\"10\">'\r\n print '</div>'", "def wrap_up_section(section_type, section, output_handle):\n if section_type != 'LKG':\n return\n read_1, read_2 = get_paired_reads(section)\n output_handle.write('\\t'.join([read_1, read_2]) + '\\n')", "def _writeSection(self, sectionName, options):\n return True", "def displayScene(self):\n sceneprint = \"\"\n sceneprint += \" \"*40 + Back.LIGHTRED_EX + Fore.LIGHTCYAN_EX + Style.BRIGHT + \"M A N D A L O R I A N\\n\" + RESET\n sceneprint += Fore.LIGHTBLUE_EX +\"SCORE : \" +\\\n str(self.__score) + \" \"*30 +\"TIME : \" + str(self.__remaining_time) + \" \"*30 +\\\n \"LIVES:\" + str(self.__lives)+\"\\n\"+ RESET\n if self.__start >= self.__fullwidth - self.__width:\n self.__start = self.__fullwidth - self.__width\n for i in range(0, self.__height):\n for j in range(self.__start, self.__start + self.__width):\n sceneprint += str(self.__matrix[i][j])\n sceneprint += '\\n'\n \n if self.__start + sc_span < sc_full - 5:\n self.__start = self.__start + 1\n if self.__score < 420420420:\n self.__score += 1\n pass\n\n return sceneprint", "def print_help(self):\r\n\r\n print (\"\"\"Show data values for assignment.\r\n\r\nUsage:\r\n cat <request or table path>\r\n cat --id <assignment_id> #Where assignment_id provided by 'vers <table path>' command\r\n\r\nFormatting flags:\r\n\r\n -c or --comments - Show comments on/off\r\n -nc or --no-comments\r\n\r\n -ph or --horizontal - Print table horizontally\r\n -pa or --vertical - Print table vertically\r\n (If no '--horizontal' or '--vertical' flag is given, the layout of table is determined automatically:\r\n vertical layout if table has only 1 row and more than 3 columns, horizontal otherwise)\r\n\r\n -b or --borders - Switch show borders on of off\r\n -nb or --no-borders\r\n\r\n -h or --header - Show header on/off\r\n -nh or --no-header\r\n\r\n -t or --time - Show time\r\n -nt or --no-time\r\n\r\nExamples:\r\n > cat /test/test_vars/test_table #print latest data for test_table\r\n > cat /test/test_vars/test_table::subtest #print latest data in subtest variation\r\n > cat /test/test_vars/test_table:::2012-08 #print data latest for august 2012\r\n\r\nSee also 'dump' command which is 'cat' formatted to save data to files. 'help dump'\r\n\r\n \"\"\")", "def get_course_syllabus_section(course, section_key):\r\n\r\n # Many of these are stored as html files instead of some semantic\r\n # markup. This can change without effecting this interface when we find a\r\n # good format for defining so many snippets of text/html.\r\n\r\n if section_key in ['syllabus', 'guest_syllabus']:\r\n try:\r\n filesys = course.system.resources_fs\r\n # first look for a run-specific version\r\n dirs = [path(\"syllabus\") / course.url_name, path(\"syllabus\")]\r\n filepath = find_file(filesys, dirs, section_key + \".html\")\r\n with filesys.open(filepath) as html_file:\r\n return replace_static_urls(\r\n html_file.read().decode('utf-8'),\r\n getattr(course, 'data_dir', None),\r\n course_id=course.id,\r\n static_asset_path=course.static_asset_path,\r\n )\r\n except ResourceNotFoundError:\r\n log.exception(\r\n u\"Missing syllabus section {key} in course {url}\".format(key=section_key, url=course.location.to_deprecated_string())\r\n )\r\n return \"! Syllabus missing !\"\r\n\r\n raise KeyError(\"Invalid about key \" + str(section_key))", "def henhouseDisplayMenu () :\r\n print('1.Predict egg production')\r\n print('2.Display needs')\r\n print('0.Exit henhouse management')\r\n print()\r\n print('Please choose an option from the above menu')", "def display(self):\r\n print(self.title, 'written by', self.author)", "def do_show(self, line):\n\t\tif isinstance(self.cl, Book):\n\t\t\tprint(\"Contacts in the current book\\n\")\n\t\t\tself.cl.list_contacts()\n\t\telse:\n\t\t\tprint(\"To see contacts you need to open or create book\")", "def load_sections():\n pass", "def section_4_8():\n pass", "def display(items):\n\n # LOC, COMMENT, ...\n # (same as keys of TYPE_OF_LINE, but better to only rely on items here)\n what = next(iter(items))[1]\n\n # Headers\n print(bcolors.BOLD\n +(\"{:<30}\"+\":{:>10}\"*len(what)).format(\"path\", *what)\n +bcolors.ENDC)\n\n # Lines\n for k,v in items:\n print((bcolors.OKGREEN if v[\"LOC\"] == 0\n else bcolors.FAIL if v[\"COMMENTS\"] == 0\n else bcolors.WARNING if v[\"COMMENTS\"]/v[\"LOC\"] < 0.2\n else bcolors.OKGREEN )\n +(\"{:<30}\"+\":{:>10}\"*len(v)).format(k, *v.values())\n + bcolors.ENDC)", "def show_man_page(self):\n print(Gstr_synopsis)", "def usage(self):\n\n # header\n self.usage_header()\n\n print _(\"\"\"Screen: %(screen)s\nDescription: %(description)s\n\nUsage: %(app_name)s %(screen)s [options]\"\"\") % {\n 'app_name': constants.App.NAME,\n 'screen': self.name,\n 'description': self.description,\n }\n # any additional info in between (see other classes for reference)\n self._usage_options_example()\n\n #footer\n self.usage_footer()", "def get_section(line: str) -> str:\n if len(line) < 2:\n raise Exception(\"Error: Section line can't be shorter than 2\")\n return line[1:len(line) - 1]", "def __getitem__(self, section_id):", "def go_to_section(self, section_title, subsection_title):\r\n\r\n # For test stability, disable JQuery animations (opening / closing menus)\r\n self.browser.execute_script(\"jQuery.fx.off = true;\")\r\n\r\n # Get the section by index\r\n try:\r\n sec_index = self._section_titles().index(section_title)\r\n except ValueError:\r\n self.warning(\"Could not find section '{0}'\".format(section_title))\r\n return\r\n\r\n # Click the section to ensure it's open (no harm in clicking twice if it's already open)\r\n # Add one to convert from list index to CSS index\r\n section_css = 'nav>div.chapter:nth-of-type({0})>h3>a'.format(sec_index + 1)\r\n self.q(css=section_css).first.click()\r\n\r\n # Get the subsection by index\r\n try:\r\n subsec_index = self._subsection_titles(sec_index + 1).index(subsection_title)\r\n except ValueError:\r\n msg = \"Could not find subsection '{0}' in section '{1}'\".format(subsection_title, section_title)\r\n self.warning(msg)\r\n return\r\n\r\n # Convert list indices (start at zero) to CSS indices (start at 1)\r\n subsection_css = \"nav>div.chapter:nth-of-type({0})>ul>li:nth-of-type({1})>a\".format(\r\n sec_index + 1, subsec_index + 1\r\n )\r\n\r\n # Click the subsection and ensure that the page finishes reloading\r\n self.q(css=subsection_css).first.click()\r\n self._on_section_promise(section_title, subsection_title).fulfill()", "def display(self):\n print(\"{}, {}\".format(self.label, self.params))", "def get_hierarchy_section_header(self, path):\n section_index = len(path) - 1\n section = path[section_index][0]\n html = f\"Section {section.roman_numeral}: {section.title.capitalize()}\"\n\n return html" ]
[ "0.7210139", "0.71652883", "0.7011607", "0.6947766", "0.6940115", "0.6729903", "0.6606709", "0.6531379", "0.6423387", "0.6411612", "0.64110327", "0.6390477", "0.6239066", "0.6006855", "0.59656584", "0.5911256", "0.5908014", "0.5903914", "0.58947486", "0.5894047", "0.58920234", "0.5869818", "0.57344604", "0.5726867", "0.57255656", "0.5708535", "0.5699384", "0.5670143", "0.5630285", "0.5627663", "0.5625759", "0.559505", "0.5592301", "0.5581311", "0.5575115", "0.5552795", "0.55483776", "0.5537225", "0.55326134", "0.5517509", "0.5494914", "0.54686546", "0.5447233", "0.54468703", "0.54436976", "0.5435018", "0.5428676", "0.54218894", "0.5401391", "0.54004973", "0.53984964", "0.5395698", "0.5395698", "0.53810894", "0.53757113", "0.53616697", "0.535539", "0.534934", "0.53384316", "0.5329452", "0.5319898", "0.53178054", "0.5302483", "0.52933097", "0.52911204", "0.5290761", "0.52820367", "0.52743083", "0.5263124", "0.52625203", "0.52588785", "0.525453", "0.5252174", "0.524457", "0.5238815", "0.5232797", "0.52300656", "0.5229912", "0.5228175", "0.52204496", "0.52120924", "0.521088", "0.5210552", "0.5194999", "0.51892143", "0.5187697", "0.5187447", "0.51867265", "0.51863277", "0.5183351", "0.51795113", "0.5172278", "0.51715", "0.51623166", "0.51526904", "0.51443917", "0.5143678", "0.5143225", "0.5141306", "0.5140054" ]
0.6881852
5
Scraping handler Initiates scraping of the different sections required by the script.
def scrape_movie(url): soup = get_soup(url) if soup: # scrape all of the sections soup_sections = soup.find('section', {'class': 'article listo content-advisories-index'}) # scrape for the specific sections required soup_certificates = soup_sections.find('section', {'id': 'certificates'}) soup_nudity = soup_sections.find('section', {'id': 'advisory-nudity'}) soup_profanity = soup_sections.find('section', {'id': 'advisory-profanity'}) # further scrape the sections above ratings = parse_certificates(soup_certificates) nudity, nudity_comments = parse_section(soup_nudity) profanity, profanity_comments = parse_section(soup_profanity) # here is where we actually format and show the results display_ratings(ratings) display_section('nudity', nudity, nudity_comments) display_section('profanity', profanity, profanity_comments) else: display_error()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self):\r\n self.init_data = td.import_data(self.__module__)\r\n self.page1() # GET navigation (requests 101-153)\r\n\r\n grinder.sleep(20)\r\n self.page2() # GET case (requests 201-252)\r\n\r\n grinder.sleep(20)\r\n self.page3() # GET view (requests 301-365)\r\n\r\n grinder.sleep(20)\r\n self.page4() # POST view (requests 401-452)\r", "def start_requests(self):\n self.spider = Base_Spider(LpCfg)\n self.first_url = 'https://www.liepin.com/zhaopin/' \\\n '?industries=&dqs=010&salary=15%2440' \\\n '&jobKind=2&pubTime=3&compkind=&compscale=' \\\n '&industryType=&searchType=1&clean_condition=' \\\n '&isAnalysis=&init=1&sortFlag=15&flushckid=1' \\\n '&fromSearchBtn=2&headckid=0b5a9690a5cb1d82&key=Python'\n urls = []\n s = self.spider.get_content(self.first_url)\n self.cookies = self.spider.session.cookies.get_dict()\n del s\n self.spider.headers.update({'Cookie': self.cookies})\n for page in range(1,5):\n url = self.first_url + '&curPage=%d'%page\n urls.append(url)\n for url in urls:\n print url\n yield scrapy.Request(url=url,\n callback=self.parse,\n headers=self.spider.headers,\n cookies=self.cookies\n )", "def start_requests(self):\n\n base_url = \"https://www.f3motorauctions.com.au/search_results.aspx?sitekey=F3A&make=All%20Makes&model=All%20Models&keyword=&fromyear%20=From%20Any&toyear=To%20Any&body=All%20Body%20Types\"\n self.init_data()\n yield scrapy.Request(\n url = base_url, callback = self.parse_all_cars_within_page)", "def scrape(self):\n pass", "def _scrape(self):", "def start(self):\n print \"starting to crawler qsbk's page(Enter Q or q to quit)\"\n print\n self.enable = True\n self.load_page()\n # a variabel to control counts\n nowpage = 0\n while self.enable:\n if len(self.stories) > 0:\n # get a page stories\n page_stories = self.stories[0]\n nowpage += 1\n del self.stories[0]\n # print stories\n self.print_one_story(page_stories, nowpage)", "def crawler(self):\n\n\t\tfor page in range(self.first_page, self.last_page+1):\n\t\t\tprint(\"\\nCrawling Page \" + str(page))\n\t\t\tpage_url = self.site_url + \"?page=\" + str(page) +\\\n\t\t\t \"&index=prod_all_products_term_optimization\"\n\t\t\t\n\t\t\tself.scrape_features(page_url)", "def main():\n download_insert_title_basics()\n download_insert_title_principals()\n download_insert_name_basics()\n download_insert_title_ratings()\n scrap_keywords()\n create_and_insert_soup()\n return", "def deal_with_sections(self):\n self.data_sections = []\n self.create_parser_sections(self.soup)", "def start_requests(self):\n NUM_PAGES = 74\n warnings.warn(\n 'ECACCSpider: Num pages is hard-coded!'\n )\n \n URL_TEMPLATE = \"https://www.phe-culturecollections.org.uk/products/celllines/generalcell/browse.jsp?a2z=All&d-49653-p={}\"\n urls = [\n URL_TEMPLATE.format(i) \n for i in range(1, NUM_PAGES+1)\n ]\n for url in urls:\n yield scrapy.Request(\n url=url,\n callback=self.parse_catalog_page\n )", "def setUp(self):\n self.scraped = SimpleContent(Selector.from_text(SIMPLE_HTML))", "def start_requests(self):\r\n try:\r\n\r\n for url in self.start_urls:\r\n yield scrapy.Request(url,\r\n callback=self.navigate_to)\r\n except Exception as err:\r\n logger.error(f'TekDefenceScraper : start_requests : {err}')\r\n raise err", "def start(self):\n self.start_spider()\n self.start_ranker()\n\n concurrent.futures.wait(self.spider_thread_futures) # wait for spiders to finish\n self.logger.info(\"Done crawling\")\n self.ranker.done_crawling.set()\n\n self.ranker.print_ranks()", "def __init__(self, script=None, **kwargs):\n super().__init__()\n\n # The splash lua script. Provide a custom lua script to fit your use case.\n if script:\n self.LUA_SOURCE = script\n else:\n self.LUA_SOURCE = get_data(\n 'transistor',\n 'scrapers/scripts/basic_splash.lua').decode('utf-8')\n\n # after calling super().__init__(), call self.start_http_session()\n\n # ------------------ kwargs ---------------- #\n # Set these as needed in your subclass with keywords or hardcoded.\n self.baseurl = kwargs.pop('baseurl', None)\n self.searchurl = kwargs.pop('searchurl', None)\n self.crawlera_user = kwargs.pop('crawlera_user', None)\n self.name = kwargs.pop('name', None)\n self.referrer = kwargs.pop('referrer', None)\n self.user_agent = kwargs.pop('user_agent',\n \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) \"\n \"AppleWebKit/537.36 (KHTML, like Gecko) \"\n \"Chrome/73.0.3683.86 Safari/537.36\")\n self.max_retries = kwargs.pop('max_retries', 5)\n self.http_session_timeout = kwargs.pop('http_session_timeout', (3.05, 10.05))\n self.splash_args = kwargs.pop('splash_args', None)\n self.splash_wait = kwargs.pop('splash_wait', 3.0)\n self.js_source = kwargs.pop('js_source', None)\n\n # ----- kwargs only used for testing setup ----- #\n self._test_true = kwargs.get('_test_true', False)\n self._test_page_text = kwargs.get('_test_page_text', None)\n self._test_status_code = kwargs.get('_test_status_code', None)\n self._test_url = kwargs.get('_test_url', None)\n self._test_soup_config = kwargs.get('_test_soup_config', None)\n # ----- end kwargs for testing setup ----- #\n\n # ------ flags for internal use --------- #\n # For example, if a public method on your scraper returns\n # None undesirably, switch the self._result flag to False.\n # Then, you can just delete scrape results if flagged False.\n self._result = True\n # ------- /end internal use flags -------- #\n\n # Whether we already have a valid HTTP session with the remote server\n self.http_session_valid = False\n\n # ssl._create_default_https_context = ssl._create_unverified_context\n self._crawlera_ca = get_data('transistor',\n 'scrapers/certs/crawlera-ca.crt').decode('utf-8')\n\n ssl.create_default_context(cadata=self._crawlera_ca)\n\n self.browser = SplashBrowser(\n soup_config={'features': 'lxml'},\n requests_adapters={'http://': HTTPAdapter(max_retries=self.max_retries)})\n\n self.cookies = dict_from_cookiejar(self.browser.session.cookies)\n\n # set the splash basic authorization\n self.auth = basic_auth_header(\n username=os.environ.get('SPLASH_USERNAME', 'user'),\n password=os.environ.get('SPLASH_PASSWORD', 'userpass'))\n self.browser.session.headers.update({'Authorization': self.auth})", "def _start(self):\n\n\t\tr = self.send_request(self._src_url)\n\n\t\tsoup = BeautifulSoup(r.content, \"html.parser\")\n\n\t\tchap = ChapterInfo.from_soup(soup)\n\n\t\tself._title = chap.title\n\n\t\twith tempfile.TemporaryDirectory() as temp_dir:\n\t\t\timage_paths = self._download_images(chap.image_urls, temp_dir)\n\n\t\t\tnum_pages = self._create_pdf(image_paths)\n\n\t\t\tself._percent_saved = num_pages / len(chap.image_urls)", "def run(self):\n\n for url in self.urls:\n try:\n # Use requests to retrieve web page data\n print(url)\n response = session.get(url, ) # allow_redirects=True)\n\n if response.status_code != 200:\n print('Failed to retrieve page, URL: {0}, error: {1}\\n'.format(url, response.status_code))\n return\n\n # Get web page data from HTML response\n content = get_json_data(response.text)\n\n # Compile data into dictionary to be used for reporting\n summary_data = generate_report(content)\n\n # Generate/print report\n print_report(summary_data)\n\n except Exception as error:\n print('Scraper failed to run for URL {0}, error: {1}, {2}\\n'.format(\n url, type(error).__name__, error\n ))\n\n # time.sleep(1) # for load concerns", "def start_requests(self):\n # Spider settings are not available during __init__, so finalizing here\n settings_output_dir = self.settings.get(\"OUTPUT_DIR\")\n output_root = os.path.join(settings_output_dir, \"epaipm\")\n self.output_dir = new_output_dir(output_root)\n\n yield Request(\"https://www.epa.gov/airmarkets/\"\n \"national-electric-energy-data-system-needs-v6\")", "def setup(self):\n\n setup_data = get_setup_data()\n\n self.base_url = setup_data['base_url']\n browser = setup_data['browser'].lower()\n if browser == 'chrome':\n self.driver = webdriver.Chrome()\n elif browser == 'firefox':\n self.driver = webdriver.Firefox()\n elif browser == 'phantomjs':\n self.driver = webdriver.PhantomJS()\n else:\n error = (\n '{0:s} is not a supported browser. Supported browsers: '\n 'Chrome, Firefox, PhantomJS'.format(browser)\n )\n\n self.log.error(error)\n raise ValueError(error)\n self.log.info(\n 'Setup data:\\nURL: {0}\\nBrowser: {1}'.format(\n self.base_url, browser\n )\n )\n self.log.info('Load main page')\n self.driver.get(self.base_url)", "async def init(self):\n self.base_tamplates = {}\n self.preparing_task = None\n self.app = aioweb.Application()\n self.runner = aioweb.AppRunner(self.app)", "def main():\n\n from scrapy.crawler import CrawlerProcess\n from scrapy.utils.project import get_project_settings\n\n process = CrawlerProcess(get_project_settings())\n process.crawl(NCBIGeoSpider)\n process.start()", "async def initialiser_crawler(self) -> Dict[str, List[req.Response]]:\n web_pages = {}\n with ThreadPoolExecutor(max_workers=NUM_WORKERS) as exe:\n try:\n loop = asyncio.get_event_loop()\n tasks = [\n loop.run_in_executor(exe, self.collect_webpages, keyword)\n for keyword in self.keywords \n ]\n for res in await asyncio.gather(*tasks):\n web_pages.update(res)\n except KeyboardInterrupt:\n loop.close()\n raise KeyboardInterrupt\n return web_pages", "def boot():\n\t\tcreate_project_url_dir(Spider.project_name)\n\t\tcreate_url_data(Spider.project_name, Spider.base_url)\n\t\tSpider.queue = file_to_set(Spider.queue_file)\n\t\tSpider.crawled = file_to_set(Spider.crawled_file)", "def crawl(self):\n try:\n self.crawl_pages()\n self.crawl_posts()\n self.crawl_comments()\n except Exception as exception:\n self.handle_request_limit(exception)", "async def setup(self):\n load_base_templates()\n uris = URI.gather()\n for uri, resource in uris.items():\n methods = resource.methods\n if \"get\" not in methods:\n methods[\"get\"] = None\n\n for method in methods.keys():\n self.app.add_routes([\n getattr(aioweb, method)(uri, resource.process)\n ])\n self.app.add_routes([aioweb.get(\"/hello\", hello)])\n\n # TMP code\n max_age = 3600 * 24 * 365 # 1 year\n setup(self.app, PonyStorage(max_age=max_age))\n self.preparing_task = asyncio.create_task(self.prepare_web())", "def setup_page(self):\n raise NotImplementedError", "def set_pages(self, selenium: fixture) -> None:\n self.API: RecurringRidesAPI = RecurringRidesAPI()\n self.details: Details = Details(selenium)\n self.rides: Rides = Rides(selenium)", "def scrape_main() -> None:\n\n logger.info(\"Starting scrape\")\n search_info = construct_scrape_regex_patterns(grab_scrape_info())\n links = run_scrape(\n url=search_info['url'],\n seasons_regex=search_info['seasons'],\n episodes_regex=search_info['episodes']\n )\n if links:\n logger.debug(\"Writing urls to file\")\n with open('urls.txt', 'w') as f:\n for link in links:\n f.write(link + '\\n')\n else:\n logger.warning(\"No links available\")", "def setup_page(self):\r\n raise NotImplementedError", "def start_requests(self):\n url = self.start_urls[0]\n yield scrapy.Request(url=url, callback=self.parse)", "def start_requests(self):\n # This predefined list of URLs is chosen to include all types of\n # inquiries possible in the Austrian parliament in order to provide a\n # suitable testing surface for new functions.\n # urls = [\"https://www.parlament.gv.at/PAKT/VHG/XXV/JPR/JPR_00019/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XXV/JPR/JPR_00016/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XXV/J/J_06954/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XXV/M/M_00178/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XXV/JEU/JEU_00003/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XXV/J/J_06758/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/BR/J-BR/J-BR_03089/index.shtml\",\n # \"https://www.parlament.gv.at/PAKT/VHG/BR/J-BR/J-BR_03091/index.shtml\", \"http://www.parlament.gv.at/PAKT/VHG/BR/J-BR/J-BR_01155/index.shtml\", \"http://www.parlament.gv.at/PAKT/VHG/XX/J/J_06110/index.shtml\", \"http://www.parlament.gv.at/PAKT/VHG/XX/J/J_06651/index.shtml\", \"http://www.parlament.gv.at/PAKT/VHG/XX/J/J_04024/index.shtml\", \"http://www.parlament.gv.at/PAKT/VHG/XX/J/J_04025/index.shtml\", \"https://www.parlament.gv.at/PAKT/VHG/XX/M/M_00178/index.shtml\"]\n urls = [] if not self.url_override else [self.url_override]\n\n if self.LLP and not self.url_override:\n for i in self.LLP:\n for nrbr in ['NR', 'BR']:\n roman_numeral = roman.toRoman(i)\n options = self.URLOPTIONS.copy()\n options['GP'] = roman_numeral\n options['NRBR'] = nrbr\n url_options = urlencode(options)\n url_llp = \"{}?{}\".format(self.BASE_URL, url_options)\n rss = feedparser.parse(url_llp)\n\n self.logger.info(\"GP {}: {} inquiries from {}\".format(\n roman_numeral, len(rss['entries']), nrbr)\n )\n urls = urls + [entry['link'] for entry in rss['entries']]\n self.TOTAL_COUNTER = len(urls)\n for url in urls:\n yield self.make_requests_from_url(url)", "def parse_main(self, response):\n\n for i in response.xpath('//div[contains(@class,\"products-list__item\")]'):\n item = {\n \"VENDORID\": 1055,\n \"VENDOR\": 'JC SALES',\n \"ITEMNO\": i.xpath('.//span[contains(text(),\"Item No:\")]/text()').get().replace('Item No:', '').strip(),\n \"DESCRIPTION\": i.xpath('.//div[contains(@class,\"product-card__name\")]//a/text()').get(),\n \"IMAGE_URL\": i.xpath('.//div[contains(@class,\"product-card__image\")]//img[1]/@src').get(),\n \"PAGE_TITLE\": response.css('title::text').get(),\n \"PAGE_URL\": response.request.url\n }\n yield Request(response.urljoin(i.xpath('.//a[contains(@class,\"image__body\")]/@href').get()),\n self.parse_details, meta={'item': item})\n\n next_page = response.xpath('//a[text()=\">\"]/@href').get()\n if next_page is not None:\n next_page = response.urljoin(next_page)\n yield scrapy.Request(next_page, callback=self.parse_main)", "def init_page_elements(self):\n pass", "def __call__(self):\n self.page1() # GET supercars.do (requests 101-111)\n\n grinder.sleep(2117)\n self.page2() # GET cars.do (requests 201-202)\n\n grinder.sleep(1867)\n self.page3() # GET car.do (request 301)\n\n grinder.sleep(4351)\n self.page4() # GET enquire.do (requests 401-402)\n\n grinder.sleep(16341)\n self.page5() # POST enquire.do (request 501)\n\n grinder.sleep(1309)\n self.page6() # GET supercars.do (request 601)\n\n grinder.sleep(669)\n self.page7() # GET cars.do (requests 701-702)\n\n grinder.sleep(1260)\n self.page8() # GET car.do (request 801)\n\n grinder.sleep(837)\n self.page9() # GET car.do (request 901)\n\n grinder.sleep(1108)\n self.page10() # GET search.do (request 1001)\n\n grinder.sleep(3146)\n self.page11() # POST search.do (requests 1101-1102)\n\n grinder.sleep(2822)\n self.page12() # POST search.do (request 1201)\n\n grinder.sleep(1333)\n self.page13() # GET sell.do (request 1301)\n\n grinder.sleep(17417)\n self.page14() # POST sell.do (request 1401)\n\n grinder.sleep(6680)\n self.page15() # GET insurance.do (request 1501)\n\n grinder.sleep(600)\n self.page16() # GET about.do (requests 1601-1602)\n\n grinder.sleep(584)\n self.page17() # GET supercars.do (request 1701)\n\n grinder.sleep(1049)\n self.page18() # GET cars.do (requests 1801-1802)\n\n grinder.sleep(2901)\n self.page19() # GET car.do (request 1901)\n\n grinder.sleep(1441)\n self.page20() # GET car.do (request 2001)\n\n grinder.sleep(791)\n self.page21() # GET supercars.do (request 2101)\n\n grinder.sleep(1365)\n self.page22() # GET cars.do (request 2201)\n\n grinder.sleep(1067)\n self.page23() # GET supercars.do (request 2301)\n\n grinder.sleep(1284)\n self.page24() # GET cars.do (request 2401)\n\n grinder.sleep(879)\n self.page25() # GET supercars.do (request 2501)\n\n grinder.sleep(1066)\n self.page26() # GET cars.do (request 2601)\n\n grinder.sleep(974)\n self.page27() # GET supercars.do (request 2701)", "def set_up(self):\n for section_name, section_body in self.sections.iteritems():\n scenario = Scenario(section_name, section_body)\n self.scenarios.append(scenario)", "def start(self):\n\t\tself.app.printflush('Sitemap: ' + self.sitemap_url)\n\t\tself.getUrlsList()\n\t\tself.app.printflush('Fetched: ' + str(self.fetched_count))\n\t\tself.app.printflush('Processes: ' + str(self.processes))\n\t\tself.CheckURLs()\n\t\tself.printReport()", "def perform_scraping(current_session):\r\n\r\n # List Array storing all relevant decision information\r\n final_data_fetch = []\r\n pagination_index = global_constants['NUMBER_PAGE_TO_SCRAPE_FIRST']\r\n while pagination_index < global_constants['NUMBER_PAGE_TO_SCRAPE_LAST']:\r\n print(\"Page:\", pagination_index, \" Collected records:\", len(final_data_fetch))\r\n\r\n # Get relevant admit-reject page based on pagination value\r\n result = current_session.get(global_constants['ALL_RESULTS_URL'] + str(pagination_index),\r\n headers=dict(referer=global_constants['ALL_RESULTS_URL']))\r\n tree = lxml_html.fromstring(result.content)\r\n\r\n # Get Nodes containing individual decisions for each page(approx 20 per page)\r\n decision_buckets = tree.xpath('//*[@class=\"row\"]/div[@class=\"col-sm-6\"]/div[@class=\"panel panel-warning\"]/div[@class=\"panel-body\"]')\r\n\r\n # If decision buckets are empty, captcha page has been encountered\r\n if len(decision_buckets) == 0:\r\n print(\"Captcha Time\")\r\n time.sleep(120)\r\n continue\r\n\r\n for individual_decision_bucket in decision_buckets:\r\n\r\n current_admit_status = ((individual_decision_bucket.xpath('./div[1]/div[2]/label'))[0]).text.strip()\r\n\r\n # Fetch results only if ADMIT or REJECT\r\n if current_admit_status.lower() == 'admit' or current_admit_status.lower() == 'reject':\r\n\r\n # Get relevant information from html page returned in response\r\n current_bucket_university_course = ((individual_decision_bucket.xpath('./div[1]/div[1]/h4/small'))[0]).text.replace(\"\\n\",\"\").strip()\r\n current_gre = get_gre_or_toefl(((((individual_decision_bucket.xpath('./div[2]/div[1]'))[0]).getchildren())[1]).tail)\r\n current_toefl = get_gre_or_toefl(((((individual_decision_bucket.xpath('./div[2]/div[2]'))[0]).getchildren())[1]).tail)\r\n current_gpa = get_gpa(((((individual_decision_bucket.xpath('./div[2]/div[3]'))[0]).getchildren())[1]).tail)\r\n current_workex = get_workex_months(((((individual_decision_bucket.xpath('./div[2]/div[4]'))[0]).getchildren())[1]).tail)\r\n\r\n current_university, current_course = split_bucket_university_course(current_bucket_university_course.lower())\r\n # Append decision information to final bucket only if minimum criteria met\r\n if current_university is not None and filter_criteria_met(current_gre, current_gpa, current_toefl):\r\n\r\n # Get UG College from profile of user\r\n profile_page_path = ((individual_decision_bucket.xpath('./div[1]/div[1]/h4/a'))[0]).attrib['href']\r\n profile_result = current_session.get(global_constants['HOME_PAGE'] + profile_page_path,\r\n headers=dict(referer=global_constants['PAST_RESULTS_URL']))\r\n profile_tree = lxml_html.fromstring(profile_result.content)\r\n ug_details_bucket = (profile_tree.xpath('//div[@class=\"col-sm-12 card\"][1]'))\r\n if len(ug_details_bucket) >= 1:\r\n ug_details_bucket = ug_details_bucket[0]\r\n current_ug_course = ((ug_details_bucket.xpath('./div[1]/div[7]/p[1]/b[1]'))[0]).text.replace(\"\\n\", \"\").strip()\r\n current_ug_college = ((ug_details_bucket.xpath('./div[1]/div[7]/p[2]'))[0]).text.replace(\"\\n\", \"\").strip()\r\n\r\n final_data_fetch.append([current_course, current_university, current_gpa, current_gre, current_toefl,\r\n current_workex, current_ug_course, current_ug_college, current_admit_status])\r\n\r\n # Add sleep time to allow for web scraping in undetected manner\r\n sleep_delay = random.choice([0, 1, 2, 3])\r\n time.sleep(sleep_delay)\r\n pagination_index += 1\r\n\r\n # Export final_data to excel sheet\r\n export_to_file(final_data_fetch)", "def crawl(self) -> Dict[str, List[req.Response]]:\n loop = asyncio.get_event_loop()\n try:\n web_pages = loop.run_until_complete(\n asyncio.ensure_future(self.initialiser_crawler())\n )\n except KeyboardInterrupt:\n loop.close()\n raise KeyboardInterrupt\n return web_pages", "def general_scraper(section_url):\n\n prefix = \"http://mesva.univaq.it\"\n\n request = []\n news = []\n\n for i, url in enumerate(section_url):\n request.append(requests.get(url))\n news_division = BeautifulSoup(request[i].text, \"html.parser\").find(class_=\"view-content\")\n\n discab_news = news_division.find_all(\"div\", recursive=False)[0:5]\n\n for single_news in discab_news:\n news.append({\n 'description': '',\n 'title': single_news.a.string,\n 'link': prefix + single_news.a['href']\n })\n\n return news", "def __init__(self, url, epRange):\n self.driver = webdriver.PhantomJS()\n self.downloads = OrderedDict() # sort episodes in asending order\n self.pbar = \"\" # Download Progressbar\n self.Main(url, epRange)", "def page_data():\n return scrape()", "def parse(self, response):\n page_source = self.upwork_controller.get_source_home()\n\n # Hand-off between Selenium and Scrapy happens here\n sel = Selector(text=page_source)\n # Extract data\n sections = sel.xpath(\"//section/div\")\n\n for section in sections:\n selector = Selector(text=section.get())\n jobtitle = selector.xpath(\"//div/div/div/h4/a/text()\")\n jobdescription = selector.xpath(\"//div/div/div/div/div/div/div/span/span/text()\")\n hourlypay = selector.xpath(\"//div/div/div/div/small/span/strong/text()\")\n proposals = selector.xpath(\"//div/div/div/div/div/span/small/strong/text()\")\n country = selector.xpath(\"//div/div/div/div/small/span/span/span/span/strong[@class='text-muted client-location ng-binding']/text()\")\n\n job = Job(jobtitle=jobtitle.get(),\n jobdescription=jobdescription.get(),\n hourlypay=hourlypay.get(),\n proposals=proposals.get(),\n country=country.get())\n job.serialize()\n yield job.dict()", "def crawl(self):\n\n # create helper process and setup IPC\n self.socket.listen(1)\n help_out_fd = open(self.helper_outfile, \"w\")\n with subprocess.Popen(\"./crawl_helper.py\", stdout=help_out_fd, stderr=subprocess.STDOUT) as proc:\n self.helper_pid = proc.pid\n try:\n conn, _ = self.socket.accept()\n # create initial params for crawler helper and send them\n new_urls = set()\n setup_params = {\"start_urls\": self.start_urls, \"allowed_domains\": [self.domain],\n \"cookies\": self.cookies, \"user_agent\": self.config[\"user_agent\"]}\n ipc_operations.send_object(conn, setup_params)\n\n # loop: receive a response object, then send new URLs to crawl. Catch & handle problems.\n while True:\n try:\n proc.wait(timeout=0.001)\n break\n except subprocess.TimeoutExpired:\n response = ipc_operations.receive_object(conn)\n if not response: # socket is dead / closed\n break\n new_urls = self.process_response(response)\n ipc_operations.send_object(conn, new_urls)\n except socket.timeout:\n util.printit(\"Unix socket connection to scrapy crawler unexpectedly broke. \" +\n \"Quitting crawling of %s\" % self.base_url, color=util.RED)\n break\n finally:\n # ensure connection is closed and helper process killed in any case\n conn.close()\n proc.kill()\n\n # after the actual crawling, extract all the gathered cookies from Selenium\n if self.config[\"use_selenium\"].lower() == \"true\":\n selenium_cookies = self.driver.get_cookies()\n for cookie in selenium_cookies:\n if not any(cookie[\"name\"] == c[\"name\"] and cookie[\"path\"] == c[\"path\"] and\n cookie[\"domain\"] == c[\"domain\"] for c in self.found_cookies):\n parsed_cookie = {}\n for key in (\"name\", \"path\", \"domain\", \"httpOnly\", \"secure\"):\n parsed_cookie[key] = cookie[key]\n self.found_cookies.append(parsed_cookie)\n\n help_out_fd.close()\n return self.create_results()", "def start_requests(self):\n for url in self.start_urls:\n yield scrapy.Request(url, callback=self.parse)", "def start_requests(self):\n yield scrapy.Request(url=self.start_urls[0])", "async def scrape_and_post(self):\n # Scrape latest challenge posts\n challenges = self.scraper.scrape()\n await self._update_rooms(challenges)", "def start_requests(self):\n for url in self.start_urls:\n yield SplashRequest(\n url=url,\n callback=self.parse,\n method=\"GET\",\n endpoint=\"execute\",\n args={\"wait\": 15.0, \"lua_source\": self.lua_script},\n )", "def set_up(self, web_driver):\n self.driver = web_driver\n self.wait = WebDriverWait(self.driver, 60)\n\n self.google_page = GoogleSearchPage(self.driver, locator, conf, message)\n self.flipkart_page = FlipkartPage(self.driver, locator, message)", "def dm_setup(self):\n dispatcher.connect(\n self.dequeue_next_page_requests,\n signal=signals.spider_idle\n )\n self._was_setup_called = True", "def start_requests(self):\r\n yield Request(url=MooreSpider.start_url,\r\n callback=self.parse_directory_list,\r\n method=\"GET\")", "def scrape(site=''):\n scraper.scrape(get_site_config(site))", "def start_requests(self):\n\n yield SeleniumRequest(\n url='https://www.iayt.org/search/newsearch.asp',\n callback=self.get_iframe_document_src,\n wait_time=5\n )", "def main(self):\n\n\t\tparsed_href = self.__load_webpage()\n\n\t\tsmart_attr_to_drive_list_map = self.__get_smart_attr_headers_params(parsed_href)\n\t\tdrives_by_manufacturer = self.__get_drives_grouped_by_mfg(smart_attr_to_drive_list_map)\n\t\tdrive_attributes, mfg_reports_same_attrs = self.__manufacturer_reported_params(smart_attr_to_drive_list_map)\n\t\t\n\t\treturn smart_attr_to_drive_list_map, drives_by_manufacturer, drive_attributes, mfg_reports_same_attrs", "def _subpage_crawler(self):\n while True:\n try:\n \tfamily = self.product_families.pop()\n except IndexError:\n\t\t\t\tbreak\n\n with closing(urlopen(self.base_url + family)) as product_list_page:\n product_list_soup = BeautifulSoup(product_list_page, 'html.parser')\n product_list = product_list_soup.find_all(self.product_list_tag[\"name\"], self.product_list_tag[\"attrs\"])\n for product in product_list:\n \tproduct_url = product.find(self.product_url_tag[\"name\"]).get('href')\n \tself.product_links[product_url] = family\n \t#print product_url", "def _process_html(self) -> None:\n\n row_xpath = f\"{self.y}_radGridOpinions_ctl00\"\n more_rows = self.html.xpath(f\"//tr[contains(@id, '{row_xpath}')]\")\n for row in more_rows:\n self.cases.append(\n {\n \"date\": row.xpath(\".//td[3]\")[0].text_content(),\n \"name\": row.xpath(\".//td[4]\")[0].text_content(),\n \"url\": row.xpath(\".//td[2]/a\")[0].attrib[\"href\"],\n \"docket\": row.xpath(\".//td[2]/a\")[0].attrib[\"title\"],\n \"status\": \"Published\"\n if row.xpath(\".//td[5]\")[0].text_content() == \"pub\"\n else \"Unpublished\",\n }\n )\n\n del self.data[f\"{self.x}$btnSearch\"]\n rad_script = f\"{self.x}${self.x}$radGridOpinionsPanel|{self.x}$radGridOpinions$ctl00$ctl03$ctl01$ctl10\"\n\n # switch to search mode for pagination\n self.data[f\"{self.x}$searchMode\"] = \"search\"\n self.data[\"__ASYNCPOST\"] = \"true\"\n self.data[\"RadAJAXControlID\"] = f\"{self.y}_radAjaxManager1\"\n self.data[\"ctl00$RadScriptManager1\"] = rad_script\n\n last = self.html.xpath(\n \"//div[@class='rgWrap rgNumPart']/a/span/text()\"\n )[-1]\n\n page_content = None\n current_xp = \"//a[@class='rgCurrentPage']/span/text()\"\n while last != (current_page := self.html.xpath(current_xp)[0]):\n self._update_pagination_data(page_content, current_page)\n page_content = (\n self.request[\"session\"]\n .post(self.url, headers=self.headers, data=self.data)\n .text\n )\n self.html = fromstring(page_content)\n rows = self.html.xpath(f\"//tr[contains(@id, '{row_xpath}')]\")\n for row in rows:\n self.cases.append(\n {\n \"date\": row.xpath(\".//td[3]\")[0].text_content(),\n \"name\": row.xpath(\".//td[4]\")[0].text_content(),\n \"url\": row.xpath(\".//td[2]/a\")[0].attrib[\"href\"],\n \"docket\": row.xpath(\".//td[2]/a\")[0].attrib[\"title\"],\n \"status\": \"Published\"\n if row.xpath(\".//td[5]\")[0].text_content() == \"pub\"\n else \"Unpublished\",\n }\n )", "def parse_all(self):\n\n # Generates a list of apartment urls\n self.parse_apartment_urls()\n\n # Parses each apartment url and stores it in apartment_data\n for apartment_url in self.apartment_urls:\n self.parse_single_page(apartment_url)", "def open_spider(self, spider):\n pass", "def start_requests(self):\n try:\n # query = \"select website from company where status is null ORDER BY id asc limit 10000\"\n query = \"select website from company2 where web_addr = 'Web Address';\"\n self.cursor.execute(query)\n numrows = self.cursor.rowcount\n for x in xrange(0, numrows):\n row = self.cursor.fetchone()\n detailed_pag_url = str(row[0])\n yield scrapy.Request(url=detailed_pag_url, callback=self.parse)\n except MySQLdb.Error, e:\n print(\"Database connection Error\", e)", "def start_requests(self):\n yield Request(self.base_url, \n callback=self.parse_urls)", "def setup_simulation(self, **kwargs):\n\n self.distance = self.config[\"site\"][\"distance\"]\n self.num_substations = self.config[\"num_substations\"]\n\n self.initialize_substructure_production()\n self.initialize_installation_vessel()", "def init_job_page(self, base_url):\n self.driver.get(base_url)\n self.driver.implicitly_wait(100)", "def scrape(self):\n\n self.url = self.headline.url\n\n # Should raise exception...\n if not self.parsing_template:\n return None, None, None, None, None\n\n try:\n response = self.download()\n self.source = response.text\n except:\n return None, None, None, None, None\n\n soup = BeautifulSoup(response.content, \"html.parser\")\n\n if soup:\n return self.parse(soup)\n else:\n return None, None, None, None, None", "def start_requests(self):\n # Load sitemap JSON - generate queues\n if self.args.sitemap_json is None:\n yield Request('https://repo1.maven.org/maven2/', callback=self.spider.parse_page, meta=dict())\n return\n\n for req in self.gen_links(self.args.sitemap_json):\n yield req", "def scrape(self):\n\n self.jobs_load()\n self.new_jobs = []\n\n for bot in self.bot_squad:\n self.new_jobs += bot.scrape_all_pages()\n\n self.statistics(self.new_jobs)\n print('SCRAPE COMPLETE. NOTE: Resulting job list still in RAM')\n print('We observed %d new jobs' % len(self.new_jobs))", "def crawl(self):\n self.get('http://code.google.com/p/webscraping/')\n self.get('http://code.google.com/p/sitescraper/')\n QTimer.singleShot(5000, self.app.quit)", "def performPostModuleDiscoveryTasks(self):\n\n print(\"!\"*80)\n print(\"You are being watched!!!\")\n print(\"!\"*80)\n\n self.webWidget = slicer.qSlicerWebWidget()\n slicer.util.mainWindow().statusBar().addWidget(self.webWidget)\n\n self.logic = AnalyticsLogic()\n self.logic.watchAndReport(self.reportToGoogle)", "def setup(self):\n while True:\n try:\n self.driver.get(self.ROOT_URL)\n self._navigate_by_xpath()\n return\n except StaleElementReferenceException:\n continue\n except WebDriverException as err:\n logging.error(repr(err))\n continue", "def webScraper(self):\n try:\n self.covid_df = pd.read_csv(self.COVID_URL)\n except:\n sys.exit('COVID data is unavailable at source.')\n \n latest_date = self.covid_df['date'].max()\n earliest_date = self.covid_df['date'].min()\n self.covid_df = self.covid_df[self.covid_df['date'] == self.date.strftime('%Y-%m-%d')]\n \n if self.covid_df.empty:\n exit_string = 'Requested date not available. Latest date available is ' + latest_date + ' while earliest is ' + earliest_date\n sys.exit(exit_string)\n else:\n self.covid_df = self.covid_df[self.covid_df['location'] != 'World']\n \n try:\n self.countries_centroids = pd.read_html(self.CENTROIDS_URL, header=0, index_col='country')[0]\n except:\n sys.exit('Central coordinates data for countries unavailable from Google developers.')\n \n try:\n self.geo_data = requests.get(self.GEOJSON_URL).json()\n except:\n sys.exit('GeoJSON data unavailable to draw country polygons.')", "def _subpage_crawler(self):\n\t\twhile True:\n\t\t\ttry:\n\t\t\t\tfamily = self.product_families.pop()\n\t\t\texcept IndexError:\n\t\t\t\tbreak\n\n\t\t\twith closing(urlopen(self.base_url + family)) as product_list_page:\n\t\t\t\tproduct_list_soup = BeautifulSoup(product_list_page, 'html.parser')\n\t\t\t\tproduct_list = product_list_soup.find('ul', class_='list-produits').find_all('li')\n\t\t\t\tfor product in product_list:\n\t\t\t\t\tproduct_url = product.find('a').get('href')\n\t\t\t\t\tself.product_links[product_url] = family", "def start_requests(self):\n\n with open(os.path.join(os.path.dirname(__file__), \"../resources/mapemall_categories.csv\")) as categories:\n for category in csv.DictReader(categories):\n category_text=category[\"category\"]\n url=str(MapemallCrawlerSpider.start_urls[0])+category_text\n # The meta is used to send our search text into the parser as metadata\n yield scrapy.Request(url, callback = self.parse, meta = {\"category_text\": category_text})", "def mor_prepare_data():\n prices, locations, areas, links = [], [], [], []\n for i in range(START_PAGE, SEARCHING_DEPTH+1):\n handler = requests.get(main_url, params={\"page\": str(i)})\n soup = bs4.BeautifulSoup(handler.text, 'lxml')\n heads = soup.find_all(\"header\")\n once = True\n for head in heads:\n if head.find(\"meta\", {\"itemprop\": \"category\"}) and once:\n\n raw_price = head.find(\"meta\", {\"itemprop\": \"price\"})\n price = int(float(raw_price[\"content\"]) if raw_price else \"\")\n\n raw_loc_list = head.find(\"h2\",\n {\"class\": \"single-result__title\"}).getText().strip().split(\n \", \")\n found = False\n for loc in raw_loc_list:\n if location_mapper[CITY].get(loc.lower(), 0):\n location = location_mapper[CITY][loc.lower()]\n\n found = True\n break\n if not found:\n location = \"\"\n if DEBUG_MODE:\n print(raw_loc_list)\n\n raw_area = head.find(\"p\", {\n \"class\": \"single-result__price single-result__price--currency\"}).getText().strip().split()\n if price and location:\n square_price = raw_area[0] if len(raw_area) == 2 else \"\".join(\n (raw_area[0], raw_area[1]))\n\n area = int(price / float(square_price.replace(\",\", \".\")))\n link_url = head.find('a')['href']\n\n if location and area and link_url:\n prices.append(price) if price < PRICE_UPPER_LIMIT else prices.append(\n PRICE_UPPER_LIMIT)\n locations.append(location)\n areas.append(area) if area < AREA_UPPER_LIMIT else areas.append(\n AREA_UPPER_LIMIT)\n links.append(link_url)\n\n return prices, locations, areas, links", "def generate_webpages(self):\n if self.add_to_existing:\n self.add_existing_data()\n self.make_home_pages()\n self.make_1d_histogram_pages()\n self.make_corner_pages()\n self.make_config_pages()\n if self.make_comparison:\n self.make_comparison_pages()\n if self.make_interactive:\n self.make_interactive_pages()\n if self.publication:\n self.make_publication_pages()\n if self.gwdata is not None:\n self.make_detector_pages()\n self.make_error_page()\n self.make_version_page()\n if self.notes is not None:\n self.make_notes_page()\n self.make_downloads_page()\n self.generate_specific_javascript()", "def initialize_process():\n\n settings = Settings({'BOT_NAME': 'warnnoticebot',\n 'LOG_LEVEL': 'INFO',\n 'ITEM_PIPELINES': {'modules.pipelines.PerStateJsonlinesExportPipeline': 300},\n 'USER_AGENT': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36', # This is my actual user agent when using a browser\n 'COOKIES_ENABLED': False,\n 'ROBOTSTXT_OBEY': True,\n 'DOWNLOAD_DELAY': 5.0,\n 'DEFAULT_REQUEST_HEADERS': {\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'Accept-Language': 'en',\n 'Upgrade-Insecure-Requests': 1}\n })\n \n process = CrawlerProcess(settings) \n\n return process", "def start(self):\n if not self._all_page_num:\n self._set_all_page_num()\n LOGGER.debug('overall pages: {}'.format(self._all_page_num))\n with tqdm(total=self._all_page_num) as progress_bar:\n while self.has_post:\n self._get_html()\n self._parse()\n # 10 post each page.\n progress_bar.update(1)\n progress_bar.set_description(\n 'Page #{}'.format(self._current_page))\n sleep(4)\n self._dump()", "def main():\n articles = []\n\n # Get main page and get links to all article pages\n \n br = mechanize.Browser()\n br.set_cookiejar(cookielib.LWPCookieJar())\n br.set_handle_equiv(True)\n br.set_handle_gzip(True)\n br.set_handle_redirect(True)\n br.set_handle_referer(True)\n br.set_handle_robots(False)\n br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)\n br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')] \n\n # Want debugging messages?\n## br.set_debug_http(True)\n## br.set_debug_redirects(True)\n## br.set_debug_responses(True)\n\n\n \n request = br.open(BASE_URL)\n data = request.get_data()\n \n links = scrape_links(BASE_URL, data)\n\n\n # Scrape articles in linked pages\n for link in links[0:]:\n data = br.follow_link(link).get_data()\n articles.append(scrape_articles(data, link.url))\n br.back()\n\n # Ouput is the list of titles and URLs for each article found\n print (\"Article Networkn\"\n \"---------------\")\n for article in articles:\n print 'Url: ', article[0]\n print 'Title: ', article[1]\n print '\\r\\n\\r\\n'", "def load_sections():\n pass", "def eighth_page_execution(self):\n self.errors_and_correct_input_values_helper()\n self.ninth_page.wait_for_page()", "def __init__(self):\n\n self.base_url = 'https://apartments.jsmliving.com/'\n self.base_url_apartments = self.base_url + \"apartments/\"\n self.start_url = self.base_url_apartments + \"?availability=37\"\n self.apartment_urls = []\n self.apartment_data = []", "def __call__(self):\n self.page1() # GET web (request 101)\n\n grinder.sleep(1000)\n self.page2() # GET web (request 201)\n\n grinder.sleep(1000)\n self.page3() # GET web (request 301)\n\n grinder.sleep(1000)\n self.page4() # GET web (request 401)\n\n grinder.sleep(1000)\n self.page5() # GET web (request 501)\n\n grinder.sleep(1000)\n self.page6() # GET web (request 601)\n\n grinder.sleep(1000)\n self.page7() # GET web (request 701)\n\n grinder.sleep(1000)\n self.page8() # GET web (request 801)\n\n grinder.sleep(1000)\n self.page9() # GET web (request 901)\n\n grinder.sleep(1000)\n self.page10() # GET web (request 1001)\n\n grinder.sleep(1000)\n self.page11() # GET web (request 1101)\n\n grinder.sleep(1000)\n self.page12() # GET web (request 1201)\n\n grinder.sleep(1000)\n self.page13() # GET web (request 1301)\n\n grinder.sleep(1000)\n self.page14() # GET web (request 1401)\n\n grinder.sleep(1000)\n self.page15() # GET web (request 1501)\n\n grinder.sleep(1000)\n self.page16() # GET web (request 1601)\n\n grinder.sleep(1000)\n self.page17() # GET web (request 1701)\n\n grinder.sleep(1000)\n self.page18() # GET web (request 1801)\n\n grinder.sleep(1000)\n self.page19() # GET web (request 1901)\n\n grinder.sleep(1000)\n self.page20() # GET web (request 2001)\n\n grinder.sleep(1000)\n self.page21() # GET web (request 2101)\n\n grinder.sleep(1000)\n self.page22() # GET web (request 2201)\n\n grinder.sleep(1000)\n self.page23() # GET web (request 2301)\n\n grinder.sleep(1000)\n self.page24() # GET web (request 2401)\n\n grinder.sleep(1000)\n self.page25() # GET web (request 2501)\n\n grinder.sleep(1000)\n self.page26() # GET web (request 2601)\n\n grinder.sleep(1000)\n self.page27() # GET web (request 2701)\n\n grinder.sleep(1000)\n self.page28() # GET web (request 2801)\n\n grinder.sleep(1000)\n self.page29() # GET web (request 2901)\n\n grinder.sleep(1000)\n self.page30() # GET web (request 3001)\n\n grinder.sleep(1000)\n self.page31() # GET web (request 3101)\n\n# grinder.sleep(1000)\n# self.page32() # POST downloads (request 3201)\n\n# grinder.sleep(1000)\n# self.page33() # GET goog-malware-shavar_s_10501-10520.10501.10502-10520: (request 3301)\n\n grinder.sleep(1000)\n self.page34() # GET web (request 3401)\n\n grinder.sleep(1000)\n self.page35() # GET web (request 3501)\n# self.page36() # GET goog-malware-shavar_a_9606-9610.9606-9609.9610: (request 3601)\n\n# grinder.sleep(1000)\n# self.page37() # GET goog-phish-shavar_s_36981-36985.36981-36985.: (request 3701)\n\n# grinder.sleep(1000)\n# self.page38() # GET goog-phish-shavar_s_36986-36990.36986-36987.36988-36990: (request 3801)\n\n# grinder.sleep(1000)\n# self.page39() # GET goog-phish-shavar_a_46491-46500.46491-46499.46500: (request 3901)\n\n grinder.sleep(1000)\n self.page40() # GET web (request 4001)\n\n grinder.sleep(1000)\n self.page41() # GET web (request 4101)\n\n grinder.sleep(1000)\n self.page42() # GET web (request 4201)\n\n grinder.sleep(1000)\n self.page43() # GET web (request 4301)\n\n grinder.sleep(1000)\n self.page44() # GET web (request 4401)\n\n grinder.sleep(1000)\n self.page45() # GET web (request 4501)\n\n grinder.sleep(1000)\n self.page46() # GET web (request 4601)\n\n grinder.sleep(1000)\n self.page47() # GET web (request 4701)\n\n grinder.sleep(1000)\n self.page48() # GET web (request 4801)\n\n grinder.sleep(1000)\n self.page49() # GET web (request 4901)\n\n grinder.sleep(1000)\n self.page50() # GET web (request 5001)\n\n grinder.sleep(1000)\n self.page51() # GET web (request 5101)\n\n grinder.sleep(1000)\n self.page52() # GET web (request 5201)\n\n grinder.sleep(1000)\n self.page53() # GET web (request 5301)", "def open_spider(self,Spider):\n pass", "def setup_script(self, *args, **kwargs):\n pass", "def start_extracting(self, response):\n input_file = open('input.txt', 'r')\n start_url = []\n\n # Check for command line input\n try:\n start_url = [self.url]\n except AttributeError:\n # If no CL argument, read links from txt file\n start_url = [link for link in input_file]\n\n # Generating request for every url\n for url in start_url:\n yield scrapy.Request(url=url\n , meta={'dont_merge_cookies': False}\n , callback=self.parse\n )", "def parse(self, response):\n\n\t\t### close spider if exception\n\t\tif 'Bandwidth exceeded' in response.body:\n\t\t\traise CloseSpider('bandwidth_exceeded')\n\n\t\tlog_scrap.debug(u\"\\n>>> NEW PARSING >>>\\n\" )\n\t\tlog_scrap.info(\"--- GenericSpider.parse ...\" )\n\n\t\tlog_scrap.info(\"\\n--- GenericSpider.parse /response : \\n%s\" , response)\n\t\tlog_scrap.info(\"\\n--- GenericSpider.parse /response : \\n%s \\n\" , response.__dict__.keys() )\n\n\t\t# for k, v in response.__dict__.iteritems() :\n\t\t# \tlog_scrap.info(\"\\n--- [k] {} : [v] {} : \".format(k,v))\n\t\t# print response._body\n\t\tstart_url = response.meta[\"start_url\"]\n\t\tlog_scrap.info(\"--- GenericSpider.parse / start_url : %s\", start_url )\n\n\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\t### start request with API crawler\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\t# if self.spider_config_flat[\"parse_api\"] == True :\n\t\tif self.parse_api == True :\n\n\t\t\tlog_scrap.info(\"\\n--- GenericSpider.parse / starting request on API endpoint... \" )\n\t\t\tjsonresponse = json.loads(response.body_as_unicode())\n\t\t\t# log_scrap.info(\"--- GenericSpider.parse / jsonresponse : \\n%s\", jsonresponse )\n\t\t\tlog_scrap.info(\"--- GenericSpider.parse / jsonresponse received...\" )\n\n\t\t\traw_items_list = get_dictvalue_from_xpath(jsonresponse, self.item_xpath)\n\t\t\t# raw_items_list = jsonresponse[self.item_xpath]\n\t\t\tlog_scrap.info(\"--- GenericSpider.parse / raw_items_list[0] : \\n%s\\n...\", pformat(raw_items_list[0]) )\n\n\t\t\t### - - - - - - - - - - ###\n\t\t\t### PARSING PAGE - API\n\t\t\t### start parsing page : loop through data items in page in response\n\t\t\tif len(raw_items_list) != 0 :\n\n\t\t\t\tlog_scrap.info(\"--- GenericSpider. / START LOOPING raw_items_list WITH API ...\" )\n\n\t\t\t\t# while self.there_is_more_items_to_scrap_dict[start_url] :\n\n\t\t\t\tfor raw_data in raw_items_list :\n\n\t\t\t\t\tself.item_count += 1\n\n\t\t\t\t\t### check if can continue depending on item_count\n\t\t\t\t\tif self.settings_limit_items == 0 or self.item_count <= self.settings_limit_items :\n\n\t\t\t\t\t\tprint()\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - current start_url : {} >>>\".format(start_url) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - API - item n°{} >>> \\n\".format(self.item_count) )\n\n\t\t\t\t\t\t### instantiate Item to fill from datamodel --> cf items.py\n\t\t\t\t\t\titemclass \t= create_item_class( 'GenericItemClass', fields_list = self.dm_item_related )\n\t\t\t\t\t\titem \t\t= itemclass()\n\n\t\t\t\t\t\t### add global info to item : i.e. core fields in dm_core_item_related list\n\t\t\t\t\t\titem[ 'spider_id' ]\t= self.spider_id\n\t\t\t\t\t\titem[ 'added_by' ]\t= self.user_id\n\t\t\t\t\t\titem[ 'added_at' ]\t= time.time()\t\t# timestamp\n\t\t\t\t\t\titem[ 'link_src' ]\t= response._url\n\n\t\t\t\t\t\titem[ 'page_n' ]\t\t= self.page_count\n\t\t\t\t\t\titem[ 'item_n' ]\t\t= self.item_count\n\n\t\t\t\t\t\t### extract data and feed it to the Item instance based on spider_config_flat\n\t\t\t\t\t\titem = self.fill_item_from_results_page(raw_data, item, is_api_rest=True, item_n=self.item_count)\n\n\n\t\t\t\t\t\t### - - - - - - - - - - ###\n\t\t\t\t\t\t### FOLLOW LINK - API\n\t\t\t\t\t\t### if need to follow to extract all data\n\t\t\t\t\t\tif self.spider_config_flat[\"parse_follow\"] == True :\n\n\t\t\t\t\t\t\tlog_scrap.debug(u\">>> FOLLOW LINK - API - item n°{} / page n°{} >>>>>> \\n\".format(self.item_count, self.page_count) )\n\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.follow_xpath : %s\", self.follow_xpath )\n\n\t\t\t\t\t\t\t# follow_link_raw = raw_data[ self.follow_xpath ]\n\t\t\t\t\t\t\tfollow_link_raw = get_dictvalue_from_xpath(raw_data, self.follow_xpath)\n\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link RAW ({}) : {} \".format(type(follow_link_raw),follow_link_raw) )\n\n\t\t\t\t\t\t\turl_follow = \"\"\n\t\t\t\t\t\t\tif self.api_follow_root != \"\" :\n\t\t\t\t\t\t\t\t\turl_follow = self.api_follow_root\n\t\t\t\t\t\t\telse :\n\t\t\t\t\t\t\t\turl_follow = self.page_url\n\n\t\t\t\t\t\t\t# complete follow link if needed\n\t\t\t\t\t\t\tfollow_link = self.clean_link(follow_link_raw, url_root=url_follow)\n\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link CLEAN : %s \", follow_link )\n\n\t\t\t\t\t\t\t# store follow_link\n\t\t\t\t\t\t\titem[ 'link_data' ]\t= follow_link\n\t\t\t\t\t\t\turl \t\t\t\t= item['link_data']\n\n\t\t\t\t\t\t\tfollow_is_api = self.follow_is_api\n\n\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\tyield scrapy.Request(url, callback=self.parse_detailed_page, meta={ 'item': item, 'start_url' : start_url, 'item_n' : self.item_count , 'parse_api' : follow_is_api })\n\n\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t### if no follow link\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\t### item completion is finished - yield and so spark pipeline for item (store in db for instance)\n\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t# log_scrap.info(\" --> item : \\n %s \\n\", pformat(item) )\n\t\t\t\t\t\tlog_scrap.debug(u\" --> item ...\" )\n\n\t\t\t\t\telse :\n\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF LIMIT_ITEMS - items count : {} - LIMIT_ITEMS : {}\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\t# log_scrap.warning(u\"--- GenericSpider. / OUT OF LIMIT_ITEMS - items count : {} - LIMIT_ITEMS : {}\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\t\t# raise CloseSpider('OUT OF LIMIT_ITEMS')\n\n\t\t\t\telse :\n\t\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - items count : {} - LIMIT_ITEMS : {} / except -> break\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\t# raise CloseSpider('OUT OF ITEMS')\n\n\t\t\t### - - - - - - - - - - - - ###\n\t\t\t### NEXT PAGE - API\n\t\t\tif self.test_limit == None or self.page_count < self.test_limit :\n\n\t\t\t\tif self.page_count < self.settings_limit_pages or self.settings_limit_pages == 0 :\n\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider.parse (API) >>> PAGE n°{} DONE -> NEXT PAGE >>> \\n\".format(self.page_count) )\n\n\t\t\t\t\t### get and go to next page\n\t\t\t\t\tself.page_count += 1\n\n\t\t\t\t\turl_next = \"\"\n\t\t\t\t\tif self.api_pagination_root != \"\" :\n\t\t\t\t\t\turl_next = self.api_pagination_root\n\t\t\t\t\telse :\n\t\t\t\t\t\turl_next = self.page_url\n\n\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_name : '%s' >>>\" %(self.spider_name) )\n\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - current start_url : {} >>>\".format(start_url) )\n\t\t\t\t\tnext_page = url_next + str(self.page_count)\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider.parse >>> NEXT PAGE II : %s\", next_page )\n\n\t\t\t\t\tyield response.follow(next_page, callback=self.parse, meta={'start_url': start_url} )\n\n\t\t\t\telse :\n\t\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - page n°{} - limit : {} - test_limit : {} \".format(self.page_count, self.settings_limit_pages, self.test_limit) )\n\t\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\n\t\t\telse :\n\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - items count : {} - LIMIT_ITEMS : {} \".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\n\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\t### start requests with pure Scrapy requests\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\telif self.spider_config_flat[\"parse_reactive\"] == False :\n\t\t# elif self.parse_reactive == False :\n \n\t\t\tlog_scrap.info(\"\\n--- GenericSpider.parse / starting requests with Scrapy... \" )\n\t\t\t# self.parse_scrapy(response)\n\n\t\t\t### find items list\n\t\t\tlog_scrap.info(\"--- GenericSpider.parse / self.item_xpath : %s\", self.item_xpath )\n\t\t\traw_items_list = response.xpath(self.item_xpath)\n\t\t\tlog_scrap.info(\"--- GenericSpider.parse / len(raw_items_list) : %d \", len(raw_items_list) )\n\n\n\t\t\t### - - - - - - - - - - - ###\n\t\t\t### PARSING PAGE - SCRAPY\n\t\t\t### start parsing page : loop through data items in page in response\n\t\t\tif len(raw_items_list) != 0 :\n\n\t\t\t\tlog_scrap.info(\"--- GenericSpider. / START LOOPING raw_items_list WITH SCRAPY ...\" )\n\n\t\t\t\tfor raw_data in raw_items_list :\n\n\t\t\t\t\tself.item_count += 1\n\n\t\t\t\t\t### check if can continue depending on item_count\n\t\t\t\t\tif self.settings_limit_items == 0 or self.item_count <= self.settings_limit_items :\n\n\t\t\t\t\t\tprint()\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - current start_url : {} >>>\".format(start_url) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - Scrapy - item n°{} / page n°{} >>> \\n\".format(self.item_count, self.page_count) )\n\n\t\t\t\t\t\t# print \">>> raw_data : \\n\", raw_data.extract()\n\n\t\t\t\t\t\t### instantiate Item to fill from datamodel --> cf items.py\n\t\t\t\t\t\titemclass \t= create_item_class( 'GenericItemClass', fields_list = self.dm_item_related )\n\t\t\t\t\t\titem \t\t= itemclass()\n\n\t\t\t\t\t\t### add global info to item : i.e. core fields in dm_core_item_related list\n\t\t\t\t\t\titem[ 'spider_id' ]\t\t= self.spider_id\n\t\t\t\t\t\titem[ 'added_by' ]\t\t= self.user_id\n\t\t\t\t\t\titem[ 'added_at' ]\t\t= time.time()\t\t# timestamp\n\t\t\t\t\t\titem[ 'link_src' ]\t\t= response._url\n\n\t\t\t\t\t\titem[ 'page_n' ]\t\t= self.page_count\n\t\t\t\t\t\titem[ 'item_n' ]\t\t= self.item_count\n\n\t\t\t\t\t\t### extract data and feed it to the Item instance based on spider_config_flat\n\t\t\t\t\t\titem = self.fill_item_from_results_page(raw_data, item, item_n=self.item_count)\n\n\n\t\t\t\t\t\t### - - - - - - - - - - - ###\n\t\t\t\t\t\t### FOLLOW LINK - SCRAPY\n\t\t\t\t\t\t### if need to follow to extract all data\n\t\t\t\t\t\tif self.spider_config_flat[\"parse_follow\"] == True :\n\n\t\t\t\t\t\t\tlog_scrap.debug(u\">>> FOLLOW LINK - SCRAPY - item n°{} / page n°{} >>>>>> \\n\".format(self.item_count, self.page_count) )\n\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.follow_xpath : %s\", self.follow_xpath )\n\n\t\t\t\t\t\t\tfollow_link \t= raw_data.xpath( self.follow_xpath ).extract_first()\n\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link RAW ({}) : {} \".format(type(follow_link),follow_link) )\n\n\t\t\t\t\t\t\turl_follow = \"\"\n\t\t\t\t\t\t\tif self.api_follow_root != \"\" :\n\t\t\t\t\t\t\t\turl_follow = self.api_follow_root\n\n\t\t\t\t\t\t\t# complete follow link if needed\n\t\t\t\t\t\t\tfollow_link = self.clean_link(follow_link, url_root=url_follow)\n\t\t\t\t\t\t\t# log_scrap.info(\" --> follow_link CLEAN : %s \", follow_link )\n\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link CLEAN ({}) : {} \".format(type(follow_link),follow_link) )\n\n\t\t\t\t\t\t\t# store follow_link\n\t\t\t\t\t\t\titem[ 'link_data' ]\t= follow_link\n\t\t\t\t\t\t\turl \t\t\t\t= item['link_data']\n\n\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\tlog_scrap.warning(u\">>> FOLLOWING LINK --> url : {} \".format(url) )\n\t\t\t\t\t\t\t\t# yield Request(url, callback=self.parse_detailed_page, meta={ 'item': item, 'start_url' : start_url } )\n\t\t\t\t\t\t\t\tyield scrapy.Request(url, callback=self.parse_detailed_page, meta={ 'item': item, 'start_url' : start_url , 'item_n' : self.item_count , 'parse_api' : False} )\n\t\t\t\t\t\t\t\t# log_scrap.warning(u\">>> FOLLOWING LINK --> url : {} / WORKED !!! \".format(url) )\n\n\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\tlog_scrap.warning(u\">>> FOLLOW LINK - NOT WORKING : {} \".format(url) )\n\t\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t### if no follow link\n\t\t\t\t\t\telse :\n\n\t\t\t\t\t\t\tlog_scrap.warning(u\">>> NO FOLLOW LINK ... \" )\n\t\t\t\t\t\t\t### item completion is finished - yield and so spark pipeline for item (store in db for instance)\n\t\t\t\t\t\t\t# log_scrap.info(\">>> GenericSpider.parse - item.items() : \\n %s\", item.items() )\n\t\t\t\t\t\t\t# log_scrap.info(\">>> GenericSpider.parse - item.keys() : \\n %s\", item.items() )\n\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t\t# print (\"\\n>>> NEXT ITEM \" + \">>> >>> \"*10, \"\\n\")\n\n\t\t\t\t\t\t# log_scrap.info(\" --> item : \\n %s \\n\", pformat(item) )\n\t\t\t\t\t\tlog_scrap.debug(u\" --> item ...\" )\n\n\t\t\t\t\telse :\n\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF LIMIT_ITEMS - items count : {} - LIMIT_ITEMS : {}\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\t\t# raise CloseSpider('OUT OF LIMIT_ITEMS')\n\n\t\t\telse :\n\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - items count : {} - LIMIT_ITEMS : {} / except -> break\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t# raise CloseSpider('OUT OF ITEMS')\n\n\t\t\t### - - - - - - - - - - ###\n\t\t\t### NEXT PAGE - SCRAPY\n\t\t\t### check if there is a test_limit\n\t\t\tif self.test_limit == None or self.page_count < self.test_limit :\n\n\t\t\t\tif self.page_count < self.settings_limit_pages or self.settings_limit_pages == 0 :\n\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider.parse (Scrapy) >>> PAGE n°{} DONE -> NEXT PAGE >>> \\n\".format(self.page_count) )\n\n\t\t\t\t\t### get and go to next page\n\t\t\t\t\tis_next_page, next_page = self.get_next_page(response, start_url)\n\n\t\t\t\t\tif is_next_page :\n\n\t\t\t\t\t\tself.page_count += 1\n\n\t\t\t\t\t\turl_next = \"\"\n\t\t\t\t\t\tif self.api_pagination_root != \"\" :\n\t\t\t\t\t\t\turl_next = self.api_pagination_root\n\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_name : '%s' >>>\" %(self.spider_name) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - current start_url : {} >>>\".format(start_url) )\n\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider.parse >>> NEXT PAGE I : %s\", next_page )\n\t\t\t\t\t\tnext_page = self.clean_link(next_page, url_root=url_next)\n\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider.parse >>> NEXT PAGE II : %s\", next_page )\n\n\t\t\t\t\t\tyield response.follow(next_page, callback=self.parse, meta={'start_url': start_url} )\n\n\t\t\t\t\telse :\n\t\t\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / NO MORE PAGE TO SCRAP - pages count : {} \".format(self.page_count) )\n\t\t\t\t\t\t# raise CloseSpider('NO MORE PAGE TO SCRAP')\n\n\t\t\t\telse :\n\t\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - page n°{} - limit : {} - test_limit : {} / except -> break\".format(self.page_count, self.settings_limit_pages, self.test_limit) )\n\t\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\n\t\t\telse :\n\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t# self.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - items count : {} - LIMIT_ITEMS : {} / except -> break\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\n\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\t### start requests with Selenium\n\t\t### - - - - - - - - - - - - - - - - - - - - - - - ###\n\t\telse :\n\t\t\t### initiate selenium browser\n\t\t\t### cf : https://github.com/voliveirajr/seleniumcrawler/blob/master/seleniumcrawler/spiders/seleniumcrawler_spider.py\n\t\t\tlog_scrap.info(\"\\n--- GenericSpider.parse / starting Selenium driver... \" )\n\n\t\t\t# retrieve exec path for chromedriver from settings_scrapy.py\n\t\t\t### GET APP MODE FROM ENV VARS\n\t\t\tapp_mode \t\t\t\t\t\t= os.environ.get('APP_MODE', 'default')\n\t\t\tlog_scrap.debug(u\"--- GenericSpider.parse / APP_MODE : %s\", app_mode)\n\t\t\tchromedriver_path \t= CHROMEDRIVER_PATH_LIST[ app_mode ]\n\t\t\tlog_scrap.debug(u\"--- GenericSpider.parse / chromedriver_path : %s\", chromedriver_path)\n\n\t\t\t### specify executable path to launch webdriver-->\n\t\t\t# cf : where chromedriver was installed when `brew install chromedriver`\n\t\t\tself.driver = webdriver.Chrome(executable_path=chromedriver_path, chrome_options=options_selenium)\n\t\t\t# self.driver = webdriver.Chrome(chrome_options=options_selenium)\n\t\t\t# self.driver = webdriver.Firefox()\n\t\t\t# self.driver = webdriver.Chrome()\n\t\t\t# self.driver = webdriver.PhantomJS() ### deprecated\n\n\t\t\t### setup waiting times\n\t\t\t# self.driver.set_page_load_timeout(60)\n\t\t\tself.wait_driver\t= WebDriverWait(self.driver, self.delay_driver)\n\t\t\tself.wait_page \t\t= WebDriverWait(self.driver, self.delay_new_page)\n\t\t\tself.driver.implicitly_wait(self.delay_implicit)\n\t\t\tlog_scrap.debug(u\"--- GenericSpider. / self.delay_driver : %s\", self.delay_driver )\n\t\t\tlog_scrap.debug(u\"--- GenericSpider. / self.delay_new_page : %s\", self.delay_new_page )\n\t\t\tlog_scrap.debug(u\"--- GenericSpider. / self.delay_implicit : %s\", self.delay_implicit )\n\n\n\t\t\t### start parsing with selenium\n\t\t\tlog_scrap.debug(u\"--- GenericSpider. / response._url : %s\", response._url )\n\t\t\ttry :\n\t\t\t\tself.driver.get(response._url)\n\n\t\t\t\t### try scroll_down if needed in config\n\t\t\t\tif self.spider_config_flat['scroll_down'] : \n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / scroll_down is TRUE ... \" )\n\t\t\t\t\t# log_scrap.debug(u\"--- GenericsSpider. / scroll_down - self.spider_config_flat : \\n%s\", pformat(self.spider_config_flat) )\n\n\t\t\t\t\tscroll_pause_time = self.spider_config_flat[\"scroll_pause_time\"]\n\t\t\t\t\tmax_loops \t\t\t\t= self.spider_config_flat[\"scroll_loops\"]\n\t\t\t\t\tself.driver = scroll_down(self.driver, scroll_pause_time, max_loops)\n\t\t\t\t\t# scroll_down(self.driver, scroll_pause_time, max_loops)\n\t\t\t\tlog_scrap.info(\"--- GenericSpider. / url '{}' is loaded ... \".format( response._url ))\n\t\t\t\n\t\t\texcept :\n\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\tself.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\tself.driver.close()\n\t\t\t\tlog_scrap.info(\"--- GenericSpider / driver is shut\" )\n\t\t\t\traise CloseSpider('DRIVER NOT RESPONDING')\n\n\n\t\t\t### clean original xpath from strings\n\t\t\tstrings_to_clean = [\n\t\t\t\t'/@src',\n\t\t\t\t'/@href',\n\t\t\t\t'/text()',\n\t\t\t\t'/@*[name()=\"xlink:href\"]',\n\t\t\t\t'/@datetime'\n\t\t\t]\n\n\t\t\t# while self.there_is_more_items_to_scrap :\n\t\t\twhile self.there_is_more_items_to_scrap_dict[start_url] :\n\n\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / while loop continues : %s\", self.there_is_more_items_to_scrap )\n\t\t\t\tlog_scrap.debug(u\"--- GenericSpider. / while loop continues : %s\", self.there_is_more_items_to_scrap_dict[start_url] )\n\n\t\t\t\ttry :\n\n\t\t\t\t\t### wait / debug page content\n\t\t\t\t\tpage_source_code = self.driver.page_source.encode(\"utf-8\")\n\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / page_source_code : \\n %s \", page_source_code )\n\t\t\t\t\ttime.sleep(self.delay_new_page)\n\n\t\t\t\t\t### start parsing page :\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.item_xpath : %s\", self.item_xpath )\n\t\t\t\t\traw_items_list \t= self.driver.find_elements_by_xpath(self.item_xpath)\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / raw_items_list length : %s\", len(raw_items_list) )\n\t\t\t\t\t# log_scrap.info(\"--- GenericSpider. / raw_items_list[0].text : \\n%s\", raw_items_list[0].text )\n\n\t\t\t\t\t# current_item_index = 0\n\n\t\t\t\t\t### - - - - - - - - - - - - ###\n\t\t\t\t\t### PARSING PAGE - SELENIUM\n\t\t\t\t\t# loop through data items in page in response\n\t\t\t\t\tif len(raw_items_list) != 0 :\n\n\t\t\t\t\t\t# log_scrap.info(\"--- GenericSpider. / START PARSING WITH SELENIUM ...\\n\" )\n\n\t\t\t\t\t\tfor raw_data in raw_items_list :\n\n\t\t\t\t\t\t\tprint()\n\t\t\t\t\t\t\tlog_scrap.debug(u\"--- GenericSpider. / START LOOPING raw_items_list WITH SELENIUM ...\" )\n\n\t\t\t\t\t\t\t### add +1 to items count\n\t\t\t\t\t\t\tself.item_count += 1\n\n\t\t\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / VARIABLES - spider_name : {} - item n°{} - there_is_more_items_to_scrap_dict[start_url] : {} \".format(str(self.spider_name), self.item_count, self.there_is_more_items_to_scrap_dict[start_url]) )\n\t\t\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / VARIABLES - spider_name : {} - item n°{} \".format(self.spider_name, self.item_count) )\n\t\t\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / VARIABLES - item n°{} \".format(self.item_count) )\n\t\t\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / VARIABLES - spider_name : '%s' - item n°%s \" %(self.spider_name, self.item_count) )\n\n\t\t\t\t\t\t\t### check if can continue depending on item_count\n\t\t\t\t\t\t\tif self.settings_limit_items == 0 or self.item_count <= self.settings_limit_items :\n\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - spider_name : '%s' >>>\" %(self.spider_name) )\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - current start_url : {} >>>\".format(start_url) )\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEW ITEM - Selenium - item n°{} / page n°{} >>> \\n\".format(self.item_count, self.page_count) )\n\n\t\t\t\t\t\t\t\t### instantiate Item to fill from datamodel --> cf items.py\n\t\t\t\t\t\t\t\titemclass \t= create_item_class( 'GenericItemClass', fields_list = self.dm_item_related )\n\t\t\t\t\t\t\t\titem \t\t= itemclass()\n\n\t\t\t\t\t\t\t\t### add global info to item : i.e. core fields in dm_core_item_related list\n\t\t\t\t\t\t\t\titem[ 'spider_id' ]\t\t= self.spider_id\n\t\t\t\t\t\t\t\titem[ 'added_by' ]\t\t= self.user_id\n\t\t\t\t\t\t\t\titem[ 'added_at' ]\t\t= time.time()\t\t# timestamp\n\t\t\t\t\t\t\t\titem[ 'link_src' ]\t\t= response._url\n\n\t\t\t\t\t\t\t\titem[ 'page_n' ]\t\t= self.page_count\n\t\t\t\t\t\t\t\titem[ 'item_n' ]\t\t= self.item_count\n\n\t\t\t\t\t\t\t\t### extract data and feed it to the Item instance based on spider_config_flat\n\t\t\t\t\t\t\t\titem = self.fill_item_from_results_page(raw_data, item, is_reactive=True, strings_to_clean=strings_to_clean, item_n=self.item_count )\n\n\t\t\t\t\t\t\t\t### - - - - - - - - - - ###\n\t\t\t\t\t\t\t\t### FOLLOW LINK - SELENIUM\n\t\t\t\t\t\t\t\t### find follow link to open detailled item view\n\t\t\t\t\t\t\t\tif self.spider_config_flat[\"parse_follow\"] == True :\n\n\t\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> FOLLOW LINK - SELENIUM - item n°{} / page n°{} >>>>>> \\n\".format(self.item_count, self.page_count) )\n\t\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.follow_xpath : %s\", self.follow_xpath )\n\n\t\t\t\t\t\t\t\t\t### follow link with Scrapy\n\t\t\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\t\t\tlog_scrap.debug(u\"--- GenericSpider. / follow link with Scrapy ...\" )\n\n\t\t\t\t\t\t\t\t\t\t# log_scrap.debug(u\"--- GenericSpider. / get href of follow_link ...\" )\n\t\t\t\t\t\t\t\t\t\tfollow_link_xpath \t= clean_xpath_for_reactive(self.follow_xpath, strings_to_clean)\n\t\t\t\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link_xpath : %s \", follow_link_xpath )\n\n\t\t\t\t\t\t\t\t\t\tfollow_link\t\t\t= raw_data.find_element_by_xpath( follow_link_xpath ).get_attribute('href')\n\t\t\t\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link RAW : %s \", follow_link )\n\n\t\t\t\t\t\t\t\t\t\turl_follow = \"\"\n\t\t\t\t\t\t\t\t\t\tif self.api_follow_root != \"\" :\n\t\t\t\t\t\t\t\t\t\t\t\turl_follow = self.api_follow_root\n\n\t\t\t\t\t\t\t\t\t\t# complete follow link if needed\n\t\t\t\t\t\t\t\t\t\tfollow_link = self.clean_link(follow_link, url_root=url_follow)\n\t\t\t\t\t\t\t\t\t\tlog_scrap.info(\" --> follow_link CLEAN ({}) : {}\".format(type(follow_link), follow_link ) )\n\n\t\t\t\t\t\t\t\t\t\t# store follow_link\n\t\t\t\t\t\t\t\t\t\titem[ 'link_data' ]\t= follow_link\n\t\t\t\t\t\t\t\t\t\turl\t\t\t= item['link_data']\n\n\t\t\t\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\t\t\t\tlog_scrap.warning(u\">>> FOLLOWING LINK --> url : {} \".format(url) )\n\t\t\t\t\t\t\t\t\t\t\tyield scrapy.Request(url, callback=self.parse_detailed_page, meta={'item': item, 'start_url' : start_url , 'item_n' : self.item_count , 'parse_api' : False})\n\n\t\t\t\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\t\t\t\tlog_scrap.warning(u\">>> FOLLOW LINK - NOT WORKING : {} \".format(url) )\n\t\t\t\t\t\t\t\t\t\t\tyield item\n\n\n\t\t\t\t\t\t\t\t\t### follow link with Selenium\n\t\t\t\t\t\t\t\t\t### FIND A WEBSITE TEST FOR REACTIVE DETAILLED PAGES\n\t\t\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\t\t\tlog_scrap.debug(u\"--- GenericSpider. / follow link with Selenium ...\" )\n\n\t\t\t\t\t\t\t\t\t\tfollow_link_xpath \t= clean_xpath_for_reactive(self.follow_xpath, strings_to_clean)\n\t\t\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.follow_link_xpath : %s\", self.follow_link_xpath )\n\t\t\t\t\t\t\t\t\t\tfollow_link \t\t= raw_data.find_element_by_xpath( follow_link_xpath )\n\n\t\t\t\t\t\t\t\t\t\t### open link in new tab ?\n\t\t\t\t\t\t\t\t\t\tfollow_link.click()\n\n\t\t\t\t\t\t\t\t\t\t### get data and save data\n\t\t\t\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\t\t\t\tlog_scrap.debug(u\"--- GenericSpider. / get data and save data ...\" )\n\t\t\t\t\t\t\t\t\t\t\titem = self.fill_item_from_results_page(raw_data, item, is_reactive=True, strings_to_clean=strings_to_clean, item_n=self.item_count )\n\n\t\t\t\t\t\t\t\t\t\t\t### back to previous page and scrap from where it left\n\t\t\t\t\t\t\t\t\t\t\t### cf : https://selenium-python.readthedocs.io/navigating.html#navigation-history-and-location\n\t\t\t\t\t\t\t\t\t\t\tself.driver.back()\n\n\t\t\t\t\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t\t\t### if no follow link\n\t\t\t\t\t\t\t\telse :\n\t\t\t\t\t\t\t\t\tyield item\n\n\t\t\t\t\t\t\t\t# log_scrap.info(\" --> item : \\n %s \\n\", pformat(item) )\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\" --> item ...\" )\n\n\t\t\t\t\t\t\telse :\n\t\t\t\t\t\t\t\tself.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF LIMIT_ITEMS - items count : {} - LIMIT_ITEMS : {} / except -> break\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\t\t\t\tself.driver.close()\n\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider / driver is shut\" )\n\t\t\t\t\t\t\t\traise CloseSpider('OUT OF LIMIT_ITEMS')\n\t\t\t\t\t\t\t\tbreak\n\n\t\t\t\t\telse :\n\t\t\t\t\t\tself.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF ITEMS - page n°{} - limit : {} - test_limit : {} / except -> break\".format(self.page_count, self.settings_limit_pages, self.test_limit) )\n\t\t\t\t\t\tself.driver.close()\n\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider / driver is shut\" )\n\t\t\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\t\t\t\t\t\tbreak\n\n\t\t\t\t\t### - - - - - - - - - - - - ###\n\t\t\t\t\t### NEXT PAGE - SELENIUM\n\t\t\t\t\tif self.test_limit == None or self.page_count < self.test_limit :\n\n\t\t\t\t\t\tif self.there_is_more_items_to_scrap_dict[start_url] :\n\n\t\t\t\t\t\t\tif self.page_count < self.settings_limit_pages or self.settings_limit_pages == 0 :\n\n\t\t\t\t\t\t\t\tprint ()\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_name : '%s' >>>\" %(self.spider_name) )\n\t\t\t\t\t\t\t\tlog_scrap.info(\" --- GenericSpider.parse (Selenium) >>> PAGE n°{} DONE -> NEXT PAGE >>> \\n\".format(self.page_count) )\n\n\t\t\t\t\t\t\t\t### add +1 to parsed pages\n\t\t\t\t\t\t\t\tself.page_count += 1\n\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - spider_page_url : {} >>>\".format(self.spider_page_url) )\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\">>> NEXT PAGE - current start_url : {} >>>\".format(start_url) )\n\n\t\t\t\t\t\t\t\t### find next page btn in current view\n\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / self.next_page : %s\", self.next_page )\n\t\t\t\t\t\t\t\tnext_page_xpath = clean_xpath_for_reactive(self.next_page, strings_to_clean)\n\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / next_page_xpath : %s\", next_page_xpath )\n\t\t\t\t\t\t\t\t# next_page \t= re.sub(\"|\".join(strings_to_clean), \"\", next_page )\n\n\t\t\t\t\t\t\t\t# try :\n\t\t\t\t\t\t\t\t# element_present = EC.presence_of_element_located((By.XPATH, next_page_xpath ))\n\t\t\t\t\t\t\t\t# log_scrap.info(\"--- GenericSpider. / next_page present : %s\", element_present )\n\t\t\t\t\t\t\t\t# self.wait.until(element_present)\n\t\t\t\t\t\t\t\t# next_page = self.wait.until( EC.element_to_be_clickable(element_present) )\n\t\t\t\t\t\t\t\t# next_page \t\t= self.driver.find_element_by_xpath( next_page_xpath )\n\t\t\t\t\t\t\t\tnext_page \t\t= self.driver.find_element(By.XPATH, next_page_xpath )\n\n\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / next_page : %s\", next_page )\n\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / next_page.text : %s\", next_page.text )\n\n\t\t\t\t\t\t\t\t# except TimeoutException:\n\t\t\t\t\t\t\t\t# except :\n\t\t\t\t\t\t\t\t# \tlog_scrap.error(\"--- GenericSpider. / Timed out waiting for page to load\")\n\n\t\t\t\t\t\t\t\t### click next button and wait for ajax calls to complete (post and get)\n\t\t\t\t\t\t\t\t### cf : http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html\n\n\t\t\t\t\t\t\t\t# def wait_for(condition_function):\n\t\t\t\t\t\t\t\t# \t\tstart_time = time.time()\n\t\t\t\t\t\t\t\t# \twhile time.time() < start_time + 3:\n\t\t\t\t\t\t\t\t# \t\tif condition_function():\n\t\t\t\t\t\t\t\t# \t\t\treturn True\n\t\t\t\t\t\t\t\t# \t\telse:\n\t\t\t\t\t\t\t\t# \t\t\ttime.sleep(0.1)\n\t\t\t\t\t\t\t\t# \traise Exception ('Timeout waiting for {}'.format(condition_function.__name__) )\n\n\t\t\t\t\t\t\t\t# def link_has_gone_stale():\n\t\t\t\t\t\t\t\t# \t\ttry:\n\t\t\t\t\t\t\t\t# \t\t# poll the link with an arbitrary call\n\t\t\t\t\t\t\t\t# \t\tnext_page.find_elements_by_xpath(self.item_xpath)\n\t\t\t\t\t\t\t\t# \t\treturn False\n\t\t\t\t\t\t\t\t# \texcept StaleElementReferenceException :\n\t\t\t\t\t\t\t\t# \t\treturn True\n\n\t\t\t\t\t\t\t\tlog_scrap.debug(u\"--- ... ---\")\n\t\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / next_page.click() \" )\n\t\t\t\t\t\t\t\t\tnext_page.click()\n\t\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\t\t# log_scrap.info(\"--- GenericSpider. / next_page.send_keys( \\ n )\" )\n\t\t\t\t\t\t\t\t\t# next_page.send_keys(\"\\n\")\n\t\t\t\t\t\t\t\t\t# added this step for compatibility of scrolling to the view\n\t\t\t\t\t\t\t\t\tlog_scrap.error(\"--- GenericSpider. / ALTERNATIVE next_page.click() \" )\n\t\t\t\t\t\t\t\t\t# self.driver.execute_script(\"return arguments[0].scrollIntoView();\", next_page)\n\t\t\t\t\t\t\t\t\t# next_page.click()\n\t\t\t\t\t\t\t\t\tself.driver.execute_script(\"arguments[0].click();\", next_page)\n\n\t\t\t\t\t\t\t\t### wait after click\n\t\t\t\t\t\t\t\ttry :\n\t\t\t\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider. / wait for ajax to finish... \" )\n\t\t\t\t\t\t\t\t\t# wait_for(link_has_gone_stale)\n\t\t\t\t\t\t\t\t\tself.wait_page.until(lambda driver: self.driver.execute_script('return jQuery.active') == 0)\n\t\t\t\t\t\t\t\t\tself.wait_page.until(lambda driver: self.driver.execute_script('return document.readyState') == 'complete')\n\t\t\t\t\t\t\t\t\t# time.sleep(self.delay_implicit)\n\t\t\t\t\t\t\t\t\ttime.sleep(self.delay_new_page)\n\t\t\t\t\t\t\t\texcept :\n\t\t\t\t\t\t\t\t\tlog_scrap.error(\"--- GenericSpider. / !!! FAIL / wait for ajax to finish... \" )\n\n\t\t\t\t\t\t\telse :\n\t\t\t\t\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t\t\t\t\tself.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF PAGES TO SCRAP - page n°{} / except -> break\".format(self.page_count) )\n\t\t\t\t\t\t\t\tself.driver.close()\n\t\t\t\t\t\t\t\traise CloseSpider('OUT OF PAGES TO SCRAP')\n\t\t\t\t\t\t\t\tbreak\n\n\t\t\t\t\telse :\n\t\t\t\t\t\t# self.there_is_more_items_to_scrap = False\n\t\t\t\t\t\tself.there_is_more_items_to_scrap_dict[start_url] = False\n\t\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / OUT OF TEST_LIMIT - page n°{} - limit : {} - test_limit : {} / except -> break\".format(self.page_count, self.settings_limit_pages, self.test_limit) )\n\t\t\t\t\t\tself.driver.close()\n\t\t\t\t\t\tlog_scrap.info(\"--- GenericSpider / driver is shut\" )\n\t\t\t\t\t\t# raise CloseSpider('OUT OF TEST_LIMIT')\n\t\t\t\t\t\tbreak\n\n\t\t\t\texcept :\n\t\t\t\t\tlog_scrap.warning(u\"--- GenericSpider. / NO MORE ITEMS TO SCRAP - item_count : {} - LIMIT_ITEMS : {} / except -> break\".format(self.item_count, self.LIMIT_ITEMS) )\n\t\t\t\t\tself.driver.close()\n\t\t\t\t\tlog_scrap.info(\"--- GenericSpider / driver is shut\" )\n\t\t\t\t\traise CloseSpider('NO MORE ITEMS TO SCRAP')\n\t\t\t\t\tbreak", "async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(0)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the request response of every request and save the ones we want\n page.on('response', lambda response: asyncio.ensure_future(interceptResponse(response)))\n await page.goto('https://www.tiktok.com/trending/?lang=en')\n await page.waitFor(3000)\n await browser.close()", "async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(40000)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the url of every request and save the ones we want\n page.on('request', lambda request: checkUrl(request.url,browser))\n await page.goto('https://www.tiktok.com/trending/?lang=en')\n await page.waitFor(2000)\n #scroll down to trigger the second request to get trending video data\n await page.evaluate(\"\"\"{window.scrollBy(0, document.body.scrollHeight);}\"\"\")\n await page.waitFor(2000)\n await browser.close()", "def __init__(self, base_url, start_urls, config, helper_outfile, verbose):\n\n # setup class variables\n self.base_url = base_url\n self.config = config\n self.helper_outfile = helper_outfile\n self.verbose = verbose\n self.found_urls = set()\n self.crawled_urls = {}\n self.crawled_paths = {}\n self.param_infos = {}\n self.helper_pid = None\n self.found_cookies = []\n self.comments = {}\n self.redirects = {}\n self.driver = None\n\n # figure out domain\n parsed_url = urllib.parse.urlparse(base_url)\n self.domain = parsed_url.hostname\n self.port = parsed_url.port\n if not self.port:\n self.port = 80 if parsed_url.scheme == \"http\" else 443\n self.protocol_prefix = \"%s://\" % parsed_url.scheme\n\n # compile exclude path regexes from config\n self.exclude_paths = []\n if self.config.get(\"exclude_paths\", \"\"):\n exclude_paths_str = util.parse_as_csv(self.config.get(\"exclude_paths\", \"\"))\n for path_str in exclude_paths_str:\n self.exclude_paths.append(re.compile(path_str))\n\n # parse cookies from config\n self.cookies = {}\n for key_val_pair in self.config[\"cookie_str\"].split(\";\"):\n if not key_val_pair:\n continue\n if \"=\" not in key_val_pair:\n self.cookies[key_val_pair.strip()] = \"\"\n else:\n key, val = key_val_pair.strip().split(\"=\")\n self.cookies[key.strip()] = val.strip()\n\n # setup start urls\n self.start_urls = set([base_url])\n for url in start_urls:\n # skip paths that are excluded from crawling\n if self.exclude_paths and url.count(\"/\") > 2:\n check_str = \"/\" + \"/\".join(url.split(\"/\")[3:])\n if any(re_path.match(check_str) for re_path in self.exclude_paths):\n continue\n self.start_urls.add(url)\n self.start_urls = list(self.start_urls)\n\n # create unix socket for IPC with crawler helper\n if os.path.exists(UNIX_SOCK_ADDR):\n os.remove(UNIX_SOCK_ADDR)\n self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n self.socket.bind(UNIX_SOCK_ADDR)\n\n # setup selenium if it is configured to be used\n if config[\"use_selenium\"].lower() == \"true\":\n import logging\n logging.getLogger(\"seleniumwire\").setLevel(logging.ERROR)\n from seleniumwire import webdriver\n from selenium.webdriver.chrome.options import Options\n chrome_options = Options()\n chrome_options.add_argument(\"--headless\")\n chrome_options.add_argument(\"--user-agent=%s\" % self.config[\"user_agent\"])\n\n # on Linux running Selenium as root requires '--no-sandbox' option\n if os.geteuid() == 0 and sys.platform.startswith(\"linux\"):\n chrome_options.add_argument(\"--no-sandbox\")\n self.driver = webdriver.Chrome(options=chrome_options)\n\n # disallow downloads via Selenium (see https://stackoverflow.com/a/47366981)\n self.driver.command_executor._commands[\"send_command\"] = (\"POST\", \"/session/$sessionId/chromium/send_command\")\n params = {\"cmd\": \"Page.setDownloadBehavior\", \"params\": {\"behavior\": \"disallow\", \"downloadPath\": \"\"}}\n command_result = self.driver.execute(\"send_command\", params)\n\n # add cookies\n self.driver.get(self.base_url) # initial request required to add cookies\n self.driver.delete_all_cookies()\n for key, val in self.cookies.items():\n self.driver.add_cookie({\"name\": key, \"value\": val, \"domain\": self.domain})", "def setup_early(self):\n\n # create all helper/manager components first\n self.create_allcomponents()\n\n # set up config stuff\n self.setup_confighelper()\n\n # settings\n self.add_earlydefault_settings()\n self.add_settings_early()\n self.add_latesettings_aliases()\n\n # other stuff\n self.add_loggers()\n self.add_routes()\n self.add_navnodes()\n\n # site addons\n self.add_addons()\n\n # we add fallback loggers at END, after user-site added loggers\n self.add_fallback_loggers()\n\n # now update site state (log manager should catch this)\n self.set_statelabel(mconst.DEF_SITESTATE_INITIALIZE_END)", "def run(self):\n\n try:\n # Get the content from this page\n if self.verbose:\n print \"Getting page content for '%s'\" % self.url.strip()\n \n content = getPageContent(self.url)\n\n # Verify that this is not binary data\n if content is not None and isHTML(content):\n\n\n # Extract basic data about this result\n content = content.lower()\n title, keywords, description = parseMetaDataFromContent(content)\n headers = parseHeaderInformationFromContent(content)\n\n # Add this result data\n self.resultDictionary['title'] = title\n self.resultDictionary['keywords'] = keywords\n self.resultDictionary['description'] = description\n self.resultDictionary['content'] = content\n self.resultDictionary['headers'] = headers\n\n # Run the extensions\n for extension in self.extensions:\n extension.run(self.resultDictionary)\n\n\n except URLError:\n\n # Skip this URL, and register it as an error on the cache\n if self.verbose:\n print(\"Error accessing '%s', %s\" % (self.url.strip(), str(sys.exc_info()[1]).strip()))", "def setUp(self):\r\n super(TestAnswerDistributions, self).setUp()\r\n\r\n self.homework = self.add_graded_section_to_course('homework')\r\n self.add_dropdown_to_section(self.homework.location, 'p1', 1)\r\n self.add_dropdown_to_section(self.homework.location, 'p2', 1)\r\n self.add_dropdown_to_section(self.homework.location, 'p3', 1)\r\n self.refresh_course()", "def parse(self, response):\n # single page\n product_urls = response.css('.product-title > a::attr(href)').getall()\n for product_url in product_urls:\n yield scrapy.Request(response.urljoin(product_url), self.parse_product)\n categorie_urls = response.css('.elementor-button ::attr(href)').getall()\n for categorie_url in categorie_urls:\n yield scrapy.Request(response.urljoin(categorie_url))", "def __init__(self):\n self.site = pywikibot.Site(u'commons', u'commons')\n self.generator = self.getGenerator()", "def run_scrapping():\n date = datetime.now().strftime(\"%Y-%m-%d\")\n size = 100\n r = list(range(size))\n random.shuffle(r)\n for i in r:\n scrap_page(url_page.format(i), date)\n print(str(i) + \" / \" + str(size))", "def process_cmd():\n web_scraper = SainsburyWebscraper()\n logger.info(\"Sainsbury web scraper initialized and loaded data from SainsburyWebscraper\")\n\n json_data = web_scraper.get_product_data()\n logger.info(\"Found %s products with the following data:\" % len(json_data[\"results\"]))\n print json.dumps(json_data, indent=4, sort_keys=True)", "def prepare_work(self):\n self.driver.get(self.BaseUrl)\n self.driver.add_cookie(cookie)\n self.driver.refresh()\n self.base_handle = self.driver.current_window_handle", "def main():\n create_home()\n create_about()\n create_banner_list()\n\n for banner_id in appdata.banner_info:\n banner.create_banner_page(banner_id)", "def first_page_execution(self):\n self.errors_and_correct_input_values_helper(wrong_pattern_error=True)\n self.utility_page.click_next_button()\n self.utility_page.click_next_button()\n self.second_page.wait_for_page()", "def main(u, o):\n click.echo(f\"Web crawling on {u} started successfully...\")\n\n comment_regex = re.compile('<!--(.*?-->)')\n\n with requests.Session() as session:\n resp = session.get(u)\n soup = BeautifulSoup(resp.text, 'lxml')\n #TODO: search for hidden attributes, may be useful\n comments = soup.find_all(text=comment_regex)\n print(comments)", "def scrape_page(driver, **kwargs):\n # wait ~1 second for elements to be dynamically rendered\n time.sleep(1.2)\n start = time.time()\n containers = [\n \"section#top-card div.content\", # job content\n \"div.job-salary-container\", # job salary\n \"ul.company-growth-stats.stats-list\", # company stats\n \"div.insights-card.applicants-skills\", # applicants skills\n \"div.applicants-locations-list\" # applicants locations\n ]\n for container in containers:\n try:\n WebDriverWait(driver, .25).until(\n EC.presence_of_element_located(\n (By.CSS_SELECTOR, container)\n )\n )\n except Exception as e:\n print(\"timeout error waiting for container to load or element\" \\\n \" not found: {}\".format(container))\n print(e)\n applicant_info = {\n \"num_applicants\" : num_applicants(driver),\n \"skills\" : applicants_skills(driver),\n \"education\" : applicants_education(driver),\n \"locations\" : applicants_locations(driver)\n }\n job_info = {\n \"job_id\" : job_id(driver),\n \"salary_estimates\" : salary_data(driver),\n \"company_info\" : company_data(driver)\n }\n job_info.update(job_data(driver))\n data = {\n \"applicant_info\" : applicant_info,\n \"job_info\" : job_info,\n \"post_info\" : post_data(driver),\n \"search_info\" : kwargs\n }\n print(\"scraped page in {} seconds\\n\".format(time.time()-start))\n # try:\n # print(\"data:\\n\\n{}\\n\".format(data))\n # except Exception as e:\n # print(\"data could not be printed to console\\n\")\n return data", "def setUp(self):\n self.driver = webdriver.PhantomJS()\n self.driver.get(self.get_server_url())", "async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(100000)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the response of every request and save the ones we want\n page.on('response', lambda response: asyncio.ensure_future(interceptResponse(response)))\n await page.goto(urlChallenge)\n await page.waitFor(1000)\n #scroll down to trigger the requests to get video data\n for _ in range(5):\n await page.evaluate(\"\"\"{window.scrollBy(0, document.body.scrollHeight);}\"\"\")\n await page.waitFor(1000)\n await page.waitFor(3000)\n await browser.close()", "def scrape(self, page_src):\n soup = bs(page_src, 'html.parser')\n self._data = soup.text.split()", "def go(self):\n self.analyse_folder(BASE)\n self.analyse_folder(JS_FOLDER)" ]
[ "0.6909366", "0.66830665", "0.6554945", "0.65500784", "0.64894116", "0.6398545", "0.63733447", "0.6339496", "0.6224981", "0.6167547", "0.6128595", "0.6099338", "0.6079297", "0.602946", "0.59992015", "0.59699357", "0.596249", "0.5953066", "0.586132", "0.5859717", "0.58555573", "0.581796", "0.5787649", "0.578258", "0.5780477", "0.5769773", "0.57616085", "0.5759113", "0.5749785", "0.5712765", "0.5711735", "0.5703042", "0.5682373", "0.56608117", "0.56562674", "0.56344515", "0.56201184", "0.56127554", "0.5607843", "0.5601687", "0.5601471", "0.55803585", "0.55789256", "0.5568733", "0.5563192", "0.5562257", "0.5544852", "0.55258375", "0.55250466", "0.5522485", "0.5521178", "0.55186874", "0.55153716", "0.55007654", "0.5491684", "0.5490677", "0.5484771", "0.5484043", "0.5482745", "0.5482287", "0.5465271", "0.5463989", "0.5462178", "0.5448102", "0.5446791", "0.5443814", "0.54389834", "0.5426555", "0.54055476", "0.5400623", "0.53952163", "0.53906435", "0.53902704", "0.5378672", "0.5372392", "0.53694546", "0.5365088", "0.5346599", "0.5342322", "0.5333034", "0.53270245", "0.5325215", "0.5314577", "0.5314315", "0.5313967", "0.5312377", "0.531035", "0.5309552", "0.53037244", "0.52988464", "0.52983147", "0.52977747", "0.5290075", "0.5289555", "0.52858186", "0.5283192", "0.5278924", "0.52765405", "0.52760386", "0.52740145", "0.5273545" ]
0.0
-1
Scrapes the plot from the provided URL.
def get_plot(url): soup = get_soup(url.rsplit('/', 1)[0]) if soup: # scrape the plot section plot_div = soup.find('div', {'id': 'titleStoryLine'}) # fixes bug were no plot is found try: plot_class = plot_div.find('span', {'itemprop': 'description'}) plot = plot_class.text.strip() return ' '.join(plot.split()) except AttributeError: return 'The plot was not available.' else: display_error()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def scrape_page(session, url):\n logging.info('Scraping %s', url)\n images = await get_url_images(session, url)\n await save_url_images(images)", "def scrape_url(url):\n html = requests.get(url).text\n return scrape_html(html)", "def fn_GetMoviePlot(self, details):\n\n # If the custom url was not actually defined and we had no cached\n # data, then there is nothing to do.\n #\n if details is None:\n return\n\n dom = parseString(details)\n d = dom.firstChild\n self.plot = get_child_data(d, \"plot\", self.plot)\n dom.unlink()", "def joblib_read_img_url(url):\n\n from matplotlib.image import imread\n fd = urlopen(url, timeout=10)\n return imread(io.BytesIO(fd.read()))", "def get_data(self, url):\n # Initialize the button that needs to be pressed to get download the data\n button = None\n # While this button is of type 'None' we reload the browser\n while button is None:\n try:\n # Navigate to the URL\n self.go_to_url(url)\n # Sleep the code by the defined time plus a random number of seconds between 0s and 2s. This should\n # reduce the likelihood that Google detects us as a scraper\n time.sleep(self.sleep + 2 * np.random.rand(1))\n # Try to find the button and click it\n line_chart = self.browser.find_element_by_css_selector(\n \"widget[type='fe_line_chart']\")\n button = line_chart.find_element_by_css_selector(\n '.widget-actions-item.export')\n button.click()\n except exceptions.NoSuchElementException:\n # If the button cannot be found, try again (load page, ...)\n pass\n # After downloading, wait again to allow the file to be downloaded\n time.sleep(self.sleep)\n # Load the data from the csv-file as pandas.DataFrame object\n data = pd.read_csv(self.filename, skiprows=1)\n # Set date as index:\n if 'Day' in data.columns:\n data.Day = pd.to_datetime(data.Day)\n data = data.set_index(\"Day\")\n frequency = 'Daily'\n elif 'Week' in data.columns:\n data.Week = pd.to_datetime(data.Week)\n data = data.set_index(\"Week\")\n frequency = 'Weekly'\n else:\n data.Month = pd.to_datetime(data.Month)\n data = data.set_index(\"Month\")\n frequency = 'Monthly'\n # Sleep again\n time.sleep(self.sleep)\n # Delete the file\n while os.path.exists(self.filename):\n try:\n os.remove(self.filename)\n except:\n pass\n return data, frequency", "def do_get(self, url):\n self.driver.get(url)", "def get(self, url):\n self.browser.get(url)", "def webScraper(self):\n try:\n self.covid_df = pd.read_csv(self.COVID_URL)\n except:\n sys.exit('COVID data is unavailable at source.')\n \n latest_date = self.covid_df['date'].max()\n earliest_date = self.covid_df['date'].min()\n self.covid_df = self.covid_df[self.covid_df['date'] == self.date.strftime('%Y-%m-%d')]\n \n if self.covid_df.empty:\n exit_string = 'Requested date not available. Latest date available is ' + latest_date + ' while earliest is ' + earliest_date\n sys.exit(exit_string)\n else:\n self.covid_df = self.covid_df[self.covid_df['location'] != 'World']\n \n try:\n self.countries_centroids = pd.read_html(self.CENTROIDS_URL, header=0, index_col='country')[0]\n except:\n sys.exit('Central coordinates data for countries unavailable from Google developers.')\n \n try:\n self.geo_data = requests.get(self.GEOJSON_URL).json()\n except:\n sys.exit('GeoJSON data unavailable to draw country polygons.')", "def parse(self, url):\n pass", "def scrape(url):\n logger.debug('[SCRAPER]\\t Loading url: %s', url)\n try:\n html_page = urlopen(url).read()\n except (http.client.IncompleteRead, urllib.error.URLError):\n logger.warning(\"[SCRAPER]\\t Could not read the page for url: %s\", url)\n return ''\n logger.debug('[SCRAPER]\\t Parsing with BS')\n soup = BeautifulSoup(html_page, 'html5lib')\n data = soup.findAll('p')\n data = [p.get_text().replace('\\n', '').replace('\\t','') for p in data]\n\n if not data:\n logger.warning('[SCRAPER]\\t No data found for url: %s', url)\n else:\n logger.debug('[SCRAPER]\\t [%s]: \\n %s', url, data)\n\n return ' '.join(data) if data else ''", "def run(self, url=''):\n if url:\n webbrowser.open(url)", "def get_plot(session_id, test_name):\n return Plot.get_plot(session_id, test_name)", "def load(self, url):\n pass", "def load(self, url):\n pass", "def get(self, url: str):\n\n self.driver.get(url)", "def plot_race(url):\n #hey, thanks again for these functions!\n idrace = id_from_url(url)\n xml = get_poll_lxml(idrace) \n colors = plot_colors(xml)\n\n if len(colors) == 0:\n return\n \n #really, you shouldn't have\n result = race_result(url)\n \n poll_plot(idrace)\n plt.xlabel(\"Date\")\n plt.ylabel(\"Polling Percentage\")\n for r in result:\n plt.axhline(result[r], color=colors[_strip(r)], alpha=0.6, ls='--')", "def loadComponentFromURL( cUrl, tProperties=() ):\n StarDesktop = getDesktop()\n oDocument = StarDesktop.loadComponentFromURL( cUrl, \"_blank\", 0, tProperties )\n return oDocument", "def loadComponentFromURL( cUrl, tProperties=() ):\n StarDesktop = getDesktop()\n oDocument = StarDesktop.loadComponentFromURL( cUrl, \"_blank\", 0, tProperties )\n return oDocument", "def GetDataFromURL(self, url):\n try:\n deftimeout = socket.getdefaulttimeout()\n socket.setdefaulttimeout(1)\n try:\n logging.debug('Slide fetching data from %s' % url)\n u = urllib.urlopen(url)\n data = u.read()\n return data\n except:\n logging.exception('Uh oh!')\n return None\n finally:\n socket.setdefaulttimeout(deftimeout)", "def from_url(self) -> PngImagePlugin.PngImageFile:\n response = requests.get(self.url)\n img = Image.open(BytesIO(response.content))\n\n return img", "def fetch_song_data(url):\r\n response = requests.get(url)\r\n return response.text", "def open_url(self, url: str):\n self.driver.get(url)", "def load_data(url: str):\n\n page = requests.get(url=url)\n soup = BeautifulSoup(page.content, 'html.parser')\n return soup", "def run(self):\n\n for url in self.urls:\n try:\n # Use requests to retrieve web page data\n print(url)\n response = session.get(url, ) # allow_redirects=True)\n\n if response.status_code != 200:\n print('Failed to retrieve page, URL: {0}, error: {1}\\n'.format(url, response.status_code))\n return\n\n # Get web page data from HTML response\n content = get_json_data(response.text)\n\n # Compile data into dictionary to be used for reporting\n summary_data = generate_report(content)\n\n # Generate/print report\n print_report(summary_data)\n\n except Exception as error:\n print('Scraper failed to run for URL {0}, error: {1}, {2}\\n'.format(\n url, type(error).__name__, error\n ))\n\n # time.sleep(1) # for load concerns", "def scrape_url(url):\n r = requests.get(url)\n url_list = get_urls(r.text)\n email_list = get_email_addresses(r.text)\n phone_list = get_phone_numbers(r.text)\n\n print_list('Urls', url_list)\n print_list('Emails', email_list)\n print_list('Phone Numbers', phone_list)", "def scrape(self):\n try:\n self.result = urlfetch.fetch(self.url)\n except DownloadError:\n self.result = urlfetch.fetch(self.url) \n if ((self.result.status_code == 200) and\n (self.result.content_was_truncated == 0)):\n self.soup = BeautifulSoup(self.result.content)\n else:\n logging.critical(\"Bad Status Code: \", self.result.status_code, self.url)\n sys.exit(1)", "def open_url(self, url):\n\n self.driver.get(url)", "def crawl(self, url):\n return None", "def fetch(self, url, listener, useCache = True): #$NON-NLS-1$\r", "def scrape_url(url):\n ydl_opts = {\n \"ignoreerrors\": True, # Skip private and unavaliable videos\n }\n\n ydl = youtube_dl.YoutubeDL(ydl_opts)\n\n with ydl:\n result_ydl = ydl.extract_info(\n url,\n download=False # No download needed, only the info\n )\n\n logger.debug('Url scraped {}', url)\n if 'entries' in result_ydl:\n # It's a playlist or a list of videos\n return result_ydl['entries']\n # Just a video\n return [result_ydl]", "def get_data(stream_url, count, is_graph):\n data = requests.get(stream_url + '/Data' + '?count=' + str(count) + '&Startindex=1970-01-01T00:00:00Z', headers=headers)\n Values = []\n for event in data.json():\n Values.append(event['Value'])\n if is_graph:\n terminalplot.plot(range(len(Values)), Values)\n else:\n print(Values)\n input(\"Press any key to continue...\")\n return", "def _setContentFromUrl(self, url):\n urlgrabber = UrlGrabber(url)\n self._htmlContent = urlgrabber.get()", "def scrape(self, link):\n url = urljoin(self.base_url, '/scrape/')\n res = _analysis(url, self.auth, url=link)\n return res.json()", "def retrieve_episode_html(url):\n response = requests.get(url)\n return response.content", "def fetchUrl(self, url):\n self.driver.get(url)\n html = self.driver.page_source\n return html", "def scrape(self):\n pass", "def access_study(self, url):\n\n doc = requests.get(url)\n self.num_requests += 1\n if doc.status_code != 200:\n with open('runReport.txt', 'a') as report:\n report.write('\\n')\n report.write(str(doc.status_code) + \" for \" + url)\n return None\n else:\n soup = BeautifulSoup(doc.content, features=\"lxml\")\n self.num_hits += 1\n return self.get_study_data(soup, url)", "def spidy_1(url):\r\n\tbr = Browser()\r\n\t# browser basic setup (for simulate a real web browser)<----todo para simular un navegador :D\r\n\tbr.set_handle_equiv(True) # cuando tratar HTML http-equiv headers como HTTP headers\r\n\tbr.set_handle_redirect(True) # para los redirect loops\r\n\tbr.set_handle_referer(True) # para annadir un referer al objeto request\r\n\tbr.set_handle_robots(False) # ignorar robots.txt\r\n\tbr.set_debug_http(False) # bueno para la fase de development\r\n\tbr.set_debug_responses(False) # mas debuggeo\r\n\tbr.set_debug_redirects(False) # mas aun\r\n\tbr.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time = 1) # puede usarse: br.set_handle_refresh(False)\r\n\t# para simular Firefox desde Fedora :)\r\n\tbr.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')]\r\n\tbr.open(url)\r\n\tfor link in br.links():\r\n\t\tprint link.text,link.url", "async def play_url(self, url: str):\n await self._pytheos.api.browse.play_url(self.id, url)", "def start(self):\n self.get(self.url)", "def view(self, metric: str, plot_type: PlotType):\n plots = self._create_plot(metric, plot_type)\n for plot in plots:\n plot.show()\n # plot.clf()", "def main(url):\n\n return excel_parser(\n url,\n extract_product_from_data\n )", "def open_web_browser(url: str):\n Popen(web_browser + [url], stdout=DEVNULL, stderr=DEVNULL)", "def soup_explore(url_or_file, session=None):\n soup = ph.get_soup(url_or_file, session)\n if not soup:\n ph.logger.error('No soup found for {}'.format(url_or_file))\n else:\n print('\\nExplore the \"soup\" object\\n\\n')\n embed()\n return soup", "def read_from_server(url_base=\"http://10.200.102.18/\", url_dir=\"G179-dataset/\"):\n\n all_images = urllib2.urlopen(url_base + url_dir).read()\n\n parser = ImagesHTMLParser()\n parser.feed(all_images)\n data = parser.data\n imgs = []\n\n print(\"Found %d images!\" % len(data))\n print(\"Started Download!\")\n i = 1\n\n for d in data:\n print(\"\\rProgress: %d/%d \" % (i, len(data)), end='')\n dl_img = urllib2.urlopen(url_base + url_dir + d).read()\n asd = cStringIO.StringIO(dl_img)\n img = Image.open(asd)\n imgs.append(np.array(img))\n i = i + 1\n\n return imgs", "def get_url_soup(url):\n r = requests.get(url)\n if r.status_code != 200:\n raise Exception(\"Paper request failed '%s'\" % url)\n return get_soup(r.content)", "def loadUrl(self, url):\n cmdId = self.executeCommand(Command.GET, {'url': url})\n return cmdId", "def request_page(self, url, action=None):\n if url.startswith(self.url):\n self.driver.get(url)\n else:\n self.driver.get(self.url + url)\n self.default_wait.until(EC.invisibility_of_element_located((By.XPATH, \"//div[@class='loading-bar']\")))\n if action:\n action(self.driver)\n return self.driver.page_source", "def getVotacion(self, url):", "def _by_url(session: Session, url: URL) -> UnifiedDataset:\n r = session.get(str(url))\n if r.status_code == 404:\n raise NotFound(str(url))\n data = response.successful(r).json()\n return _from_json(url, data)", "def read_url(url):\n response = requests.get(url)\n return response.text", "def call_feed(url: str) -> dict:\n\n if not url:\n return {}\n feed = feedparser.parse(url)\n return feed", "def Open(self, url):\n fp = self._opener.open(url)\n response = fp.read()\n fp.close()\n return response", "def __fetch_from_url(url: str) -> Any:\n song_information: Any = None\n try:\n # Send the request and load the returned contents.\n req = request.Request(url, headers={\n 'User-Agent': Config.Config.get_user_agent()\n })\n response = request.urlopen(req)\n contents: str = response.read().decode('utf-8')\n except (HTTPError, TimeoutError) as ex:\n Logger.Logger.log_error(str(ex))\n Logger.Logger.log_error('Request failed for URL: ' + url)\n return\n # Parse the response from the endpoint as a JSON encoded string\n data: Any = json.loads(contents)\n # Check if response contains at least one result, otherwise return \"None\".\n if data['resultCount'] > 0:\n song_information = data\n return song_information", "def download_pil_image(self, url):\r\n return Image.open(urlopen(url))", "def fetch_url(self, url: str):\n log.debug(f\"Fetching {url}\")\n answer = self.session.get(url, timeout=self.timeout)\n answer.raise_for_status()\n\n # returning raw answer object, because due to redirects we may need to\n # double check answer.url to proceed\n return answer", "def main(url):\n print(f\"Running main with URL = {url}...\")\n imagehits(downloaddata(url))", "def _urlopen(url):\n headers = config.get(\"extra_headers\",{})\n headers['User-Agent'] = config.user_agent\n\n type, host, selector = split_type_host(url)\n\n if type.lower() == \"https\":\n conn = ProxyHTTPSConnection(host, url=url)\n else:\n conn = ProxyHTTPConnection(host, url=url)\n\n conn.request(\"GET\", selector, headers=headers)\n return conn.getresponse()", "def scrape_page(self, url: str) -> str:\n\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n\n headers = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3',\n 'Content': 'Type: application/json',\n 'Referer': 'https://places.nbnco.net.au/'\n }\n\n req = Request(url=url, headers=headers)\n\n return urlopen(req, context=ctx)", "def fetch_url_feed(self, url, **args):\n return self.fetch(\"/url\", url=url, **args)", "def request(self, url):\r\n\r\n req = self.get(url)\r\n soup = BeautifulSoup(req.content, \"lxml\")\r\n return soup", "def Load(self, url, **kwargs):\n opts = ''\n self.url = url\n if not 'wcsunits' in kwargs:\n kwargs['wcsunits'] = 'degrees'\n if not 'scaleclipping' in kwargs:\n kwargs['scaleclipping'] = 'zscale'\n if len(kwargs) > 0:\n opts = json.dumps(kwargs)+','\n fmt = dict(url=self.url, kw=opts, wid=self.wid, suffix=self.suffix)\n command = \"JS9.Load('{url}',{kw}{{display:'{wid}{suffix}'}});\".format(**fmt)\n try:\n hdulist = fits.open(url)\n temp_wcs = wcs.WCS(hdulist[0].header)\n self.header = hdulist[0].header\n self.wcs_world2pix = temp_wcs.wcs_world2pix\n self.wcs_pix2world = temp_wcs.wcs_pix2world\n hdulist.close()\n except:\n pass\n get_ipython().run_cell_magic('javascript', '', command)", "def get(self, url):\n\t\ttry:\n\t\t\tassert(type(url)) == str\n\t\t\tself.driver.get(url)\n\t\t\t# sleep(1) # Even tho driver.get is blocking, it returns as soon as DOM loads, without waiting for JS to run and update the DOM with the new elements\n\t\t\t# wait(self.driver, 10).until( EC.visibility_of_element_located() ) # Not sure how to wait here efficiently\n\t\t\tsleep(5) # A little long, but without a conditional variable to tell us when the page is ready us when to go our only choice is to nap\n\t\t\tself.bsource = bs( self.viewSource(), \"lxml\" ) # Update internal BeautifulSoup source with new javascript-encriched code (\"lxml\" is faster that \"html.parser\")\n\t\texcept Exception as e:\n\t\t\tprint(\"[*] Unable to GET page {}\\n{}\".format(url, e))\n\t\t\treturn -1", "def fetch_url(session, url):\n return session.get(url).text", "def go(self, url):\n self.driver.get(url)", "async def fetch_page(self, url: str) -> PageRaw:\n\n raise NotImplementedError()", "def open_url(url):\n\tglobal books\n\tglobal count_books\n\tglobal titles\n\t#global word_count\n\ttry:\n\t\t#open url\n\t\tresponse = re.urlopen(url)\n\t\t#get data\n\t\tcontent = response.read().decode('utf8')\n\t\t#close connection\n\t\tresponse.close()\n\t\t\n\texcept(er.URLError):\n\t\t#if url is not functional\n\t\tcontent = \"\"\n\t\tprint(\"The URL is not functional : \",url)\n\t\treturn None\n\t\t# #remove the url from the books dictionary\n\t\t# for key,val in books.items():\n\t\t# \tif val == url:\n\t\t# \t\tdel books[key]\n\t\t# \t\t#pop the last\n\t\t# \t\ttitles.pop()\n\t\t# \t\tbreak\n\t\t# #update count for number of books\n\t\t# count_books = len(books)\n\t\t# return\n\treturn content", "async def parse_url(self, url: str, delay: int = 0) -> BeautifulSoup:\n if url != self.driver.current_url:\n self.driver.get(url)\n return BeautifulSoup(self.driver.page_source, 'lxml')", "def open_url(self):\n QDesktopServices.openUrl(self.url)", "def plot_page( stat_type ) :\r\n logger.debug( f\"stat_type={stat_type}\" )\r\n param = request.args[\"param\"]\r\n\r\n script = server_document( url=f'http://localhost:5006/{stat_type_2_plot_route[stat_type]}',\r\n arguments={'param' : param ,\r\n 'stat_type' : stat_type ,\r\n 'session_id' : session[ session_info.session_id_key ] } )\r\n\r\n return render_template('plot_page.html',\r\n script=script ,\r\n param=param ,\r\n stat_type=param_stats.StatTypes[stat_type].value )", "def open(webpage_url):\n\twith youtube_dl.YoutubeDL(dict(forceurl=True)) as ydl:\n\t\tr = ydl.extract_info(webpage_url, download=False)\n\t\tmedia_url = r['formats'][-1]['url']\n\twebbrowser.open('googlechromes://' + media_url[8:] )", "def crawl(self):\n self.get('http://code.google.com/p/webscraping/')\n self.get('http://code.google.com/p/sitescraper/')\n QTimer.singleShot(5000, self.app.quit)", "def read_url(url):\n return requests.get(url).text", "def fromurl(cls, url: str):\n return cls.parse_obj(requests.get(url).json())", "def get_ss(url):\n while True:\n try:\n headers = headers_pool[random.randint(0, len(headers_pool)-1)]\n r = requests.get(url, headers=headers, timeout=10)\n except Exception:\n time.sleep(random.randint(2, 10))\n continue\n break\n\n if r.status_code != requests.codes.ok:\n print('return status code {} is not OK.'.format(r.status_code))\n return None\n r.encoding = r.apparent_encoding\n soup = BeautifulSoup(r.text, \"lxml\")\n row_heads = soup.body.find_all('td', text='日本樱花')\n rows = map(lambda x: x.find_parent(\"tr\"), row_heads)\n values = map(lambda x: x.find_all('td'), rows)\n\n proxy_list = []\n for v in values:\n c = map(lambda x: x.text, v)\n lc = list(c)\n proxy = 'ss://{}:{}@{}:{}'.format(lc[4],lc[3],lc[1],lc[2])\n # proxy = ss://rc4-md5:password@ip:port\n proxy_list.append(proxy)\n return proxy_list", "async def _get_spot_feed(self):\n self._logger.debug(\"Polling Spot API\")\n async with aiohttp.ClientSession() as session:\n response = await session.request(\n method=\"GET\",\n url=self.spot_url,\n params=self.params\n )\n json_resp = await response.json()\n _response = json_resp.get(\"response\")\n\n if \"errors\" in _response:\n self._logger.error(\"Error from Spot API: '%s'\", _response)\n else:\n await self.handle_response(_response)", "def get_soup(url: str):\n response = requests.get(url)\n\n return BeautifulSoup(response.content, \"html.parser\")", "def scrape_jpl_images():\n browser = init_browser()\n\n url = \"https://www.jpl.nasa.gov/spaceimages/?search=featured&category=Mars#submit\"\n browser.visit(url)\n time.sleep(3)\n\n # Scrape page into Soup\n html = browser.html\n soup = bs(html, \"html.parser\")\n url_base = \"https://www.jpl.nasa.gov\"\n result = soup.find('a', class_='fancybox')\n featured_image_url = url_base+result[\"data-fancybox-href\"]\n\n # Close the browser after scraping\n browser.quit()\n return featured_image_url", "def report(self, url):\n\n print(self.get(url))", "def get_song_html(self, url):\n request = urllib.request.Request(url)\n request.add_header(\"Authorization\", \"Bearer \" + self.client_access_token)\n request.add_header(\"User-Agent\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36'\")\n\n page = urllib.request.urlopen(request)\n html = BeautifulSoup(page, \"lxml\")\n print(\"Scraped: \" + url)\n return html", "def get_results(self, url=None, job_id=None) -> 'ImageCollection':\n\n pgraph = RESTProcessgraph(pg_id=None, connection=self.connection)\n\n graph = {\n 'process_id': 'get_results',\n }\n\n if url:\n graph[\"url\"] = url\n if job_id:\n graph[\"job_id\"] = job_id\n\n pgraph.graph = graph\n\n return pgraph", "def scrape_tracks():\n r = request.json\n for i in range(len(r)):\n try:\n url = req.get(r[i]).text\n soup = BeautifulSoup(url, \"html.parser\")\n soup = json.loads(unquote(soup.find(id='resource').text))\n r[i] = soup['preview_url']\n except:\n continue\n return jsonify(r), 200", "def browse( self ):\n webbrowser.open(self.url())", "async def _fetch(self, session, url, proxy=None, raw=False, which_site=False):\n print(url)\n result = None\n site = None\n if 'hare' in url: # {'Unknown': -1, 'Pixnet': 0, 'Hares': 1}\n site = self._websites['Hares']\n elif 'pixnet' in url:\n site = self._websites['Pixnet']\n else:\n site = self._websites['Unknown']\n\n count = 1\n while count <= 2:\n soup = ''\n status = 0\n try:\n async with session.get(url, proxy=proxy) as response:\n source_code = await response.text('utf-8')\n status = response.status\n soup = source_code if raw else BeautifulSoup(source_code, 'lxml')\n except Exception as e:\n print('Connection error: ' + str(e))\n soup = None\n finally:\n result = (url, soup, status, site) if which_site else (url, soup, status)\n if status != 0:\n return result\n if 'searcharticle' not in url:\n count += 1\n result = (url, soup, status, site) if which_site else (url, soup, status)\n return result", "def open_url(name):\n url = localReadConfig.get_webServer(name)\n browser = open_browser()\n browser.get(url)\n return browser", "def _stream_from_url(self):\n if self.songObj is None:\n self._extract_data()\n else:\n self._extract_songObj()\n\n logger.debug(self.title)\n\n # Now search the song locally\n if not self.dont_cache_search:\n match = search_locally(self.title)\n if match:\n # Update the URL cache. This is necessary for the old songs.\n update_URL_cache(self.title, self.URL)\n # Change the value to local path\n self.stream_url = match[1]\n else:\n self._dw()\n else:\n logger.info(\"Searching locally disabled.\")\n if self.stream_url == \"\":\n self._get_youtube_data_url()\n\n direct_to_play(self.stream_url, self.show_lyrics, self.title)", "def refresh(self, url, args, cancellationSignal):\n pass", "def scrape_BI(url):\n response = requests.get(url)\n soup = BeautifulSoup(response.text)\n companies = soup.find_all('h3', class_='slide-title')\n #names = []\n driver = init_driver()\n for company in companies[:]:\n name = company.getText().strip()\n # if \" \" in name:\n # name.replace(' ','+')\n html_code = load_google(driver, name)\n #name, address = scrape_google(html_code)\n url = scrape_google(html_code)\n print(name,url)\n #names.append(name)\n driver.quit()\n #print(names)", "def set_url(self, url):\n qgs_logger = QgsApplication.messageLog()\n qgs_logger.logMessage('DialogMetadata: url {}'.format(url), tag=configuration.LOGGER_TAG, level=Qgis.Info)\n self.dlg.webview.load(QUrl(url))\n self.dlg.webview.show()", "def querySlidesFor(pieName, url):\n url = \"http://\" + url + \"/wp-admin/admin-ajax.php?action=dds_api&pie_name=\" + pieName\n jsonString = str(urllib2.urlopen(url).read().decode(\"utf-8\"))\n print \"Slides for \" + pieName + \": \" + jsonString\n return jsonString", "async def fetch(self, session, url):\n async with session.get(url) as response:\n if response.status != 200:\n response.raise_for_status()\n response = await response.text()\n return json.loads(response)", "async def _grab_connection(self, url):\n ...", "def get_fred_data(url):\n pass", "async def get_one_page_soup_object(url):\n async with aiohttp.ClientSession() as session:\n async with session.get(url) as response:\n return await response.text()", "def _scrape(self):", "def import_webscrape_data(scrape_link):\n cur = cursor(scrape_link)\n coords = cur.execute(\"SELECT name, long, lat FROM reactors_coordinates\")\n return coords", "def plot(self, *path, **attrs):\n # Check for expected number of arguments\n if len(path) == 1:\n # Check it's a short url we know of\n if path[0] in self.urls:\n mimetype = 'image/png' # We'll just assume they're all png for the moment\n url = self.urls[path[0]] # Get the full URL from the dictionary\n data = urllib2.urlopen(url).read() # Read the data from this url\n return mimetype, data # Return the data and mimetype\n\n # Something went wrong, return an error condition\n return None, None", "def get_content(url):\n img=requests.get(url).content\n return img", "def _extract_html(self, url):\n self.response = requests.get(url, timeout=5)\n self.html = BeautifulSoup(self.response.content, \"lxml\") if self.response.ok else None\n # return self.html", "def dl_url(url):\n g.browse_mode = \"normal\"\n yt_url(url)\n\n if len(g.model.songs) == 1:\n download(\"download\", \"1\")\n\n if g.command_line:\n sys.exit()" ]
[ "0.59756243", "0.59229475", "0.5871458", "0.5760079", "0.56895536", "0.5675282", "0.56477475", "0.55973434", "0.5542551", "0.55007184", "0.54510456", "0.54468447", "0.54364306", "0.54364306", "0.54296917", "0.5414543", "0.53710335", "0.53710335", "0.5366538", "0.5339143", "0.5326717", "0.5325852", "0.5311769", "0.5304213", "0.53011084", "0.5289374", "0.5289186", "0.5284501", "0.526057", "0.52544653", "0.5242962", "0.5239749", "0.52040696", "0.51871264", "0.5168625", "0.5152333", "0.51297915", "0.51293796", "0.51118684", "0.50997704", "0.50948256", "0.5093622", "0.5085319", "0.5073904", "0.50736785", "0.50661755", "0.5065279", "0.506277", "0.5059238", "0.50500596", "0.50455046", "0.50446486", "0.5040949", "0.5040278", "0.50389844", "0.50264597", "0.5024166", "0.502255", "0.50125134", "0.50112534", "0.50041896", "0.50022376", "0.49974734", "0.49931315", "0.49902266", "0.4989733", "0.49787754", "0.49770328", "0.49671564", "0.4963408", "0.4962927", "0.49610907", "0.49547583", "0.49465063", "0.4943634", "0.49430177", "0.493535", "0.49352202", "0.49319276", "0.49305326", "0.49221638", "0.49181253", "0.4916234", "0.49156317", "0.49153906", "0.49129823", "0.4912629", "0.49087706", "0.49026084", "0.49004316", "0.48976222", "0.48881313", "0.4880184", "0.4879642", "0.48792696", "0.487474", "0.48660588", "0.48651493", "0.48606074", "0.48576897" ]
0.6619968
0
Cleans up the given comments.
def cleanup_comments(comments): clean_comments = [] if comments: for comment in comments: cleaned_up = sub(r'\n\n {8}\n {8}\n {12}\n {16}\n {16}\n {12}\nEdit', '', comment) clean_comments.append(cleaned_up) return clean_comments
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clean_comments(self):\n new_lines = list()\n for line in self.lines:\n if ((not line.startswith(\"//\")) & (not line.isspace()) &\n (not line.startswith(\"/*\") & (not line.startswith(\"*/\")))):\n line = Parser.strip_line(line)\n new_lines.append(line)\n self.lines = new_lines", "def del_comm(self, blocks=False):\n logging.debug('Delete comments from text')\n if not(self.check()):\n raise GcodeError(\"Invalid g-codes\")\n temp = []\n comment = re.compile(';\\ .*')\n for line in self.blocks:\n n = comment.search(line)\n if n:\n line = line[:n.span()[0]]\n line = line.strip()\n if line != \"\":\n temp.append(line)\n if blocks:\n return temp\n return \"\\n\".join(temp)", "def remove_comments(ls):\r\n for i in range(len(ls)):\r\n ls[i] = re.sub(r'//.*', '', ls[i])\r\n\r\n return ls", "def __clean_line_comments(self):\n self.lines = [l for l in self.lines if not l.startswith(\"//\") and len(l) != 0]", "def __clean_line_comments(self):\n self.lines = [l for l in self.lines if not l.startswith(\"//\") and len(l) != 0]", "def correct_tokenization(self, comments):\n\t\tself.yap(\"Joining orphaned lines of punctuation...\")\n\t\tcorrected = []\n\t\tfor line in comments:\n\t\t\tif all([w in punct for w in line]):\n\t\t\t\tcorrected[-1] = corrected[-1] + line if corrected else \"\"\n\t\t\telse:\n\t\t\t\tcorrected.append(line)\n\t\t#combine punctuation sequences into a single token\n\t\tself.yap(\"Joining punctuation sequences... \")\n\t\tcorrected = [self.joinPunctuationSequence(c) for c in corrected]\n\t\treturn corrected", "def _removeComments(code):\r\n # remove all occurance streamed comments (/*COMMENT */) from string\r\n text = re.sub(re.compile('/\\*.*?\\*/', re.DOTALL), '', code)\r\n # remove all occurance singleline comments (//COMMENT\\n ) from string\r\n return re.sub(re.compile('//.*?\\n'), '', text)", "def strip_comments(tokens):\n prev_typ = None\n prev_end_col = 0\n for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:\n if typ in (tokenize.NL, tokenize.NEWLINE):\n if prev_typ in (tokenize.NL, tokenize.NEWLINE):\n start_col = 0\n else:\n start_col = prev_end_col\n end_col = start_col + 1\n elif typ == tokenize.COMMENT and start_row > 2:\n continue\n prev_typ = typ\n prev_end_col = end_col\n yield typ, tok, (start_row, start_col), (end_row, end_col), line", "def comments(self, comments):\n\n self.container['comments'] = comments", "def deleteComments(self: Self, event: Event = None) -> None:\n #@+<< deleteComments docstring >>\n #@+node:ekr.20171123135625.37: *3* << deleteComments docstring >>\n #@@pagewidth 50\n #@-<< deleteComments docstring >>\n c, p, u, w = self, self.p, self.undoer, self.frame.body.wrapper\n #\n # \"Before\" snapshot.\n bunch = u.beforeChangeBody(p)\n #\n # Initial data.\n head, lines, tail, oldSel, oldYview = self.getBodyLines()\n if not lines:\n g.warning('no text selected')\n return\n # The default language in effect at p.\n language = c.frame.body.colorizer.scanLanguageDirectives(p)\n if c.hasAmbiguousLanguage(p):\n language = c.getLanguageAtCursor(p, language)\n d1, d2, d3 = g.set_delims_from_language(language)\n #\n # Calculate the result.\n changed, result = False, []\n if d1:\n # Remove the single-line comment delim in front of each line\n d1b = d1 + ' '\n n1, n1b = len(d1), len(d1b)\n for s in lines:\n i = g.skip_ws(s, 0)\n if g.match(s, i, d1b):\n result.append(s[:i] + s[i + n1b :])\n changed = True\n elif g.match(s, i, d1):\n result.append(s[:i] + s[i + n1 :])\n changed = True\n else:\n result.append(s)\n else:\n # Remove the block comment delimiters from each line.\n n2, n3 = len(d2), len(d3)\n for s in lines:\n i = g.skip_ws(s, 0)\n j = s.find(d3, i + n2)\n if g.match(s, i, d2) and j > -1:\n first = i + n2\n if g.match(s, first, ' '):\n first += 1\n last = j\n if g.match(s, last - 1, ' '):\n last -= 1\n result.append(s[:i] + s[first:last] + s[j + n3 :])\n changed = True\n else:\n result.append(s)\n if not changed:\n return\n #\n # Set p.b and w's text first.\n middle = ''.join(result)\n p.b = head + middle + tail # Sets dirty and changed bits.\n w.setAllText(head + middle + tail)\n #\n # Set the selection range and scroll position.\n i = len(head)\n j = ins = max(i, len(head) + len(middle) - 1)\n w.setSelectionRange(i, j, insert=ins)\n w.setYScrollPosition(oldYview)\n #\n # \"after\" snapshot.\n u.afterChangeBody(p, 'Indent Region', bunch)", "def comments(self, comments):\n if comments is not None and len(comments) > 1000:\n raise ValueError(\"Invalid value for `comments`, length must be less than or equal to `1000`\") # noqa: E501\n\n self._comments = comments", "def comments(self, comments):\n\n self._comments = comments", "def comments(self, comments):\n\n self._comments = comments", "def comments(self, comments):\n\n self._comments = comments", "def comments(self, comments):\n\n self._comments = comments", "def delete_comments(redditor):\n\n for index, comment in enumerate(redditor.comments.new(limit=None)):\n print(\"Deleting comment {}\".format(index))\n comment.edit(\"-\")\n comment.delete()", "def remove_comments(css):\n log.debug(\"Removing all Comments.\")\n iemac, preserve = False, False\n comment_start = css.find(\"/*\")\n while comment_start >= 0: # Preserve comments that look like `/*!...*/`.\n # Slicing is used to make sure we dont get an IndexError.\n preserve = css[comment_start + 2:comment_start + 3] == \"!\"\n comment_end = css.find(\"*/\", comment_start + 2)\n if comment_end < 0:\n if not preserve:\n css = css[:comment_start]\n break\n elif comment_end >= (comment_start + 2):\n if css[comment_end - 1] == \"\\\\\":\n # This is an IE Mac-specific comment; leave this one and the\n # following one alone.\n comment_start = comment_end + 2\n iemac = True\n elif iemac:\n comment_start = comment_end + 2\n iemac = False\n elif not preserve:\n css = css[:comment_start] + css[comment_end + 2:]\n else:\n comment_start = comment_end + 2\n comment_start = css.find(\"/*\", comment_start)\n return css", "def resolve_empty_comments(tree: dict, empty_comments: List[str]):\n empty_comments_dict = {}\n for id in empty_comments:\n empty_comments_dict[id] = tree[\"comments\"][id]\n tree[\"comments\"].pop(id)\n\n for id, comment in tree[\"comments\"].items():\n parent_id = comment[\"parent_id\"]\n while parent_id in empty_comments:\n parent_id = empty_comments_dict[parent_id][\"parent_id\"][3:]\n comment[\"parent_id\"] = (parent_id if parent_id in tree[\"comments\"]\n else tree[\"id\"])\n\n for i, reply_id in enumerate(comment[\"replies\"]):\n if reply_id in empty_comments:\n del comment[\"replies\"][i]\n\n return tree", "def DropComment(text):\n grp = re.compile(r'/\\*[^/]*\\*/').split(text)\n result = string.join(grp);\n grp = re.compile(r'//.*').split(result);\n result = string.join(grp);\n #result = string.join(result.split('\\n')) #remove the line break\n return(' '+result);", "def comment(self, *comments):\n for comment in comments:\n self._p('[*]', comment)", "def check_comments():\n\n # Get the id of the group track\n try:\n group_track = soundcloud.get('/me/tracks')[config.post_track_id]\n except HTTPError as e:\n if e.response.status_code == 404:\n logging.critical('Cannot find a track with id %d. Please, fix post_track_id in config.py', config.post_track_id)\n sys.exit(1)\n else:\n raise\n\n # Get the comment list for the group track\n comments = soundcloud.get('/tracks/%d/comments' % group_track.id)\n if not comments:\n logging.info('Nothing found...')\n return\n \n # Process each comment and delete it\n for comment in reversed(comments): \n logging.info('Processing a comment by user %d (%s): %s', comment.user_id, comment.user['username'], comment.body)\n response = None\n \n # Try to process the comment\n try:\n response = process_comment(comment)\n except HTTPError as e:\n if e.response.status_code == 429:\n logging.exception('Failed to repost track: too many requests:')\n return\n elif e.response.status_code // 100 == 4:\n logging.exception('Failed to process comment due to a client request error:')\n else:\n raise\n except Exception as e: # Program crash\n logging.exception('Failed to process comment:')\n else:\n if response:\n logging.info('The comment would have this response: %s', response) \n else:\n logging.info('Comment processed successfully')\n \n # Delete the processed comment\n try:\n soundcloud.delete('/tracks/' + str(group_track.id) + '/comments/' + str(comment.id))\n except HTTPError as e:\n if e.response.status_code == 404:\n logging.warning('Comment already deleted')\n else:\n raise\n\n if config.use_advanced_description and should_update_description:\n update_description()", "def comment_remover(text):\n\n def replacer(match):\n s = match.group(0)\n if s.startswith(\"/\"):\n return \"\"\n else:\n return s\n\n pattern = re.compile(\n r'//.*?$|/\\*.*?\\*/|\\'(?:\\\\.|[^\\\\\\'])*\\'|\"(?:\\\\.|[^\\\\\"])*\"',\n re.DOTALL | re.MULTILINE,\n )\n return re.sub(pattern, replacer, text)", "def remove_comments(ctx, files):\n # CD into Salt's repo root directory\n ctx.cd(CODE_DIR)\n\n # Unfortunately invoke does not support nargs.\n # We migth have been passed --files=\"foo.py bar.py\"\n # Turn that into a list of paths\n _files = []\n for path in files:\n if not path:\n continue\n _files.extend(path.split())\n if not _files:\n utils.exit_invoke(0)\n\n _files = [\n pathlib.Path(fname).resolve() for fname in _files if fname.endswith(\".py\")\n ]\n\n fixes = 0\n exitcode = 0\n comments_regex = re.compile(r\"^# ([I|i])mports? .*(([L|l])ibs?)?\\n\", re.MULTILINE)\n for path in _files:\n contents = path.read_text()\n fixed = comments_regex.sub(\"\", contents)\n if fixed == contents:\n continue\n fixes += 1\n exitcode = 1\n path.write_text(fixed)\n if exitcode:\n utils.error(\"Fixed {} files\", fixes)\n utils.exit_invoke(exitcode)", "def remove_c_style_comments(fd):\n ret = []\n comment_state = False\n for line in fd:\n while True:\n # seems we have nothing left\n if len(line) < 2:\n break\n # we're still inside a comment\n if comment_state:\n idx = line.find(\"*/\")\n if idx > -1:\n line = line[idx + 2:]\n comment_state = False\n continue\n # comment doesn't seem to end on this line\n break\n # we're not inside any comment\n else:\n idx = line.find(\"/*\")\n if idx > -1:\n line = line[idx + 2:]\n comment_state = True\n continue\n if \"//\" in line:\n line = line.split(\"//\", 1)[0]\n # only now we can actually do our job\n line = line.strip()\n if len(line) > 0:\n ret.append(line)\n break\n return ret", "def run_standardize_comments():\n df = pd.read_csv('politics_past_30_months_comments_cleaned.csv')\n df = df.drop(['Unnamed: 0'], axis=1)\n\n standardized_df = standardize_comments(df, 'body')\n print(standardized_df.head())\n print()\n print('original length:', len(df))\n print('standardized length:', len(standardized_df))\n print('removed', len(df) - len(standardized_df), 'comments')\n\n # THIS MIGHT BRING BACK THE UTF-8 ENCODING EMOJIS. MIGHT HAVE TO WRITE TO CSV IN ASCII\n standardized_df.to_csv('politics_past_30_months_comments_cleaned_standardized.csv')", "def issues_comments_undelete(self, mar, request):\n return self.aux_delete_comment(mar, request, False)", "def generic_run_standardize_comments(raw_input_file, clean_output_file):\n df = pd.read_csv(raw_input_file)\n df = df.drop(['Unnamed: 0'], axis=1)\n\n standardized_df = standardize_comments(df, 'body')\n print(standardized_df.head())\n print()\n print('original length:', len(df))\n print('standardized length:', len(standardized_df))\n print('removed', len(df) - len(standardized_df), 'comments')\n\n # THIS MIGHT BRING BACK THE UTF-8 ENCODING EMOJIS. MIGHT HAVE TO WRITE TO CSV IN ASCII\n standardized_df.to_csv(clean_output_file)", "def filter_comments(asm_utf):\n comments = []\n # removes nones\n a = filter(lambda x: x != None, asm_utf)\n # splits on comment token\n comments = [re.split(\";\", line) for line in a]\n # takes only those that have a comment token\n comments = list(filter(lambda x: len(x) > 1, comments))\n # strips the whitespace from those tokens\n comments = [line[1].strip() for line in comments]\n # removes the singleton chars\n comments = list(filter(lambda x: len(x) > 1, comments))\n # regex to remove section markers and extraneous tabs\n # left over by poor reading of files\n comments = [re.sub('([-=].*[-=]|\\t)', '', line) for line in comments]\n comments = list(filter(lambda x: x != '', comments))\n return comments", "def remove_comments(code):\n state = ReadState.NORMAL\n escape = False\n result = ''\n i = 0\n while i < (len(code)):\n c = code[i]\n if state == ReadState.NORMAL:\n if c == '\"':\n state = ReadState.STRING\n escape = False\n if i + 1 < len(code):\n if c + code[i + 1] == '//':\n state = ReadState.SINGLE_COMMENT\n i += 2\n continue\n if c + code[i + 1] == '/*':\n state = ReadState.MULTI_COMMENT\n i += 2\n continue\n result += c\n elif state == ReadState.STRING:\n if escape:\n escape = False\n else:\n if c == '\"':\n state = ReadState.NORMAL\n if c == '\\\\':\n escape = True\n result += c\n elif state == ReadState.SINGLE_COMMENT:\n if c == '\\n':\n state = ReadState.NORMAL\n result += c\n elif state == ReadState.MULTI_COMMENT:\n if i + 1 < len(code):\n if c + code[i + 1] == '*/':\n state = ReadState.NORMAL\n i += 1\n i += 1\n return result", "def process_comments(session, comments):\n for c in tqdm(comments, desc=\"Injecting comments into DB\"):\n db_comment = session.query(Comment).get(c['id'])\n if db_comment:\n db_comment.update(session, **c)\n else:\n Comment.create(session, **c)", "def removeHtmlComments(self, text):\n sb = []\n start = text.find(u'<!--')\n last = 0\n while start != -1:\n end = text.find(u'-->', start)\n if end == -1:\n break\n end += 3 \n \n spaceStart = max(0, start-1)\n spaceEnd = end\n while text[spaceStart] == u' ' and spaceStart > 0:\n spaceStart -= 1\n while text[spaceEnd] == u' ':\n spaceEnd += 1\n \n if text[spaceStart] == u'\\n' and text[spaceEnd] == u'\\n':\n sb.append(text[last:spaceStart])\n sb.append(u'\\n')\n last = spaceEnd+1\n else:\n sb.append(text[last:spaceStart+1])\n last = spaceEnd\n \n start = text.find(u'<!--', end)\n sb.append(text[last:])\n return u''.join(sb)", "def strip_comments(source):\n\n tokens = (\n 'PERCENT',\n 'BEGINCOMMENT', 'ENDCOMMENT',\n 'BACKSLASH',\n 'CHAR',\n 'BEGINVERBATIM', 'ENDVERBATIM',\n 'BEGINLISTING', 'ENDLISTING',\n 'NEWLINE',\n 'ESCPCT',\n )\n states = (\n ('linecomment', 'exclusive'),\n ('commentenv', 'exclusive'),\n ('verbatim', 'exclusive'),\n ('listing', 'exclusive'),\n )\n\n #Deal with escaped backslashes, so we don't think they're escaping %.\n def t_BACKSLASH(t):\n r\"\\\\\\\\\"\n return t\n\n #One-line comments\n def t_PERCENT(t):\n r\"\\%\"\n t.lexer.begin(\"linecomment\")\n return None\n\n #Escaped percent signs\n def t_ESCPCT(t):\n r\"\\\\\\%\"\n return t\n\n #Comment environment, as defined by verbatim package\n def t_BEGINCOMMENT(t):\n r\"\\\\begin\\s*{\\s*comment\\s*}\"\n t.lexer.begin(\"commentenv\")\n return None\n\n #Verbatim environment (different treatment of comments within)\n def t_BEGINVERBATIM(t):\n r\"\\\\begin\\s*{\\s*verbatim\\s*}\"\n t.lexer.begin(\"verbatim\")\n return t\n\n #Listings environment (different treatment of comments within)\n def t_BEGINLISTING(t):\n r\"\\\\begin\\s*{\\s*lstlisting\\s*}\"\n t.lexer.begin(\"listing\")\n return t\n\n #Any other character in initial state we leave alone\n def t_CHAR(t):\n r\".\"\n return t\n\n def t_NEWLINE(t):\n r\"\\n\"\n return t\n\n #End comment environment\n def t_commentenv_ENDCOMMENT(t):\n r\"\\\\end\\s*{\\s*comment\\s*}\"\n #Anything after \\end{comment} on a line is ignored!\n t.lexer.begin('linecomment')\n return None\n\n #Ignore comments of comment environment\n def t_commentenv_CHAR(t):\n r\".\"\n return None\n\n def t_commentenv_NEWLINE(t):\n r\"\\n\"\n return None\n\n #End of verbatim environment\n def t_verbatim_ENDVERBATIM(t):\n r\"\\\\end\\s*{\\s*verbatim\\s*}\"\n t.lexer.begin('INITIAL')\n return t\n\n #End of listing environment\n def t_listing_ENDLISTING(t):\n r\"\\\\end\\s*{\\s*lstlisting\\s*}\"\n t.lexer.begin('INITIAL')\n return t\n\n #Leave contents of verbatim/listing environment alone\n def t_verbatim_listing_CHAR(t):\n r\".\"\n return t\n\n def t_verbatim_listing_NEWLINE(t):\n r\"\\n\"\n return t\n\n\n #End a % comment when we get to a new line\n def t_linecomment_ENDCOMMENT(t):\n r\"\\n\"\n t.lexer.begin(\"INITIAL\")\n #Newline at the end of a line comment is stripped.\n return None\n\n #Ignore anything after a % on a line\n def t_linecomment_CHAR(t):\n r\".\"\n return None\n\n #Print errors\n def t_ANY_error(t):\n print(t.value, file=sys.stderr)\n\n lexer = ply.lex.lex()\n lexer.input(source)\n return u\"\".join([tok.value for tok in lexer])", "def __remove_c_comments(self, line):\n new_chars = []\n i = 0\n while i < len(line):\n blocks = self.__ingest_c_block_comments(line, i)\n if blocks > 0:\n i += blocks\n continue\n\n whitespace = self.__ingest_whitespace(line, i)\n if whitespace > 0:\n new_chars.append(' ')\n i += whitespace\n continue\n\n comm_start = self.__ingest_c_comment_start(line, i)\n if comm_start == -1:\n new_chars.append(' ')\n break\n elif comm_start > 0:\n new_chars.append(' ')\n i += comm_start\n\n if blocks + whitespace + comm_start == 0:\n new_chars.append(line[i])\n i += 1\n\n new_line = ''.join(new_chars)\n return new_line", "def clean(apps, schema_editor):\n for campaign in apps.get_model(\"peacecorps\", \"Campaign\").objects.all():\n campaign.description = clean_description(campaign.description)\n campaign.description = re.sub(r\"(?<!\\\\)\\n\", r\"\\\\n\",\n campaign.description)\n campaign.save()\n\n for project in apps.get_model(\"peacecorps\", \"Project\").objects.all():\n project.description = clean_description(project.description)\n project.description = re.sub(r\"(?<!\\\\)\\n\", r\"\\\\n\", project.description)\n project.save()", "def dedent(comment):\n commentLines = comment.split('\\n')\n if len(commentLines) < 2:\n cleaned = list(map(str.lstrip, commentLines))\n else:\n spc = 0\n for char in commentLines[1]:\n if char in string.whitespace:\n spc = spc + 1\n else:\n break\n #now check other lines\n cleaned = []\n for line in commentLines:\n for i in range(min(len(line),spc)):\n if line[0] in string.whitespace:\n line = line[1:]\n cleaned.append(line)\n return '\\n'.join(cleaned)", "def purify_comments(csv_file, keep_stops=False, POS=False, lemmatize=False, popular=0):\r\n\r\n df = pd.read_csv(csv_file)\r\n df = df.loc[df[\"author\"] != \"[deleted]\"] # trim out comments whose authors have deleted their accounts\r\n df = df.loc[df[\"score\"] != \"score\"] # this is an error in the code when building new csv_files from dask\r\n\r\n # extracts only the popular comments\r\n if popular > 0:\r\n df = df.loc[pd.to_numeric(df[\"score\"]) > popular]\r\n\r\n comments = df[\"body\"]\r\n del df # no need for this anymore, and it'll merely eat up memory\r\n\r\n nlp = en_core_web_sm.load()\r\n\r\n revised_comments = []\r\n for comment in comments.astype('unicode').values:\r\n comment = comment[1:] # remove the initial 'b' bytes-representation character\r\n comment = comment.encode(\"utf-8-sig\").decode(\"utf-8-sig\") # get rid of BOM character\r\n comment = comment.lower().replace(r\"\\n\", r\"\").replace(r'\"', r'')\r\n\r\n tokens = nlp(comment)\r\n\r\n # actual specification section\r\n for sent in tokens.sents:\r\n\r\n if POS: # conversion of comments to tokens/lemmas-POS tags\r\n if lemmatize:\r\n if keep_stops:\r\n revised_tokens = [\"{}-{}\".format(token.lemma_, token.tag_) for token in sent\r\n if not token.is_punct]\r\n else:\r\n revised_tokens = [\"{}-{}\".format(token.lemma_, token.tag_) for token in sent\r\n if not token.is_stop and not token.is_punct\r\n and not token.orth_ == \"n't\" and not token.orth_ == \"'s\"]\r\n else:\r\n if keep_stops:\r\n revised_tokens = [\"{}-{}\".format(token.orth_, token.tag_) for token in sent\r\n if not token.is_punct]\r\n else:\r\n revised_tokens = [\"{}-{}\".format(token.orth_, token.tag_) for token in sent\r\n if not token.is_stop and not token.is_punct\r\n and not token.orth_ == \"n't\" and not token.orth_ == \"'s\"]\r\n\r\n elif lemmatize: # just lemmatization\r\n if keep_stops:\r\n revised_tokens = [token.lemma_ for token in sent\r\n if not token.is_punct]\r\n else:\r\n revised_tokens = [token.lemma_ for token in sent\r\n if not token.is_stop and not token.is_punct\r\n and not token.orth_ == \"n't\" and not token.orth_ == \"'s\"]\r\n\r\n else: # nothing but removal of stop words (or not)\r\n if keep_stops:\r\n revised_tokens = [token.orth_ for token in sent\r\n if not token.is_punct]\r\n else:\r\n revised_tokens = [token.orth_ for token in sent\r\n if not token.is_stop and not token.is_punct\r\n and not token.orth_ == \"n't\" and not token.orth_ == \"'s\"]\r\n\r\n revised_comments.append(\" \".join(revised_tokens))\r\n\r\n return pd.Series(revised_comments)", "def tidy(self):\n \n replies = self.bot.user.me().comments.new(\n limit=100)\n \n for reply in replies:\n \n if reply.score < 0:\n \n with open(\"deleted.csv\", \"a\", encoding = \"UTF-8\") as removed:\n \n deleted = clevercsv.writer(removed)\n \n if removed.tell() == 0:\n deleted.writerow(\n [\"Comment\", \n \"Parent\", \n \"Thread\", \n \"Subreddit\", \n \"Time\", \n \"Score\"])\n \n deleted.writerow(\n [f\"{reply.body}\", \n f\"{reply.parent().body}\", \n f\"{reply.submission.title}\", \n f\"{reply.subreddit}\", \n f\"{pendulum.from_timestamp(reply.created_utc)}\", \n f\"{reply.score}\"])\n\n reply.delete()", "def clean_code(ls):\r\n ls = remove_white_space(ls)\r\n ls = remove_comments(ls)\r\n ls = remove_empty_lines(ls)\r\n\r\n return ls", "def delete_comment(self, uid: str):\n pass", "def handle(self, *args, **options):\r\n self.handle_free_comments()\r\n self.handle_comments()", "def _clean_message(comment):\n message = comment['message']\n # Remove comments with linked persons (they mostly contain only emojis)\n if 'message_tags' in comment:\n for tag in comment['message_tags']:\n if 'type' in tag and tag['type'] == 'user':\n message = message.replace(tag['name'], '')\n # Remove links\n message = re.sub(r'http\\S+', '', message)\n return message.strip()", "def clean(c):", "def comment_cleaner(text):\n text = re.sub(\"[^\\w\\s]\", \"\", text)\n text = \" \".join([x.lower() for x in text.split(' ') if x.lower() in corpus and x.lower() not in stopwords and len(x) > 1])\n if text == '':\n return np.nan\n return text", "def remove_comments(s):\n return \"\\n\".join(l for l in s.strip().split(\"\\n\") if not l.strip().startswith(\"#\"))", "def parseComments(data):\n global comments\n reviewBegins = '<div style=\"margin-left:0.5em;\">'\n reviewEnds = '<div style=\"padding-top: 10px; clear: both; width: 100%;\">'\n stars_line = 'margin-right:5px;'\n stars = re.compile('\\d+.\\d+ out of 5 stars')\n header_line = '<span style=\"vertical-align:middle;\"'\n helpful_line ='people found the following review helpful'\n helpful = re.compile('\\d+ of \\d+ people found the following review helpful')\n reviewText = '<span class=\"h3color tiny\">' # Actual review\n\n boundaries = commentsStartStopLineNmbr(data)\n for i in range(boundaries[0], boundaries[1] + 1):\n if reviewBegins in data[i]:\n curcomment = Comment()\n while reviewEnds not in data[i]:\n # Parse stars\n if stars_line in data[i]:\n stars_found = re.search(stars, data[i])\n if stars_found != None:\n curcomment.stars = stars_found.group()\n # Parse header\n elif header_line in data[i]:\n line = data[i]\n begin = line.find('<b>') + 3\n end = line.find('</b>')\n curcomment.header = line[begin : end]\n # Parse helpfulness\n elif helpful_line in data[i]:\n helpful_found = data[i].replace(\",\", \"\")\n helpful_found = re.search(helpful, helpful_found)\n if helpful_found != None:\n curcomment.helpful = helpful_found.group()\n # Parse body text\n elif reviewText in data[i]:\n i += 3\n if '<span class=\"small\"' in data[i]: # Yep, dirty trick :(\n i += 3\n data[i] = stripHtmlTags(data[i])\n curcomment.comment = re.sub(\"\\s+\", \" \", data[i])\n i += 1\n comments.append(curcomment.getonelinecomment())\n #comments.append(curcomment.__repr__())", "def _RemoveStaleComments(content: str) -> str:\n for match in STALE_GROUP_COMMENT_REGEX.findall(content):\n content = content.replace(match, '')\n\n return content", "def cleaning_pipeline(self, tree: dict):\n tree = self.remove_redundant(tree)\n tree[\"selftext\"] = self.clean_text(tree[\"selftext\"])\n tree[\"title\"] = self.clean_text(tree[\"title\"])\n\n empty_comments = []\n for id, comment in tree[\"comments\"].items():\n if \"body\" in comment and \"parent_id\" in comment:\n comment[\"body\"] = self.clean_text(comment[\"body\"])\n comment[\"parent_id\"] = comment[\"parent_id\"][3:]\n else:\n empty_comments.append(id)\n print(\"Skipping empty comment : \", id, tree[\"comments\"][id])\n\n tree = self.resolve_empty_comments(tree, empty_comments)\n\n tree = self.correct_replies(tree)\n\n return tree", "def remove_comments_and_docstrings(source):\n io_obj = StringIO(source)\n out = \"\"\n prev_toktype = tokenize.INDENT\n last_lineno = -1\n last_col = 0\n for tok in tokenize.generate_tokens(io_obj.readline):\n token_type = tok[0]\n token_string = tok[1]\n start_line, start_col = tok[2]\n end_line, end_col = tok[3]\n ltext = tok[4]\n # The following two conditionals preserve indentation.\n # This is necessary because we're not using tokenize.untokenize()\n # (because it spits out code with copious amounts of oddly-placed\n # whitespace).\n if start_line > last_lineno:\n last_col = 0\n if start_col > last_col:\n out += (\" \" * (start_col - last_col))\n # Remove comments:\n if token_type == tokenize.COMMENT:\n pass\n # This series of conditionals removes docstrings:\n elif token_type == tokenize.STRING:\n if prev_toktype != tokenize.INDENT:\n # This is likely a docstring; double-check we're not inside an operator:\n if prev_toktype != tokenize.NEWLINE:\n # Note regarding NEWLINE vs NL: The tokenize module\n # differentiates between newlines that start a new statement\n # and newlines inside of operators such as parens, brackes,\n # and curly braces. Newlines inside of operators are\n # NEWLINE and newlines that start new code are NL.\n # Catch whole-module docstrings:\n if start_col > 0:\n # Unlabelled indentation means we're inside an operator\n out += token_string\n # Note regarding the INDENT token: The tokenize module does\n # not label indentation inside of an operator (parens,\n # brackets, and curly braces) as actual indentation.\n # For example:\n # def foo():\n # \"The spaces before this docstring are tokenize.INDENT\"\n # test = [\n # \"The spaces before this string do not get a token\"\n # ]\n\n else:\n out += token_string\n prev_toktype = token_type\n last_col = end_col\n last_lineno = end_line\n out = '\\n'.join([line for line in out.splitlines() if line.strip()])\n return out", "def handle_free_comments(self):\r\n comments = FreeComment.objects.all()\r\n for c in comments:\r\n new = FreeThreadedComment(\r\n content_type = c.content_type,\r\n object_id = c.object_id,\r\n comment = c.comment,\r\n name = c.person_name,\r\n website = '',\r\n email = '',\r\n date_submitted = c.submit_date,\r\n date_modified = c.submit_date,\r\n date_approved = c.submit_date,\r\n is_public = c.is_public,\r\n ip_address = c.ip_address,\r\n is_approved = c.approved\r\n )\r\n new.save()", "def delete_comments(self, pk):\n try:\n comments = Comment.objects.all()\n # filter to only contain comments for specified article (article_id)\n comments = list(filter(lambda x: x.article_id == pk, comments))\n\n #loop through and delete all comments\n for comment in comments:\n comment.delete()\n\n except Comment.DoesNotExist:\n pass", "def _cleanup(self):\n os.system(\"rm -r %s/*\" %(self._snippet_index_dir))\n os.system(\"rm %s/*\" %(self._para_dir))\n os.system(\"rm %s/*\" %(self._temp_dir))\n os.system(\"rm %s/*\" %(self._snippet_result_dir))", "def make_unparsed_comments(self, replacements):\n if not hasattr(self, 'cleaned_html'):\n self.cleaned_html = self.clean_html()\n \n self.basic_comments = self.cleaned_html\n for d in self.replacements:\n if d['regex']:\n self.basic_comments = re.sub(d['pattern'], d['replacement'], self.basic_comments)\n else:\n self.basic_comments = self.basic_comments.replace(d['pattern'], d['replacement'])\n return self.basic_comments", "def test_remove_single_line_comments_noannotation():\n\n\tinput_ = \"\"\"line1\n\t\t\t\tline2 \n\t\t\t\t//comment\n\t\t\t\tline3 \"\"\"\n\n\texpect = \"\"\"line1\n\t\t\t\tline2 \n\t\t\t\t\n\t\t\t\tline3 \"\"\"\n\n\tassert aunit.remove_single_line_comments(input_) == expect", "def strip_comments(string, comment_symbols=frozenset(('#', '//'))): # pragma: no cover\n lines = string.splitlines()\n for k in range(len(lines)):\n for symbol in comment_symbols:\n lines[k] = strip_comment_line_with_symbol(lines[k], start=symbol)\n return '\\n'.join(lines)", "def make_parsed_comments(self):\n if not hasattr(self, 'separated_comments'):\n self.separated_comments = self.separate_comments()\n \n # build comments list of dictionaries, one dictionary for each article\n self.comments = []\n for self.separated_comment in self.separated_comments:\n try:\n comment_data = self.get_comment_data(self.separated_comment)\n self.comments.append(comment_data)\n except Exception as e:\n pass\n return self.comments", "def uncollapse_special_comments(self, wrapped_by_id):\n\n if self.uncollapse_all:\n dont_collapse = set(wrapped_by_id.keys())\n elif self.comment:\n dont_collapse = set([self.comment._id])\n parent_id = self.comment.parent_id\n while parent_id:\n dont_collapse.add(parent_id)\n if parent_id in wrapped_by_id:\n parent_id = wrapped_by_id[parent_id].parent_id\n else:\n parent_id = None\n elif self.children:\n dont_collapse = set(self.children)\n else:\n dont_collapse = set()\n\n # we only care about preventing collapse of wrapped comments\n dont_collapse &= set(wrapped_by_id.keys())\n\n maybe_collapse = set(wrapped_by_id.keys()) - dont_collapse\n\n for comment_id in maybe_collapse:\n comment = wrapped_by_id[comment_id]\n if comment.distinguished and comment.distinguished != \"no\":\n dont_collapse.add(comment_id)\n\n maybe_collapse -= dont_collapse\n\n # ensure all ancestors of dont_collapse comments are not collapsed\n if maybe_collapse:\n for comment_id in sorted(dont_collapse):\n # sort comments so we start with the most root level comments\n comment = wrapped_by_id[comment_id]\n parent_id = comment.parent_id\n\n counter = 0\n while (parent_id and\n parent_id not in dont_collapse and\n parent_id in wrapped_by_id and\n counter < g.max_comment_parent_walk):\n dont_collapse.add(parent_id)\n counter += 1\n\n comment = wrapped_by_id[parent_id]\n parent_id = comment.parent_id\n\n for comment_id in dont_collapse:\n comment = wrapped_by_id[comment_id]\n if comment.collapsed:\n comment.collapsed = False\n comment.hidden = False", "def comment_remover(string):\n def replacer(match):\n s = match.group(0)\n if s.startswith('/'):\n return \" \" # note: a space and not an empty string\n else:\n return s\n pattern = re.compile(\n r'//.*?$|/\\*.*?\\*/|\\'(?:\\\\.|[^\\\\\\'])*\\'|\"(?:\\\\.|[^\\\\\"])*\"',\n re.DOTALL | re.MULTILINE\n )\n return re.sub(pattern, replacer, string)", "def get_comments(self,comments):\n all_comments = []\n for comment in comments:\n try :\n all_comments.append({\n 'comment':comment['data']['body'],\n 'score':comment['data']['score']\n })\n except: pass\n return all_comments", "def strip_comments(string, comment_symbols=frozenset(('#', '//'))):\n lines = string.splitlines()\n for k in range(len(lines)):\n for symbol in comment_symbols:\n lines[k] = strip_comment_line_with_symbol(lines[k], start=symbol)\n return '\\n'.join(lines)", "def handle_comments(self):\r\n comments = Comment.objects.all()\r\n for c in comments:\r\n new = ThreadedComment(\r\n content_type = c.content_type,\r\n object_id = c.object_id,\r\n comment = c.comment,\r\n user = c.user,\r\n date_submitted = c.submit_date,\r\n date_modified = c.submit_date,\r\n date_approved = c.submit_date,\r\n is_public = c.is_public,\r\n ip_address = c.ip_address,\r\n is_approved = not c.is_removed\r\n )\r\n new.save()", "def unMarkupCommentsAndStrings(self, content):\n\n def replaceMarkups(match):\n groupdict = match.groupdict()\n if groupdict[\"str\"] is not None:\n return self.strings[int(match.group(\"str\"))]\n elif groupdict[\"comment\"] is not None:\n return self.comments[int(match.group(\"comment\"))]\n else:\n assert False\n\n unMarkedup = markups.sub(replaceMarkups, content)\n\n return unMarkedup", "def test_print_comments():\n flat_comments, tree_comments = get_comments_from_submission_id('jrjn70')\n print(len(flat_comments))\n print(len(tree_comments))\n\n print('flat comments')\n for c in flat_comments[0:5]:\n comment_instance = REDDIT.comment(c)\n print(comment_instance.body)\n\n print()\n print('tree comments')\n for c in tree_comments[0:5]:\n comment_instance = REDDIT.comment(c)\n print(comment_instance.body)", "def sanitize_comment(comment):\n\n if hasattr(settings, \"BLEACH_ALLOWED_TAGS\"):\n allowed_tags = settings.BLEACH_ALLOWED_TAGS\n else:\n allowed_tags = bleach.sanitizer.ALLOWED_TAGS\n\n return bleach.clean(comment, tags=allowed_tags, strip=True)", "def clean_comment(pair):\n pair = [remove_newlines(i) for i in pair]\n pair = [i.strip() for i in pair]\n # Remove colons\n pair[0] = pair[0].replace(':', '')\n # Remove excess whitespace\n whitespace_regex = re.compile('\\s\\s+')\n pair[1] = whitespace_regex.sub(' ', pair[1])\n return pair", "def calc_comments(self):\n for comment in self.pull_request.get_comments():\n self._users.add(comment.user.login)\n lowercase_body = comment.body.lower()\n if \"protm\" in lowercase_body:\n self.num_protm += 1\n self.num_comments += 1\n if comment.body is not None:\n self.len_comments += len(comment.body)\n for reaction in comment.get_reactions():\n self._users.add(reaction.user.login)\n self.comment_reactions += 1", "def parse_comments(submission):\n comments = []\n submission.replace_more_comments()\n for c in praw.helpers.flatten_tree(submission.comments):\n comment_dict = c.__dict__\n\n # NOTE: author is a special case (and must be present)\n author = c.author.name if hasattr(c.author, \"name\") else None\n if not author:\n continue\n\n comment = {\n \"submission_id\": submission.id,\n \"author\": author\n }\n del comment_dict[\"author\"] # no longer needed\n for k in _model_columns(Comment):\n if k in comment_dict:\n comment[k] = comment_dict[k]\n comments.append(comment)\n\n return comments", "def remove_comments(html):\n return re.sub(r\"<!--.*?-->\", \" \", html)", "def remove_paired_comments(self, program):\n regions = re.compile(\"(/\\*|\\*/)\").split(program)\n depth = 0\n output = []\n for i in range(len(regions)):\n region = regions[i]\n if region==\"/*\":\n depth+=1\n elif region==\"*/\":\n depth-=1\n else:\n #print(\" \"*depth, region.replace(\"\\n\", \"_\"))\n if depth==0:\n output.append(region)\n return \"\".join(output)", "def test_issue_delete_comment(self):\n pass", "def clean_inp(self):\n self.E_str = \"clean_inp\"\n\n # First remove any comment lines\n new_ltxt = []\n for line_num, line in enumerate(self.file_ltxt):\n edit_line, comment = gen_parse.rm_comment_from_line(line)\n edit_line = edit_line.rstrip()\n if edit_line:\n new_ltxt.append(edit_line)\n self.file_ltxt = new_ltxt[:]\n\n # Get line nums for error messages -before the inp cleaning\n self.line_nums = list(range(1, len(self.file_ltxt)+1))\n for line_num in self.line_nums:\n self.file_ltxt_orig[line_num] = self.file_ltxt[line_num - 1]\n self.line_num = 0\n\n self.clean_open_close_brace()", "def test_remove_single_line_comments_annotation():\n\n\tinput_ = \"\"\"line1\n\t\t\t\tline2 \n\t\t\t\t//comment\n\t\t\t\t//@Test //comment\n\t\t\t\t//comment\n\t\t\t\tline3 \"\"\"\n\n\texpect = \"\"\"line1\n\t\t\t\tline2 \n\t\t\t\t\n\t\t\t\t//@Test //comment\n\t\t\t\t\n\t\t\t\tline3 \"\"\"\n\n\tassert aunit.remove_single_line_comments(input_) == expect", "def filter_comments(\n content, filter_multiline_comment=True, keep_content_size=False\n):\n def _replace_comment(element):\n \"\"\"Replace matched *element* in content.\"\"\"\n replacement = \"\"\n matched = element.group()\n\n # Ensure that only the comment part is being replaced\n if not matched.startswith(\"/\"):\n replacement += matched[0]\n matched = matched[1:]\n\n count = matched.count(\"\\n\")\n\n # Add empty spaces with the size of the content if the size\n # must be kept.\n if keep_content_size:\n _buffer = len(matched) - count\n replacement += \" \" * _buffer + \"\\n\" * count\n\n # Otherwise simply keep the number of lines\n else:\n replacement += \"\\n\" * count\n\n return replacement\n\n content = _ONE_LINE_COMMENT_PATTERN.sub(_replace_comment, content)\n\n if filter_multiline_comment:\n content = _MULTI_LINES_COMMENT_PATTERN.sub(_replace_comment, content)\n\n return content", "def remove_html_comments(html): # Grunt uses comments to as build arguments, bad practice but still.\n log.debug(\"\"\"Removing all unnecessary HTML comments; Keep all containing:\n 'build:', 'endbuild', '<!--[if]>', '<![endif]-->' for Grunt/Grymt, IE.\"\"\")\n return re.compile('<!-- [^(build|endbuild)].*? -->', re.I).sub('', html)", "def clean_comment_id_database():\n\t\n\twith _conn.cursor() as cur:\n\t\tcur.execute('DELETE FROM comment_ids WHERE timestamp < (NOW() - INTERVAL \\'2 DAYS\\')')\n\t_conn.commit()", "def clean_docs(c):\n c.run(f\"rm -fr {DOCS_BUILD_DIR}\")", "def _parse_comments(reader):\n regex = r'\\s*(#|\\/{2}).*$'\n regex_inline = r'(:?(?:\\s)*([A-Za-z\\d\\.{}]*)|((?<=\\\").*\\\"),?)(?:\\s)*(((#|(\\/{2})).*)|)$'\n\n pipe = []\n for line in reader:\n if re.search(regex, line):\n if re.search(r'^' + regex, line, re.IGNORECASE): continue\n elif re.search(regex_inline, line):\n pipe.append(re.sub(regex_inline, r'\\1', line))\n else:\n pipe.append(line)\n return \"\\n\".join(pipe)", "def parse_comments_html(advertise: Dict[str, Any]) -> Optional[List[str]]:\n if \"comments_html\" in advertise.keys():\n\n filtred_comments: str = advertise[\"comments_html\"][200::]\n\n tmp: List[str] = re.split(\"[ \\n\\t]{2,}\", filtred_comments)\n if '' in tmp:\n tmp.remove('')\n\n # Breaking comments\n master: List[List[str]] = []\n tmp_vec: List[str] = []\n for line in tmp:\n\n if re.search(\"de \\d{4,}\", line): # matches 'de 2018' that signals the end of comment\n master.append(tmp_vec)\n tmp_vec = []\n else:\n tmp_vec.append(line)\n\n # Cleaning comments\n for comment in master:\n if \"...\" in comment:\n comment.remove(\"...\")\n if \"O usuário contratou o serviço em\" in comment:\n comment.remove(\"O usuário contratou o serviço em\")\n\n return [\" \".join(m) for m in master]", "def _dump_comment(comment: List[str]) -> List[str]:\n return [\"/**\"] + comment + [\"*/\"]", "def clean_up(self, graph):\n # Delete albums associated with place\n if len(self.albums) != 0:\n for album in self.albums:\n album.clean_up()\n album.delete(graph)", "def split_comment(cls, code):\r\n if '#' not in code: return code\r\n #: Remove comments only (leave quoted strings as they are)\r\n subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0)\r\n return re.sub(cls.re_pytokens, subf, code)", "def clean(c):\n clean_docker(c)\n clean_repo(c)", "def bugs_from_comments(comments):\n retval = []\n m = re.search(r\"\\bb(?:ug(?:s)?)?\\s*((?:\\d+[, ]*)+)\", comments, re.I)\n if m:\n for m in re.findall(\"\\d+\", m.group(1)):\n retval.append(int(m))\n return retval", "def process_comment(self, data):\r\n if not self.is_suppress:\r\n return [data]", "def update_comments_in_parent(reference_doctype, reference_name, _comments):\n\tif (\n\t\tnot reference_doctype\n\t\tor not reference_name\n\t\tor frappe.db.get_value(\"DocType\", reference_doctype, \"issingle\")\n\t\tor is_virtual_doctype(reference_doctype)\n\t):\n\t\treturn\n\n\ttry:\n\t\t# use sql, so that we do not mess with the timestamp\n\t\tfrappe.db.sql(\n\t\t\tf\"\"\"update `tab{reference_doctype}` set `_comments`=%s where name=%s\"\"\", # nosec\n\t\t\t(json.dumps(_comments[-100:]), reference_name),\n\t\t)\n\n\texcept Exception as e:\n\t\tif frappe.db.is_column_missing(e) and getattr(frappe.local, \"request\", None):\n\t\t\tpass\n\t\telif frappe.db.is_data_too_long(e):\n\t\t\traise frappe.DataTooLongException\n\t\telse:\n\t\t\traise\n\telse:\n\t\tif frappe.flags.in_patch:\n\t\t\treturn\n\n\t\t# Clear route cache\n\t\tif route := frappe.get_cached_value(reference_doctype, reference_name, \"route\"):\n\t\t\tclear_cache(route)", "def tearDown(self):\n self.comment.post.delete()\n self.user.delete()\n\n print('Deleting comment...')\n self.comment.delete()\n print('Deleted comment.')\n print('Deleting test_file...')\n if os.path.exists('test_file.txt'):\n os.remove('test_file.txt')\n print('Deleted file')\n print()\n print()", "def check_comments(self, args):\n\n for submission in args.comments:\n if any(char.isalpha() for char in submission[1]) \\\n or self._illegal_chars.search(submission[1]) != None:\n raise ValueError", "def extract_comments(comments_file, output_filename=direc+\"/comments.txt\"):\r\n if not os.path.exists(output_filename.split(\"/\")[0]):\r\n os.makedirs(output_filename.split(\"/\")[0])\r\n\r\n print(\"Extracting comments from \" + comments_file + \"...\")\r\n comments_dict = {}\r\n with open(output_filename, \"w\", encoding=encoding) as f:\r\n current = 0\r\n for event, child in iterparse(comments_file, events=('start', 'end')):\r\n if current > SAMPLE_SIZE:\r\n break\r\n elif len(child.attrib) > 0 and event == \"start\":\r\n if child.attrib['PostId'] not in comments_dict:\r\n comments_dict[child.attrib['PostId']] = []\r\n comments_dict[child.attrib['PostId']].append(child.attrib['Id'])\r\n clean_comment = clean_markdown(child.attrib['Text'])\r\n line = child.attrib['Id'] + \"\\t\" + child.attrib['PostId'] + \"\\t\" + clean_comment + \"\\t\" + child.attrib['Score'] + \"\\n\"\r\n f.write(line)\r\n\r\n current += 1\r\n print_progress(current, SAMPLE_SIZE)\r\n print(\"\\nFinished extracting comments from \" + comments_file + \".\\n\")\r\n return comments_dict", "def _strip_comments(file_contents):\n lines_without_comments = []\n for line in file_contents:\n comment_position = line.find(COMMENT_INDICATOR)\n if comment_position != -1:\n lines_without_comments.append(line[:comment_position])\n else:\n lines_without_comments.append(line)\n return lines_without_comments", "def _clean_data(self, docs: []):\n print('Cleaning data...')\n preprocessed_data = []\n for doc in docs:\n if len(doc) <= constants.MIN_DOC_LENGTH:\n continue\n\n temp_doc = self._remove_urls(doc)\n temp_doc = self._remove_special_chars(temp_doc)\n temp_doc = self._transform_to_lowercase(temp_doc)\n temp_doc = self._remove_stopwords(temp_doc)\n\n preprocessed_data.append(temp_doc)\n\n return preprocessed_data", "def return_filtered_comments(submission):\n submission.comment_sort = COMMENT_SORT_BY\n submission.comment_limit = COMMENT_LIMIT\n filtered_comments = []\n for top_level_comment in submission.comments:\n if isinstance(top_level_comment, praw.models.MoreComments):\n continue\n # Here you can fetch data off the comment.\n comment = top_level_comment.body\n\n # ensure that the comment does not contain any words in blacklist\n # and also it is less than COMMENT_MAX_WORDS\n fail_test = 0\n lcomment = comment.lower()\n for badword in blacklist:\n if badword not in lcomment and len(comment) < COMMENT_MAX_WORDS:\n pass\n else:\n fail_test += 1\n if not fail_test:\n filtered_comments.append(replace_words(comment).capitalize())\n\n return filtered_comments", "def _parse_comment(i, doc):\n\n if doc[i].strip() != \"/**\":\n raise ParseFailure(i, \"Expected beginning of block comment\")\n\n e = i + 1\n while e < len(doc) and doc[e].strip() != \"*/\":\n e += 1\n\n return e + 1, [x.rstrip() for x in doc[i + 1: e]]", "def clean_comment(line):\n if line.startswith(\"#!\"):\n line = line[2:]\n else:\n line = line[1:]\n if line.startswith(\" \"):\n line = line[1:]\n if not line.endswith('\\n'):\n line += '\\n'\n return line", "def delete_comment(request, comment_id):\n raise NotImplementedError", "def clean_tinymce(input):\n result = input\n result = result.replace(u'<#document-fragment>', u'') # A strange bug that the NIVE client experiences but that we can't reproduce.\n result = result.replace(u'&lt;#document-fragment&gt;', u'') # A strange bug that the NIVE client experiences but that we can't reproduce.\n result = html_comments.sub(u'', result)\n return result", "def comment_content(c):\n content = str(c)[4:-3]\n return content.strip()", "def clear_all_file_breaks(self, filename):\n filename = self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s' % filename\n for line in self.breaks[filename]:\n blist = Breakpoint.bplist[filename, line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n return None", "def clear(self):\n self.feedback.clear()\n self.ignored_feedback.clear()\n self.suppressions.clear()\n self.suppressed_labels.clear()\n self.hiddens.clear()\n self._tool_data.clear()\n self.group = None\n self.groups.clear()\n self.group_names.clear()\n self.hooks.clear()\n self.submission = None\n self.result = None\n self.resolves.clear()\n self.format = Formatter()\n self.clear_overridden_feedback()", "def clean(text):\n lines = text.split('\\n')\n\n indx = range(len(lines))\n indx.reverse()\n for i in indx:\n temp = lines[i].strip()\n if temp == '' or temp.startswith('#'):\n del lines[i]\n else:\n lines[i] = temp\n\n return lines", "def decomment(string):\n pattern = r\"//.*|/\\*[\\s\\S]*?\\*/|(\\\"(\\\\.|[^\\\"])*\\\"|'(\\\\.|[^\\'])*')\"\n regex = re.compile(pattern)\n return regex.sub(lambda m: m.group(1), string)", "def set_comments(self, id, comments):\n logging.debug(f\"\"\"__set_comments {comments} for id {id}\"\"\")\n sql = f\"\"\"update {self.schemaRepo}.tablediff\n set comments = '{comments}' where id = {id}\"\"\"\n conn = self.connect(self.cxRepo)\n with conn:\n with conn.cursor() as curs:\n try:\n curs.execute(sql)\n except conn.DatabaseError as exc:\n error, = exc.args\n logging.error(f\"\"\"error executing {sql} : {error}\"\"\")" ]
[ "0.6856925", "0.64137363", "0.6254925", "0.6239691", "0.6239691", "0.62174577", "0.6139259", "0.5952024", "0.5854225", "0.5823274", "0.5761137", "0.5758094", "0.5758094", "0.5758094", "0.5758094", "0.5737992", "0.573623", "0.5734576", "0.5660582", "0.5650429", "0.56369406", "0.5579267", "0.555296", "0.55428225", "0.55419505", "0.55317813", "0.548162", "0.54573303", "0.544922", "0.54212207", "0.54129094", "0.5409146", "0.53908306", "0.538066", "0.5374327", "0.5326311", "0.5321923", "0.52880377", "0.5283441", "0.5281908", "0.5278282", "0.5275003", "0.52534354", "0.5249661", "0.5237412", "0.5235667", "0.52125597", "0.51908416", "0.51851064", "0.517332", "0.51663744", "0.5153387", "0.51094407", "0.5100672", "0.5092509", "0.50899136", "0.50781906", "0.5067474", "0.50623834", "0.5053816", "0.5050659", "0.50453013", "0.50432855", "0.5035868", "0.5033816", "0.50336874", "0.50276524", "0.502517", "0.50215375", "0.50097734", "0.50004727", "0.49876022", "0.49699566", "0.49648577", "0.49614653", "0.49515688", "0.49494383", "0.4946525", "0.4941896", "0.49356055", "0.49250302", "0.49229506", "0.4911633", "0.48917302", "0.48854655", "0.48657396", "0.48561904", "0.48537531", "0.48350522", "0.48350394", "0.4834142", "0.4820395", "0.48132116", "0.48122", "0.48121762", "0.4810026", "0.48053756", "0.47820306", "0.47803563", "0.47802964" ]
0.7856755
0
Parses the certificates specific to the United States.
def parse_certificates(soup): # removes the first item because it does not needed rating_tags = soup.find_all('a')[1:] rating_codes = [code.string for code in rating_tags] mpaa = [] if rating_codes: for rating in rating_codes: # sorry international folks, only interested in the US ratings if rating.startswith('United States'): mpaa.append(rating) return mpaa
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_country_states(self):\n pass", "def test_addr_country_good_values(self):\n for input_val, output_val in self.known_values:\n self.line._parse_addr_country(input_val)\n self.assertEqual(output_val, self.line.addr_country)", "def test_county_limit_by_state__valid_arg(self):\n response_01 = self.client.get(self.url, {\"state\": \"01\"})\n self.assertEqual(response_01.status_code, 200)\n self.assertEqual(\n \"Autauga County\", response_01.data[\"data\"][0][\"county\"]\n )\n response_AL = self.client.get(self.url, {\"state\": \"AL\"})\n self.assertTrue(response_01.data[\"data\"] == response_AL.data[\"data\"])\n response_DC = self.client.get(self.url, {\"state\": \"DC\"})\n self.assertEqual(len(response_DC.data[\"data\"]), 1)\n response_VA = self.client.get(self.url, {\"state\": \"VA\"})\n self.assertEqual(len(response_VA.data[\"data\"]), 1)\n self.assertEqual(\n \"Accomack County\", response_VA.data[\"data\"][0][\"county\"]\n )", "def findCountryCode(self):\n RecordsWithCountry = []\n for state in pycountry.subdivisions:\n #print(state.name)\n for record in self.Records: \n if state.name == record.state:\n #print(state.country, record.state)\n r = RecordCountry(date=record.date,\n country=state.country.alpha_3,\n impressions=record.impressions,\n CTR=record.CTR)\n self.Records.remove(record)\n RecordsWithCountry.append(r)\n for record in self.Records: \n r = RecordCountry(date=record.date,\n country=\"XXX\",\n impressions=record.impressions,\n CTR=record.CTR)\n RecordsWithCountry.append(r)\n self.Records = RecordsWithCountry", "def read_and_load_email_domains():\n\twith open(\"world_universities_and_domains.json\") as json_file:\n\t\traw_json_text = json_file.read()\n\n\traw_universities_json = json.loads(raw_json_text)\n\tuniversity_lookup = {}\n\tfor university in raw_universities_json:\n\t\t# print(university)\n\t\t# input()\n\t\tfor domain in university.get(\"domains\"):\n\n\t\t\tuniversity_summary = {}\n\n\t\t\tif university.get(\"name\"):\n\t\t\t\tuniversity_summary[\"name\"] = university[\"name\"]\n\t\t\tif university.get(\"country\"):\n\t\t\t\tuniversity_summary[\"country\"] = university[\"country\"]\n\t\t\tif university.get(\"alpha_two_code\"):\n\t\t\t\tuniversity_summary[\"alpha_two_code\"] = university[\"alpha_two_code\"]\n\t\t\tif university.get(\"state-province\"):\n\t\t\t\tuniversity_summary[\"state-province\"] = university[\"state-province\"]\n\n\t\t\tuniversity_lookup[domain] = university_summary\n\n\treturn(university_lookup)", "def parsePemList(self, s):\r\n x509List = []\r\n bList = dePemList(s, \"CERTIFICATE\")\r\n for b in bList:\r\n x509 = X509()\r\n x509.parseBinary(b)\r\n x509List.append(x509)\r\n self.x509List = x509List", "def parse(self, s):\r\n\r\n bytes = dePem(s, \"CERTIFICATE\")\r\n self.parseBinary(bytes)\r\n return self", "def seperate_City_Data(data, us_state_abbrev):\n assert data is not None\n dictionary = dict(data)\n keys = dictionary.keys()\n tmp = list(keys)\n values = dictionary.values()\n res = []\n for elem in keys:\n state = elem[1].strip()\n city = elem[0].strip()\n# print(city)\n if state in us_state_abbrev:\n res.append(city)\n return res, list(values)", "def fill_cites(self):\n response = requests.get(\"https://restcountries.eu/rest/v2/all\")\n json_content = json.loads(response.text)\n i = 0\n for t in json_content:\n currency = t[\"currencies\"][0][\"code\"]\n pop = t[\"population\"]\n state_name = t[\"name\"]\n self.cities_from_api[t[\"capital\"].lower()] = [str(state_name), str(currency), str(pop)]", "def new_X509( # pylint: disable=invalid-name\n country_name: str = \"US\",\n state_or_province_name: str = \"New York\",\n locality: str = \"New York\",\n organization_name: str = \"mitm\",\n organization_unit_name: str = \"mitm\",\n common_name: str = \"mitm\",\n serial_number: Optional[int] = None,\n time_not_before: int = 0, # 0 means now.\n time_not_after: int = 1 * (365 * 24 * 60 * 60), # 1 year.\n) -> OpenSSL.crypto.X509:\n\n cert = OpenSSL.crypto.X509()\n cert.get_subject().C = country_name\n cert.get_subject().ST = state_or_province_name\n cert.get_subject().L = locality\n cert.get_subject().O = organization_name\n cert.get_subject().OU = organization_unit_name\n cert.get_subject().CN = common_name\n cert.set_serial_number(serial_number or random.randint(0, 2**64 - 1))\n cert.set_version(2)\n cert.gmtime_adj_notBefore(time_not_before)\n cert.gmtime_adj_notAfter(time_not_after)\n cert.set_issuer(cert.get_subject())\n return cert", "def _get_countries():\n print('-c, -C [country]\\\n \\n [country]=\\\n \\n AR\\t: Argentina\\\n \\n AT\\t: Austria\\\n \\n BR\\t: Brazil\\\n \\n BY\\t: Belarus\\\n \\n CA\\t: Canda\\\n \\n DE\\t: Germany\\\n \\n FR\\t: France\\\n \\n GB\\t: Great Britain\\\n \\n GH\\t: Ghana\\\n \\n HU\\t: Hungary\\\n \\n ID\\t: Indonesia\\\n \\n IL\\t: Israel\\\n \\n JP\\t: Japan\\\n \\n KR\\t: Korea\\\n \\n MA\\t: Morocco\\\n \\n MY\\t: Malaysia\\\n \\n NL\\t: Netherlands\\\n \\n NO\\t: Norway\\\n \\n OM\\t: Oman\\\n \\n PK\\t: Pakistan\\\n \\n RU\\t: Russia\\\n \\n SA\\t: Saudi Arabia\\\n \\n TH\\t: Thailand\\\n \\n TW\\t: Taiwan\\\n \\n UA\\t: Ukraine\\\n \\n US\\t: United States\\\n \\n UY\\t: Uruguay\\\n \\n VE\\t: Venezuela\\\n \\n VN\\t: Vietnam\\\n \\n .....\\n common usage: opengate -c JP')", "def test_valid_country_format(self, cred, country):\n resp = requests.get(verify_url.format('json', cred[0], cred[1],\n 'TestApp', test_number), params={'country': country})\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '0'\n request_id = resp.json()['request_id']\n # terminate verification process\n assert 'Workflow terminated' in \\\n terminate_workflow(cred[0], cred[1], request_id).json()['error_text']", "def initialize(self):\r\n state_name = self.state\r\n\r\n state_name = state_name.lower()\r\n\r\n response = requests.get(\"https://cdn-api.co-vin.in/api/v2/admin/location/states\") \r\n\r\n if response.ok:\r\n\r\n df = pd.DataFrame(json.loads(response.text)[\"states\"]) \r\n\r\n state = process.extractOne(state_name, df[\"state_name\"].tolist()) # fuzzy match to get best state match \r\n\r\n self.state_id = df.loc[df.state_name == state[0],[\"state_id\"]].values[0][0] \r\n self.load_districts()", "def parse_citystate(self):\n \n index = self.index\n \n if self.words[index]['tag'] != Vocabulary.NAME:\n return None, None, 0, 0\n \n if self.words[index]['word'] == 'mt':\n city = \"mountain\"\n else:\n city = self.words[index]['word']\n start = index\n \n index += 1\n if index == self.length:\n return None, None, 0, 0\n \n if self.words[index]['word'] == ',':\n index += 1\n if index == self.length:\n return None, None, 0, 0\n elif self.words[index]['tag'] == Vocabulary.NAME: \n # Hack\n state, n = self.state_hack(index)\n if n > 0:\n index += n\n return city, state, index - start + 1, index\n \n #if self.words[index]['word'] == 'medical doctor':\n #return city, \"ISO3166-2:US-MD\", index - start + 1, index\n try:\n state = self._state_dict[self.words[index]['word']]\n return city, state, index - start + 1, index\n except:\n city += ' ' + self.words[index]['word']\n index += 1\n if index == self.length:\n return None, None, 0, 0\n \n if self.words[index]['word'] == ',':\n index += 1\n if index == self.length:\n return None, None, 0, 0\n\n # Hack\n state, n = self.state_hack(index)\n if n > 0:\n index += n\n if index == self.length: index -= 1 # Hack\n return city, state, index - start + 1, index\n \n if self.words[index]['tag'] not in [Vocabulary.NAME, Vocabulary.ACRONYM]:\n return None, None, 0, 0\n \n try:\n state = self._state_dict[self.words[index]['word']]\n return city, state, index - start + 1, index\n except: \n return None, None, 0, 0", "def _derive_country_IE(place):\n derived = []\n if _COUNTY_REGEX.search(place.name):\n stripped = _COUNTY_REGEX.sub(\"\", place.name.lower())\n derived += [\"co \" + stripped, \"county \" + stripped]\n\n #\n # Alternative name cases that aren't as straightforward as the above.\n #\n try:\n derived += {\n \"loch garman\": [\"co wexford\"],\n \"uíbh fhailí\": [\"co offaly\"],\n \"maigh eo\": [\"co mayo\"],\n \"an iarmhí\": [\"co westmeath\"],\n }[place.name.lower()]\n except KeyError:\n pass\n\n return [DerivedName(text, \"en\") for text in derived]", "def parse_filename(self, filename:str) -> bool:\n with open(filename, 'r') as xfh:\n data = xfh.read()\n\n xml = etree.fromstring(data)\n for node in xml.xpath('.//a:Detail', namespaces=self.NSMAP):\n cert = Certificates(node)\n self.certificate_records.append(cert)\n self.station_records.setdefault(cert.name, []).append(cert)\n\n return len(self.certificate_records) > 0", "def seperate_City_State_Data(data, us_state_abbrev):\n assert data is not None\n dictionary = dict(data)\n keys = dictionary.keys()\n tmp = list(keys)\n v = list(dictionary.values())\n values = []\n res = []\n for i in range(len(keys)):\n state = tmp[i][1].strip()\n city = tmp[i][0].strip()\n# print(city)\n if state in us_state_abbrev:\n res.append((state, city))\n values.append(v[i])\n return res, list(values)", "def parse_usa(text: str, state: str) -> tuple:\n pattern = re.compile(\n r'\\\"statistic-module--statistic--QKc9M\\\">.*?'\n r'\\\"statistic-module--title--MZHLl\\\">(.*?)<.*?'\n r'\\\"statistic-module--value--2qXQD.*?\\\">(.*?)<'\n )\n result = pattern.findall(text)\n final_result = [state.capitalize(), -1, -1, -1, -1]\n for i, res in enumerate(result):\n n = res[1].replace(',', '')\n if not n.isdigit():\n continue\n if res[0] == 'Total cases':\n final_result[1] = int(n)\n elif res[0] == 'Recovered':\n final_result[3] = int(n)\n elif res[0] == 'Deaths' or res[0] == 'Total deaths':\n final_result[4] = int(n)\n final_result = tuple(final_result)\n return final_result", "def test_addr_city_good_values(self):\n for input_val, output_val in self.known_values:\n self.line._parse_addr_city(input_val)\n self.assertEqual(output_val, self.line.addr_city)", "def europe_central_asia_countries():\r\n europe_central_asia_data = []\r\n years = []\r\n medians = []\r\n lst = []\r\n for idx in range(1960, 2016):\r\n years.append(idx)\r\n for idx in europe_central_asia:\r\n europe_central_asia_data.append(life_expectancy_graph(idx))\r\n y_idx = 0\r\n for idx in europe_central_asia_data:\r\n if idx != None and idx != {}:\r\n if (list(idx.keys())[y_idx]) == years[y_idx]:\r\n lst.append((list(idx.values())[y_idx]))\r\n lst = sorted(lst)\r\n medians.append(median(lst))\r\n return medians", "def get_covid_stats_by_county(state, county):\n url = \"https://corona.lmao.ninja/v2/jhucsse/counties/\" + county\n response = requests.get(url)\n data = response.json()\n counties = []\n for res in data:\n if res[\"province\"] == state:\n county1 = res[\"county\"]\n updatedAt = res[\"updatedAt\"]\n stats = res[\"stats\"]\n confirmed = stats[\"confirmed\"]\n deaths = stats[\"deaths\"]\n recovered = stats[\"recovered\"]\n counties.append(\n CountyStats(state, county1, updatedAt, confirmed, deaths, recovered)\n )\n # return CountyStats(state,county,updatedAt,confirmed,deaths,recovered)\n return counties", "def clean_embargoed_countries(self):\r\n embargoed_countries = self.cleaned_data[\"embargoed_countries\"]\r\n if not embargoed_countries:\r\n return ''\r\n\r\n error_countries = []\r\n\r\n for country in embargoed_countries.split(','):\r\n country = country.strip().upper()\r\n if not self._is_valid_code(country):\r\n error_countries.append(country)\r\n\r\n if error_countries:\r\n msg = 'COULD NOT PARSE COUNTRY CODE(S) FOR: {0}'.format(error_countries)\r\n msg += ' Please check the list of country codes and verify your entries.'\r\n raise forms.ValidationError(msg)\r\n\r\n return embargoed_countries", "def get_countries():\n call = build_call('attr', 'country')\n return request_data(call)", "def audit_city(osmfile):\r\n suburb_list_wrong = defaultdict(set)\r\n city_file = open(osmfile, encoding=\"utf8\")\r\n \r\n for event, elem in ET.iterparse(city_file, events=(\"start\",)):\r\n \r\n if elem.tag == \"node\" or elem.tag == \"way\":\r\n \r\n for tag in elem.iter(\"tag\"):\r\n \r\n if tag.attrib['k'] == 'addr:city':\r\n \r\n city = tag.attrib['v']\r\n # province = re.sub(\" \", \"\", tag.attrib['v'].strip())\r\n if city not in expected_suburb:\r\n \r\n suburb_list_wrong[city].add(city)\r\n \r\n city_file.close()\r\n return suburb_list_wrong", "def fetch_domain_certs(domain):\n url = BASE_URL.format(domain)\n result = requests.get(url)\n if result.status_code != 200:\n result.raise_for_status()\n return result.json()", "def _derive_country_MX(place):\n lname = place.name.lower()\n derived = []\n match = _PARENTHETICAL.search(lname)\n if match:\n derived.append(_PARENTHETICAL.sub(\"\", lname).strip())\n derived.append(match.group(1).strip())\n\n if _MX_COLONIA.search(place.name):\n derived.append(_MX_COLONIA.sub(\"col\", lname))\n\n if _MX_DELEG.search(place.name):\n derived.append(_MX_DELEG.sub(\"delegación\", lname))\n derived.append(_MX_DELEG.sub(\"del\", lname))\n derived.append(_MX_DELEG.sub(\"deleg\", lname))\n\n if _MX_CIUDAD.search(place.name):\n derived.append(_MX_CIUDAD.sub(\"cd\", lname))\n\n alternative_names = _MX_SUPPORT[\"alternative_names\"][\"es\"]\n try:\n derived += alternative_names[lname]\n except KeyError:\n pass\n\n return [DerivedName(text, \"es\") for text in derived]", "def east_asia_pacific_countries():\r\n east_asia_pacific_data = []\r\n years = []\r\n medians = []\r\n lst = []\r\n for idx in range(1960, 2016):\r\n years.append(idx)\r\n for idx in east_asia_pacific:\r\n east_asia_pacific_data.append(life_expectancy_graph(idx))\r\n y_idx = 0\r\n for idx in east_asia_pacific_data:\r\n if idx != None and idx != {}:\r\n if (list(idx.keys())[y_idx]) == years[y_idx]:\r\n lst.append((list(idx.values())[y_idx]))\r\n lst = sorted(lst)\r\n medians.append(median(lst))\r\n return medians", "def parse_china(text: str) -> list:\n pattern = re.compile(\n r'\\{\\\"provinceName\\\":\\\".*?\\\",'\n r'\\\"provinceShortName\\\":\\\"(.*?)\\\".*?'\n r'\\\"confirmedCount\\\":(.*?),.*?'\n r'\\\"suspectedCount\":(.*?),'\n r'\\\"curedCount\\\":(.*?),'\n r'\\\"deadCount\\\":(.*?),'\n )\n result = pattern.findall(text)\n for i, res in enumerate(result):\n res = list(res)\n res[0] = to_pinyin(res[0]).capitalize()\n result[i] = tuple(res)\n return result", "def test_parser():\n test_list = [\n \"ACIBOE\",\n \"AZOHEC\",\n \"BADJAU\",\n \"ACOLIP\",\n \"QAGWIG\",\n \"GOCBAD\",\n \"BUVYIB01\",\n \"GIRNIH\",\n \"FURVEU\",\n \"GAHJUW\",\n ]\n\n expected = {\n \"ACIBOE\": {\"Zn\": [np.nan]},\n \"AZOHEC\": {\"Zn\": [2]},\n \"BADJAU\": {\"Sc\": [np.nan]},\n \"ACOLIP\": {\"Zn\": [2]},\n \"QAGWIG\": {\"Fe\": [2]},\n \"GOCBAD\": {\"Cu\": [2]},\n \"BUVYIB01\": {\"Fe\": [2]},\n \"GIRNIH\": {\"Cd\": [2]},\n \"FURVEU\": {\"Fe\": [2]},\n \"GAHJUW\": {\"Fe\": [0]},\n }\n\n getoxstates = GetOxStatesCSD(test_list)\n result = getoxstates.run_parsing()\n\n assert expected == result", "def get_domains_from_csr(csr_file):\n logging.info(\"Parsing CSR...\")\n proc = subprocess.Popen([\"openssl\", \"req\", \"-in\", csr_file, \"-noout\", \"-text\"],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = proc.communicate()\n if proc.returncode != 0:\n raise IOError(\"Error loading {0}: {1}\".format(csr, err))\n return ACMEClient._parse_domains_from_openssl_output(out.decode('utf8'))", "def json_parsing():\n with open('countries.json') as f:\n countries = json.load(f)\n\n return countries", "def populate(self):\n\n NUM_COUNTRIES = 2 # random.randint(1, 4)\n\n # find a suitable hex\n with Timer(\"Creating initial data\", debug=self.debug):\n\n for i in range(NUM_COUNTRIES):\n country, provinces, pops = create_country(self, self.map)\n country.determine_tax_policy()\n self.countries.append(country)", "def get_domains_from_cert(cert_file):\n proc = subprocess.Popen([\"openssl\", \"x509\", \"-in\", cert_file, \"-noout\", \"-text\"],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = proc.communicate()\n if proc.returncode != 0:\n raise IOError(\"Error loading {0}: {1}\".format(cert_file, err))\n return ACMEClient._parse_domains_from_openssl_output(out.decode('utf8'))", "def __build_county_list(self):\n for entry in self._data.values():\n if entry['type'] == 40:\n self._counties[entry['county']] = entry['name']", "def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )", "def all_capital_city(state):\n\n\tfor state_key in states:\n\t\tif state_key.lower() == state.lower():\n\t\t\tif capital_cities.get(states[state_key]):\n\t\t\t\treturn [capital_cities[states[state_key]], state_key]\n\treturn None", "def all_state(cap):\n\n\tfor abbrev, capital in capital_cities.items():\n\t\tif cap.lower() == capital.lower():\n\t\t\tfor state, abbreviation in states.items():\n\t\t\t\tif abbreviation.lower() == abbrev.lower():\n\t\t\t\t\treturn [capital, state]\n\treturn None", "def loadState(fileid):\n dinf = {}\n root = etree.Element(\"state\")\n text = None\n statename = \"\"\n statefile = \"\"\n # TODO: put this in a global variable, and make a function to populate it from the DTD.\n tags = [\"name\",\"start\",\"scue\",\"end\",\"ecue\",\"aspects\",\"update\"]\n for tag in tags:\n dinf[tag] = [\"\",False]\n dinf['cities'] = {}\n dinf['m'] = {}\n dinf['m']['events'] = {}\n dinf['aspects'] = {}\n if not idExists(fileid):\n status.push(0,\"new state created... '%s'\" % fileid)\n return dinf\n statefile = fileid\n fn = os.path.join(config['realmdir'],\"%s.xml\" % fileid)\n status.push(0,\"loading state from XML... '%s'\" % fn)\n try:\n with codecs.open(fn,'rU','utf-8') as f:\n tree = etree.parse(f)\n f.close()\n root = tree.getroot()\n except IOError as e:\n print \"c: Could not open configuration file: %s\" % e\n\n ir = 0\n for i in range(len(root)):\n if root[i].tag is not None:\n if root[i].tag == \"city\":\n if len(root[i]) > 0:\n node = \"\"\n node = root[i].find(\"file\")\n if node.text:\n node = node.text.strip()\n node = common.validateFileid(node)\n dinf['cities'][node] = {}\n for j in root[i]:\n if j.tag and j.text and j.tag != \"file\":\n dinf['cities'][node][j.tag] = [j.text.strip(),False]\n if config['debug'] > 3: printPretty(dinf['cities'][node])\n else:\n if config['debug'] > 0:\n print \"Invalid city tag:\"\n for c in root[i]:\n print c.tag + ': ' + c.text,\n else: # no relat length\n if config['debug'] > 0: print \"Empty city tag.\"\n elif root[i].tag == \"events\":\n if len(root[i]) > 0:\n nodes = root[i]\n for node in nodes:\n k = str(len(dinf['m']['events']))\n dinf['m']['events'][k] = {}\n for j in node:\n if j.tag and j.text:\n dinf['m']['events'][k][j.tag] = [j.text.strip(),False]\n else:\n if config['debug'] > 0:\n print \"Invalid milestone tag:\"\n for c in node:\n print c.tag + ': ' + c.text,\n if config['debug'] > 3: printPretty(dinf['m']['events'])\n else: # no relat length\n if config['debug'] > 0: print \"Empty milestone tag.\"\n\n elif root[i].tag == \"aspects\":\n if len(root[i]) > 0:\n nodes = root[i]\n for node in nodes:\n k = str(len(dinf['aspects']))\n dinf['aspects'][k] = {}\n if node.tag and node.text:\n dinf['aspects'][k] = [node.text.strip(),False]\n else:\n if config['debug'] > 0:\n print \"Invalid aspects tag:\"\n print node.tag + ': ' + node.text,\n else: # no aspects length\n if config['debug'] > 0: print \"Empty aspects tag.\"\n\n elif root[i].text is not None:\n dinf[root[i].tag] = [root[i].text.strip(), False]\n if config['debug'] > 2: print str(i) + \" \",\n statename = dinf.get(\"name\",\"\")\n if len(statename) > 1: pushLoc(statefile,statename)\n return dinf", "def parse_country(text: str) -> list:\n pattern = re.compile(\n r',\\\"provinceName\\\":\\\".*?\\\",.*?'\n r'\\\"confirmedCount\\\":(.*?),.*?'\n r'\\\"suspectedCount\":(.*?),'\n r'\\\"curedCount\\\":(.*?),'\n r'\\\"deadCount\\\":(.*?),.*?'\n r'\\\"countryFullName\\\":\\\"(.*?)\\\",'\n )\n result = pattern.findall(text)\n for i, res in enumerate(result):\n res = list(res)\n n = res.pop()\n n = re.sub(r'\\xa0', ' ', n)\n res.insert(0, n)\n result[i] = tuple(res)\n return result", "def get_countries_dict():\r\n competitions_page_soup = BeautifulSoup(requests.get(\r\n SOCCER_URL + \"/competitions/\").text, 'html.parser')\r\n\r\n competitions_page_soup = competitions_page_soup.find('ul', class_='areas')\r\n countries_soup = competitions_page_soup.find_all('div', class_=\"row\")\r\n\r\n countries_dict = {}\r\n for country_soup in countries_soup:\r\n countries_dict[country_soup.a.text.strip().title()] = SOCCER_URL + country_soup.a[\"href\"]\r\n\r\n return countries_dict", "def test_get_all_certificates(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.get(\n '/api/v1/certificates', content_type='application/json',\n headers=self.get_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificates retrieved successfully')\n assert response.status_code == 200", "def handle_cacert(self):\n\n file = open(\"./certs/cacert.p7b\", \"r\")\n ca_certs = file.read()\n\n self.set_est_rsp_header(len(ca_certs))\n\n self.wfile.write(ca_certs.encode('utf-8'))", "def getStateAbbreviations():\n state_abbrev = {\n \"01\": \"AL\",\n \"02\": \"AK\",\n \"04\": \"AZ\",\n \"05\": \"AR\",\n \"06\": \"CA\",\n \"08\": \"CO\",\n \"09\": \"CT\",\n \"10\": \"DE\",\n \"11\": \"DC\",\n \"12\": \"FL\",\n \"13\": \"GA\",\n \"15\": \"HI\",\n \"16\": \"ID\",\n \"17\": \"IL\",\n \"18\": \"IN\",\n \"19\": \"IA\",\n \"20\": \"KS\",\n \"21\": \"KY\",\n \"22\": \"LA\",\n \"23\": \"ME\",\n \"24\": \"MD\",\n \"25\": \"MA\",\n \"26\": \"MI\",\n \"27\": \"MN\",\n \"28\": \"MS\",\n \"29\": \"MO\",\n \"30\": \"MT\",\n \"31\": \"NE\",\n \"32\": \"NV\",\n \"33\": \"NH\",\n \"34\": \"NJ\",\n \"35\": \"NM\",\n \"36\": \"NY\",\n \"37\": \"NC\",\n \"38\": \"ND\",\n \"39\": \"OH\",\n \"40\": \"OK\",\n \"41\": \"OR\",\n \"42\": \"PA\",\n \"44\": \"RI\",\n \"45\": \"SC\",\n \"46\": \"SD\",\n \"47\": \"TN\",\n \"48\": \"TX\",\n \"49\": \"UT\",\n \"50\": \"VT\",\n \"51\": \"VA\",\n \"53\": \"WA\",\n \"54\": \"WV\",\n \"55\": \"WI\",\n \"56\": \"WY\",\n \"72\": \"PR\"\n }\n return state_abbrev", "def parse_campus(department_data, campus):\n\n def extract_data(department_link):\n \"\"\"\n Extracts all course information from a UW Department\n\n @params:\n\n 'department_link': The url to the UW Department to get course\n information from\n\n Returns\n\n A list of lists. Each nested list represents one course section with the\n following values (in this order):\n\n 'Campus', 'Department Name', 'Course Number', 'Course Name', 'Credits',\n 'Areas of Knowledge', 'Quarters Offered', 'Offered with', \n 'Prerequisites', 'Co-Requisites', 'Description'\n \"\"\"\n # Update the progress bar\n if show_progress:\n progress_bar.update()\n\n # Regular expressions for searching course descriptions stored in local variables\n # for better peformance\n local_course_re = course_re\n local_course_name_re = course_name_re\n local_credits_re = credits_re\n local_credits_num_re = credits_num_re\n local_offered_jointly_re = offered_jointly_re\n local_CAMPUSES = CAMPUSES\n\n # Method used in extracting data from course descriptions found in the local scope\n # are stored in local variables for better performance\n local_complete_description = complete_description\n local_get_offered = get_offered\n local_get_requisites = get_requisites\n\n # All the courses in the department\n courses = []\n dep_file = department_link.get('href')\n\n # If the user entered a dict as the 'campuse' parameter, departments\n # are checked here\n try:\n # The String in the conditional is the abbreviated Department Name i.e EE\n # for Electrical Engineering\n if normalize('NFKD', department_link.text).rsplit('(', 1) \\\n [-1].replace(' ', '')[:-1] not in campuses[campus]:\n return None\n except TypeError:\n pass\n \n # The only links that are used for finding departments are those\n # of the format [a-z]+.html\n if '/' not in dep_file and dep_file.endswith('.html') \\\n and dep_file not in parsed_departments:\n parsed_departments.add(dep_file)\n department = BeautifulSoup(requests.get( \\\n f'{local_CAMPUSES[campus]}{dep_file}').text, features='lxml')\n for course in department.find_all('a'):\n course_ID = course.get('name') \n if course_ID:\n course_ID = course_ID.upper()\n course_title = course.find('b').text\n # The Course Description\n description = course.get_text().replace(course_title, '', 1) \n instructors = course.find('i')\n if instructors:\n description = description.replace(str(instructors.get_text()), '', 1)\n del instructors\n course_text = local_complete_description( \\\n description.rsplit('View course details in MyPlan', 1)[0])\n # Course Number i.e 351\n course_number = re.sub(local_course_re, '', course_ID)\n match_name = re.search(local_course_name_re, course_title)\n match_credit_num = re.search(local_credits_num_re, course_title)\n match_credit_types = re.findall(local_credits_re, course_title)\n # Jointly offered course with the given course\n if 'jointly with' in course_text: \n offered_jointly = course_text.rsplit('jointly with ', 1)[-1].rsplit(';', 1)[0] \n offered_jointly = ','.join(re.findall( \\\n local_offered_jointly_re, offered_jointly)).replace(' ', '') \n else:\n offered_jointly = ''\n courses.append(\n # Campus, Department Name and Course Number\n [campus, course_ID[:-3], course_number, \n # Course Name\n match_name.group(0).split(course_number, 1)[-1].strip() \\\n if match_name else '',\n # Number of credits for the course\n match_credit_num.group(0)[1:-1] \\\n if match_credit_num else '', \n # Course Credit Types (I&S, DIV, NW, VLPA, QSR, C)\n ','.join([list(filter(('').__ne__, x))[0] for x in match_credit_types]) \\\n if match_credit_types else '', \n local_get_offered(course_text),\n offered_jointly, local_get_requisites(course_text, 'Prerequisite:'), \n local_get_requisites(course_text, 'Co-requisite'), course_text]\n )\n return courses\n\n # In the course catalog website, several department links appear multiple times\n # To prevent parsing the same department more than once, parsed departments\n # are tracked in 'parsed_departments'\n parsed_departments = set()\n local_extract_data = extract_data\n department_data = BeautifulSoup(requests.get(department_data).text, features='lxml')\n\n campus_catalog = []\n # Extract data from department websites in parallel to reduce idle time\n with cf.ThreadPoolExecutor() as executor:\n results = [executor.submit(local_extract_data, department_link) \n for department_link in department_data.find_all('a')]\n for result in cf.as_completed(results):\n dptmnt = result.result()\n if dptmnt:\n campus_catalog.append(dptmnt)\n\n # DataFrame with all courses in the campus\n return pd.DataFrame(\n [course for department in campus_catalog for course in department], \n columns=COLUMN_NAMES\n )", "def country(alpha_2_code: str) -> None:", "def GetValidHostsForCert(cert):\r\n if 'subjectAltName' in cert:\r\n return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns']\r\n else:\r\n return [x[0][1] for x in cert['subject']\r\n if x[0][0].lower() == 'commonname']", "def test_addr_country_bad_values(self):\n for val in self.bad_values:\n self.assertRaises(line_format_errors.FieldParseError,\n lambda: self.line._parse_addr_country(val))", "def getCountry(soup):\n title_details = self.getAdditionalDetails(soup)\n pattern = r'country_of_origin.*?>(.*?)<'\n country = re.findall(pattern, str(title_details))\n return country", "def test_city_country(self):\n dublin_ireland = city_country('dublin', 'ireland')\n self.assertEqual(dublin_ireland, 'Dublin, Ireland')", "def loadCity(fileid):\n dinf = {}\n root = etree.Element(\"city\")\n text = None\n statename = \"\"\n statefile = \"\"\n cityname = \"\"\n dinf['m'] = {}\n dinf['m']['events'] = {}\n # TODO: put this in a global variable, and make a function to populate it from the DTD.\n tags = [\"name\",\"state\",\"statefile\",\"start\",\"scue\",\"end\",\"ecue\",\"place\",\"aspects\"]\n for tag in tags:\n dinf[tag] = [\"\",False]\n dinf['aspects'] = {}\n if not dinf.get(\"places\"): dinf['places'] = {}\n if not idExists(fileid):\n status.push(0,\"new city created... '%s'\" % fileid)\n return dinf\n fn = os.path.join(config['realmdir'],\"%s.xml\" % fileid)\n status.push(0,\"loading city from XML... '%s'\" % fn)\n try:\n with codecs.open(fn,'rU','utf-8') as f:\n tree = etree.parse(f)\n f.close()\n root = tree.getroot()\n except IOError as e:\n print \"c: Could not open configuration file: %s\" % e\n\n ir = 0\n for i in range(len(root)):\n if root[i].tag is not None:\n if root[i].tag == \"place\":\n if len(root[i]) > 0:\n node = \"\"\n node = root[i].find(\"file\")\n if node.text:\n node = node.text.strip()\n node = common.validateFileid(node)\n dinf['places'][node] = {}\n for j in root[i]:\n if j.tag and j.text and j.tag != \"file\":\n dinf['places'][node][j.tag] = [j.text.strip(),False]\n if config['debug'] > 3: print dinf['places'][node]\n else:\n if config['debug'] > 0:\n print \"Invalid place tag:\"\n for c in root[i]:\n print c.tag + ': ' + c.text,\n else: # no relat length\n if config['debug'] > 0: print \"Empty place tag.\"\n elif root[i].tag == \"events\":\n if len(root[i]) > 0:\n nodes = root[i]\n for node in nodes:\n k = str(len(dinf['m']['events']))\n dinf['m']['events'][k] = {}\n for j in node:\n if j.tag and j.text:\n dinf['m']['events'][k][j.tag] = [j.text.strip(),False]\n else:\n if config['debug'] > 0:\n print \"Invalid milestone tag:\"\n for c in node:\n print c.tag + ': ' + c.text,\n if config['debug'] > 3: printPretty(dinf['m']['events'])\n else: # no relat length\n if config['debug'] > 0: print \"Empty milestone tag.\"\n elif root[i].tag == \"aspects\":\n if len(root[i]) > 0:\n nodes = root[i]\n for node in nodes:\n k = str(len(dinf['aspects']))\n dinf['aspects'][k] = {}\n if node.tag and node.text:\n dinf['aspects'][k] = [node.text.strip(),False]\n else:\n if config['debug'] > 0:\n print \"Invalid aspects tag:\"\n print node.tag + ': ' + node.text,\n else: # no aspects length\n if config['debug'] > 0: print \"Empty aspects tag.\"\n elif root[i].text is not None:\n if root[i].tag == \"statefile\":\n statefile = root[i].text.strip()\n statefile = common.validateFileid(statefile)\n if statefile is None: statefile = \"\"\n elif root[i].tag == \"state\":\n statename = root[i].text.strip()\n elif root[i].tag == \"name\":\n cityname = root[i].text.strip()\n dinf[root[i].tag] = [root[i].text.strip(), False]\n if config['debug'] > 2: print str(i) + \" \",\n if len(statefile) > 0: pushLoc(statefile,statename,fileid,cityname)\n return dinf", "def loads(s):\n return Offices(\n **{\n room: set(Gepettist(*m) for m in members)\n for room, members in loads(s).items()\n }\n )", "def load_verify_locations(self, cafile: Optional[Any] = ..., dummy: Optional[Any] = ...):\n ...", "def test_city_country(self):\n\t\tformatted_address = city_country('santiago', 'chile')\n\t\tself.assertEqual(formatted_address, 'Santiago, Chile')", "def seperate_Loc_Data(data, us_state_abbrev):\n assert data is not None\n dictionary = dict(data)\n keys = dictionary.keys()\n tmp = list(keys)\n values = dictionary.values()\n res = []\n for elem in keys:\n state = elem[1].strip()\n if state in us_state_abbrev:\n res.append(us_state_abbrev[state])\n return res, list(values)", "def all_citites(state_id):\n state = storage.get(\"State\", state_id)\n citites = []\n if state is not None:\n for city in state.cities:\n citites.append(city.to_dict())\n return jsonify(citites)\n abort(404)", "def offices_ldap():\n conn = Connection(\"ldap.laas.fr\", auto_bind=True)\n conn.search(\n \"dc=laas,dc=fr\",\n \"(laas-mainGroup=gepetto)\",\n attributes=[\"sn\", \"givenName\", \"roomNumber\", \"st\"],\n )\n offices = Offices()\n for entry in conn.entries:\n room, gn, sn, st = (\n str(entry.roomNumber),\n str(entry.givenName),\n str(entry.sn),\n str(entry.st),\n )\n if (\n st not in [\"JAMAIS\", \"NON-PERTINENT\"]\n and date(*(int(i) for i in reversed(st.split(\"/\")))) < date.today()\n ):\n continue # filter out alumni\n if room == \"[]\":\n continue # filter out the Sans-Bureaux-Fixes\n offices[room].add(Gepettist(sn, gn))\n return offices", "def load_districts(self):\r\n\r\n response = requests.get(\"https://cdn-api.co-vin.in/api/v2/admin/location/districts/{}\".format(self.state_id))\r\n\r\n if response.ok:\r\n\r\n df = pd.DataFrame(json.loads(response.text)[\"districts\"])\r\n self.districts_df = df", "def USCode(self, short):\n states = {\n 'AK': 'Alaska',\n 'AL': 'Alabama',\n 'AR': 'Arkansas',\n 'AS': 'American Samoa',\n 'AZ': 'Arizona',\n 'CA': 'California',\n 'CO': 'Colorado',\n 'CT': 'Connecticut',\n 'DC': 'District of Columbia',\n 'DE': 'Delaware',\n 'FL': 'Florida',\n 'GA': 'Georgia',\n 'GU': 'Guam',\n 'HI': 'Hawaii',\n 'IA': 'Iowa',\n 'ID': 'Idaho',\n 'IL': 'Illinois',\n 'IN': 'Indiana',\n 'KS': 'Kansas',\n 'KY': 'Kentucky',\n 'LA': 'Louisiana',\n 'MA': 'Massachusetts',\n 'MD': 'Maryland',\n 'ME': 'Maine',\n 'MI': 'Michigan',\n 'MN': 'Minnesota',\n 'MO': 'Missouri',\n 'MP': 'Northern Mariana Islands',\n 'MS': 'Mississippi',\n 'MT': 'Montana',\n 'NA': 'National',\n 'NC': 'North Carolina',\n 'ND': 'North Dakota',\n 'NE': 'Nebraska',\n 'NH': 'New Hampshire',\n 'NJ': 'New Jersey',\n 'NM': 'New Mexico',\n 'NV': 'Nevada',\n 'NY': 'New York',\n 'OH': 'Ohio',\n 'OK': 'Oklahoma',\n 'OR': 'Oregon',\n 'PA': 'Pennsylvania',\n 'PR': 'Puerto Rico',\n 'RI': 'Rhode Island',\n 'SC': 'South Carolina',\n 'SD': 'South Dakota',\n 'TN': 'Tennessee',\n 'TX': 'Texas',\n 'UT': 'Utah',\n 'VA': 'Virginia',\n 'VI': 'Virgin Islands',\n 'VT': 'Vermont',\n 'WA': 'Washington',\n 'WI': 'Wisconsin',\n 'WV': 'West Virginia',\n 'WY': 'Wyoming'\n }\n return states.get(short)", "def clean_countries(event_db):\n event_db[\"country_edb\"] = event_db[\"country_edb\"].apply(_clean_country_str)\n event_db = my_utils.split_strings_at_comma_and_distribute_to_new_rows(event_db, 'country_edb')\n return event_db", "def parse_postalUS(self):\n \n index = self.index\n \n # US Postal Code\n if len(self.words[index]['word']) != 5 or not self.words[index]['word'].isdigit():\n return None, 0\n postal = self.words[index]['word']\n \n if index + 1 < self.length:\n if self.words[index+1]['word'] == '-':\n index += 2\n if index == self.length:\n return None, 0\n if len(self.words[index]['word']) == 4 and self.words[index]['word'].isdigit():\n postal += '-' + self.words[index]['word']\n return postal, 3\n else:\n return postal, 1\n \n return postal, 1", "def test_get_countries(self):\n pass", "def vi_fetch_voting_info_from_scb(self):\r\n\r\n # map counties and county codes using 'GET' request\r\n response = requests.get(self.url)\r\n if response.status_code == requests.codes.ok:\r\n try:\r\n data = json.loads(response.text)\r\n for code, county in zip(data['variables'][0]['values'], \\\r\n data['variables'][0]['valueTexts']):\r\n self.counties_codes[code] = county\r\n # self.vi_log_msg(self.counties_codes)\r\n except Exception as e:\r\n self.vi_log_msg('CRITICAL_ERROR : ', str(e), '. Exiting!!')\r\n sys.exit()\r\n else:\r\n self.vi_log_msg('\"GET\" request failed. Received error code:', \\\r\n response.status_code)\r\n\r\n # 'POST' required query (json query) to SCB url & get relevant info\r\n # json_query must state response format as json\r\n response = requests.post(self.url, json=self.json_query)\r\n if response.status_code == requests.codes.ok:\r\n try:\r\n json_response_obj = json.loads(codecs.encode(response.text, \\\r\n 'utf-8'))\r\n\r\n # json_response_obj['data'] is in below format\r\n # {\"key\":[county_code, voting_year],\"values\":\\\r\n # [voting_percentage]}\r\n # Eg: {\"key\":[\"01L\",\"1973\"],\"values\":[\"90.0\"]}\r\n for voting_data in json_response_obj['data']:\r\n county_code = voting_data['key'][0]\r\n voting_year = int(voting_data['key'][1])\r\n voting_percentage = voting_data['values'][0]\r\n # voting_percentage not available\r\n if voting_percentage == '..':\r\n voting_percentage = 0.0\r\n else:\r\n voting_percentage = float(voting_percentage)\r\n # get county name from county code\r\n county_name = self.counties_codes[county_code]\r\n \r\n # map voting information with voting year & county, i.e,\r\n # voting_information[voting_year][county] = \\\r\n # voting_percentage\r\n # Eg: voting_information[1973]['Stockholm county council'] \\\r\n # = 90.0\r\n if voting_year not in self.voting_information.keys():\r\n self.voting_information[voting_year] = {}\r\n self.voting_information[voting_year][county_name] = \\\r\n voting_percentage\r\n except Exception as e:\r\n self.vi_log_msg('CRITICAL_ERROR : ', str(e), '. Exiting!!')\r\n sys.exit()\r\n else:\r\n self.vi_log_msg('\"POST\" request failed. Received error code:', \\\r\n response.status_code)", "def parse_url(self, url: str):\n time.sleep(0.1)\n resp = requests.get(url, timeout=5).content.decode('windows-1250')\n selector = Selector(text=resp)\n name_addresses = []\n if not self.is_right_page(selector):\n return []\n\n company = self.parse_business_name(selector)\n name_addresses += self.parse_management_body(selector)\n name_addresses += self.parse_partners(selector)\n\n ret = []\n for name_address in name_addresses:\n name_address = [re.sub(r'[\",;]', '', n).strip() for n in name_address]\n print(\"Found name: \", name_address)\n is_russian = self.RUSSIA in name_address[1]\n ret.append([re.sub(r'[\",;]', '', company).strip()] + name_address + [is_russian])\n return ret", "def test_country_name_in_countries(self):\n\t\tcountry_code = get_country_code('Andorra')\n\t\tself.assertEqual(country_code, 'ad')", "def states(self):\n from geoid.core import names\n from geoid.censusnames import geo_names, stusab\n\n states = {}\n\n for state_no, stusab in stusab.items():\n states[stusab] = {\n 'name': geo_names[(state_no,0)],\n 'stusab': stusab,\n 'number' : state_no\n }\n\n states['US'] = {\n 'name': 'United States',\n 'stusab': 'US',\n 'number' : 0\n }\n\n return states", "def find_state(self, state):\n state = state.strip().upper()\n cursor = self.households.find({\"address.state\":state})\n results = [Household.from_dict(dct) for dct in cursor]\n\n cursor = self.businesses.find({\"address.state\":state})\n results += [Business.from_dict(dct) for dct in cursor]\n\n return results", "def alerts_county_zone(self: SimpleNWS) -> List[Dict[str, Any]]:\n return self._alerts_county_zone", "def parse_china(self, pyName=None):\n self.status = {}\n\n extractMethod = lambda d: {\n k: v for k,\n v in d.items() if k in [\n \"quName\",\n \"stateDetailed\",\n \"tem1\",\n \"tem2\",\n \"windState\"]}\n if pyName in [\"xisha\", \"nansha\", \"diaoyudao\"]:\n for city in self.root.findall(\"city\"):\n if city.get(\"pyName\") == pyName:\n self.status[\"cityname\"] = city.get(\"cityname\")\n self.status[\"stateDetailed\"] = city.get(\"stateDetailed\")\n self.status[\"temLow\"] = city.get(\"tem2\")\n self.status[\"temHigh\"] = city.get(\"tem1\")\n self.status[\"windState\"] = city.get(\"windState\")\n\n break\n\n return self.status\n else:\n for city in self.root.findall(\"city\"):\n self.status[city.get(\"cityname\")] = extractMethod(city.attrib)\n\n return self.status", "def test_valid_country():\n assert valid_country(\"Democratic Republic of Lungary\") is True\n assert valid_country(\"Kraznoviklandstan\") is True\n assert valid_country(\"kraznoviklandstan\") is True\n assert valid_country(\"KRAZNOVIKLANDSTAN\") is True\n\n assert valid_country(\"Democratic_Republic982759\") is False\n assert valid_country(\"Kraznoviklandsta\") is False\n assert valid_country(\"Principalities of Fryed\") is False\n assert valid_country(\"FRY\") is False", "def test_single_word_salisbury(self):\n result = location.lookup_location('Salisbury GB')\n\n self.assertEqual(result['country'], 'GB')", "def parse_postalCA(self):\n \n index = self.index\n \n if len(self.words[index]['word']) != 3:\n return None, 0\n postal = self.words[index]['word']\n index += 1\n if index == self.length:\n return None, 0\n \n if len(self.words[index]['word']) != 3:\n return None, 0\n postal += self.words[index]['word']\n \n return postal, 2", "def process_city(state, city, locations=Locations, perror=None, do_exit=None):\n c=ConfigParser.ConfigParser()\n c.read(locations)\n l=c.options('US_%s' % state)\n d = {}\n condition_station = None\n zone = None\n for x in l:\n # info: city condition-station zone radar-code\n info = string.split(c.get('US_%s' % state, x))\n if city == string.lower(info[0]):\n if verbose:\n print 'info:', info\n if info[1] != '-'*len(info[1]):\n condition_station = info[1]\n\n if info[2] != '-'*len(info[2]):\n zone = string.upper(info[2])\n zone = zone[3:]\n\n return (condition_station, zone)\n\n if perror:\n dp_io.eprintf(\"Don't know this state/city: %s/%s\\n\",\n self.state,\n self.city)\n if do_exit:\n sys.exit(1)\n \n return None", "def test_get_country_by_geo_location(self):\n pass", "def parse(self, response):\n\n j = response.body\n cs = json.loads(j) \n for v in cs:\n cid = v['country_id']\n url = \"http://api.qaym.com/0.1/countries/\"+cid+\"/cities/key=\"+key\n yield scrapy.Request(url, callback=self.parse_city)", "def parse_province(self, pyName=None):\n self.status = {}\n if pyName:\n for city in self.root.findall(\"city\"):\n if city.get(\"pyName\") == pyName:\n self.status[\"cityname\"] = city.get(\"cityname\")\n self.status[\"stateDetailed\"] = city.get(\"stateDetailed\")\n self.status[\"temHigh\"] = city.get(\"tem2\")\n self.status[\"temLow\"] = city.get(\"tem1\")\n self.status[\"temNow\"] = city.get(\"temNow\")\n self.status[\"windState\"] = city.get(\"windState\")\n self.status[\"humidity\"] = city.get(\"humidity\")\n self.status[\"time\"] = city.get(\"time\")\n\n break\n\n return self.status\n else:\n extractMethod = lambda d: {\n k: v for k, v in d.items() if k in [\n \"stateDetailed\", \"tem1\", \"tem2\", \"windState\"]}\n for city in self.root.findall(\"city\"):\n self.status[city.get(\"cityname\")] = extractMethod(city.attrib)\n\n return self.status", "def _parse_father_days_per_country(year, date_to_countries,filename=fathers_days_countries):\n year = str(year)\n with open(filename,'r') as f:\n\n for line in f:\n line = line.strip()\n if not line or line.startswith('#'):\n continue\n elif line.startswith('*'):\n\n line = re.sub(r'\\band\\b','',line)\n\n countries = list(map(lambda s: s.strip('* ').strip(),line.split(',')))\n else:\n date,day = line.split(':')\n\n if date == year:\n day = day.strip()\n date_to_countries[day].extend(countries)", "def _derive_country_JP(place):\n derived = []\n if _JP_FU_SUFFIX.search(place.asciiname):\n bare = _JP_FU_SUFFIX.sub(\"\", place.asciiname)\n derived += [bare, bare + \" prefecture\", bare + \" pref\"]\n elif _JP_KEN_SUFFIX.search(place.asciiname):\n bare = _JP_KEN_SUFFIX.sub(\"\", place.asciiname)\n derived += [bare, bare + \" prefecture\", bare + \" pref\",\n bare + \"-ken\", bare + \" ken\"]\n elif _JP_SHI_SUFFIX.search(place.name):\n bare = _JP_SHI_SUFFIX.sub(\"\", place.name)\n derived += [bare, bare + \"-city\", bare + \" city\"]\n elif _JP_KU_SUFFIX.search(place.name):\n bare = _JP_KU_SUFFIX.sub(\"\", place.name)\n derived += [bare, bare + \"-ku\", bare + \" ku\", bare + \" ward\"]\n\n en_names = [DerivedName(text.lower(), \"en\") for text in derived]\n _LOGGER.debug(\"derive_country_JP: en_names: %r\", en_names)\n\n if _JA_JP_SHI_SUFFIX.search(place.name):\n bare = _JA_JP_SHI_SUFFIX.sub(\"\", place.name)\n ja_names = [DerivedName(bare, \"ja\")]\n else:\n ja_names = []\n return en_names + ja_names", "def _read_dns_(dns, cnt):\r\n \r\n dn_names = None\r\n dn_ids = None\r\n dn_iaps = [None]*10\r\n \r\n for dn in dns.DN:\r\n if dn.ref == 'Name':\r\n dn_names = dn.value\r\n if dn.ref == 'DNId':\r\n dn_ids = dn.value\r\n if dn.ref == 'IAP':\r\n dn_iaps[0] = dn.value\r\n if dn.ref == 'IAP2':\r\n dn_iaps[1] = dn.value\r\n if dn.ref == 'IAP3':\r\n dn_iaps[2] = dn.value\r\n if dn.ref == 'IAP4':\r\n dn_iaps[3] = dn.value\r\n if dn.ref == 'IAP5':\r\n dn_iaps[4] = dn.value\r\n if dn.ref == 'IAP6':\r\n dn_iaps[5] = dn.value\r\n if dn.ref == 'IAP7':\r\n dn_iaps[6] = dn.value\r\n if dn.ref == 'IAP8':\r\n dn_iaps[7] = dn.value\r\n if dn.ref == 'IAP9':\r\n dn_iaps[8] = dn.value\r\n if dn.ref == 'IAP10':\r\n dn_iaps[9] = dn.value\r\n \r\n logger.info('Parsed DN names: %s' % dn_names)\r\n logger.info('Parsed DN ids: %s' % dn_ids)\r\n logger.info('Parsed DN iaps: %s' % dn_iaps)\r\n \r\n for i in range(len(dn_names)):\r\n mydn = Dn()\r\n mydn.set_id(dn_ids[i])\r\n mydn.set_name(dn_names[i])\r\n myiaps = [None]*10\r\n for j in range(10):\r\n myiaps[j] = dn_iaps[j][i]\r\n mydn.set_iaps(myiaps)\r\n cnt.add_dn(mydn)\r\n return cnt", "def allLocales(self):\n return util.parseLocales(urlopen(self.all_url).read())", "def get_county_boundaries(self):\n\n county_boundaries_gdf = self.census_boundaries.get_boundaries_gdf(\n \"Colorado\", \"county\"\n )\n\n county_boundaries_gdf = county_boundaries_gdf.set_index(\"GEOID\")\n county_boundaries_gdf[\"STATENAME\"] = county_boundaries_gdf[\"STATEFP\"].apply(\n lambda fip: self.census_boundaries.state_names.get(fip)\n )\n\n return county_boundaries_gdf", "async def test_get_location_data(self):\n for city_name in ['dublin', 'London', 'Copenhagen']:\n response = await self.http_client.fetch(request=HTTPRequest(\n url=self.get_url(path=\"/location-data/{}\".format(city_name)),\n method='GET'\n ))\n self.assertEqual(response.code, HTTPStatus.OK)\n self.check_city_response(response, city_name.lower())", "def load_country_code_data():\n name_conversion = {\n 'East Timor': 'Timor-Leste',\n 'Republic of the Congo': 'Congo (Kinshasa)',\n 'Ivory Coast': 'Cote d\\'Ivoire',\n 'Macedonia': 'North Macedonia',\n 'Myanmar': 'Burma',\n 'Republic of Serbia': 'Serbia',\n 'Taiwan': 'Taiwan*',\n 'The Bahamas': 'Bahamas',\n 'United Republic of Tanzania': 'Tanzania',\n 'United States of America': 'US'\n }\n\n shapefile = os.path.join('data', 'ne_110m_admin_0_countries.shp')\n\n gdf = gpd.read_file(shapefile)[['ADMIN', 'ADM0_A3', 'geometry']]\n gdf.columns = ['country', 'country_code', 'geometry']\n\n gdf.loc[gdf['country'].isin(name_conversion.keys()), 'country'] = gdf['country'].map(name_conversion)\n\n return gdf", "def read_county_from_db(state_name, county_name):\n # Connect to database\n conn = sqlite3.connect('./db/incarceration.db')\n\n # Query the database\n data = pd.read_sql_query(f\"\"\"SELECT *\n FROM incarceration\n WHERE county_name = '{county_name}'\n AND state = '{state_name}';\n \"\"\", conn)\n\n # Close connection\n conn.close()\n\n return data", "def get_state_centers(state_i):\n url = 'https://optn.transplant.hrsa.gov/data/view-data-reports/center-data/'\n options = Options()\n options.add_argument(\"--headless\")\n\n browser = webdriver.Firefox(firefox_options=options)\n browser.get(url)\n assert \"Center Data\" in browser.title\n\n # Select state, click\n select_state = Select(browser.find_element_by_id(\"selectArea\"))\n select_state.select_by_index(state_i)\n selected = select_state.first_selected_option\n state = selected.text\n browser.find_element_by_id('imgSubmit').click()\n\n # Get center options; If no center data available, close browser\n try:\n choose_center = Select(browser.find_element_by_id('slice2'))\n except NoSuchElementException:\n if verbose:\n print('[-] Data not available for %s' % state)\n browser.close()\n return None, None\n except UnexpectedAlertPresentException:\n if verbose:\n print('[-] Data not available for %s' % state)\n alert = browser.switch_to.alert\n alert.accept()\n browser.close()\n return None, None\n\n # Iterate through each hospital for that state (skip 'All Centers')\n centers = []\n for center_i in range(1, len(choose_center.options)):\n choose_center.select_by_index(center_i)\n selected = choose_center.first_selected_option\n centers.append(selected.text)\n\n # Close main browser when done\n browser.close()\n\n # Return ('state', [centers])\n if verbose:\n print('[+] Gathered centers for %s' % state)\n return (state, centers)", "def usgs_parse(dataframe_list, args):\n\n for df in dataframe_list:\n # add columns at national and state level that only exist at the county level\n if 'state_cd' not in df:\n df['state_cd'] = '00'\n if 'state_name' not in df:\n df['state_name'] = 'None'\n if 'county_cd' not in df:\n df['county_cd'] = '000'\n if 'county_nm' not in df:\n df['county_nm'] = 'None'\n if 'year' not in df:\n df['year'] = args[\"year\"]\n # concat data frame list based on geography and then parse data\n df = pd.concat(dataframe_list, sort=True)\n df_n = df[df['geo'] == 'national']\n df_sc = df[df['geo'] != 'national']\n # drop columns that are all NAs\n df_n = df_n.dropna(axis=1, how='all')\n df_sc = df_sc.dropna(axis=1, how='all')\n # melt state and county level data frame\n df_sc = pd.melt(df_sc, id_vars=[\"geo\", \"state_cd\", \"state_name\", \"county_cd\", \"county_nm\", \"year\"],\n var_name=\"Description\", value_name=\"FlowAmount\")\n # merge national and state/county dataframes\n df = pd.concat([df_n, df_sc], sort=True)\n # drop any rows associated with commercial data (because only exists for 3 states)\n df = df[~df['Description'].str.lower().str.contains('commercial')]\n # drop rows that don't have a record and strip values that have extra symbols\n df['FlowAmount'] = df['FlowAmount'].str.strip()\n df[\"FlowAmount\"] = df['FlowAmount'].str.replace(\"a\", \"\", regex=True)\n df[\"FlowAmount\"] = df['FlowAmount'].str.replace(\"c\", \"\", regex=True)\n df = df[df['FlowAmount'] != '-']\n df = df[df['FlowAmount'] != '']\n # create fips codes by combining columns\n df['Location'] = df['state_cd'] + df['county_cd']\n # drop unused columns\n df = df.drop(columns=['county_cd', 'county_nm', 'geo', 'state_cd', 'state_name'])\n # create new columns based on description\n df['Unit'] = df['Description'].str.rsplit(',').str[-1]\n # create flow name column\n df['FlowName'] = pd.np.where(df.Description.str.contains(\"fresh\"), \"fresh\",\n pd.np.where(df.Description.str.contains(\"saline\"), \"saline\",\n pd.np.where(df.Description.str.contains(\"wastewater\"), \"wastewater\", \"total\")))\n # create flow name column\n df['Compartment'] = pd.np.where(df.Description.str.contains(\"ground\"), \"ground\",\n pd.np.where(df.Description.str.contains(\"Ground\"), \"ground\",\n pd.np.where(df.Description.str.contains(\"surface\"), \"surface\",\n pd.np.where(df.Description.str.contains(\"Surface\"), \"surface\",\n pd.np.where(df.Description.str.contains(\"consumptive\"), \"air\",\n pd.np.where(df.Description.str.contains(\"total\"), \"total\", \"total\"))))))\n # drop rows of data that are not water use/day. also drop \"in\" in unit column\n df['Unit'] = df['Unit'].str.strip()\n df[\"Unit\"] = df['Unit'].str.replace(\"in \", \"\", regex=True)\n df[\"Unit\"] = df['Unit'].str.replace(\"In \", \"\", regex=True)\n df = df[~df['Unit'].isin([\"millions\", \"gallons/person/day\", \"thousands\", \"thousand acres\", \"gigawatt-hours\"])]\n df = df[~df['Unit'].str.contains(\"number of\")]\n df.loc[df['Unit'].isin(['Mgal/', 'Mgal']), 'Unit'] = 'Mgal/d'\n df = df.reset_index(drop=True)\n # assign activities to produced or consumed by, using functions defined below\n activities = df['Description'].apply(activity)\n activities.columns = ['ActivityProducedBy', 'ActivityConsumedBy']\n df = df.join(activities)\n # rename year column\n df = df.rename(columns={\"year\": \"Year\"})\n # add location system based on year of data\n if args['year'] >= '2019':\n df['LocationSystem'] = 'FIPS_2019'\n elif '2015' <= args['year'] < '2019':\n df['LocationSystem'] = 'FIPS_2015'\n elif '2013' <= args['year'] < '2015':\n df['LocationSystem'] = 'FIPS_2013'\n elif '2010' <= args['year'] < '2013':\n df['LocationSystem'] = 'FIPS_2010'\n # hardcode column information\n df['Class'] = 'Water'\n df['SourceName'] = 'USGS_NWIS_WU'\n # Assign data quality scores\n df.loc[df['ActivityConsumedBy'].isin(['Public Supply', 'Public supply']), 'DataReliability'] = '2'\n df.loc[df['ActivityConsumedBy'].isin(['Aquaculture', 'Livestock', 'Total Thermoelectric Power',\n 'Thermoelectric power', 'Thermoelectric Power Once-through cooling',\n 'Thermoelectric Power Closed-loop cooling',\n 'Wastewater Treatment']), 'DataReliability'] = '3'\n df.loc[df['ActivityConsumedBy'].isin(['Domestic', 'Self-supplied domestic', 'Industrial', 'Self-supplied industrial',\n 'Irrigation, Crop', 'Irrigation, Golf Courses', 'Irrigation, Total',\n 'Irrigation', 'Mining']), 'DataReliability'] = '4'\n df.loc[df['ActivityConsumedBy'].isin(['Total withdrawals', 'Total Groundwater',\n 'Total Surface water']), 'DataReliability'] = '5'\n df.loc[df['ActivityProducedBy'].isin(['Public Supply']), 'DataReliability'] = '2'\n df.loc[df['ActivityProducedBy'].isin(['Aquaculture', 'Livestock', 'Total Thermoelectric Power',\n 'Thermoelectric Power Once-through cooling',\n 'Thermoelectric Power Closed-loop cooling',\n 'Wastewater Treatment']), 'DataReliability'] = '3'\n df.loc[df['ActivityProducedBy'].isin(['Domestic', 'Industrial', 'Irrigation, Crop', 'Irrigation, Golf Courses',\n 'Irrigation, Total', 'Mining']), 'DataReliability'] = '4'\n # remove commas from activity names\n df['ActivityConsumedBy'] = df['ActivityConsumedBy'].str.replace(\", \", \" \", regex=True)\n df['ActivityProducedBy'] = df['ActivityProducedBy'].str.replace(\", \", \" \", regex=True)\n\n # standardize usgs activity names\n df = standardize_usgs_nwis_names(df)\n\n return df", "def extract_certs_from_pem(pem_contents):\n start = 0\n certs = []\n while True:\n index = pem_contents.find(constants.BEGIN_CERTIFICATE_MARKER, start)\n if index == -1:\n break\n try:\n cert = x509.load_pem_x509_certificate(pem_contents[index::],\n default_backend())\n except Exception:\n LOG.exception(_(\"Load pem x509 certificate failed at file \"\n \"location: %s\") % index)\n raise exception.SysinvException(_(\n \"Failed to load pem x509 certificate\"))\n\n certs.append(cert)\n start = index + len(constants.BEGIN_CERTIFICATE_MARKER)\n return certs", "def _parse_location(self, response):\n if \"1700 S. Wentworth\" in response.text:\n return {\n \"address\": \"1700 S. Wentworth Avenue, Chicago, Illinois\",\n \"name\": \"Leonard M. Louie Fieldhouse\",\n }\n elif \"Zoom\" in response.text:\n return {\n \"address\": \"\",\n \"name\": \"Zoom\",\n }\n else:\n raise ValueError(\"Meeting address has changed\")", "def get_countryes(db_name=_db_indicators, country_txt_file=os.path.join('Source', 'work_countries.txt')):\n imf = cmm.READ_DB(db_name=None)\n country_list = cmm.read_countries(file_name=country_txt_file)\n print('CREATE IMF: reading countries from all neede datasets...', end=' ')\n coni = sa.create_engine('sqlite+pysqlite:///{db_name}'.format(db_name=db_name))\n dbSETS=pd.read_sql('SELECT DISTINCT Dataset from {INDI_NAME}'.format(INDI_NAME=cmm.strINDI_db_name), con=coni)\n\n cntrl=list()\n\n for k, d in dbSETS.iterrows():\n try:\n cntrl.append(pd.DataFrame(imf.get_datastructure_list(d['Dataset'])['Geographical Areas']).set_index('CL_AREA_{}'.format(d['Dataset'])))\n except KeyError:\n pass\n\n # pdfC = pd.concat([pd.DataFrame(imf.get_datastructure_list(d['Dataset'])['Geographical Areas']).set_index('CL_AREA_{}'.format(d['Dataset'])) for k, d in dbSETS.iterrows() ])\n pdfC = pd.concat(cntrl)\n\n pdfC=pdfC[pdfC.index.isin(country_list)]\n pdfC = pdfC[~pdfC.index.duplicated()]\n pdfC.index.name='id'\n pdfC=pdfC.rename(columns={'Geographical Areas':'Country'})\n print('done reading countries', end='\\n')\n return pdfC\n\n\n #print(dbSETS)", "def _get_tls_cert_details(url, domain_validator):\n result, x509 = domain_validator.in_abuse_list(url)\n cert_df = pd.DataFrame()\n if x509 is not None:\n cert_df = pd.DataFrame(\n {\n \"SN\": [x509.serial_number],\n \"Subject\": [[(i.value) for i in x509.subject]],\n \"Issuer\": [[(i.value) for i in x509.issuer]],\n \"Expired\": [x509.not_valid_after],\n \"InAbuseList\": result,\n }\n )\n return cert_df", "def all_matched_searches(affiliations, de_facto_affiliations):\n geolocator = Nominatim()\n backup_geolocator = Google(\"AIzaSyCc3U_YDbluAh_Eja8Zc4e4PX04ndyDXgE\")\n iso_3166_1 = pd.read_csv(os.path.abspath(os.path.join(__file__, os.pardir,\n \"ISO_3166_1.csv\")), na_filter=False)\n iso_3166_2_us = pd.read_csv(os.path.abspath(os.path.join(__file__,\n os.pardir, \"ISO_3166_2_US.csv\")), na_filter=False)\n iso_dict = {**{country['Alpha-2 code']: [country[\n 'English short name (upper/lower case)'], country[\n 'Alpha-2 code'], country['Alpha-3 code']] for country in\n iso_3166_1.to_dict(orient='records')}, **{state['Code']: [\n state[\"Subdivision name\"], state['Code'], state['Code']] for\n state in iso_3166_2_us.to_dict(orient='records')}, 'unknown': [\n 'unknown'] * 3}\n countries = {**{country['Alpha-2 code']: country['Alpha-2 code'] for\n country in iso_3166_1.to_dict(orient='records')}, **{country[\n 'Alpha-3 code']: country['Alpha-2 code'] for country in\n iso_3166_1.to_dict(orient='records')}, **{country[\n 'English short name (upper/lower case)']: country[\n 'Alpha-2 code'] for country in iso_3166_1.to_dict(orient=\n 'records')}, **{state['Code']: state['Code'] for state in\n iso_3166_2_us.to_dict(orient='records')}, **{state[\n 'Subdivision name']: state['Code'] for state in\n iso_3166_2_us.to_dict(orient='records')}, 'unknown': 'unknown',\n '?': 'unknown', 'Taiwan': 'TW', \"PRC\": \"CN\", \"PR China\": \"CN\",\n \"UK\": \"GB\", \"United Kingdom\": \"GB\", \"Vietnam\": \"VN\",\n \"South Korea\": \"KR\", \"Macedonia\": \"MK\",\n \"Macedonia (FYROM)\": \"MK\", \"Iran (Islamic Republic of)\": \"IR\"}\n us = {'US', 'USA', 'United States', 'U.S.A', \"United States of America\"}\n us_states = {state['Subdivision name']: state['Code'] for state in\n iso_3166_2_us.to_dict(orient='records')}\n usa_states = dict()\n for state in us_states:\n usa_states[countries[state]] = countries[state]\n usa_states[countries[state][-2:]] = countries[state]\n if state not in countries:\n countries[state] = us_states[state]\n us_states = {**us_states, **usa_states}\n del usa_states\n country_count = {country: 0 for country in iso_dict}\n for k, v in affiliations.items():\n time.sleep(1)\n if \"country\" not in affiliations[k]:\n address_components = None\n while not address_components:\n time.sleep(1)\n try:\n address_components = [x.strip() for x in\n geolocator.reverse(k, language=\n 'en').address.split(',')]\n except GeocoderServiceError as g:\n try:\n address_components = list({com_g.strip() for com_g in [\n com_i for com_h in [com[0].split(\n ',') for com in\n backup_geolocator.reverse(k,\n language='en')] for com_i in com_h\n ]})\n except:\n print(colored(g, 'yellow'))\n next\n if bool([u for u in us if u in address_components]):\n local_states = [state for state in us_states if state in\n address_components]\n if bool(local_states):\n for state in local_states :\n affiliations[k][\"country\"] = us_states[state]\n country_count[affiliations[k][\"country\"]] = \\\n country_count[\n affiliations[\n k][\n \"country\"]\n ] + 1\n else:\n for country in countries:\n if \"country\" not in affiliations[k]:\n if country != 'United States of America' and country \\\n in address_components:\n affiliations[k][\"country\"] = countries[country]\n country_count[affiliations[k][\"country\"]] = \\\n country_count[\n affiliations[\n k][\n \"country\"]\n ] + 1\n if \"country\" not in affiliations[k]:\n country = input(colored(\"{}\\n{}? \".format(str(\n address_components), str(affiliations[k][\n \"affiliations\"])), 'magenta'))\n if len(country):\n affiliations[k][\"country\"] = countries[country]\n country_count[affiliations[k][\"country\"]] = country_count[\n affiliations[\n k][\"country\"]]\\\n + 1\n if \"country\" in affiliations[k]:\n print(\"{}: {}\".format(iso_dict[affiliations[k][\"country\"]][0], str(\n address_components)))\n save_heatmap_data(affiliations)\n return(affiliations, country_count)", "def clean_counselor(counselor):\n counselor = dict(counselor)\n\n if not REQUIRED_COUNSELOR_KEYS.issubset(set(counselor.keys())):\n raise ValueError('missing keys in counselor')\n\n lat_lng_keys = ('agc_ADDR_LATITUDE', 'agc_ADDR_LONGITUDE')\n for key in lat_lng_keys:\n counselor[key] = float_or_none(counselor[key])\n\n for key in ('city', 'nme'):\n counselor[key] = title_case(counselor[key])\n\n counselor['email'] = reformat_email(counselor['email'])\n counselor['weburl'] = reformat_weburl(counselor['weburl'])\n\n return counselor", "def _load_county_geometry() -> geopandas.GeoDataFrame:\n\tfilename = shapefiles_folder / \"cb_2016_us_county_500k\"\n\ttable = read_geometry(filename)\n\n\ttable['regionCode'] = [f\"{i:>02}{j:>03}\" for i, j in zip(table['STATEFP'].values, table['COUNTYFP'].values)]\n\treturn table", "def getValidCertifications(self):\n certs = []\n today = date.today()\n for c in self.getCertifications():\n validfrom = c.getValidFrom() if c else None\n validto = c.getValidTo() if validfrom else None\n if not validfrom or not validto:\n continue\n validfrom = validfrom.asdatetime().date()\n validto = validto.asdatetime().date()\n if (today >= validfrom and today <= validto):\n certs.append(c)\n return certs", "def read_coastal(filename, plotregion):\n\n # Initialize all variables\n coast_x = []\n coast_y = []\n poly_x = []\n poly_y = []\n segnum = 0\n segments = 0\n\n # Read in file\n polygons = open(filename)\n\n # Parse polygons\n for line in polygons:\n tokens = line.split()\n if (tokens[0] == 'P') or (tokens[0] == 'L'):\n if (len(poly_x) > 0):\n coast_x.append(poly_x)\n coast_y.append(poly_y)\n poly_x = []\n poly_y = []\n segnum = 0\n segments = int(tokens[2])\n else:\n if (segnum >= segments):\n print(\"Invalid number of segments in \" +\n \"polygon from file %s\" % (file))\n return([], [])\n segnum = segnum + 1\n x = float(tokens[0])\n y = float(tokens[1])\n if (in_region([x, y], plotregion)):\n poly_x.append(x)\n poly_y.append(y)\n else:\n if (len(poly_x) > 0):\n coast_x.append(poly_x)\n coast_y.append(poly_y)\n poly_x = []\n poly_y = []\n\n # Remember to close file\n polygons.close()\n\n return coast_x, coast_y", "def test_normalize_missing_city_state_and_postal_code(self) -> None:\n try:\n address_missing_required_fields()\n except ValidationError as err:\n assert err.request_id is None\n assert err.source is ErrorSource.SHIPENGINE.value\n assert err.error_type is ErrorType.VALIDATION.value\n assert err.error_code is ErrorCode.FIELD_VALUE_REQUIRED.value\n assert (\n err.message\n == \"Invalid address. Either the postal code or the city/locality and state/province must be specified.\" # noqa\n )", "def ad_rep_city_state(obj):\n return '%s, %s' % (obj.ad_rep.geolocation_object.us_city.name,\n obj.ad_rep.geolocation_object.us_state.abbreviation)", "def cities_aibnb():\n objetos = storage.all(\"State\")\n c_dit = {}\n s_dit = {}\n for key, values in objetos.items():\n if \"State\" in key:\n s_dit[key] = values\n if \"City\" in key:\n c_dit[key] = values\n return render_template(\"8-cities_by_states.html\", city=c_dit, state=s_dit)", "def test_county_limits_by_state__no_args(self):\n response = self.client.get(self.url, {})\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(\n response.data, {\"detail\": \"Required parameter state is missing\"}\n )", "def get_country(self, data: dict):\n country_entries = data.get(\"P27\")\n if country_entries is None or len(country_entries) == 0:\n country_entries = data.get(\"P19\")\n if country_entries is None or len(country_entries) == 0:\n return [{\"country\": \"Unknown\", \"region\": \"Unknown\"}]\n countries = []\n for entry in country_entries:\n country = entry.get(\"mainsnak\").get(\"datavalue\").get(\"value\").get(\"id\")\n countries.append(self._reference.get_country(country))\n return countries", "def parse_courses():\n\n subjects = collections.OrderedDict()\n name = '' # the most recent course name acronym (ex. 'COMP')\n\n courses = re.sub(r'\\([^)]*\\)', '', COURSES).split() # Remove parens and their contents\n\n for course in courses:\n if course == 'OR':\n continue\n\n if course[0].isalpha():\n\n index = 0 # the upper bound character index of the subject name\n for char in course:\n if char.isalpha():\n index += 1\n else:\n break\n\n name = course[:index]\n number = course[index:index+4]\n else:\n number = course[:4]\n\n try:\n subjects[name].append(number)\n except KeyError:\n subjects[name] = [number]\n\n return subjects" ]
[ "0.52510595", "0.5242316", "0.50890213", "0.5040012", "0.49885833", "0.49216333", "0.49208397", "0.49044296", "0.48793742", "0.47967193", "0.4782242", "0.47812676", "0.4777004", "0.47749475", "0.47335306", "0.47322056", "0.47285786", "0.46640974", "0.46594405", "0.46486455", "0.46388897", "0.4636068", "0.46249333", "0.46237603", "0.4608417", "0.45780727", "0.45779064", "0.45614", "0.45545778", "0.4550156", "0.4545614", "0.45443305", "0.45381552", "0.45380008", "0.4530111", "0.45289776", "0.45263618", "0.45209417", "0.45164472", "0.45130393", "0.45052356", "0.449193", "0.44887963", "0.44868052", "0.44559446", "0.445472", "0.44333178", "0.44281238", "0.44254473", "0.4422911", "0.44177583", "0.44172457", "0.44145295", "0.44115925", "0.44085646", "0.44080228", "0.4405089", "0.43982038", "0.4395741", "0.4387519", "0.43815398", "0.43810207", "0.437576", "0.43746033", "0.43745357", "0.43727818", "0.4372532", "0.43661636", "0.4356271", "0.4356173", "0.4353149", "0.43391213", "0.4335275", "0.4328089", "0.4324858", "0.43247208", "0.4322454", "0.43201703", "0.4307919", "0.43077475", "0.4306636", "0.43034253", "0.42946625", "0.42770398", "0.42758802", "0.42670196", "0.42645562", "0.42640853", "0.42617786", "0.42582324", "0.42495304", "0.42495275", "0.4246031", "0.423972", "0.42344674", "0.42328295", "0.4231327", "0.42287955", "0.42273018", "0.4218661" ]
0.54531664
0
Parses the given section.
def parse_section(soup): section_tag = soup.find_all('a', {'class': 'advisory-severity-vote__message'}) section_scale = [code.string for code in section_tag] section = section_scale[0] if section_scale else None section_comment_tags = soup.find_all('li', {'class': 'ipl-zebra-list__item'}) section_comment_list = [comment.text.strip() for comment in section_comment_tags] comments = cleanup_comments(section_comment_list) return section, comments
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_section(self, root, fmt):\n return self.parse_tag(root, fmt)", "def parse(self):\n for section in self.sections:\n section.parse()", "def do_section(parser, token, template='parts/section.html', end='endsection'):\n bits = token.split_contents()[1:]\n if len(bits) is 0:\n title, attrs = '', {}\n elif len(bits) is 1:\n title, attrs = bits[0], {}\n elif len(bits) % 2 is 0:\n raise template.TemplateSyntaxError(\"Your attributes don't match up: %s\" % ', '.join(bits[1:]))\n else:\n title = bits[0]\n attrs = dict(zip(bits[1::2], bits[2::2]))\n nodelist = parser.parse((end,))\n parser.delete_first_token()\n return SectionNode(template, title, attrs, nodelist)", "def parse(self, content):\n self._sections = {}\n self._filters = []\n section = None\n\n def error(msg):\n print('autodl.cfg: line {}: {}'.format(i + 1, msg))\n # log('autodl.cfg: line {}: {}'.format(i + 1, msg))\n\n first_prog = re.compile(ur'^\\[\\s*([\\w\\-]+)\\s*(?:([^\\]]+))?\\s*]$')\n second_prog = re.compile(ur'^([\\w\\-]+)\\s*=(.*)$')\n lines = content['data'].split('\\n')\n for line in lines:\n i = 0\n line = line.strip()\n if line == '':\n continue\n\n first_array = first_prog.match(line)\n second_array = second_prog.match(line)\n if line[0] == '#':\n if section:\n section.add_comment(line)\n elif first_array:\n _type = first_array.group(1).strip().lower()\n try:\n _name = first_array.group(2).strip().lower()\n except AttributeError:\n _name = None\n section = self.get_section(_type, _name)\n elif second_array:\n if section is None:\n error('Missing a [section]')\n else:\n _option = second_array.group(1).strip().lower()\n _value = second_array.group(2).strip().lower()\n section.add_option(_option, _value)\n else:\n error('Ignoring line')\n i += 1", "def parse_section(section):\n data = {}\n for line in section.splitlines(False):\n if not line:\n continue\n if not line.startswith(' '):\n # new key/value\n key, _, value = line.partition(': ')\n data[key] = value\n else:\n # continuation of the previous value\n data[key] += line[1:]\n return data", "def parse_create_section(xml_course):\n\n attrs = [\n \"section\",\n 'crn',\n \"start-time\",\n \"end-time\",\n \"meeting-days\",\n \"location\",\n \"section-number\",\n \"instructor\"\n ]\n\n section = pull_attributes_from_xml(xml_course, attrs)\n\n section[\"places\"] = []\n\n # Create Place attribute pointer based on location string\n # Get places from Parse\n places = get_places()[\"results\"]\n # Get location info from section (of form [\"BRK 101\", \"TBA\"])\n all_locations = section[\"location\"].split(\", \")\n # Filter out TBA\n # TODO Maybe do something else with them\n locations = [location for location in all_locations if location != \"TBA\"]\n\n for location in locations:\n building_code = location.split(\" \")[0]\n for place in places:\n if place.get(\"symbol\") and place[\"symbol\"] == building_code:\n section[\"places\"].append(place[\"objectId\"])\n break;\n\n\n return section", "def parse_text(self, text: str) -> SectionDict:", "def _parse_psf_section(psf):\n conv = OplsPsfFile._convert\n line = psf.readline()\n while not line.strip():\n if not line:\n raise CharmmPsfEOF('Unexpected EOF in PSF file')\n else:\n line = psf.readline()\n if '!' in line:\n words = line[:line.index('!')].split()\n title = line[line.index('!')+1:].strip().upper()\n # Strip out description\n if ':' in title:\n title = title[:title.index(':')]\n else:\n raise CharmmPSFError('Could not determine section title')\n if len(words) == 1:\n pointers = conv(words[0], int, 'pointer')\n else:\n pointers = tuple([conv(w, int, 'pointer') for w in words])\n line = psf.readline().strip()\n if not line and title.startswith('NNB'):\n # This will correctly handle the NNB section (which has a spurious\n # blank line) as well as any sections that have 0 members.\n line = psf.readline().strip()\n data = []\n if title == 'NATOM' or title == 'NTITLE' or title == 'NUMLP NUMLPH' or title == 'NUMANISO':\n # Store these four sections as strings (ATOM section we will parse\n # later). The rest of the sections are integer pointers\n while line:\n data.append(line)\n line = psf.readline().strip()\n else:\n while line:\n words = line.split()\n data.extend([conv(w, int, 'PSF data') for w in words])\n line = psf.readline().strip()\n return title, pointers, data", "def parseSection(self, response):\n sel = Selector(response)\n sections = sel.xpath('//table[@class=\"sections responsive\"]//tr[not(@class=\"headers\")]')\n for s in sections:\n item = CourseItem(response.request.meta[\"item\"])\n item['section'] = s.xpath('@data-section-id').get().strip()\n item['instructors'] = s.css('.instructor::text').get()\n if item['instructors'] != None:\n item['instructors'].strip()\n item['instructors'] = [x.strip() for x in re.split(',', item['instructors'])]\n item['syllabus'] = s.css('.syllabus a::attr(href)').get()\n if item['syllabus'] != None:\n item['syllabus'].strip()\n return item\n \n\n \"\"\"\n Ignore the code below this. I was trying to get\n the times, days, and number registered from the class sections\n \"\"\"\n #times = s.xpath('//td[@class=\"time\"]/text()').get().strip()\n #times = re.split('-', times)\n #starttime = times[0]\n #endtime = times[1]\n #endt = dt.datetime.strptime(endtime, '%H:%M%p')\n # TODO: Check if \"am\"/\"pm\" from endt, & if endt hour is greater/less than startt \n #startt = dt.datetime.strptime(starttime, '%H:%M')\n #days = s.xpath('//td[@class=\"days\"]/text()').get().strip()\n #days = re.split(',', days)\n #numdays = len(days]\n \n #cap = s.xpath('//td[@class=\"registered\"]//a/text()').get().strip()\n #cap = re.split(' of ', cap.strip())\n #item['capacity'] = cap[1]", "def section(data):\n if len(data['index']) == 2 and data['index'][1][0].isdigit():\n element = {}\n element['is_section'] = True\n element['section_id'] = '-'.join(data['index'])\n if u\"§§ \" == data['title'][:3]:\n element['is_section_span'] = True\n else:\n element['is_section_span'] = False\n match = SECTION_TITLE_REGEX.match(data['title'])\n element['label'] = match.group(1)\n element['sub_label'] = match.group(2)\n return element", "def visit_section(self, node):\n self.section_level += 1\n self.body.append(self.starttag(node, \"section\"))", "def parse_get_section(xml_course):\n parse_section = parse_create_section(xml_course)\n query_constraints = {\n \"crn\": parse_section[\"crn\"]\n }\n params = urllib.urlencode({\"where\": json.dumps(query_constraints)})\n connection = httplib.HTTPSConnection(PARSE_API_URL, PARSE_API_PORT)\n connection.connect()\n connection.request(\n \"GET\",\n \"%s?%s\" % (SECTIONS_ENDPOINT, params),\n '',\n {\"X-Parse-Application-Id\": app_id, \"X-Parse-REST-API-Key\": rest_api_key}\n )\n response = json.loads(connection.getresponse().read())\n if response.get(\"results\"):\n return response[\"results\"][0]\n else:\n return None", "def find_section(amdpar_xml):\n siblings = [s for s in amdpar_xml.itersiblings()]\n\n if len(siblings) == 0:\n return find_lost_section(amdpar_xml)\n\n for sibling in siblings:\n if sibling.tag == 'SECTION':\n return sibling\n\n paragraphs = [s for s in siblings if s.tag == 'P']\n if len(paragraphs) > 0:\n return fix_section_node(paragraphs, amdpar_xml)", "def _get_section_data(self, section):\n \n # unit number\n apt_name = section['name']\n \n try:\n # get the number of bedrooms and bathrooms based \n # on the specific section dictionary\n bedrooms_text = section['bedrooms']['fullValue']\n bathrooms_text = section['bathrooms']['fullValue']\n bedrooms = self._extract_num(bedrooms_text)\n bathrooms = self._extract_num(bathrooms_text)\n except:\n bedrooms, bathrooms = np.nan, np.nan\n\n try:\n # get the square foot area of the unit \n space = float(section['floorSpace']['max'])\n except:\n space = np.nan\n\n try:\n # get the rent price of the unit \n price_text = section['priceRange']['formattedPrice']\n price_text = price_text.replace(',', '') \\\n .replace('$', '')\n price = self._extract_num(price_text)\n except:\n price = np.nan\n \n # construct the section data\n section_data = [\n apt_name,\n bedrooms,\n bathrooms,\n space,\n price,\n ]\n \n return section_data", "def parse_data(self, section):\n data = {}\n # We need to first search down the section to look for where the\n # first TEMPERATURE section starts.\n regex = re.compile(\"^TEMPERATURE\", re.M)\n search = regex.search(section)\n if search is None:\n raise CLIException(\"Failed to find TEMPERATURE, aborting\")\n pos = search.start()\n # Strip extraneous spaces\n meat = \"\\n\".join([s.rstrip() for s in section[pos:].split(\"\\n\")])\n # replace any 2+ \\n with just two\n meat = re.sub(r\"\\n{2,}\", \"\\n\\n\", meat)\n sections = meat.split(\"\\n\\n\")\n for _section in sections:\n lines = _section.split(\"\\n\")\n if lines[0].startswith(\"TEMPERATURE\"):\n parse_temperature(self, self.regime, lines[1:], data)\n elif lines[0].startswith(\"PRECIPITATION\"):\n parse_precipitation(self.regime, lines[1:], data)\n elif lines[0].startswith(\"SNOWFALL\"):\n parse_snowfall(self.regime, lines[1:], data)\n elif lines[0] in [\"SKY COVER\"]:\n parse_sky_coverage(lines, data)\n elif lines[0] in [\"WIND (MPH)\"] and len(lines) > 1:\n parse_wind(lines, data)\n\n return data", "def parse(self, section_dict):\n self.dict = section_dict\n for option in section_dict:\n if option not in self.optionnames:\n print(\"Warning: Unknown option: {:s} in section {:s}\".format(\n option, self.name), file=sys.stderr\n )\n for option, name in zip(self.options, self.optionnames):\n self.dict[name] = option.parse(self)\n return self.dict", "def _section(self, node, offset_mngr):\n infon = self.infon_dict(node)\n type_ = infon.get('type')\n text = self._text(node)\n if not text:\n # Text and annotations at sentence level.\n offset = offset_mngr.start(node)\n text, anno = [], []\n for sent in self._iterfind(node, 'sentence'):\n text.append(self._sentence(sent, offset_mngr))\n anno.extend(self._get_annotations(sent, offset_mngr))\n else:\n # Text and annotations at passage level.\n offset = offset_mngr.update(node, text)\n anno = list(self._get_annotations(node, offset_mngr))\n return type_, text, offset, infon, anno", "def _read_section(self, pointer, nr_of_leads):\n if pointer.id == 1:\n return self._section1(pointer)\n if pointer.id == 2:\n return self._section2(pointer)\n elif pointer.id == 3:\n return self._section3(pointer)\n elif pointer.id == 4:\n return self._section4(pointer)\n elif pointer.id == 5:\n return self._section5(pointer, nr_of_leads)\n elif pointer.id == 6:\n return self._section6(pointer, nr_of_leads)\n elif pointer.id == 7:\n return self._section7(pointer)\n elif pointer.id == 8:\n return self._section8(pointer)\n elif pointer.id == 9:\n return self._section9(pointer)\n elif pointer.id == 10:\n return self._section10(pointer)\n elif pointer.id == 11:\n return self._section11(pointer)\n elif pointer.id == 12:\n return self._section12(pointer)\n elif pointer.id > 12:\n print(\"WARN: Section Id %s is not implemented\" % str(pointer.id))\n return None", "def _parse_section(self, lines_iter, expected_header=None):\r\n if expected_header:\r\n line = lines_iter.next()\r\n if expected_header + ':\\n' != line:\r\n raise ParseError('Expected: \"%s:\". Found: \"%s\"' % (expected_header, line))\r\n n = self._parse_num_items(lines_iter)\r\n relation = defaultdict(list) # Values are lists, to accommodate relations.\r\n for i in xrange(n):\r\n k, _, v = lines_iter.next().partition(' -> ')\r\n if len(v) == 1: # Value on its own line.\r\n v = lines_iter.next()\r\n relation[k].append(v[:-1])\r\n return relation", "def preprocess_section(self, section):\n\n if self.is_google_format(section.content):\n return self._google_preprocessor.preprocess_section(section)\n\n return self._rst_preprocessor.preprocess_section(section)", "def _parse_data(self):\n current_block = []\n current_section = \"docstring\"\n\n # if we get a line that starts with #, this is a new comment or\n # part of a block comment. Otherwise, it means the current block\n # comment has ended.\n\n for this in self.data:\n # Beginning of a new section at top level\n if self.regex_section.findall(this):\n name = self.regex_section.findall(this)[0]\n current_section = name.strip(\":\")\n self.sections[current_section] = \"\".join(current_block)\n current_block = []\n current_section = None\n elif this.startswith(\"#\"): # a comment at top level\n current_block.append(this)\n elif this.strip() == \"\": # an empty line\n # this was the main comment, or an isolated comment\n current_block = []\n else: # a non-empty line to skip\n current_block = []\n\n for key in self._get_expected_sections():\n if key not in self.sections.keys():\n logger.warning(\"section %s not dealt by the parsing function\" % key)", "def makesection(section):\n s = []\n if section is None:\n return s\n try:\n for i in section.split(':'):\n s.append(int(i))\n except Exception as e:\n msg = 'Not able to convet section to list because %s' % e\n raise SpecError(msg)\n return s", "def _parse(self):\n\n self.specification = {}\n\n while True:\n try:\n line = self._lines.current\n if ':' in line:\n self.specification.update(self._parse_spec())\n elif line.startswith('NODE_COORD_SECTION'):\n next(self._lines)\n self.coords = self._parse_coords()\n elif line.startswith('EDGE_WEIGHT_SECTION'):\n next(self._lines)\n self.weights = self._parse_weights()\n elif line.startswith('DISPLAY_DATA_SECTION'):\n next(self._lines)\n self.display = self._parse_coords()\n else:\n break\n except StopIteration:\n break\n\n del self._lines", "def handle_section_import(section):\n for prop in section.properties:\n handle_property_import(prop)\n\n # Make sure properties down the rabbit hole are also treated.\n for sec in section.sections:\n handle_section_import(sec)", "def extract_section(soup, symbol):\n section = []\n\n # assume this is only happens at the end of the file\n if soup.contents[0] == u'\\n':\n return None, [], \"\"\n\n if len(soup.contents) == 2:\n if soup.contents[1].strip() == u'None.':\n # the section is noted as empty, forward to next section\n return soup.nextSibling.nextSibling, [], \"\"\n\n # it's most likely it's here, but not sure. oh well!\n title = soup.contents[0].string\n #print >> sys.stderr, \"SYMBOL:\", symbol, \"[\", title, \"]\"\n\n soup = soup.nextSibling.nextSibling\n\n lines = []\n while soup and len(soup.findAll(text=re.compile(\"[A-Z][a-z]+:\"))) == 0:\n # fix for Examples\n line = [e.strip() for e in soup.recursiveChildGenerator()\n if isinstance(e, unicode)]\n lines.append(' '.join(line))\n soup = soup.nextSibling\n\n if len(lines):\n soup_data = '\\n'.join(lines)\n\n # xml-ish markup fixup\n section = xml_markup_fixup(soup_data)\n\n return soup, section, title", "def parse_section_header(data, elf_header):\n if elf_header[\"shoff\"] == 0:\n print \" No section header\"\n return None\n \n if is64bit(elf_header):\n section_entry_str = section_64_entry_str\n section_entry_spec = section_64_entry_spec\n else:\n section_entry_str = section_32_entry_str\n section_entry_spec = section_32_entry_spec\n \n entry_len = struct.calcsize(section_entry_str)\n entries = {}\n offset = elf_header[\"shoff\"] \n for entry in range(elf_header[\"shnum\"]):\n vals = {}\n if len(data) < offset+entry_len:\n break\n val_data = struct.unpack(section_entry_str, data[offset:offset+entry_len]) \n for i, elem in enumerate(section_entry_spec):\n vals[elem[0]] = val_data[i] \n \n vals[\"flags\"] = get_section_flags(vals[\"flags\"])\n vals[\"type\"] = get_section_type(vals[\"type\"])\n \n entries[entry] = vals\n offset += entry_len\n \n if not entries:\n return {}\n \n sections = assign_section_names(data, entries, elf_header[\"shstrndx\"])\n return sections", "def _getsec(self, line):\n m = sectionheader_re.match(line)\n if not m:\n return\n self.anchors = anchors = []\n self.d[m.group(1)] = anchors\n self.getline = self._getanchor", "def parse_section(config, config_type):\n\n\n if 'also_skip' in config:\n also_skip = config['also_skip'].lower() == 'true' or config['also_skip'].lower() == '1'\n config.pop('also_skip')\n else:\n also_skip = False\n\n if config_type == 'SAVGOL':\n config = parse_savgol(config)\n elif config_type == 'BASELINE':\n config = {}\n elif config_type == 'SNV':\n config = {}\n elif config_type == 'RNV':\n config = parse_rnv(config)\n elif config_type == 'LSNV':\n config = parse_lsnv(config)\n elif config_type == 'TRIM':\n config = parse_trim(config)\n elif config_type == 'DETREND':\n config = parse_detrend(config)\n elif config_type == 'MSC':\n config = {}\n elif config_type == 'EMSC':\n config = parse_emsc(config)\n elif config_type == 'NORML':\n config = parse_norml(config)\n elif config_type == 'CLIP':\n config = parse_clip(config)\n elif config_type == 'SMOOTH':\n config = parse_smooth(config)\n elif config_type == 'RESAMPLE':\n config = parse_resample(config)\n elif config_type == 'DERIVATE':\n config = parse_derivate(config)\n else:\n raise TypeError('Preprocessing option \"{}\" not recognized!'.format(config_type))\n\n if also_skip:\n config['also_skip'] = also_skip\n\n return config", "def get_section(self, section: str, item: str = '') -> Union[dict, str]:\n\n if section.lower() in self.ini_config.sections():\n if item != '':\n return dict(self.ini_config[section.lower()])[item.lower()]\n else:\n return dict(self.ini_config[section.lower()])\n\n else:\n logging.error(f'No {section} in .ini config')\n return {}", "def _read(self, fp, fpname):\n cursect = None # None, or a dictionary\n optname = None\n lineno = 0\n e = None # None, or an exception\n while True:\n line = fp.readline()\n if not line:\n break\n lineno = lineno + 1\n # comment or blank line?\n if line.strip() == '' or line[0] in '#;':\n continue\n if line.split(None, 1)[0].lower() == 'rem' and line[0] in \"rR\":\n # no leading whitespace\n continue\n # continuation line?\n if line[0].isspace() and cursect is not None and optname:\n value = line.strip()\n if value:\n cursect[optname].append(value)\n # a section header or option header?\n else:\n # is it a section header?\n mo = self.SECTCRE.match(line)\n if mo:\n sectname = mo.group('header')\n if sectname in self._sections:\n cursect = self._sections[sectname]\n elif sectname == DEFAULTSECT:\n cursect = self._defaults\n else:\n cursect = self._dict()\n cursect['__name__'] = sectname\n self._sections[sectname] = cursect\n # So sections can't start with a continuation line\n optname = None\n # no section header in the file?\n elif cursect is None:\n raise MissingSectionHeaderError(fpname, lineno, line)\n # an option line?\n else:\n mo = self._optcre.match(line)\n if mo:\n optname, vi, optval = mo.group('option', 'vi', 'value')\n optname = self.optionxform(optname.rstrip())\n # This check is fine because the OPTCRE cannot\n # match if it would set optval to None\n if optval is not None:\n if vi in ('=', ':') and ';' in optval:\n # ';' is a comment delimiter only if it follows\n # a spacing character\n pos = optval.find(';')\n if pos != -1 and optval[pos-1].isspace():\n optval = optval[:pos]\n optval = optval.strip()\n # allow empty values\n if optval == '\"\"':\n optval = ''\n cursect[optname] = [optval]\n else:\n # valueless option handling\n cursect[optname] = optval\n else:\n # a non-fatal parsing error occurred. set up the\n # exception but keep going. the exception will be\n # raised at the end of the file and will contain a\n # list of all bogus lines\n if not e:\n e = ParsingError(fpname)\n e.append(lineno, repr(line))\n # if any parsing errors occurred, raise an exception\n if e:\n raise e\n\n # join the multi-line values collected while reading\n all_sections = [self._defaults]\n all_sections.extend(self._sections.values())\n for options in all_sections:\n for name, val in options.items():\n if isinstance(val, list):\n options[name] = '\\n'.join(val)", "def read_section(self, configuration_file=\"./conf.txt\", section=\"\"):\n parser = ConfigParser.ConfigParser()\n parser.read(configuration_file)\n\n sec = {}\n if parser.has_section(section):\n items = parser.items(section)\n for item in items:\n sec[item[0]] = item[1]\n else:\n raise ConfException(\"{0} not found in the {1} file\".format(section, configuration_file))\n return sec", "def _parse_handle_section(lines):\n data = {}\n key = ''\n next(lines)\n\n for line in lines:\n line = line.rstrip()\n if line.startswith('\\t\\t'):\n if isinstance(data[key], list):\n data[key].append(line.lstrip())\n elif line.startswith('\\t'):\n key, value = [i.strip() for i in line.lstrip().split(':', 1)]\n key = normalize(key)\n if value:\n data[key] = value\n else:\n data[key] = []\n else:\n break\n\n return data", "def parseSections(data):\n pro = _sectionSplit.split(data)\n sections = {}\n for x in xrange(1, len(pro), 2):\n sections[pro[x]] = pro[x+1]\n return sections", "def parse(self):\n node = self.start\n while self.end is None:\n if node.next_sibling is None:\n self.end = node\n else:\n node = node.next_sibling\n if node.name == 'ul':\n self.quotes.append(Quote(node))\n elif node.name in ['h' + str(i) for i in range(1, 6)]:\n node_level = int(node.name[1:])\n if node_level > self.level:\n s = Section(node)\n self.children.append(s)\n node = s.end\n else:\n self.end = node.previous_sibling", "def __getitem__(self, section):\n #first translate from CFN into HOT terminology if necessary\n if section not in self.SECTIONS:\n section = HOTemplate20130523._translate(\n section, self._CFN_TO_HOT_SECTIONS,\n _('\"%s\" is not a valid template section'))\n\n if section not in self.SECTIONS:\n raise KeyError(_('\"%s\" is not a valid template section') % section)\n if section in self.SECTIONS_NO_DIRECT_ACCESS:\n raise KeyError(\n _('Section %s can not be accessed directly.') % section)\n\n if section == self.MAPPINGS:\n return {}\n\n if section == self.DESCRIPTION:\n default = 'No description'\n else:\n default = {}\n\n # if a section is None (empty yaml section) return {}\n # to be consistent with an empty json section.\n the_section = self.t.get(section) or default\n\n # In some cases (e.g. parameters), also translate each entry of\n # a section into CFN format (case, naming, etc) so the rest of the\n # engine can cope with it.\n # This is a shortcut for now and might be changed in the future.\n if section == self.RESOURCES:\n return self._translate_resources(the_section)\n\n if section == self.OUTPUTS:\n return self._translate_outputs(the_section)\n\n return the_section", "def __getitem__(self, section_id):", "def _get_section(self, sections, section_id):\n for section in sections:\n\t if section['section_id'] == section_id:\n\t return section", "def _get_section(self, sections, section_id):\n for section in sections:\n\t if section['section_id'] == section_id:\n\t return section", "def get_section(self, section_name):\n section_name = JSONSchema.format_section_name(section_name).lower()\n try:\n return self._sections[section_name]\n except KeyError:\n raise AquaError('No section \"{0}\"'.format(section_name))", "def _get_page_sections(self, sectionNum=None, sectionName=None):\n self.section = {}\n self.sections = [] # list maintains order\n content = self.page.content\n lines = content.split(\"\\n\")\n currentSection = None\n for line in lines:\n if \"==\" in line:\n line = line.replace(\"Edit =\",\"\")\n line = line.replace(\"=\",\"\").lstrip().rstrip()\n self.section[line] = []\n currentSection = line\n self.sections.append(currentSection)\n elif currentSection is not None:\n line = line.lstrip().rstrip()\n self.section[currentSection].append(line)\n else:\n pass\n logger.info(\"Sections in page: \"+str(self.sections))\n # and return some section:\n if sectionNum is not None:\n if sectionNum > len(self.sections) or sectionNum < 0:\n sectionNum = 0\n return self.section[self.sections[sectionNum]]\n elif sectionName is not None:\n pass", "def fix_section_node(paragraphs, amdpar_xml):\n\n sections = [s for s in amdpar_xml.itersiblings(preceding=True)\n if s.tag == 'SECTION']\n\n # Let's only do this if we find one section tag.\n if len(sections) == 1:\n section = deepcopy(sections[0])\n for paragraph in paragraphs:\n section.append(deepcopy(paragraph))\n return section", "def section(self):\n return SECTION_NAME_TO_SECTION[self.section_name]", "def __init__(self, section):\n\n self.values = section", "def get_section(self, section_name: str) -> NetSection:\n return self.sections[section_name]", "def parse_config_sections(self, namespace, sections):\n with patch(\"snakeoil.cli.arghparse.ArgumentParser.error\", self._config_error):\n for section in (x for x in sections if x in self.config):\n config_args = [\n f\"--{k}={v}\" if v else f\"--{k}\" for k, v in self.config.items(section)\n ]\n namespace, args = self.parser.parse_known_optionals(config_args, namespace)\n if args:\n self.parser.error(f\"unknown arguments: {' '.join(args)}\")\n return namespace", "def deal_with_sections(self):\n self.data_sections = []\n self.create_parser_sections(self.soup)", "def read_section(geo_file_path, section_marker):\n read = False\n section = []\n\n with open(geo_file_path) as file:\n lines = file.readlines()\n for line in lines:\n if line.startswith(section_marker):\n read = True\n\n if line.startswith(SECTION_END_MARKER):\n # Don't stop, section can be split\n read = False\n\n if read:\n section.append(line)\n\n return section", "def __getitem__(self, section):\n result = self.get(section)\n\n if result is None:\n raise KeyError(section)\n\n return result", "def find_section_state(line, current_section, section_order, content, highlight_content):\n for section, pattern in SEC_PAT_DICT.items():\n if pattern.match(line):\n section_order.append(section)\n content[section] = []\n highlight_content[section] = []\n return section, 1\n\n if current_section is None:\n raise InvalidDataError(\"Could not identify section from line: {}\".format(line))\n else:\n return current_section, 1", "def upload_section(parse_section):\n # If parse_section[\"id\"] is not specified, use POST method otherwise use PUT and return the upload results\n url = SECTIONS_ENDPOINT\n if not parse_section.get(\"objectId\"):\n method = \"POST\"\n else:\n method = \"PUT\"\n url += '/' + parse_section[\"objectId\"]\n\n connection = httplib.HTTPSConnection(PARSE_API_URL, PARSE_API_PORT)\n connection.connect()\n connection.request(\n method,\n url,\n json.dumps(parse_section),\n {\"X-Parse-Application-Id\": app_id, \"X-Parse-REST-API-Key\": rest_api_key}\n )\n result = json.loads(connection.getresponse().read())\n if result.get(\"error\"): \n print \"Error: could not get Parse authorization.\"\n elif method == \"POST\":\n return result[\"objectId\"]\n else:\n return parse_section[\"objectId\"]", "def section():\n # type: () -> None\n idx = get_arg(\"idx\", False)\n href = get_arg(\"href\", False)\n category = get_arg(\"category\")\n offset = get_arg(\"offset\", 0)\n if idx:\n # Section menu\n url = locs.get_search_url(idx, page=offset)\n data = locs.get_json(url)\n paginate(data.get(\"pagination\"), category, idx)\n for data in data.get(\"facets\", [{}])[0].get(\"filters\"):\n title = data.get(\"title\").title()\n count = data.get(\"count\")\n add_menu_item(section,\n \"{} [{} items]\".format(title, count),\n {\"href\": data.get(\"on\"), \"category\": title})\n if href:\n # Playable items\n data = locs.get_json(href)\n parse_search_results(data, category)\n xbmcplugin.setContent(plugin.handle, \"videos\")\n xbmcplugin.setPluginCategory(plugin.handle, category)\n xbmcplugin.endOfDirectory(plugin.handle)", "def _parse_sections(cls, content: dict) -> Dict[str, Any]:\n sections = {}\n for name, section_cls in cls.classes.items():\n section = content.get(name)\n if section:\n try:\n sections[name] = section_cls.parse(section)\n except AttributeError:\n sections[name] = section_cls(section)\n else:\n sections[name] = section_cls()\n return sections", "def query(config_file, section, option=None):\n error, parser = _get_config_parsser(config_file)\n if error:\n return error, parser\n if option:\n try:\n value = parser.get(section, option)\n return 0, [value]\n except Exception, e:\n return 1, e\n elif section:\n try:\n sections = parser.items(section)\n except Exception, e:\n return 1, e\n return 0, sections\n else:\n sections = parser.sections()\n return 0, sections", "def openssl_config_get_section(section):\n result = {}\n parses = False\n for line in openssl_config_strip(openssl_config()).split(\"\\n\"):\n if line.lstrip().startswith(\"[\") and line.rstrip().endswith(\"]\") and line[1:-1].strip() == section:\n parses = True\n continue\n if parses:\n if line.strip().startswith(\"[\"):\n break\n (key, value) = line.strip().split(\"=\", 1)\n result[key.strip()] = value.strip()\n return result", "def has_section(self, section):\n\n return self.parser.has_section(section)", "def get_section_config(self, section):\n params = self._parse_params(ConfigStorage(self.config[section]))\n return params", "def test_get_valid_section(self):\n arm = self.ar[2009][11]\n ars = arm['general']\n self.assertTrue(isinstance(ars, awstats_reader.AwstatsSection))", "def test_no_section_by_section(self):\n notice = {\n \"document_number\": \"111-22\",\n \"fr_volume\": 22,\n \"cfr_part\": \"100\",\n \"publication_date\": \"2010-10-10\"\n }\n s = SectionBySection(None, notices=[notice])\n self.assertEqual(None, s.process(Node(label=['100', '22'])))", "def section(self, idx: int) -> int:\n if self.sections >= (idx + 1):\n return int(RE_DIGIT.match(self.string.split(\".\")[idx]).group(1))\n return 0", "def parse_sections(soup, report, baseUrl):\n parse_sections = False # To parse section wise set it to True else full content is parsed\n overview = False\n config = False\n usecase = False\n overview_content = \"\"\n config_content = \"\"\n usecases_content = \"\"\n isFullContent = False\n full_content = \"\"\n updateImgUrl(baseUrl, soup)\n for e in soup.contents:\n if not parse_sections:\n if 'h1' == str(e.name).lower():\n isFullContent = True\n if isFullContent:\n full_content += \"\\n\" + str(e)\n else:\n content_value = e.next\n if content_value == 'Overview':\n overview = True\n if content_value == 'Configuration':\n config = True\n overview = False\n if content_value == 'Use Cases':\n usecase = True\n config = False\n if overview == True and config == False and usecase == False:\n overview_content += \"\\n\" + str(e)\n if overview == False and config == True and usecase == False:\n config_content += \"\\n\" + str(e)\n if overview == False and config == False and usecase == True:\n usecases_content += \"\\n\" + str(e)\n\n if not parse_sections:\n report[\"content\"] = convert_to_base64(full_content)\n else:\n if overview_content:\n report[\"overview\"] = convert_to_base64(overview_content)\n if config_content:\n report[\"configuration\"] = convert_to_base64(config_content)\n if usecases_content:\n report[\"use_cases\"] = convert_to_base64(usecases_content)", "def parse_sections(article, as_list: bool = False):\n article_text = article.find(\"text\")\n divs = article_text.find_all(\"div\", attrs={\"xmlns\": \"http://www.tei-c.org/ns/1.0\"})\n sections = []\n for div in divs:\n div_list = list(div.children)\n if len(div_list) == 0:\n heading = \"\"\n text = \"\"\n elif len(div_list) == 1:\n if isinstance(div_list[0], NavigableString):\n heading = str(div_list[0])\n text = \"\"\n else:\n heading = \"\"\n text = div_list[0].text\n else:\n text = []\n heading = div_list[0]\n if isinstance(heading, NavigableString):\n heading = str(heading)\n p_all = list(div.children)[1:]\n else:\n heading = \"\"\n p_all = list(div.children)\n for p in p_all:\n if p is not None:\n try:\n text.append(p.text)\n except:\n pass\n if not as_list:\n text = \"\\n\".join(text)\n\n if heading is not \"\" or text is not \"\":\n ref_dict = calculate_number_of_references(div)\n sections.append(\n {\n \"heading\": heading,\n \"text\": text,\n \"n_publication_ref\": ref_dict[\"n_publication_ref\"],\n \"n_figure_ref\": ref_dict[\"n_figure_ref\"],\n }\n )\n return sections", "def parser(in_file,verbose):\n\n # perform high-level parsing into sections\n res_file_lines = [row for row in in_file]\n tokenized_lines = tools.split_and_prune_lines(res_file_lines)\n sections = tools.extracted_sections(tokenized_lines)\n\n # split out common sections and subsequent groups of results sections\n def is_results_sentinel_section(section):\n \"\"\" Identify mesh point separator \"pseudo-section\" header.\n\n (Helper function for res_parser_spncci.)\n \"\"\"\n (section_name,_) = section\n return (section_name == \"RESULTS\")\n\n grouped_sections = tools.split_when(is_results_sentinel_section,sections)\n common_sections = list(next(grouped_sections))\n grouped_results_sections = [list(section_group) for section_group in grouped_sections]\n\n if (verbose):\n print(\"Section counts\")\n print(\" Common sections:\",len(common_sections))\n for results_section_group in grouped_results_sections:\n print(\" Results sections (by group):\",len(results_section_group))\n\n # generate results objects by mesh point\n mesh_data = []\n if (grouped_results_sections):\n # there are results sections: actual mesh, not counting run\n for results_section_group in grouped_results_sections:\n full_section_group = common_sections + results_section_group\n results = spncci_results_data.SpNCCIResultsData()\n parse_mesh_point(results,full_section_group,section_handlers)\n mesh_data.append(results)\n else:\n # no results sections: counting run\n results = spncci_results_data.SpNCCIResultsData()\n parse_mesh_point(results,common_sections,section_handlers)\n mesh_data.append(results)\n\n return mesh_data", "def _read_header(self, valid_sections={}):\n linenum = self._linenum\n\n # It's possible that we'll be at the end of the file, with some blank\n # lines, or hand-editing (or bad diff generation) has led to some\n # blank lines before a header. We'll iterate through any blank lines\n # until we reach content or an End of File.\n while True:\n header, eof = self._read_until(b'\\n')\n\n if eof:\n return None\n\n if header.strip():\n break\n\n if self._file_newlines is None:\n # Given that we read up until a '\\n', one of these are guaranteed\n # to match.\n if header.endswith(b'\\r\\n'):\n self._file_newlines = b'\\r\\n'\n else:\n assert header.endswith(b'\\n')\n\n self._file_newlines = b'\\n'\n\n assert header.endswith(self._file_newlines)\n header = header[:-len(self._file_newlines)]\n\n m = self._HEADER_RE.match(header)\n\n if not m:\n raise DiffXParseError(\n 'Unexpected or improperly formatted header: %r' % header,\n linenum=linenum)\n\n # Validate the level and section ID.\n section_id = m.group('section_id').decode('ascii')\n\n if section_id not in valid_sections:\n raise DiffXParseError(\n 'Unknown or unexpected section ID \"%(section_id)s\". '\n 'Expected one of: %(valid_sections)s'\n % {\n 'section_id': section_id,\n 'valid_sections': ', '.join(\n '\"%s\"' % _valid_section\n for _valid_section in sorted(valid_sections)\n ),\n },\n linenum=linenum)\n\n section_type = m.group('section_type')\n level = len(m.group('level'))\n\n # Parse the options out of the header.\n options_str = m.group('options')\n options = {}\n\n if options_str:\n # Options should be present.\n #\n # As this is a reference implementation, this will be strict with\n # the format. There should be exactly one space between the\n # \"#<id>:\" and the options, one space between each comma-separated\n # pair, and each key and value are expected to match a specific set\n # of characters.\n for option_pair in options_str.split(b', '):\n option_key, option_value = option_pair.split(b'=', 1)\n\n if not self._HEADER_OPTION_KEY_RE.match(option_key):\n raise DiffXParseError(\n 'Header option key \"%s\" contains invalid characters'\n % option_key.decode('ascii'),\n linenum=linenum,\n column=header.index(option_pair))\n\n if not self._HEADER_OPTION_VALUE_RE.match(option_value):\n raise DiffXParseError(\n 'Header option value \"%(value)s\" for key \"%(key)s\" '\n 'contains invalid characters'\n % {\n 'key': option_key.decode('ascii'),\n 'value': option_value.decode('ascii'),\n },\n linenum=linenum,\n column=header.index(option_pair) + len(option_key) + 1)\n\n # These should safely decode, since we've validated the\n # characters above.\n option_key = option_key.decode('ascii')\n option_value = option_value.decode('ascii')\n\n # Convert the value to an integer, if it's a number.\n try:\n option_value = int(option_value)\n except ValueError:\n pass\n\n options[option_key] = option_value\n\n self._linenum += 1\n\n return {\n 'level': level,\n 'line': linenum,\n 'options': options,\n 'section': section_id,\n 'type': section_type.decode('ascii'),\n }", "def _sectionpointer(self):\n p = SectionPointer()\n p.id = self.reader.readint(2)\n # section length\n p.len = self.reader.readint(4)\n # index of section starting from zero\n p.index = self.reader.readint(4)\n return p", "def to_dict(self, section):\n\t\t\n\t\tdct = {}\n\t\t\n\t\tfor name, value in self.items(section):\n\t\t\tdct[name] = self.parse_value(value)\n\t\t\n\t\treturn dct", "def _parse(self):\n\n self.specification = {}\n\n while True:\n try:\n line = self._lines.current\n if ':' in line:\n self.specification.update(self._parse_spec())\n elif line.startswith('TOUR_SECTION'):\n next(self._lines)\n self.tour = self._parse_tour()\n else:\n break\n except StopIteration:\n break\n\n del self._lines\n\n if 'TYPE' in self.specification and \\\n self.specification['TYPE'] != 'TOUR':\n raise TypeError('Unsupported TSPLib file type. Only TOUR type \\\n is supported')", "def get_sections(data):\n print \" * Extracting sections\"\n sections = OrderedDict()\n\n results = re.finditer(r\"^([A-Z][A-Z]+)([ ]+.*)?$\", data, re.M)\n data_start = None\n data_end = None\n prev_section = None\n cur_section = None\n for res in results:\n print \" * Found\", res.groups()[0]\n data_end = res.start()\n if prev_section is not None:\n # Get rid of potential comments at the end of a line.\n _data = re.sub(r\"\\s*#.*\", \"\", data[data_start:data_end])\n sections[prev_section][\"data\"] = filter(None, _data.splitlines())\n data_start = res.end()\n cur_section = res.groups()[0]\n sections[cur_section] = {\"arguments\": res.groups()[1], \"data\": \"\"}\n prev_section = \"%s\" % cur_section # Only to be sure we get a brand new string...\n\n return sections", "def parse_mesh_point(self,sections,section_handlers):\n\n for (section_name,tokenized_lines) in sections:\n if section_name in section_handlers:\n section_handlers[section_name](self,tokenized_lines)", "def get_section_config_ini(self, cfile, section, dict_format=False):\r\n\r\n config = self.get_config_ini(cfile)\r\n if dict_format:\r\n # Retorno um dicionario\r\n return dict(config.items(section.upper()))\r\n else:\r\n # Retorna um objeto config\r\n return config.items(section.upper())", "def readShimadzuSection(section):\n xdata = []\n ydata = []\n for line in section:\n tt = line.split()\n if len(tt)==2:\n try:\n x=float(tt[0])\n except ValueError:\n continue\n try:\n y=float(tt[1])\n except ValueError:\n continue\n xdata.append(x)\n ydata.append(y)\n return xdata,ydata", "def get_section_by_name(self, section_name):\n sections = self.unravel_sections(self.get_sections())\n for section in sections:\n if section['name'] == section_name:\n return section['groupId'], section\n return None, None", "def get_index_from_section(section):\n return section.rsplit(\"(\", 1)[1].rstrip(\")\")", "def section(self, section_name):\n section = site_sections.get(section_name)\n if not section:\n if site_sections.get_section_names() == []:\n raise TypeError('class site_sections is not set up. Call autodoscover first.')\n section = site_sections.get(section_name)\n if not section:\n raise TypeError('Could not find section \\'{0}\\' in site_sections. You need to define a section class for this name in section.py.'.format(section_name))\n self._section = section()", "def get_conf_by_section(self, section):\n try:\n return get_conf(self.conf_file)[section]\n except:\n return None", "def parse_part(self):\n parts = []\n for part in re.split(r'\\*\\*\\* ([A-Z- ]+) \\*\\*\\*', self.hand_file): # return [ 'part1', 'splitter1', 'part2',..\n parts.append(part)\n\n for i in range(0, len(parts)):\n if i == 0:\n self.part_dict['HEADER'] = parts[i]\n if i % 2 != 0: # number is odd\n self.part_dict[parts[i]] = parts[i + 1]", "def load_sections():\n pass", "def get_section_number() -> int:\n section_num = input('Enter a section number (1 - 4): ')\n while not (section_num.isdigit() and wf.is_valid_section(int(section_num))):\n print('Invalid section number!')\n section_num = input('Enter a section number (1 - 4): ')\n return int(section_num)", "def parse(lines):\n if len(lines) == 0:\n return (None, lines)\n title = lines[0]\n if len(title) > 0 and \"*\" == title[0]:\n level = 1\n while title[level] == \"*\":\n level = level + 1\n if title[level] != \" \":\n return (None, lines)\n else:\n heading = title[level+1:]\n todo = heading[:4] == \"TODO\"\n done = heading[:4] == \"DONE\"\n if todo or done:\n heading = heading[4:]\n return (ORGSection(heading.strip(), level=level, TODO=todo, DONE=done), lines[1:])\n else:\n return (None, lines)", "def add_section(self, section_name: str) -> None:\n pass", "def add_section(self, section_name: str) -> None:\n pass", "def split_and_find_section(curr_text,curr_sec_name,prev_section,split_upto=0.2,split_bins=10):\n current_text_split = split_match(curr_sec_name,curr_text,split_upto=split_upto,split_bins=split_bins)\n # print(\"Found Splits,\",curr_sec_name,len(current_text_split))\n if len(current_text_split) == 0: \n # This means no splits were found \n return curr_text,False\n\n portion_before_section = current_text_split[0] \n\n if prev_section is not None:\n prev_section.text = portion_before_section\n # print(ss.name,\"added To Section \",prev_section.name,len(prev_section.text))\n portion_after_section = current_text_split[1:]\n curr_text = ''.join(portion_after_section)\n return curr_text,True", "def parseLesson(lesson_text):\n lesson_parsed = dict()\n\n lesson_parsed['section_starts'] = list()\n lesson_parsed['sections'] = list()\n lesson_parsed['end_of_lesson'] = None\n lesson_parsed['source'] = None\n lesson_parsed['title'] = None\n lesson_parsed['number'] = None\n\n end_of_lesson_markers = [\n r'^\\s*\\(End\\s+of\\s+lesson',\n r'^\\s*\\(End\\s+of\\s+lecture',\n r'^\\(End\\s+of\\s+recording',\n r'^\\[End\\s+of\\s+recording',\n r'^\\[End\\s+of\\s+the\\s+recording',\n r'^\\(End of the recording',\n r'^\\[End\\s+of\\s+the\\s+tape',\n r'^\\(End\\s+of\\s+audible\\s+portion\\s+of\\s+the\\s+lesson',\n r'^\\[Note\\:\\s+This\\s+is\\s+the\\s+end\\s+of\\s+the\\s+recording',\n ]\n\n # Do a parsing pass of the overall lesson text. This information will be used\n # to compile the lesson into a form useful for display and use markup.\n line_number = 0\n for line in lesson_text:\n mobj = re.search(r'^Source:\\s+(.+)$', line)\n if mobj:\n # WORKING HERE\n pass\n\n if re.search(r'^\\d+[a-z]?\\.', line):\n lesson_parsed['section_starts'].append(line_number)\n line_number += 1\n continue\n\n found_end_of_lesson = False \n for end_of_lesson_marker in end_of_lesson_markers:\n if re.search(end_of_lesson_marker, line):\n lesson_parsed['end_of_lesson'] = line_number\n line_number += 1\n found_end_of_lesson = True\n break\n\n if found_end_of_lesson:\n continue\n\n for end_of_lesson_marker in end_of_lesson_markers:\n if re.search(end_of_lesson_marker, line):\n lesson_parsed['end_of_lesson'] = line_number\n line_number += 1\n break\n\n if lesson_parsed['end_of_lesson'] is not None:\n continue\n\n # Remove tabs from the start of a line.\n # Replace tabs between the number and the start of text with a space.\n # Skip \"This page is included for printing and binding purposes\"\n # Footnotes \"[fnX]\" will need to be linked.\n\n line_number += 1\n\n\n # Now compile the lesson instructions into their own things.\n\n idx = 0\n\n new_section = list()\n for section_line in xrange(lesson_parsed['section_starts'][idx], lesson_parsed['section_starts'][idx+1]:\n section_start_text = lesson_text[section_line]\n lesson_parsed['sections'].append(lesson_text[section_line])\n\n\n\n return lesson_parsed", "def parse(self, section):\n # try to find alternatives if they exist\n alternatives = deepcopy(self.alternatives)\n while len(alternatives) != 0 and self.name not in section.dict:\n other_name = alternatives.pop(0)\n if other_name in section.dict:\n section.dict[self.name] = section.dict[other_name]\n del section.dict[other_name]\n break\n if not self.optional:\n assert_exists(self.name, section.dict, section.name)\n if self.name not in section.dict:\n return self.default\n else:\n if self.dtype != list:\n if self.dtype == bool:\n # this is necessary since ``bool(\"False\")`` returns ``True``.\n value = parse_bool(section, self.name)\n else:\n value = self.dtype(section.dict[self.name])\n if not self.validation_func(value):\n raise ValueError('Invalid input for option ' + self.name +\n ' in section ' + section.name)\n return value\n else:\n\n value = parse_list(section.dict[self.name], self.datatype)\n\n # value validation\n if not all_true(self.validation_func, value):\n raise ValueError('Invalid input for option ' + self.name +\n ' in section ' + section.name)\n\n shape = deepcopy(self.shape)\n\n # now we need to get the correct shape\n if shape == -1:\n # we don't care for the shape of this\n if not isinstance(value, list):\n value = [value]\n return value\n\n if isinstance(shape, str):\n # in this case we simply use the shape of the option with this name\n if shape not in section.dict:\n raise ValueError(self.name + ' in ' + section.name + ' has an invalid ' +\\\n 'shape because the options whose shape it should have ' +\\\n 'does not exist. Check your option definitions!')\n shape = get_shape(section.dict[shape])\n if isinstance(shape, int):\n shape = [shape]\n # shape is now a list, but it might still contain strings\n for i in range(len(shape)):\n if isinstance(shape[i], str):\n shape[i] = len(section.dict[shape[i]])\n\n\n\n # shape is now either a 'flat' shape, i.e. something like [2, 3, 2],\n # or an expanded shape, e.g. [2, [3, 3], [[2, 2, 2],[2, 2, 2]]]\n # if it's flat, it might contain dimensions with -1 that cannot be\n # autoexpanded. We first need to determine the shape of this dimension.\n if is_flat(shape):\n real_shape = get_shape(value)\n if isinstance(real_shape, (list, tuple)):\n # if it's just a single number we can expand it\n # Here I'm trying to find the flat shape of the value that was\n # given in the configuration file.\n flat_shape_value = try_flattening_shape(real_shape)\n # It might happen that we cannot flatten the shape, in this\n # case there are negative values remaining in flat_shape_value.\n # If there are, this means that there is a dimension\n # containing lists of different lengths.\n # In any case I will try to replace any -1 in ``shape``\n # with the value in ``flat_shape_value``.\n shape = get_positive_shape(shape, flat_shape_value)\n # Now we do a test for equality of the asserted shape and\n # the shape of the value found in the config file. Keep in\n # mind that there might be -1 values left.\n if flat_shape_value != shape[-len(flat_shape_value):]:\n raise ShapeError(self.name, section.name)\n # If there are -1's left we must ensure that the \"depth\" of\n # the given value, i.e. the number of dimensions, is higher\n # than the ``number of dimensions after the value preceding\n # the first -1`` + 1 .\n if any(map(lambda x: x == -1, shape)):\n depth = numdim(value)\n mindepth = len(shape) - shape.index(-1) + 1\n if depth < mindepth:\n raise ValueError('Option ' + self.name + ' in section ' +\n section.name + ' can not be expanded!')\n shape = expand_shape(shape)\n\n # Now we have an expanded shape, so only two tasks remain:\n # * auto-expansion\n # * shape validation\n value = expand_to_shape(shape, value)\n if not compare_shapes(shape, get_shape(value)):\n raise ShapeError(self.name, section.name)\n return value", "def parse_layout(self, sections, group_size):\n layout = self.layout\n for section in sections:\n for row in layout[section]:\n try:\n column = \"\".join(row).index(\"S\" * group_size)\n return (layout[section].index(row), column, section)\n except ValueError:\n # There is no room in this row\n pass\n # Could Not find an room for the group in all sections\n raise NoRoomError", "def input_file_parser(cls):\n \n # Loop through the file and store lines in an appropriate list that is passed to other class functions\n with open(cls.infile_name,'r') as infile:\n for line in infile: # Loop through the whole file\n if '$molecule' in line: # Search for a section header\n for line in infile: # Enter second loop over the lines in the section\n if '$end' in line: # If you find $end, stop loop as the section is finished\n break\n else: # Otherwise add the line to a list\n cls.molecule_lines.append(line.strip())\n if '$connection' in line: # Continue for other sections...\n for line in infile:\n if '$end' in line:\n break\n else:\n cls.connector_lines.append(line.strip())\n if '$options' in line: # Continue for other sections...\n for line in infile:\n if '$end' in line:\n break\n else:\n cls.options_lines.append(line.strip())\n\n return None", "def is_section(line: str) -> bool:\n return len(line) > 0 and (line[0] == '[' and line[len(line) - 1] == ']')", "def parse_question_data(self):\n section = ''\n subsection = ''\n quest = ''\n # The data falls into 4 cases\n # 1. Sections\n # 2. subsections\n # 3. questions\n # 4. answers.\n\n for line in self.question_data: \n\n if \":\" in line: # case #2\n subsection = line.split(\":\")[1] # split the line on the : into an array but only take the [1] element\n debug(\"Subsection: %s\" % subsection)\n \n elif \".\" in line: # this is either a question or an answer?\n \n if line.split(\".\")[0].isdigit(): # case #3 it's a question, split on . into an array and take the element to the left and ask if it's a digit.\n quest = line # Since we know it's something like \"3. Are you a warlock?\" we stick that in the quest varable.\n debug(\"Question: %s\" % quest)\n # Create a question object and stick it in the dictonary with the key being the question (since we know it'll be unique)\n self.questions[quest] = question(section, subsection, quest) # I know it's redundant to have the key and have a value.\n \n elif line.startswith(\".\"): # case #4 answer All the answers startswith \".\" \n debug(\"Answer: %s\" % line)\n # take the question and append it to the answers array in the question object.\n self.questions[quest].answers.append(line[2:]) # Trim the first two characters off the answer since it's \". the answer\"\n \n else: # case #1 # This is section like AMERICAN DEMOCRACY\n section = line # load the line from the file into the section variable\n debug(\"Section = %s\" % section)", "def _populate_section(self, algo_group, result_template):\n if algo_group.module == CONNECTIVITY_MODULE:\n result_template[KEY_SECTION] = 'connectivity'\n result_template[KEY_SUB_SECTION] = 'connectivity'\n result_template[KEY_SUBMENU_LIST] = self.connectivity_submenu\n elif algo_group.group_category.display:\n ### Visualizers on the Burst Page\n result_template[KEY_SECTION] = 'burst'\n result_template[KEY_SUB_SECTION] = 'view_' + algo_group.subsection_name\n\n elif algo_group.group_category.rawinput:\n ### Upload algorithms\n result_template[KEY_SECTION] = 'project'\n result_template[KEY_SUB_SECTION] = 'data'\n elif 'RAW_DATA' in algo_group.group_category.defaultdatastate:\n ### Creators\n result_template[KEY_SECTION] = 'stimulus'\n result_template[KEY_SUB_SECTION] = 'stimulus'\n else:\n ### Analyzers\n result_template[KEY_SECTION] = algo_group.group_category.displayname.lower()\n result_template[KEY_SUB_SECTION] = algo_group.subsection_name\n result_template[KEY_SUBMENU_LIST] = self.analyze_adapters", "def _parse(self):\n with open(self.input) as f:\n for line in f:\n if not line.lstrip().startswith(\"#\"): # comment\n stripped_line=line.split(\"#\")[0].strip()\n \n # Initialise an empty option dictionary with some good defaults\n if \"[\" in stripped_line:\n molname=stripped_line.split()[1]\n self.options[molname]=self.empty_option_dict.copy() # dict1=dict2 does not copy!\n self.options[molname][\"MolName\"]=molname\n if \":\" in stripped_line: \n # now process line by line\n if \"{\" not in stripped_line:\n key,value=[i.strip() for i in stripped_line.split(\":\")]\n\n if key not in self.options[molname].keys():\n raise BaseException(\"Option \\\"{}\\\" not known, please check your input file\".format(key))\n self.options[molname][key]=value \n else:\n # This is to define special lines that are given by a dictionary\n key,value=stripped_line.split(\":\",1) # split on first occurence\n if key==\"Addon\": # additional atoms to be added per molecule\n addondict=self.empty_addon_dict.copy()\n addondict_string = value.split(\"}\",-1)[0].split(\"{\",1)[1]\n for pair in addondict_string.split(\",\"):\n addonkey,addonvalue=[i.strip() for i in pair.split(\":\")]\n if addonkey not in addondict.keys():\n raise BaseException(\"Option \\\"{}\\\" in Addon section of molecule {} not known, please check your input file\".format(addonkey,molname))\n addondict[addonkey]=addonvalue\n value=addondict\n # Since addon keyword can be used many times, this is a list\n self.options[molname][key].append(value) \n self._check()", "def _create_section(self, parent, sectionid, title=None, term=None):\n\n idb = nodes.make_id(sectionid)\n section = nodes.section(ids=[idb])\n parent.append(section)\n\n if term:\n if term != '**':\n section.append(nodes.term('', term))\n\n definition = nodes.definition()\n section.append(definition)\n\n return definition\n\n if title:\n section.append(nodes.title('', title))\n\n return section", "def config_section_map(section):\n try:\n section_dict = {}\n # Parse the config file's sections into options\n options = CONFIG_PARSER.options(section)\n # Loop through each option\n for option in options:\n # Get the section and option and add it to the dictionary\n section_dict[option] = CONFIG_PARSER.get(section,option)\n if section_dict[option] == -1:\n click.secho(\"[*] Skipping: {}\".format(option),fg=\"yellow\")\n # Return the dictionary of settings and values\n return section_dict\n except configparser.Error as error:\n click.secho(\"[!] There was an error with: {}\".format(section),fg=\"red\")\n click.secho(\"L.. Details: {}\".format(error),fg=\"red\")", "def gcam_parse(cfgfile_name):\n\n ## initialize the structures that will receive the data we are\n ## parsing from the file\n capability_table = {}\n module_list = []\n\n ## cfgfile_name is a filename\n with open(cfgfile_name,\"r\") as cfgfile: \n section = None\n module = None\n sectnpat = re.compile(r'\\[(.+)\\]')\n keyvalpat = re.compile(r'(.+)=(.+)')\n\n for line in cfgfile:\n line = line.lstrip() # remove leading whitespace\n\n ## check for comments and blank lines. A line is a comment if\n ## the first non-whitespace character is a '#'\n if(line == \"\" or line[0] == '#'):\n continue\n\n ## check for section header. Section headers appear in square brackets: [gcam_module]\n sectnmatch = sectnpat.match(line)\n if sectnmatch:\n section = sectnmatch.group(1)\n print \"parser starting section: %s\" % section\n \n if not section.lower()==\"global\":\n ## Section header starts a new module\n ## create the new module: the section name is the module class\n ## TODO: is the input from the config file trusted enough to do it this way?\n modcreate = \"%s(capability_table)\" % section\n print \"modcreate statement: %s\\n\" % modcreate\n module = eval(modcreate)\n else:\n ## This is kind of a wart because I want to call\n ## the section \"global\", but I don't want to have\n ## a module called \"global\".\n module = GlobalParamsModule(capability_table)\n \n module_list.append(module) \n continue # nothing further to do for a section header line\n\n ## If we get this far, we have a nonblank line that is not a\n ## comment or a section header. We had better be in a section\n ## by now, or the config is malformed.\n if section==None:\n raise RuntimeError(\"Malformed config file: doesn't open with a section header.\")\n\n kvmatch = keyvalpat.match(line)\n if not kvmatch:\n raise RuntimeError(\"Malformed line in config file:\\n%s\"%line)\n\n key = kvmatch.group(1).lstrip().rstrip()\n val = kvmatch.group(2).lstrip().rstrip()\n\n print \"parser got key= %s\\tval= %s\" % (key, val)\n\n module.addparam(key, val)\n\n ## end of loop over config file lines\n ## end of with block: config file will be closed\n \n ## close out the parameter processing for all modules in the list\n for module in module_list:\n module.finalize_parsing()\n\n return (module_list, capability_table)", "def get_section(line: str) -> str:\n if len(line) < 2:\n raise Exception(\"Error: Section line can't be shorter than 2\")\n return line[1:len(line) - 1]", "def _parse_summary(self):\r\n if self._is_at_section():\r\n return\r\n\r\n summary = self._doc.read_to_next_empty_line()\r\n summary_str = \" \".join([s.strip() for s in summary]).strip()\r\n if re.compile('^([\\w., ]+=)?\\s*[\\w\\.]+\\(.*\\)$').match(summary_str):\r\n self['Signature'] = summary_str\r\n if not self._is_at_section():\r\n self['Summary'] = self._doc.read_to_next_empty_line()\r\n else:\r\n self['Summary'] = summary\r\n\r\n if not self._is_at_section():\r\n self['Extended Summary'] = self._read_to_next_section()", "def _parse_summary(self):\r\n if self._is_at_section():\r\n return\r\n\r\n summary = self._doc.read_to_next_empty_line()\r\n summary_str = \" \".join([s.strip() for s in summary]).strip()\r\n if re.compile('^([\\w., ]+=)?\\s*[\\w\\.]+\\(.*\\)$').match(summary_str):\r\n self['Signature'] = summary_str\r\n if not self._is_at_section():\r\n self['Summary'] = self._doc.read_to_next_empty_line()\r\n else:\r\n self['Summary'] = summary\r\n\r\n if not self._is_at_section():\r\n self['Extended Summary'] = self._read_to_next_section()", "def ReadConfigFileSection( config, section ):\n dict1 = {}\n dict1['config'] = section\n options = config.options(section)\n for option in options:\n try:\n dict1[option] = config.get(section, option)\n except:\n print >> sys.stderr, (\"Exception on %s!\" % option)\n dict1[option] = None\n return dict1", "def has_section(self, section):\n raise NotImplementedError()", "def _parse_option_section(conf, items, copt, opt, _allow_include=0):\n global config_stray_opts, _non_options, _list_options, _path_options\n\n for key, val in items:\n if key == 'include' and _allow_include:\n for inc in val.split(' '):\n _parse_option_section(conf, conf.items(inc), copt, opt, _allow_include=(_allow_include-1))\n\n for key, val in items:\n if key in _non_options:\n continue\n elif key in dir(copt):\n if key in _list_options:\n val = val.split(_list_options[key])\n elif isinstance(getattr(copt, key), list) or \\\n (key in ('modules',)):\n val = val.split(' ')\n elif isinstance(getattr(copt, key), bool):\n val = bool(val.lower() in ('1', 'true', 't', 'yes'))\n\n if not getattr(copt, key):\n setattr(opt, key, val)\n else:\n config_stray_opts.append((key, val))\n pass", "def WhereInSection(self, section, container=None):\n if section.startswith('.'):\n if container:\n short_name = container.short_name\n ret = self.Filter(lambda s: (s.container_short_name == short_name and s.\n section_name == section))\n else:\n ret = self.Filter(lambda s: s.section_name == section)\n ret.section_name = section\n else:\n if container:\n short_name = container.short_name\n ret = self.Filter(lambda s: (s.container_short_name == short_name and s.\n section in section))\n else:\n ret = self.Filter(lambda s: s.section in section)\n if section in SECTION_TO_SECTION_NAME:\n ret.section_name = SECTION_TO_SECTION_NAME[section]\n return ret", "def _parse(self):\n with open(_join(self.man_dir, self.man_fn)) as fp:\n lines = fp.readlines()\n \n desc_indxs = []\n for i, L in enumerate(lines):\n if \"#landuse\" in L or \" # landuse\" in L:\n desc_indxs.append(i-1)\n desc_indxs.append(i-2)\n desc_indxs.append(i-3)\n \n lines = [L[:L.find('#')].strip() for L in lines]\n lines = [L for i, L in enumerate(lines) if len(L) > 0 or i in desc_indxs]\n\n del desc_indxs\n \n self.datver = lines.pop(0)\n self.nofe = int(lines.pop(0))\n self.sim_years = int(lines.pop(0))\n \n # Read Plant Growth Section\n self.plants = PlantLoops(lines, self)\n\n # Read Operation Section\n self.ops = OpLoops(lines, self)\n \n # Read Initial Condition Section\n self.inis = IniLoops(lines, self)\n \n # Read Surface Effects Section\n self.surfs = SurfLoops(lines, self)\n \n # Read Contour Section\n self.contours = ContourLoops(lines, self)\n \n # Read Drainage Section\n self.drains = DrainLoops(lines, self)\n \n # Read Yearly Section\n self.years = YearLoops(lines, self)\n \n # Read Management Section \n self.man = ManagementLoop(lines, self)" ]
[ "0.7374695", "0.6986738", "0.68131596", "0.68070054", "0.6706419", "0.6671083", "0.65692985", "0.6440196", "0.64307415", "0.63216704", "0.62620395", "0.6214296", "0.6185266", "0.61728215", "0.6125363", "0.6123458", "0.61100674", "0.6085565", "0.6084057", "0.6030608", "0.6025441", "0.59840393", "0.59615666", "0.5952685", "0.5939843", "0.5919707", "0.5905797", "0.59011376", "0.589904", "0.5894113", "0.5889724", "0.5889252", "0.5889243", "0.58455557", "0.58340657", "0.5822028", "0.57795286", "0.57795286", "0.5753701", "0.5735672", "0.5729519", "0.5704484", "0.57031053", "0.5701221", "0.5694936", "0.5650653", "0.5634373", "0.5624686", "0.5622022", "0.56027806", "0.56017715", "0.55987805", "0.559115", "0.55882096", "0.5578909", "0.5568472", "0.55557334", "0.55417955", "0.5537224", "0.55358815", "0.5530767", "0.552837", "0.54954904", "0.54720896", "0.5470462", "0.5415189", "0.5413476", "0.538769", "0.53659344", "0.5361231", "0.5360092", "0.53448385", "0.5343301", "0.5342502", "0.5335347", "0.53303367", "0.53291214", "0.5329092", "0.5321903", "0.5321903", "0.5315668", "0.5311553", "0.53057516", "0.530361", "0.5299981", "0.5294414", "0.5276587", "0.5270721", "0.52609974", "0.5257545", "0.52480185", "0.5244104", "0.52358544", "0.5232443", "0.5232443", "0.5227342", "0.5219844", "0.5205529", "0.5196783", "0.5191409" ]
0.71823096
1
Main entry point for the script
def main(): clear_screen() print("Establishing a connection with the IMDb service...") session = initialize_connection() another = True while another: clear_screen() search_term = input("What would you like me to look up for you? ") if search_term: clear_screen() print(f'Please wait while I search for "{search_term}"...') shows = search_for_title(session, search_term) clear_screen() print(f'Found {len(shows)} matches.') if shows: display_shows(shows) another_one = input("Would you like to search for a different title? ([y]/n)") if another_one.lower().startswith('n'): another = False else: break clear_screen() print('Bye!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n return", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():\n pass", "def main(args):", "def main(args):", "def main(args=None):", "def main(args=None):", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main():\n\tcli = Cli()\n\tcli.run()", "def main(self) -> None:\n pass", "def main(self):", "def main():\n\tpass", "def main(self):\r\n pass", "def script(self):", "def main() -> None:\n return", "def run():\n main()", "def main(ctx, verbose):\n return", "def main():\n CLI_APP.run()", "def main():\n print(\"It works!!! ;-)\")\n ###TODO### do something with the various methods/functions of this file", "def main():\n\n pass", "def main():\n ...", "def main():\n run_program()", "def main():\n Main()", "def main():\n args = parse_args()\n process_args(args)", "def test_script(self) -> None:\n main()", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main():\n pass", "def main(args=None):\n pass", "def main():\n print(\"is Running!\")", "def cli():\n pass", "def main():\n sys.exit(RBExt().run(sys.argv[1:]))", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():" ]
[ "0.8150395", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.81455755", "0.8089746", "0.8060743", "0.8060743", "0.79744905", "0.79744905", "0.78642195", "0.78642195", "0.78642195", "0.78642195", "0.7810321", "0.77918494", "0.77810603", "0.77596277", "0.7735638", "0.77261895", "0.75865424", "0.75562036", "0.7530838", "0.75012946", "0.7469691", "0.7449276", "0.74350226", "0.7434012", "0.74270445", "0.7406685", "0.739887", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.7393023", "0.73398817", "0.73222136", "0.7279563", "0.7269737", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873", "0.7259873" ]
0.0
-1
Return True if ``value`` is an health check.
def is_healthcheck(self, value): return is_healthcheck(value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check(self, value) -> bool:\n return self._check_helper(value, raise_exceptions=False)", "def has_value(cls, value):\n return bool(isinstance(value, numbers.Number) or isinstance(value, time) or \\\n isinstance(value, datetime) or value)", "def is_true(value):\n \n return (value is True)", "def _check_value_type(self, value):\n if value is not None and self.value_type is not None:\n valid = isinstance(value, self.value_type)\n if not valid:\n return False\n return True", "def check(self, key, value):\n return self._check_key(key) is True and self._check_value(value) is True", "def is_valid_value(self, value):\n return value in self.values", "def is_bool(value):\n return isinstance(value, bool)", "def is_valid_value(self, value):\n return value in self.categories", "def isPass(value: Any) -> bool: # pragma: no cover\n if isinstance(value, bool):\n return True\n return PASS in value", "def has_value(cls, value):\n return any(value == item.value for item in cls)", "def has_value(cls, value):\n return any(value == item.value for item in cls)", "def check(self, value: ATTRIBUTE_TYPES) -> bool:\n if self.type == ConstraintTypes.EQUAL:\n return self.value == value\n if self.type == ConstraintTypes.NOT_EQUAL:\n return self.value != value\n if self.type == ConstraintTypes.LESS_THAN:\n return self.value < value\n if self.type == ConstraintTypes.LESS_THAN_EQ:\n return self.value <= value\n if self.type == ConstraintTypes.GREATER_THAN:\n return self.value > value\n if self.type == ConstraintTypes.GREATER_THAN_EQ:\n return self.value >= value\n if self.type == ConstraintTypes.WITHIN:\n low = self.value[0]\n high = self.value[1]\n return low <= value <= high\n if self.type == ConstraintTypes.IN:\n return value in self.value\n if self.type == ConstraintTypes.NOT_IN:\n return value not in self.value\n if self.type == ConstraintTypes.DISTANCE:\n if not isinstance(value, Location): # pragma: nocover\n raise ValueError(\"Value must be of type Location.\")\n location = cast(Location, self.value[0])\n distance = self.value[1]\n return location.distance(value) <= distance\n raise ValueError(\"Constraint type not recognized.\") # pragma: nocover", "def is_valid(self, value) -> 'True|str':\n if self.base_type is not None and not isinstance(value, self.base_type):\n return f'Value {value} is not type of {self.base_type}.'\n return True", "def _is_valid(self, value):\n\n # Entities have an istypeof method that can perform more sophisticated\n # type checking.\n if hasattr(self._type, \"istypeof\"):\n return self._type.istypeof(value)\n else:\n return isinstance(value, self._type)", "def has_value(value):\n return IsDictContainingValue(wrap_matcher(value))", "def parse_value(cls, value):\n return bool(value)", "def accepts(cls, value: Any) -> bool:\n try:\n cls.convert(value)\n return True\n except ValueError:\n return False", "def is_bool(value):\n try:\n strtobool(value)\n except ValueError:\n return False\n else:\n return True", "def is_valid_value(self, value):\n if not self.range:\n return False\n\n return value >= self.range[0] and value <= self.range[1]", "def is_valid_value(self, value: Any) -> bool:\n return self.type_registry.is_valid_nested(value)", "def is_valid_confidence(self, value: List) -> bool:\n\n if self._confidence_values is None or value is None:\n return True\n else:\n return value in self._confidence_values", "def validate(self, value):\n\n return True", "def validate_value(self, value: valueType) -> bool:\n if value is None:\n raise Exception\n return True", "def is_value_legit(self, value):\n return value in self.domain and value in self.possible_domain", "def matches(self, value):\n return value == self.attributes[AT.VALUE]", "def check(self, value):\n raise NotImplementedError", "def _check_helper(self, value, raise_exceptions=True) -> bool:\n if not isinstance(value, self.value_type):\n if raise_exceptions:\n raise InvalidParameterException(\n '%s: invalid type given: %s (required %s)' % (\n self.name, type(value),\n ', '.join([str(x) for x in self.value_type])\n )\n )\n return False\n\n return True", "def validate(self, value):\n return True", "def check(self, value):\n\t\t\n\t\tif value <= self.current_rate:\n\t\t\treturn True\n\t\telse:\n\t\t\treturn False", "def check_value(value, sensor):\n if not GraphModel.check_if_int(value):\n return False\n\n return (sensor == 't' and ba.min_temp < int(value) < ba.max_temp) or \\\n (sensor == 'l' and ba.min_light < int(value) < ba.max_light)", "def check_active(value):\r\n\tif value == \"False\":\r\n\t\treturn False\r\n\telse:\r\n\t\treturn True", "def has_value(cls, value):\n return value in [item.value for item in cls]", "def _checkBool(inputvalue, description='inputvalue'):\n _checkString(description, minlength=1, description='description string')\n if not isinstance(inputvalue, bool):\n raise TypeError('The {0} must be boolean. Given: {1!r}'.format(description, inputvalue))", "def boolean(value):\n if isinstance(value, str):\n if value.lower() in [\"0\", \"no\", \"n\", \"false\", \"f\", \"off\"]:\n return False\n elif value.lower() in [\"1\", \"yes\", \"y\", \"true\", \"t\", \"on\"]:\n return True\n else:\n raise ValueError(\"Invalid truth value '%s'\" % value)\n elif isinstance(value, bool):\n return value\n else:\n raise TypeError(\"invalid type %s, expect bool or str\" % type(value))", "def clean_value(self, value):\n if isinstance(value, str):\n return value.lower() in ('1', 'true')\n\n return value in (1, True)", "def could_be_boolean(val):\n if val == None:\n return False\n\n if isinstance(val, bool):\n return True\n\n if isinstance(val, (str, unicode)):\n if val.lower() in ['true', '1', 'false', '0']:\n return True\n\n if isinstance(val, (int, long)):\n if val in [0,1]:\n return True\n\n return False", "def check_health(self):\n return defer.succeed(True)", "def contains(cls, value):\n return value in cls.values()", "def value_checker(self, puzzle: List[int], value: int) -> bool:\n if len(puzzle) == 0:\n return False\n\n if len(puzzle) == 1:\n if puzzle[0] is value:\n return True\n else:\n return False\n\n mid = len(puzzle) // 2\n left = self.value_checker(puzzle[:mid], value)\n right = self.value_checker(puzzle[mid:], value)\n\n return left or right", "def has_child(self, value):\n for node in self.children:\n if node.value == value:\n return True\n\n return False", "def _validate_value(self, value):\n if self.limits[0] <= value <= self.limits[1]:\n return True\n else:\n return False", "def to_bool(value: str) -> bool:\n if value in [\"True\", \"true\", True]:\n return True\n if value in [\"False\", \"false\", False]:\n return False\n raise ValueError(f\"Value {value} cannot be converted into a bool\")", "def containsValue(self, value):\n for val in values():\n if val == value or val == value:\n return True\n return False", "def validate(self,value):\r\n return type(value) is self.datatype", "def as_bool(value):\n return str(value).lower() in ('1', 'true', 'on', 'yes')", "def contains(self, value):\n return value in self.values", "def _validate(self, value):\n return True", "def is_float(self, value):\n try:\n float(value)\n return True\n except ValueError:\n return False", "def check_value(self, value):", "def is_valid(self, value) -> 'True | str':\n if not value in self.options:\n return f'The value \"{value}\" must one from \"{self.options}\".'\n return True", "def has_value(self):\n return hasattr(self, '_value')", "def pythonvalue(self, value):\n return value in (\"true\", \"1\")", "def is_contagious(self):\n if self.health >= 0 and self.health <= 49:\n return True\n elif self.health >= 50 and self.health <= 100:\n return False", "def _check_value(self, value):\n raise NotImplementedError", "def is_valid(value: str) -> bool:\n if value is None:\n return not is_required\n return value in get_all_class_attr_values(constant_cls)", "def is_register(value):\n if REGISTER_0 <= value <= REGISTER_7:\n return True\n return False", "def cast_bool(value: str) -> bool:\n return value.lower() in {\"true\", \"yes\", \"1\"}", "def checkIsHours(value):\n\n if not isinstance(value, str):\n return False\n\n if '*' in value:\n return False\n elif '+' in value:\n return False\n elif '-' in value:\n return False\n else:\n return True", "def isMember(self, value):\n\n returnValue = False\n\n if self.min != None and self.max != None:\n if self.min <= value <= self.max:\n returnValue = True\n\n if self.values != None:\n if value in self.values:\n returnValue = True\n\n return returnValue", "def validate(self, value):\r\n if isinstance(value, self.Quoter):\r\n value = value.value\r\n return bool(value)", "def checkType(self, value):\n pass", "def template_check(value):\n if isinstance(value, str):\n return value.lower() == \"true\"\n return value", "def is_alive(self):\n if self.health > 0:\n return True\n return False", "def to_bool(value):\r\n return bool(value) or lower(value) in [\"true\", \"yes\", \"on\"]", "def contains(self, val: float) -> bool:\n return self._check_lower(val) and self._check_upper(val)", "def is_false(value):\n \n return (value is False)", "def is_health_monitor_alarm(alarm):\n is_health_alarm = False\n if len(alarm[\"OKActions\"]) > 0:\n action = alarm[\"OKActions\"][0]\n is_health_alarm = \"cloudwatch_forwarder\" in action\n return is_health_alarm", "def check(self, value):\n level = logging.OK\n for i, j in self.thresholds:\n if value > i: level = j\n \n return level", "def validate(self, key: keyType, value: valueType) -> bool:\n return self.validate_key(key) and self.validate_value(value)", "def hasPossibility(self, value):\n \n return value in self._possibilities", "def is_valid_value(self, value):\n raise NotImplementedError(\"subclass must implement is_valid_value()\")", "def healthcheck(url):\n try:\n r = requests.get('http://localhost:5000/healthcheck')\n output = r.json()\n _ = output['Success']\n return True\n except:\n return False", "def is_type(value):\n if isinstance(value, type):\n return issubclass(value, Type)\n return isinstance(value, Type)", "def _validate(self, value): # type: (Any) -> bool\n if self._validator is None:\n return True\n return self._validator(value)", "def contains(cls, value):\n return any(value == item.value for item in cls)", "def is_valid(self, value: Union[float, int]) -> bool:\n if self.min is not None:\n if self.include_min:\n if value < self.min:\n return False\n else:\n if value <= self.min:\n return False\n\n if self.max is not None:\n if self.include_max:\n if value > self.max:\n return False\n else:\n if value >= self.max:\n return False\n\n if self.step is None:\n return True\n\n if self.min is not None:\n value -= self.min\n return (value % self.step) == 0", "def is_of_type(cls, value) -> bool:\n # UTF8 = 'utf-8'\n # UTF16 = 'utf-16'\n # UTF32 = 'utf-32'\n # ASCII = 'ascii'\n # BINARY = 'binary'\n # OCTAL = 'octal'\n # HEXADECIMAL = 'hexadecimal'\n # CP1252 = 'cp1252'\n # WINDOWS1252 = 'windows-1252'\n # UNICODEESCAPE = 'unicode-escape'\n\n v = None\n if cls == cls.UTF8 or cls == cls.UTF16 or cls == cls.UTF32 or cls == cls.UNICODEESCAPE:\n try:\n v = bytes(value)\n except:\n return False\n\n if cls == cls.ASCII:\n try:\n v = ascii(value)\n except:\n return False\n\n if cls == cls.BINARY:\n try:\n v = bin(value)\n except:\n return False\n\n if cls == cls.OCTAL:\n try:\n v = oct(value)\n except:\n return False\n\n if cls == cls.HEXADECIMAL:\n try:\n v = hex(value)\n except:\n return False\n\n if cls == cls.WINDOWS1252 or cls == cls.CP1252:\n try:\n v = str(value)\n except:\n return False\n return True", "def isValid(self, value):\n return value is None if self._onlyNullAllowed else value is not None", "def _assert_valid_value_and_cast(self, value):\n if not isinstance(value, bool):\n raise AssertionError(\"{0} is not a valid boolean type\".\n format(value))\n return value", "def __verify_boolean_field(cls, plugin_instance, field_name, field_value):\n\n if not isinstance(field_value, bool):\n raise BadPluginError(\n class_name=type(plugin_instance).__name__, field_name=field_name\n )", "def _check_value(self, value, name, check_function):\n if check_function is not None:\n is_good = check_function(value) #May raise an exception\n assert is_good in [0,1,True,False]\n if not is_good:\n raise ValueError(\"Invalid parameter value %r for parameter %s\" \\\n % (value, name))", "def has_acceptable_type(self, value):\n if not value:\n return False\n if super().has_acceptable_type(value):\n return True\n # Hmmm ok maybe we're running under IPython:\n try:\n import IPython\n return isinstance(value, IPython.kernel.zmq.iostream.OutStream)\n except ImportError:\n return False", "def check_value(self, key: str, value: Any):\n # Check the value with a set of tests\n self._check_missing(key, value)\n self._check_allowed_values(key, value)\n self._check_data_type(key, value)\n self._check_value_range(key, value)", "def has(self, value):\n return Filter(self, value, 'has')", "def is_healthy(self) -> bool:\n try:\n self.health()\n except MeiliSearchError:\n return False\n return True", "def is_floatable(value):\n\n try:\n float(value)\n return True\n except:\n return False", "def is_alive(self):\r\n return self._health_points > 0", "def check_for_bool(check):", "def health_check(self):\n with self.session as session:\n try:\n query = session.execute('SELECT 1')\n except Exception as e:\n raise UnhealthyCheck()\n\n return True", "def to_bool(value):\n if str(value).lower() in (\"yes\", \"y\", \"true\", \"t\", \"1\"):\n return True\n if str(value).lower() in (\"no\", \"n\", \"false\", \"f\", \"0\", \"0.0\", \"\", \"none\", \"[]\", \"{}\"):\n return False\n raise Exception('Invalid value for boolean conversion: ' + str(value))", "def check_status(status):\n if status == 'success':\n return True\n return False", "def contains(self, value: T) -> bool:\n def traverse(children: list) -> bool:\n for node in children:\n if node.value == value:\n return True\n else: \n if traverse(node.children):\n return True\n \n if self.value == value:\n return True\n elif traverse(self.children):\n return True\n else:\n return False", "def pretty_bool(value):\r\n bool_dict = [True, \"True\", \"true\", \"T\", \"t\", \"1\"]\r\n return value in bool_dict", "def validate(self, value):\n return self._validate(value, self.name)", "def should_return(self, value):\n \n return False", "def is_eyaml_value(value: str) -> bool:\n if not isinstance(value, str):\n return False\n return value.replace(\"\\n\", \"\").replace(\" \", \"\").startswith(\"ENC[\")", "def isinstance_safe(value, type_):\n try:\n return isinstance(value, type_)\n except TypeError:\n # Cannot perform isinstance on some types\n return False", "def to_bool(value):\n if isinstance(value, str):\n if value == \"True\":\n return True\n elif value == \"False\":\n return False\n else:\n raise ValueError(f\"Invalid literal for to_bool(): '{value}'\")\n else:\n return bool(value)", "def isfloat(value):\r\n try:\r\n float(value)\r\n return True\r\n except ValueError:\r\n return False", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # Health object\n return \\\n (self.is_gyrometer_calibration_ok == to_compare.is_gyrometer_calibration_ok) and \\\n (self.is_accelerometer_calibration_ok == to_compare.is_accelerometer_calibration_ok) and \\\n (self.is_magnetometer_calibration_ok == to_compare.is_magnetometer_calibration_ok) and \\\n (self.is_level_calibration_ok == to_compare.is_level_calibration_ok) and \\\n (self.is_local_position_ok == to_compare.is_local_position_ok) and \\\n (self.is_global_position_ok == to_compare.is_global_position_ok) and \\\n (self.is_home_position_ok == to_compare.is_home_position_ok)\n\n except AttributeError:\n return False" ]
[ "0.70862806", "0.6380764", "0.61718976", "0.598836", "0.5963879", "0.59439", "0.5902455", "0.5843611", "0.5777476", "0.57587725", "0.57587725", "0.57569546", "0.56969887", "0.56509334", "0.5631042", "0.55776083", "0.554896", "0.5548796", "0.55426407", "0.5515357", "0.55148315", "0.55135566", "0.5509645", "0.54972845", "0.5492789", "0.54825586", "0.54809374", "0.54606754", "0.5438963", "0.5437047", "0.5433334", "0.5418433", "0.54164135", "0.5394564", "0.53931004", "0.5382041", "0.53613126", "0.53526956", "0.5346653", "0.5315714", "0.5284689", "0.52758175", "0.52749056", "0.5265493", "0.5262916", "0.5255755", "0.5254238", "0.5252385", "0.52470225", "0.52353597", "0.5233146", "0.52327865", "0.5227089", "0.5226839", "0.5214891", "0.5212396", "0.5200195", "0.51999825", "0.5199072", "0.51976615", "0.51918894", "0.51846766", "0.51815784", "0.51755494", "0.5163229", "0.5161453", "0.5160674", "0.5153978", "0.51532936", "0.51478606", "0.5139793", "0.51339865", "0.51328623", "0.51223975", "0.51142085", "0.5109695", "0.5102495", "0.50995576", "0.50879437", "0.50835574", "0.5065376", "0.5061966", "0.5048536", "0.5034532", "0.5033358", "0.5029435", "0.50212157", "0.5010811", "0.5006647", "0.498996", "0.49810204", "0.4975632", "0.4971952", "0.49665827", "0.49591285", "0.49572837", "0.49565735", "0.4955506", "0.49517727", "0.49508357" ]
0.867911
0
Return copy of TestSuite where only health checks remain.
def filter_suite(self, suite): if isinstance(suite, unittest.TestSuite): suite_copy = self.suiteClass() for sub in suite: if isinstance(sub, unittest.TestSuite): suite_copy.addTest(self.filter_suite(sub)) else: if self.is_healthcheck(sub): suite_copy.addTest(sub) elif self.is_healthcheck(suite): suite_copy = suite.copy() return suite_copy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dry_run(self):\n self.result.report = self._new_test_report()\n\n for pyunit_testcase in self.cfg.testcases:\n testsuite_report = TestGroupReport(\n name=pyunit_testcase.__name__,\n uid=pyunit_testcase.__name__,\n category=ReportCategories.TESTSUITE,\n entries=[\n TestCaseReport(\n name=self._TESTCASE_NAME, uid=self._TESTCASE_NAME\n )\n ],\n )\n self.result.report.append(testsuite_report)\n\n return self.result", "def loadTestsFromTestCase(self, testCaseClass):\n suite = super(HealthCheckLoader, self).loadTestsFromTestCase(\n testCaseClass)\n return self.filter_suite(suite)", "def test_suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(TestIntegration))\n suite.addTest(unittest.makeSuite(TestSection))\n return suite", "def suite():\n\n utilsTests.init()\n\n suites = []\n suites += unittest.makeSuite(SourceHeavyFootprintTestCase)\n suites += unittest.makeSuite(utilsTests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def suite():\n return unittest.makeSuite(OpenedTestCase)", "def suite():\n\n utilsTests.init()\n\n suites = []\n suites += unittest.makeSuite(StatisticsTestCase)\n suites += unittest.makeSuite(utilsTests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(GetDailyReportV1TestCase))\n suite.addTest(unittest.makeSuite(GetDailyReportV2TestCase))\n return suite", "def getTestSuite():\n test_suite = unittest.TestSuite([])\n\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestDistReaders))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestPySnpTools))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestDistributedBed))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestFileCache))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestUtilTools))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestIntRangeSet))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSnpDocStrings))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestPstDocStrings))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestKrDocStrings))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSnpGen))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestGenerate))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestExampleFile))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestPstMemMap))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSnpMemMap))\n test_suite.addTests(NaNCNCTestCases.factory_iterator())\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestPstReader))\n test_suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestKernelReader))\n\n return test_suite", "def getTestSuite():\n\n suite1 = unittest.TestLoader().loadTestsFromTestCase(TestDataProcs)\n return unittest.TestSuite([suite1,suite2])", "def suite():\n loader = unittest.TestLoader()\n mysuite = unittest.TestSuite()\n mysuite.addTest(loader.loadTestsFromTestCase(TestWorldComposite))\n\n return mysuite", "def suite():\n loader = unittest.TestLoader()\n mysuite = unittest.TestSuite()\n mysuite.addTest(loader.loadTestsFromTestCase(TestUtils))\n \n return mysuite", "def suite():\n test_suite = unittest.TestSuite()\n test_suite.addTest(unittest.makeSuite(globalOptimizerTest))\n test_suite.addTest(unittest.makeSuite(recursiveStepTest))\n return test_suite", "def get_tests(self):\n return self.tests[:]", "def runtestsuite(self, testsuite):\n if testsuite.status == TestStatus.READY:\n results = testsuite.run()\n else:\n results = ResultList()\n # Disable \"Expression is assigned to nothing\" warning\n # pylint: disable=W0106\n [handler.flush() for handler in self.logger.handlers]\n results.save(heads={'Build': '', 'Branch': self.args.branch})\n sys.stdout.flush()\n self._cleanup_resourceprovider()\n return results", "def remove_empty_suites(self):\n self.visit(EmptySuiteRemover())", "def suite():\n tsuite = unittest.TestSuite()\n tsuite.addTest(unittest.defaultTestLoader.loadTestsFromModule(sys.modules[__name__]))\n tsuite.addTest(unittest.defaultTestLoader.loadTestsFromModule(commandtests))\n tsuite.addTest(unittest.defaultTestLoader.loadTestsFromModule(locktests))\n return tsuite", "def loadTestsFromName(self, name, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromName(name, module)\n return self.filter_suite(suite)", "def suite():\n\n utilsTests.init()\n\n suites = []\n suites += unittest.makeSuite(HscDistortionTestCase)\n suites += unittest.makeSuite(utilsTests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def suite():\n suite_obj = unittest.TestSuite()\n suite_obj.addTest(TestEssentials())\n return suite_obj", "def suite():\n tests.init()\n\n suites = []\n suites += unittest.makeSuite(MeasureSourcesTestCase)\n suites += unittest.makeSuite(ForcedMeasureSourcesTestCase)\n suites += unittest.makeSuite(tests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def test_suite():\n testSuite = unittest.TestSuite()\n testSuite.addTest(test_polarization.test_suite())\n testSuite.addTest(test_xray.test_suite())\n testSuite.addTest(test_emspectrum.test_suite())\n return testSuite", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(ListV1TestCase))\n return suite", "def loadTestsFromModule(self, module, *args, **kwargs):\n suite = super(HealthCheckLoader, self).loadTestsFromModule(\n module, *args, **kwargs)\n return self.filter_suite(suite)", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(UpdateV1TestCase))\n return suite", "def make_suite():\n suite = unittest.TestSuite()\n return suite", "def suite():\n\tts = unittest.TestSuite()\n\tfor test_module in __all__:\n\t\tm = importlib.import_module(\"pyroclast.test.\" + test_module)\n\t\tfor n in dir(m):\n\t\t\tc = getattr(m, n)\n\t\t\tif is_test_case(c):\n\t\t\t\ts = unittest.TestLoader().loadTestsFromTestCase(c)\n\t\t\t\tts.addTests(s)\n\treturn ts", "def _flattenTestSuite(self, testSuite):\n l = []\n try:\n for test_suite in testSuite._tests:\n l = l + self._flattenTestSuite(test_suite)\n except AttributeError:\n l.append(testSuite)\n return l", "def suite():\n # patch it to work here\n package_def = 'app.test'\n\n suite = unittest.TestSuite()\n\n for other_suite in iter_suites(package_def):\n suite.addTest(other_suite)\n return suite", "def suite():\n utilsTests.init()\n suites = []\n suites += unittest.makeSuite(TestTrackingDb)\n return unittest.TestSuite(suites)", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(CreateV1TestCase))\n suite.addTest(unittest.makeSuite(CreateV2TestCase))\n return suite", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(GetAccountReportV1TestCase))\n return suite", "def suite(self):\n return TestLoader().loadTestsFromTestCase(SourcehandlerTest)", "def filter_tagexp(testsuite, tagexp):\n if not tagexp:\n return testsuite\n caselist = []\n for each in testsuite:\n if not isinstance(each, unittest.BaseTestSuite):\n if checktags(each, tagexp):\n caselist.append(each)\n else:\n caselist.append(filter_tagexp(each, tagexp))\n return testsuite.__class__(caselist)", "def makeTestSuiteV201109():\n suite = unittest.TestSuite()\n suite.addTests(unittest.makeSuite(TrafficEstimatorServiceTestV201109))\n return suite", "def test_suite():\n suite = unittest.TestSuite()\n suite.addTests(unittest.makeSuite(PrimesTests))\n suite.addTests(unittest.makeSuite(OtherTests))\n return suite", "def get_tests(self):\n subtests = itertools.chain(*(s.get_tests() for s in self.suites.values()))\n tt = [t for t in itertools.chain(self.tests,subtests)]\n return tt", "def local():\n suite = ServiceTestSuite()\n suite.addTest(unittest.makeSuite(Test, 'test_local'))\n return suite", "def suite():\n loader = unittest.TestLoader()\n testsuite = loader.loadTestsFromModule(sys.modules[__name__])\n return testsuite", "def suite():\n return unittest.TestLoader().loadTestsFromName(__name__)", "def suite():\n return unittest.makeSuite(ClientsTestCase)", "def testsuite():\n loader = unittest.TestLoader()\n ts = unittest.TestSuite()\n ts.addTests(loader.loadTestsFromTestCase(api_server_test.ApiServerTestCase))\n ts.addTests(loader.loadTestsFromTestCase(codec_test.CodecTestCase))\n return ts", "def _testset(testsuite):\n for each in testsuite:\n if unittest.suite._isnotsuite(each):\n yield each\n else:\n for each2 in _testset(each):\n yield each2", "def collect_cases(self, suite=False):\n cases = unittest.TestSuite()\n\n if suite:\n test_suites = []\n for file in os.listdir('.'):\n if self.suite_path in file:\n if os.path.isdir(file):\n test_suites.append(file)\n\n for test_suite in test_suites:\n self._collect_cases(cases, top_dir=test_suite)\n else:\n self._collect_cases(cases, top_dir=None)\n return cases", "def suite_confopt_test():\n loader = unittest.TestLoader()\n suite = unittest.TestSuite(loader.loadTestsFromTestCase(ConfOptTest))\n return suite", "def test_oldtestcases(self):\n\t\treturn oldtests()", "def suite():\n test_suite = unittest.TestSuite()\n test_suite.addTest(unittest.makeSuite(TestFunctionalSPF, \"test\"))\n return test_suite", "def suite_utilstest():\n loader = unittest.TestLoader()\n suite = unittest.TestSuite(loader.loadTestsFromTestCase(UtilsTest))\n return suite", "def make_suite():\n\n loader = unittest.TestLoader()\n suite = unittest.TestSuite()\n for test_class in test_classes():\n tests = loader.loadTestsFromTestCase(test_class)\n suite.addTests(tests)\n return suite", "def make_suite():\n\n loader = unittest.TestLoader()\n suite = unittest.TestSuite()\n for test_class in test_classes():\n tests = loader.loadTestsFromTestCase(test_class)\n suite.addTests(tests)\n return suite", "def suite():\n\n lsst.utils.tests.init()\n\n suites = []\n suites += unittest.makeSuite(SchemaTestCase)\n suites += unittest.makeSuite(lsst.utils.tests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def children(self):\n tests = [t for t in self._collection if t.parent is self]\n suites = [t for t in self._collection.suites if t.parent is self]\n return suites + tests", "def test_suite():\n testSuite = unittest.TestSuite()\n testSuite.addTest(test_h5fs(\"test_mode\"))\n testSuite.addTest(test_h5fs(\"test_path_splitting\"))\n testSuite.addTest(test_h5fs(\"test_link_mixing\"))\n return testSuite", "def do_TestSuite(suite):\n cl = suite.__class__\n name = mangle_test_name(suite.test_name)\n dbsuite = get_or_create_TestSuite(name=name, valid=True, \n suiteimplementation=\"%s.%s\" % (cl.__module__, cl.__name__))\n dbsuite.subsuites = []\n dbsuite.testcases = []\n\n memo = set()\n for testentry in suite:\n if testentry.inst.__class__ in memo:\n continue\n memo.add(testentry.inst.__class__)\n if isinstance(testentry, core.SuiteEntry):\n newsuite = do_TestSuite(testentry.inst)\n dbsuite.subsuites.append(newsuite)\n else: # a TestEntry or TestSeriesEntry\n dbcase = do_TestEntry(testentry)\n dbsuite.testcases.append(dbcase)\n _dbsession.commit()\n return dbsuite", "def test_suite():\n return unittest.defaultTestLoader.loadTestsFromName(__name__)", "def test_suite():\n testSuite = unittest.TestSuite()\n\n testSuite.addTest(test_classfactory(\"test_inheritance\"))\n return testSuite", "def suite():\n\n utilsTests.init()\n\n suites = []\n suites += unittest.makeSuite(FitExponentialTestCase)\n suites += unittest.makeSuite(utilsTests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def _suite(self):\n import mpi.test_application\n import mpi.test_communicator\n import mpi.test_launcher\n\n test_cases = []\n for mod in [\n mpi.test_application,\n mpi.test_communicator,\n mpi.test_launcher,\n ]:\n test_cases += mod.test_classes()\n \n suite = unittest.TestSuite()\n for test_case in test_cases:\n suite.addTest(unittest.makeSuite(test_case))\n\n return suite", "def _load_tests(self):\n tests = {\"enabled\":defaultdict(list),\n \"disabled\":defaultdict(list)}\n\n for test_path, test_type, test in self.iter_tests():\n enabled = not test.disabled()\n if not self.include_https and test.environment[\"protocol\"] == \"https\":\n enabled = False\n key = \"enabled\" if enabled else \"disabled\"\n tests[key][test_type].append(test)\n\n self.tests = tests[\"enabled\"]\n self.disabled_tests = tests[\"disabled\"]", "def test_keep_unobserved_subtest(self):\n self.write_contents(\n 'external/wpt/variant.html.ini', \"\"\"\\\n [variant.html?foo=baz]\n [subtest that should not be removed]\n expected: CRASH\n \"\"\")\n self.update(\n {\n 'results': [{\n 'test': '/variant.html?foo=baz',\n 'status': 'CRASH',\n 'subtests': [],\n }],\n },\n overwrite_conditions='no')\n self.write_contents(\n 'external/wpt/variant.html.ini', \"\"\"\\\n [variant.html?foo=baz]\n [subtest that should not be removed]\n expected: CRASH\n \"\"\")", "def suite():\n\tsuite1 = unittest.makeSuite(TestCrop, 'test')\n\tsuite2 = unittest.makeSuite(TestDiag, 'test')\n\tsuite3 = unittest.makeSuite(TestEye, 'test')\n\tsuite4 = unittest.makeSuite(TestMinDim, 'test') \n\tsuite5 = unittest.makeSuite(TestNnz, 'test')\n\tsuite6 = unittest.makeSuite(TestOnes, 'test')\n\tsuite7 = unittest.makeSuite(TestRand, 'test')\n\tsuite8 = unittest.makeSuite(TestRandSym, 'test')\n\tsuite9 = unittest.makeSuite(TestReplace, 'test')\n\tsuite10 = unittest.makeSuite(TestTriu, 'test')\n\tsuite11 = unittest.makeSuite(TestTril, 'test')\n\treturn unittest.TestSuite((suite1, suite2, suite3, suite4, suite5, suite6, suite7, suite8, suite9, suite10, suite11))", "def testsuite():\n \n tests = unittest.TestSuite()\n\n parse_tests = unittest.makeSuite(ParseTestCase, 'test')\n tests = unittest.TestSuite( (tests, parse_tests) )\n\n return tests", "def loadTestsFromNames(self, names, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromNames(names,\n module)\n return self.filter_suite(suite)", "def suite():\n utilsTests.init()\n\n suites = [\n unittest.makeSuite(RingsTestCase),\n unittest.makeSuite(utilsTests.MemoryTestCase),\n ]\n\n return unittest.TestSuite(suites)", "def tests(self):\n if self._tests is None:\n raise ValueError(\"Individual tests were not kept!\")\n\n return self._tests", "def suite():\n\n lsst_tests.init()\n\n suites = []\n suites += unittest.makeSuite(DipoleFitAlgorithmTest)\n suites += unittest.makeSuite(DipoleFitTaskTest)\n suites += unittest.makeSuite(DipoleFitTaskEdgeTest)\n suites += unittest.makeSuite(lsst_tests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def runTestSuites(self):\n \n self.testsuitesToXML()\n \n\n tss = []\n jobStatus = {}\n for t in self.testsuites:\n d = t.testsuitedir\n runner = os.path.join(self.basepath, 'testSuiteRunner.py')\n tdir = os.path.join(d, 'testsuite.out')\n cmd = 'python %s %s>& %s' % (runner, d,tdir)\n #print 'about to popen the cmd: %s' % cmd\n tss.append((t.name, popen2.Popen3(cmd)))\n jobStatus[t.name] = ('running', nowSecs())\n ntests = len(tss)\n printJobStatus(jobStatus)\n\n while tss:\n toRemove = [p for p in tss if p[1].poll() != -1]\n if toRemove:\n [tss.remove(p) for p in toRemove]\n for p in toRemove:\n jobStatus[p[0]] = ('completed', nowSecs())\n\n printJobStatus(jobStatus)\n time.sleep(10)\n\n print 'all %d tests have completed' % ntests", "def suite():\n tests.init()\n\n suites = []\n suites += unittest.makeSuite(AngleTestCase)\n suites += unittest.makeSuite(tests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def __test_suites(self, report_url: str) -> Sequence[Element]:\n root = self.__element_tree(report_url)\n return root.findall('suite')", "def createTestSuite():\n import tests.functional.tests as functional\n return unittest.TestLoader().loadTestsFromModule(functional)", "def test_remove_all_pass(self):\n self.write_contents(\n 'external/wpt/variant.html.ini', \"\"\"\\\n [variant.html?foo=baz]\n [formerly failing subtest]\n expected: FAIL\n \"\"\")\n self.update({\n 'results': [{\n 'test':\n '/variant.html?foo=baz',\n 'status':\n 'OK',\n 'subtests': [{\n 'name': 'formerly failing subtest',\n 'status': 'PASS',\n 'message': None,\n 'expected': 'FAIL',\n 'known_intermittent': [],\n }],\n }],\n })\n self.assertFalse(self.exists('external/wpt/variant.html.ini'))", "def _get_java_test_health(java_ast: CompilationUnit) -> JavaTestHealth:\n annotation_counter = collections.Counter()\n\n java_classes: List[ClassDeclaration] = java_ast.types\n for java_class in java_classes:\n if any(annotation.name == _DISABLED_TEST_ANNOTATION\n for annotation in java_class.annotations):\n all_test_methods = _collect_all_test_methods(java_class)\n annotation_counter[_DISABLED_TEST_ANNOTATION] += len(\n all_test_methods)\n continue\n elif any(\n re.fullmatch(_DISABLE_IF_TEST_PATTERN, annotation.name)\n for annotation in java_class.annotations):\n all_test_methods = _collect_all_test_methods(java_class)\n annotation_counter[_DISABLE_IF_TEST_ANNOTATION] += len(\n all_test_methods)\n continue\n\n java_methods: List[MethodDeclaration] = java_class.methods\n for java_method in java_methods:\n annotation_counter.update(\n _count_annotations(java_method.annotations))\n\n return JavaTestHealth(\n java_package=_get_java_package_name(java_ast),\n disabled_tests_count=annotation_counter[_DISABLED_TEST_ANNOTATION],\n disable_if_tests_count=annotation_counter[_DISABLE_IF_TEST_ANNOTATION],\n tests_count=annotation_counter[_TEST_ANNOTATION])", "def assert_tests_stable(tests):\n return (stress_test(t) for t in tests)", "def makeSuite(self):\n expector = self._expector\n\n class TC(unittest.TestCase):\n def runTest(self):\n \"\"\"\n This method spams the logger from the expector from its base level\n up to CRITICAL with a message that is fairly easy to regex match in\n the routines from the methods in test_log_stream.py.\n \"\"\"\n import logging\n lg_name = expector.logger_name\n lg = logging.getLogger(lg_name)\n start_level = logging.getLevelName('DEBUG_9')\n end_level = logging.getLevelName('CRITICAL_0')\n for lvl in range(start_level, end_level):\n lg.log(lvl, 'MATCH-START %s %d(%s) MATCH-END',\n lg_name, lvl, logging.getLevelName(lvl))\n\n return [TC()]", "def suite():\n suite = unittest.TestSuite()\n suite.addTest(ServicesMenuDropdownListTestCase(\"testServicesMenuDropdownListItems\"))\n return suite", "def regression_suites(self):\n return [c for c in self.suites.all() if TestSuite.TS_REGRESSION == c.xtype]", "def regression_suites(self):\n return [c for c in self.suites.all() if TestSuite.TS_REGRESSION == c.xtype]", "def test_check(self):\n return self._testCheck()", "def testcase_unused(ident, args):\n\n tcases_in_use = set([])\n\n violations = []\n tsuites = _index(args.testsuites_root, \"TSUITE\")\n tcases = _index(args.testcases_root, \"TCASE\")\n\n for ts_fname in tsuites:\n ts_fpath = os.sep.join([args.testsuites_root, ts_fname])\n ts_lines_all = (l.strip() for l in open(ts_fpath).read().splitlines())\n ts_lines = (l for l in ts_lines_all if len(l) > 1 and l[0] != \"#\")\n\n for tc_fname in ts_lines:\n tcases_in_use.add(tc_fname)\n\n for tc_fname in sorted(list(tcases - tcases_in_use)):\n violations.append(MESSAGES[ident] % tc_fname)\n\n return violations", "def get_named_suites():\n\n # Skip \"with_server\" and \"no_server\" because they do not define any test files to run.\n executor_only = set([\"with_server\", \"no_server\"])\n suite_names = [suite for suite in resmokeconfig.NAMED_SUITES if suite not in executor_only]\n suite_names.sort()\n return suite_names", "def prepareTestRunner(self, runner):\n return WarningFilterRunner(runner)", "def test_suite():\n testSuite = unittest.TestSuite()\n testSuite.addTest(test_regulargrid(\"test_indexing\"))\n testSuite.addTest(test_regulargrid(\"test_interpolate\"))\n return testSuite", "def test_suite():\n\tsuite = unittest.TestSuite()\n\tsuite.addTest(unittest.makeSuite(TestPloneDbFormsManager))\n\treturn suite", "def test_command_filter(self, testsuite_pattern=\"*\", testcase_pattern=\"*\"):\n cmd = self.test_command()\n\n # TODO: although Hobbes-test can select tests to run by \"--tests\"\n # command line option, but can not select them during listing.\n # May need to implement this feature if needed, now we just run\n # the test as a whole, even only one suite is requested run.\n\n # if testsuite_pattern not in (\"*\", self._VERIFICATION_SUITE_NAME):\n # cmd.extend([\"--tests\", testsuite_pattern])\n\n # At the beginning no testcase exists in test suite\n if testcase_pattern not in (\"*\", self._VERIFICATION_TESTCASE_NAME):\n self.logger.user_info(\n 'Should run testcases in pattern \"%s\", but cannot run'\n \" individual testcases thus will run the whole test suite\",\n testcase_pattern,\n )\n\n return cmd", "def get_suite(self, name=\"_\"):\n return self.mod_suites[name]", "def testsuite():\n return unittest.TestLoader().discover(os.path.dirname(__file__))", "def make_testsuite(testsuite: Dict) -> NoReturn:\n # validate testsuite format\n load_testsuite(testsuite)\n\n testsuite_config = testsuite[\"config\"]\n testsuite_path = testsuite_config[\"path\"]\n testsuite_variables = convert_variables(\n testsuite_config.get(\"variables\", {}), testsuite_path\n )\n\n logger.info(f\"start to make testsuite: {testsuite_path}\")\n\n # create directory with testsuite file name, put its testcases under this directory\n testsuite_path = ensure_file_abs_path_valid(testsuite_path)\n testsuite_dir, file_suffix = os.path.splitext(testsuite_path)\n # demo_testsuite.yml => demo_testsuite_yml\n testsuite_dir = f\"{testsuite_dir}_{file_suffix.lstrip('.')}\"\n\n for testcase in testsuite[\"testcases\"]:\n # get referenced testcase content\n testcase_file = testcase[\"testcase\"]\n testcase_path = __ensure_absolute(testcase_file)\n testcase_dict = load_test_file(testcase_path)\n testcase_dict.setdefault(\"config\", {})\n testcase_dict[\"config\"][\"path\"] = testcase_path\n\n # override testcase name\n testcase_dict[\"config\"][\"name\"] = testcase[\"name\"]\n # override base_url\n base_url = testsuite_config.get(\"base_url\") or testcase.get(\"base_url\")\n if base_url:\n testcase_dict[\"config\"][\"base_url\"] = base_url\n # override verify\n if \"verify\" in testsuite_config:\n testcase_dict[\"config\"][\"verify\"] = testsuite_config[\"verify\"]\n # override variables\n # testsuite testcase variables > testsuite config variables\n testcase_variables = convert_variables(\n testcase.get(\"variables\", {}), testcase_path\n )\n testcase_variables = merge_variables(testcase_variables, testsuite_variables)\n # testsuite testcase variables > testcase config variables\n testcase_dict[\"config\"][\"variables\"] = convert_variables(\n testcase_dict[\"config\"].get(\"variables\", {}), testcase_path\n )\n testcase_dict[\"config\"][\"variables\"].update(testcase_variables)\n\n # override weight\n if \"weight\" in testcase:\n testcase_dict[\"config\"][\"weight\"] = testcase[\"weight\"]\n\n # make testcase\n testcase_pytest_path = make_testcase(testcase_dict, testsuite_dir)\n pytest_files_run_set.add(testcase_pytest_path)", "def tests(self):\n return [self]", "def clear(self):\n for test in self.tests:\n del test\n self.tests = []\n for suite in self.suites:\n suite.clear()\n del suite\n self.suites = {}", "def suite():\r\n\r\n current = os.path.dirname(os.path.realpath(__file__))\r\n top = os.path.normpath(os.path.join(current, \"..\", \"..\"))\r\n return unittest.TestLoader().discover(current, pattern='test_*.py', top_level_dir=top)", "def filterOneTest(self, test_name):\n super(VtsKernelLibcutilsTest, self).filterOneTest(test_name)\n asserts.skipIf(\n test_name.split('.')[0] not in self.include_test_suite,\n 'Test case not selected.')", "def GetHWTestSuite(self):\n hw_tests = self._run.config['hw_tests']\n if not hw_tests:\n # TODO(milleral): Add HWTests back to lumpy-chrome-perf.\n raise unittest.SkipTest('Missing HWTest for %s' % (self._bot_id,))\n\n return hw_tests[0]", "def get(self):\n all_suites = [s.to_dict() for s in TestSuiteModel.get_list()]\n return flask.Response(json.dumps(all_suites), mimetype=\"application/json\")", "def local():\n suite = ServiceTestSuite()\n suite.addTest(unittest.makeSuite(AmazonTestCase, 'test_local'))\n return suite", "def gen_suite(tests):\n cases = [gen_case(test) for test in tests]\n return {\n 'cases': cases,\n 'scored': True,\n 'setup': '',\n 'teardown': '',\n 'type': 'doctest'\n }", "def getShortCircuitTests(self):\n return self._ShortCircuitTests", "def suite():\n tests.init()\n\n suites = []\n suites += unittest.makeSuite(DeconvolvedPsfPhotometryTestCase)\n suites += unittest.makeSuite(tests.MemoryTestCase)\n return unittest.TestSuite(suites)", "def test_suite():\n testSuite = unittest.TestSuite()\n testSuite.addTest(test_spec(\"test_cmd_parser\"))\n return testSuite", "def get_untested(self):\n return [result for result in self.values() if result.outcome == Result.UNTESTED]", "def suite():\n \n return unittest.TestSuite([\n LocationClassTestCase,\n LocationInstanceTestCase,\n ])", "def test_cases(self) -> list[str]:\n cases = []\n for t in self._test_cases:\n if t not in cases:\n cases.append(t)\n return cases" ]
[ "0.63560736", "0.6105774", "0.59605616", "0.58906287", "0.588734", "0.58871996", "0.5851725", "0.58468413", "0.5832818", "0.5828046", "0.57777184", "0.57675064", "0.57567066", "0.5658222", "0.5646921", "0.56121224", "0.56025547", "0.56018823", "0.5576602", "0.5569374", "0.55601853", "0.5557575", "0.55571836", "0.5554578", "0.5551725", "0.5541882", "0.55130064", "0.5491539", "0.54873747", "0.5486702", "0.54800844", "0.5479035", "0.5478919", "0.54371035", "0.5403451", "0.5364012", "0.5362185", "0.53575736", "0.53546894", "0.53497946", "0.53324974", "0.5323218", "0.5322925", "0.530917", "0.5303777", "0.5279528", "0.5277912", "0.5275036", "0.5275036", "0.5259233", "0.52557075", "0.52505565", "0.52278507", "0.52155006", "0.5202967", "0.5201804", "0.52011573", "0.519582", "0.51847637", "0.5178609", "0.5161068", "0.51559645", "0.51400214", "0.51387876", "0.5127415", "0.51062083", "0.5101775", "0.509752", "0.5093967", "0.50924194", "0.50481105", "0.50478274", "0.5006351", "0.5003169", "0.4999043", "0.4999043", "0.4990883", "0.49830568", "0.4982714", "0.4975774", "0.49492043", "0.4942366", "0.4940752", "0.49324697", "0.49321", "0.49319333", "0.49215233", "0.49128067", "0.49047917", "0.4885849", "0.4883819", "0.4882211", "0.48753378", "0.487278", "0.4861672", "0.4854968", "0.48446515", "0.4842759", "0.48286685", "0.48141924" ]
0.7639181
0
Load healthchecks from TestCase.
def loadTestsFromTestCase(self, testCaseClass): suite = super(HealthCheckLoader, self).loadTestsFromTestCase( testCaseClass) return self.filter_suite(suite)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_health_get(self):\n pass", "def test_lint(self):\n l = self.l\n l.loadTestsFromTestCase\n l.loadTestsFromModule\n l.loadTestsFromName\n l.loadTestsFromNames", "def loadTestsFromModule(self, module, *args, **kwargs):\n suite = super(HealthCheckLoader, self).loadTestsFromModule(\n module, *args, **kwargs)\n return self.filter_suite(suite)", "def test_load_testcase(self):\n tests = self.loader.load(\"tests.sampletest.hellotest.HelloTest\")\n self.assertEqual(len(tests), 1)\n from tests.sampletest.hellotest import HelloTest\n\n self.assertEqual(type(tests[0]), HelloTest)", "def _load(self):\n p = os.path.join(paths.setup_dir, 'system_health.yaml')\n if os.path.isfile(p):\n with open(p, 'r') as rfile:\n config = yaml.load(rfile)\n if config:\n self._values = config['values']\n self._conditionals = config['conditionals']\n\n general = config['general']\n self._limit = general['limit']", "def test_fake_health_get(self):\n pass", "def _load_tests(self):\n tests = {\"enabled\":defaultdict(list),\n \"disabled\":defaultdict(list)}\n\n for test_path, test_type, test in self.iter_tests():\n enabled = not test.disabled()\n if not self.include_https and test.environment[\"protocol\"] == \"https\":\n enabled = False\n key = \"enabled\" if enabled else \"disabled\"\n tests[key][test_type].append(test)\n\n self.tests = tests[\"enabled\"]\n self.disabled_tests = tests[\"disabled\"]", "def test_load(self):\n (spec, check) = bundylogging.load()\n # It returns the checking function\n self.assertEqual(check, bundylogging.check)\n # The plugin stores it's spec\n self.assertEqual(spec, bundylogging.spec)", "def test_check_health(self):\n cache = DummyCache()\n ok, msg = cache.check_health()\n self.assertTrue(ok)", "def loadTestsFromName(self, name, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromName(name, module)\n return self.filter_suite(suite)", "def test_health(self):\n self.assert_request('get', '/_health')", "def loadTestsFromNames(self, names, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromNames(names,\n module)\n return self.filter_suite(suite)", "def useFailures(self):\n self.setupTests(tests = self.failures)", "def deserialize(data):\n healthchecks = []\n if data is None:\n return []\n for k, v in data.iteritems():\n hc = HealthCheck()\n hc._HealthCheck__data = v\n hc.name = k\n hc.script = v.get(\"Script\", \"\")\n hc.interval = v.get(\"Interval\", 0)\n hc.timeout = v.get(\"Timeout\", 0)\n hc.kill_count_limit = v.get(\"KillCountLimit\", default[\"KillCountLimit\"])\n hc.kill_exit_codes = v.get(\"KillExitCodes\", default[\"KillExitCodes\"])\n healthchecks.append(hc)\n return healthchecks", "def test_health_check(self):\n self.url = reverse(\"health-check\")\n response = self.client.get(self.url, **self.auth_headers)\n self.assertEqual(200, response.status_code)", "def test_healthcheck(self):\n self.assertEqual(\"OK\", \"OK\")", "def test_health_check(self):\n result = self.app.get('/v1/health')\n\n # assert the status code of the response 200 (OK)\n self.assertEqual(result.status_code, 200)\n self.assertEqual(result.data, b'UP')", "def test_health(self):\n res = self.client().get('/')\n data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertIn('health', data)\n self.assertEqual(data['health'], 'Running!!')", "def test_customize_test_loads(self):\n self.create_user_with_role(\n self.user.name, self.user.email, self.user.password, Role.tester)\n self.create_forktest(\"own-fork-commit\", TestPlatform.linux, regression_tests=[2])\n self.create_completed_regression_t_entries(3, [2])\n response = self.app.test_client().get('/test/3')\n self.assertEqual(response.status_code, 200)\n self.assert_template_used('test/by_id.html')\n regression_tests = RegressionTest.query.all()\n self.assertIn(regression_tests[1].command, str(response.data))\n self.assertNotIn(regression_tests[0].command, str(response.data))", "def test_load_testcase_in_module(self):\n tests = self.loader.load(\"tests.sampletest.InitTest\")\n self.assertEqual(len(tests), 1)\n from tests.sampletest import InitTest\n\n self.assertEqual(type(tests[0]), InitTest)", "def test_health_checks_constructed(self):\n\n node = Node(\n {\n 'healthchecks': [\n {\n 'command': '/some/basic/example',\n 'on_failure': None,\n 'on_failure_even_if_security_violation': False\n },\n\n {\n 'command': '/some/basic/example',\n 'on_failure': '/some/rescue-command',\n 'on_failure_even_if_security_violation': True\n },\n\n {\n 'command': '/some/basic/example'\n }\n ]\n },\n {},\n mock.Mock()\n )\n\n self.assertEqual(3, len(node.get_health_checks()))", "def healthcheck(parameters): \n\n print(\"In healthcheck module\")", "def test_get_hyperflex_health_list(self):\n pass", "def test_simple_health_check(self):\n response = self.client.open(\n '/awadallah/VaultsManager/1.0.0/health',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_healthz(client):\n response = client.get(\"/healthz\")\n assert response.status_code == 200", "def test_health_endpoint(self):\n url = f\"{BASE_URL}/health\"\n response = requests.get(url)\n response_json = response.json()\n assert response.status_code == 200\n assert response_json['status'] == 200", "async def test_health_check(client: AsyncClient):\n\n response = await client.get(f\"/health-check\")\n assert response.status_code == 200\n\n data = response.json()\n assert data[\"service\"][\"status\"] == \"healthy\"\n assert data[\"service\"][\"error\"] is None\n assert data[\"database\"][\"status\"] == \"healthy\"\n assert data[\"database\"][\"error\"] is None", "def load_regression_tests():\n with open(TEST_RESOURCES_DIR / \"regression_vault.pickle\", \"rb\") as p:\n tests = pickle.load(p)\n\n return tests", "def test_health_monitor_basic(self):\n self._create_servers()\n self._start_servers()\n self._create_load_balancer()\n self._create_health_monitor()\n self._check_load_balancing()\n # stopping the primary server\n self._stop_server()\n # Asserting the traffic is sent only to the secondary server\n self._traffic_validation_after_stopping_server()", "def load_status_table():", "def _load_test_data(self):\n self._save_test_data()", "async def test_health():\n response = health()\n assert response\n assert {'status': 'ok'} == response", "def setUpClass(cls):\n r = http.get(urljoin(cls.uri, '/api/status'))\n print r.status_code, r.text\n assert http.get(urljoin(cls.uri, '/api/status')).status_code == 200, '/api/status not returning 200'", "def test_get_healthz(self):\n response = self.client.open(\n '/v1/healthz',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def healthcheck(self):\n url = urljoin(self.url, \"/.well-known/healthcheck.json\")\n r = requests.get(url)\n return r.json()", "def check(job, logger, **kwargs):\n resources = Resource.objects.filter(\n attributes__field__name=\"health_check_config\",\n lifecycle='ACTIVE'\n ).distinct()\n set_progress(\n f\"Will run health checks for {resources.count()} resource(s): \"\n f\"{[resource.name for resource in resources]}\")\n\n check_results = []\n\n for resource in resources:\n logger.info(f\"Will run health checks for resource '{resource.name}'.\")\n config_dict = get_config_value(resource)\n failing_health_checks = 0\n\n # Run all the health checks configured for this resource.\n for health_check in config_dict.get('health_checks', {}):\n max_retries = health_check.get('max_retries', 3)\n retry_interval_seconds = health_check.get('retry_interval_seconds', 1)\n\n name = health_check.get('name')\n job.set_progress(f\"Beginning health check '{name}'.\")\n url = health_check.get('url')\n accepted_statuses = health_check.get('accepted_status_codes')\n timeout_seconds = health_check.get('timeout_seconds', 3)\n\n retry_attempts = 0\n while retry_attempts <= max_retries:\n try:\n if retry_attempts > 1:\n logger.info(f\"On retry attempt {retry_attempts}.\")\n status_code = requests.get(url, timeout=timeout_seconds).status_code\n\n if accepted_statuses and status_code not in accepted_statuses:\n # Failure.\n msg = (\n f\"HTTP Request returned {status_code}, \"\n f\"which is not in the accepted statuses: {accepted_statuses}\"\n f\"for health check '{name}'.\"\n )\n logger.debug(msg)\n retry_attempts += 1\n else:\n # Pass - We got a valid status. We can stop now.\n logger.info(f\"Health check '{name}' completed with success.\")\n break\n\n except Exception as e:\n # Bad, could be ConnectionError, which will count as a failure.\n logger.debug(e)\n retry_attempts += 1\n\n # Wait for the specified retry interval before trying again\n time.sleep(retry_interval_seconds)\n\n if retry_attempts == max_retries:\n job.set_progress(f\"Max retries exceeded for health check '{name}'.\")\n failing_health_checks += 1\n\n # Summarize this resource's health check results.\n data_dict = {\n 'time': datetime.datetime.now(),\n 'resource_id': resource.id,\n 'resource_name': resource.name,\n 'failing_health_checks': failing_health_checks,\n }\n\n check_results.append(data_dict)\n\n context = {\n \"health_check_results\": check_results,\n }\n\n # Return the dict to be processed by the \"Then\" action\n return 'SUCCESS', '', '', {'context': context}", "def test_health(self) -> None:\n self._response = self._app.get('/health')\n\n self.assertEqual(self._response.status, '200 OK')", "async def healthcheck(self):\n for service in self.services:\n await service.healthcheck()", "def _loadTest(self, features, labels):\n\t\tself.testX_, self.testY_, self.testLabel_ = self.__load(features, labels)", "def load_tests(test_dir, case_map):\n if not os.path.isdir(test_dir):\n raise exceptions.OkException(\n 'Assignment must have a {} directory'.format(test_dir))\n info_file = os.path.join(test_dir, INFO_FILE)\n if not os.path.isfile(info_file):\n raise exceptions.OkException(\n 'Directory {} must have a file called {}'.format(\n test_dir, INFO_FILE))\n sys.path.insert(0, os.path.abspath(test_dir))\n assignment = _get_info(case_map)\n _get_tests(test_dir, assignment, case_map)\n return assignment", "def load_tests(loader, tests, pattern):\n start_time = time.time() # 测试启动的时刻点\n suite = unittest.defaultTestLoader.discover(CASE_PATH, pattern='test_*.py')\n if config.SWITCH:\n test_result = unittest.TextTestRunner().run(suite) # 运行测试套件,并返回测试结果\n total_time = time.time() - start_time # 测试过程整体的耗时\n test_res_dict = dict_encode_test_results(\n test_result,\n run_time=total_time,\n pro_version='1.0' # 当前被测试的系统的版本号,依据目前系统的信息,如果服务端提供接口,则可以做成自动化的\n )\n test_report = TestReport()\n auth_res = test_report.get_api_auth()\n if auth_res:\n test_report.post_unit_test_data(test_res_dict)\n else:\n raise PermissionError('auth error...')\n else:\n with open(REPORT_PATH, 'wb') as files:\n runner = BSTestRunner.BSTestRunner(\n files,\n title='TestReport_{0}'.format(int(time.time())),\n description=u'自动化生成用例测试'\n )\n runner.run(suite)", "def pre_loadbalancer_healthmonitor_create(self, resource_dict):\n pass", "def test_002_load_data(self):\n __test = chess_storage.ChessStorage()\n __test_filename = consts.TEST_FILENAME\n __test_data = list(range(consts.TEST_LIST_LENGHT))\n __load_test = __test.load_data(consts.TEST_NOTEXISTFILE)\n self.assertEqual(__load_test, consts.ERROR_CODES[\"NO_FILE_EXIST\"])\n __load_test = __test.load_data(__test_filename)\n self.assertEqual(__load_test, __test_data)", "def setUp(self):\n sys.path.append(file_path)\n global alert\n\n import alert_checkpoint_time as alert\n global configs\n configs = { \"{{hdfs-site}}\" : {'dfs.namenode.http-address' : 'c6401.ambari.apache.org:50470',\n 'dfs.http.policy': 'HTTP_ONLY',\n 'dfs.namenode.checkpoint.period': 100,\n 'security_enabled': 'false',\n 'dfs.namenode.checkpoint.txns': 100},\n '{{hdfs-site/dfs.namenode.http-address}}': 'c6401.ambari.apache.org:50470',\n '{{hdfs-site/dfs.http.policy}}': 'HTTP_ONLY',\n '{{hdfs-site/dfs.namenode.checkpoint.period}}': 100,\n '{{cluster-env/security_enabled}}': 'false',\n '{{hdfs-site/dfs.namenode.checkpoint.txns}}': 100,\n }\n global parameters\n parameters = {\n 'connection.timeout': 200.0,\n 'checkpoint.time.warning.threshold': 2.0,\n 'checkpoint.time.critical.threshold': 4.0,\n 'checkpoint.txns.multiplier.warning.threshold': 2.0,\n 'checkpoint.txns.multiplier.critical.threshold': 4.0\n }", "def test_health_endpoint(client):\n\n result = client.get('/health')\n\n assert result.status_code == 200\n assert result.json == {'status': 'Ok'}", "async def _load_hsm_status(self) -> None:\n hsm: Dict[str, str] = await self._api_request(\"hsm\")\n _LOGGER.debug(\"Loaded hsm status\")\n self._hsm_status = hsm[\"hsm\"]", "def load_settings(self, config):\n self.exceptions = read_or_default(config, 'General', 'karma.exceptions',\n [], lambda val: val.split())", "def test_init_client(self):\n # TODO: dynamically importing dependancies from the file tested\n self.assertIn(\n \"describe_trusted_advisor_check_result\", dir(self.subclass.client)\n )", "def testLoadConfiguration(self):\n loader = Loader()\n loader.loadFromDirectory(self.__exampleDirectory)\n\n self.assertEqual(len(loader.taskHolders()), 1)\n\n self.assertEqual(\n os.path.basename(loader.taskHolders()[0].var('contextConfig')),\n 'config.hjson'\n )", "def init_failed_tests_dict():\n global g_failed_test_info_dict\n g_failed_tests_info_dict[\"TestName\"] = []\n g_failed_tests_info_dict[\"TestInfo\"] = []", "def load_data(self):\n try:\n data = etree.parse(self.resultfilename).getroot()\n except OSError:\n data = []\n\n testresults = []\n for testcase in data:\n category = Category.OK\n status = 'ok'\n module = testcase.get('classname')\n name = testcase.get('name')\n message = ''\n time = float(testcase.get('time'))\n extras = []\n\n for child in testcase:\n if child.tag in ('error', 'failure', 'skipped'):\n if child.tag == 'skipped':\n category = Category.SKIP\n else:\n category = Category.FAIL\n status = child.tag\n type_ = child.get('type')\n message = child.get('message', default='')\n if type_ and message:\n message = '{0}: {1}'.format(type_, message)\n elif type_:\n message = type_\n if child.text:\n extras.append(child.text)\n elif child.tag in ('system-out', 'system-err'):\n if child.tag == 'system-out':\n heading = _('Captured stdout')\n else:\n heading = _('Captured stderr')\n contents = child.text.rstrip('\\n')\n extras.append('----- {} -----\\n{}'.format(heading,\n contents))\n\n extra_text = '\\n\\n'.join(extras)\n testresults.append(\n TestResult(category, status, name, module, message, time,\n extra_text))\n\n return testresults", "def main(*args, **kwargs):\n if 'TEST_SHARD_STATUS_FILE' in os.environ:\n try:\n f = None\n try:\n f = open(os.environ['TEST_SHARD_STATUS_FILE'], 'w')\n f.write('')\n except IOError:\n sys.stderr.write('Error opening TEST_SHARD_STATUS_FILE (%s). Exiting.'\n % os.environ['TEST_SHARD_STATUS_FILE'])\n sys.exit(1)\n finally:\n if f is not None: f.close()\n\n if ('TEST_TOTAL_SHARDS' not in os.environ or\n 'TEST_SHARD_INDEX' not in os.environ):\n return unittest_main(*args, **kwargs)\n\n total_shards = int(os.environ['TEST_TOTAL_SHARDS'])\n shard_index = int(os.environ['TEST_SHARD_INDEX'])\n base_loader = TestLoader()\n\n delegate_get_names = base_loader.getTestCaseNames\n bucket_iterator = itertools.cycle(range(total_shards))\n\n def getShardedTestCaseNames(testCaseClass):\n filtered_names = []\n for testcase in sorted(delegate_get_names(testCaseClass)):\n bucket = next(bucket_iterator)\n if bucket == shard_index:\n filtered_names.append(testcase)\n return filtered_names\n\n # Override getTestCaseNames\n base_loader.getTestCaseNames = getShardedTestCaseNames\n\n kwargs['testLoader'] = base_loader\n unittest_main(*args, **kwargs)", "def testLoadConfigs(self):\n config_path = GetTestFilePath('unified_lab_config/valid_lab/hosts')\n pool = lab_config.UnifiedLabConfigPool(config_path)\n pool.LoadConfigs()\n self.assertIsNotNone(pool.GetLabConfig())\n self.assertIsNotNone(pool.GetHostConfigs('postsubmit'))\n self.assertIsNotNone(pool.GetHostConfigs('crystalball'))\n self.assertIsNotNone(pool.GetHostConfigs('crystalball-power'))", "def test_import(self):\n self.assertTrue(NagiosPerfdataCollector)", "def test_load(self):\n command = constituencies.Command()\n command.handle('load', silent=True)", "def test_load_stats(self):\n data = {'a': 'b'}\n stats_file = self._write('stats.json', data)\n settings = {\n 'webpack.stats_file': stats_file,\n }\n state = WebpackState(settings)\n stats = state.load_stats()\n self.assertEqual(stats, data)", "def setUp(self) -> None:\n print(\"testing Deaths Class...\")\n self.data_handler_1 = self._init_mocked_data_handler(json_file_path=\"json_files/deaths_mocked_data.json\",\n resource_id_enum=ResourceId.DEATHS_DATA_RESOURCE_ID)\n self._check_base_step_of_all_methods(data_handler=self.data_handler_1, class_type=Deaths)", "def test_missing_stats(self):\n state = WebpackState({})\n with self.assertRaises(IOError):\n state.load_stats()", "def test_get_scenarios(self):\n pass", "def health_check(task_service_id):\n logger.info(f\"Checking task service status for {task_service_id}\")\n task_service = TaskService.objects.get(kf_id=task_service_id)\n task_service.refresh_from_db()\n task_service.health_check()", "def pre_loadbalancer_healthmonitor_read(self, resource_id):\n pass", "def loadTestsFromTestCase(self, testCaseClass):\r\n if issubclass(testCaseClass, suite.TestSuite):\r\n raise TypeError(\"Test cases should not be derived from TestSuite.\"\r\n \" Maybe you meant to derive from TestCase?\")\r\n testCaseNames = self.getTestCaseNames(testCaseClass)\r\n if not testCaseNames and hasattr(testCaseClass, 'runTest'):\r\n testCaseNames = ['runTest']\r\n loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))\r\n return loaded_suite", "def load_test_bots(self):\n records = [\n ('0.0.0.0', self.create_past_date(5), self.create_past_date(4), self.create_past_date(3), 32, 'A status message'),\n ('0.0.0.1', self.create_past_date(0), self.create_past_date(0), self.create_past_date(0), 32, 'A status message'),\n ('0.0.0.2', self.create_past_date(7), self.create_past_date(6), self.create_past_date(5), 32, 'A status message'),\n ]\n for record in records:\n self.db_mgr.exec_cmd('''insert into bot_status (ip, last_startup_time, \n last_activity_time, last_shutdown_time,\n port, message) VALUES (%s, %s, %s, %s, %s, %s)''',\n *record)\n return [r[0] for r in records]", "def load_tests(loader, suite, patterns):\n\n # Try to add vagrant functional tests\n from .vagrant import base_boxes, VagrantFunctionTestCase, VagrantTestSuite\n boxes = base_boxes()\n if boxes:\n vagrant_suite = VagrantTestSuite(boxes)\n\n # Add a test case for each task in each fabfile\n fabfiles = os.path.join(os.path.dirname(__file__), 'fabfiles')\n for filename in sorted(os.listdir(fabfiles)):\n if fnmatch.fnmatch(filename, '[!_]*.py'):\n fabfile = os.path.join(fabfiles, filename)\n _, tasks, _ = load_fabfile(fabfile)\n for task in tasks.values():\n test = VagrantFunctionTestCase(task,\n description=short_doc(task))\n vagrant_suite.addTest(test)\n\n suite.addTest(vagrant_suite)\n\n return suite", "def test_dbhealth_check(client):\n res = client.get(\"/v0/dbcheck\")\n assert res.data == b\"Comet-API-v0\"", "def test_load_yaml_def(self):\n la_provider = self.la_provider\n with self.assertRaises((MsticpyException, ValueError)) as cm:\n file_path = Path(_TEST_DATA, \"data_q_meta_fail.yaml\")\n la_provider.import_query_file(query_file=file_path)\n self.assertIn(\"no data families defined\", str(cm.exception))\n\n with self.assertRaises((MsticpyException, ValueError)) as cm:\n file_path = Path(_TEST_DATA, \"data_q_source_fail_param.yaml\")\n la_provider.import_query_file(query_file=file_path)\n self.assertIn(\"Missing parameters are\", str(cm.exception))\n\n with self.assertRaises((MsticpyException, ValueError)) as cm:\n file_path = Path(_TEST_DATA, \"data_q_source_fail_type.yaml\")\n la_provider.import_query_file(query_file=file_path)\n self.assertIn(\"Parameters with missing types\", str(cm.exception))\n\n before_queries = len(list(la_provider.list_queries()))\n file_path = Path(_TEST_DATA, \"data_q_success.yaml\")\n la_provider.import_query_file(query_file=file_path)\n\n self.assertEqual(before_queries + 3, len(list(la_provider.list_queries())))", "def test_bad_status(self):\n self.stats['status'] = 'wat'\n with self.assertRaises(RuntimeError):\n self.webpack.get_bundle('main')", "def suite(self):\n return TestLoader().loadTestsFromTestCase(SourcehandlerTest)", "def test_all_http_stats(self):\n client = Client()\n response = client.get(reverse('home'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('browse_produce'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('browse_locations'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('search'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('faq'))\n self.assertEqual(200, response.status_code)", "def test_run_all_sql_data_checks(self):\n\n # we should run all the data checks through the main function to support\n # different sql data checker classes we may create in the future\n results = sql_data_checker.main()\n\n # if any data check threw an exception, its value in the dict will be None\n failed_data_check_ids = []\n for data_check_type in sorted(results, key=lambda key: key.data_check_type_id):\n if results[data_check_type] is None:\n failed_data_check_ids.append(data_check_type.data_check_type_id)\n\n # I want it to display all failed checks so I'm not doing a self.assertEqual(0, len(failed_data_check_ids))\n if len(failed_data_check_ids) is not 0:\n self.fail('Failed SQL Data Check IDs: %s' % [str(s) for s in failed_data_check_ids])", "def __init__(self, applogger, function_name, client_id, client_secret) -> None:\n super(HealthCollector, self).__init__(applogger, function_name, client_id, client_secret)\n self.health_table_name = HEALTH_TABLE_NAME\n self.state = StateManager(\n connection_string=self.connection_string, file_path=\"health\"\n )", "def test_loader(cls):\r\n return _test_loader_factory(cls)", "def test_list_alerts(self):\n pass", "def loadTest (self, testId, testName):\n self.bigTr = False\n self.bigCache = { 'index': 0, 'testId': '', 'testName': '', \"nb-total\": 0 }\n self.logsItem.disableControls()\n \n if sys.version_info < (3,):\n testId = \"%s\" % testId\n testName = \"%s\" % testName\n \n # new in v12.1\n # reload all events from selected testcase each time for local testresult only\n if self.local:\n del self.scriptEvents\n self.scriptEvents = {}\n # end of new\n \n self.graphView.reset()\n self.resumeView.reset()\n self.logsView.reset()\n \n if self.isLoading:\n return\n \n self.isLoading = True\n \n if not self.isRunning:\n self.hexLogsView.reset() # fix bad behaviour to be confirm\n \n # new in v11.2\n self.logsView.setExpectedEventId(testId)\n\n if testId not in self.scriptEvents: \n if self.local and self.headerReady: \n self.setCursor(QCursor(Qt.BusyCursor) )\n self.loadLocalData(testId=testId, testName=testName)\n\n if testId in self.scriptEvents:\n nbMax = len(self.scriptEvents[ testId ])\n\n self.trace(\"test result: nb event detected in testcase %s\" % (nbMax) )\n \n self.logsItem.progressBar.setMaximum( nbMax )\n if len(self.scriptEvents[ testId ]) > 200:\n self.setCursor(QCursor(Qt.BusyCursor) )\n\n # add limitation to avoid freeze, new in v16\n if not self.isRunning:\n if len(self.scriptEvents[ testId ]) > int(Settings.instance().readValue( key = 'TestRun/chunk-events' )): \n nbMax = int(Settings.instance().readValue( key = 'TestRun/chunk-events' ))\n self.bigTr = True\n self.logsItem.progressBar.setMaximum( nbMax )\n \n i = 0\n for evt in self.scriptEvents[ testId ][:nbMax]:\n # add progress bar\n i += 1\n self.logsItem.progressBar.setValue(i)\n \n # exit loop if the expected event id changed\n if self.logsView.getExpectedEventId() != testId:\n break\n \n # load data\n if Settings.instance() is None:\n break\n \n row_pos = self.logsView.addEvent( event = evt, ihmId=i)\n self.resumeView.addEvent( event = evt, rowp = row_pos, ihmId=i )\n\n self.setCursor(QCursor(Qt.ArrowCursor) )\n self.isLoading = False\n \n if self.bigTr:\n # memorize somes informations\n self.bigCache['index'] = i\n self.bigCache['testId'] = testId\n self.bigCache['testName'] = testName\n self.bigCache['nb-total'] = len(self.scriptEvents[ testId ])\n \n self.logsItem.enableControls()\n self.parent.showMessageTray( msg='Test %s is truncated...' % self.name )", "def test_load_config_safe(self):\n self.__test_load_config_safe(\".scuba.yml\")", "def setUp(self):\n client = utils.create_test_datastore_client()\n self.resource = import_attempt.ImportAttemptByID(client)\n list_resource = import_attempt_list.ImportAttemptList(client)\n run_list_resource = system_run_list.SystemRunList(client)\n attempts = [{\n _ATTEMPT.provenance_url: 'google.com'\n }, {\n _ATTEMPT.provenance_url: 'facebook.com'\n }, {\n _ATTEMPT.provenance_url: 'bing.com'\n }]\n self.attempts = utils.ingest_import_attempts(run_list_resource,\n list_resource, attempts)", "def _test(\n self,\n headers: list,\n expected_warning: str = WARN_UNKNOWN,\n status_code: str = \"403\",\n disable_hshc: bool = False,\n ):\n head = [\n (\":scheme\", \"https\"),\n (\":method\", \"GET\"),\n ]\n head.extend(headers)\n\n client = self.base_scenario(\n frang_config=\"http_strict_host_checking true;\",\n requests=[head],\n disable_hshc=disable_hshc,\n )\n self.check_response(client, status_code=status_code, warning_msg=expected_warning)", "def test_get_stats(self):\n pass", "def health_check():\n return dict(api_status='OK')", "def _load_test_configs(filename, required_keys):\n # type: (str, List[str]) -> List[Dict]\n with open(filename, 'r') as f:\n tests = json.loads(f.read())\n _validate_test_configs(tests, filename, required_keys)\n return tests", "def setUpClass(cls):\n super(TestPatientStatsHistory, cls).setUpClass()\n cls.stats_data = {\n \"num_patients_visited\": 1,\n \"num_patients_home_quarantine\": 2,\n \"num_patients_isolation\": 3,\n \"num_patient_referred\": 4,\n }", "def test_load_avg_1():\n result = _run_metric('load_avg_1')\n assert result.exit_code == 0", "def _test1():\n logging.info(\"### TEST1 ###\")\n mainDirs = \"../../../../test/config\"\n conf = Conf()\n conf.load(\"status_log.properties\", mainDirs)\n conf.dump()\n\n # Default. No connection information\n sl0 = StatusLog()\n sl0.dump()\n \n # Use init() to initialize\n sl1 = StatusLog()\n sl1.init(host='nn02.corp.xad.com', port=3306, user='etl', password='foobar',\n dbname='xad_etl')\n sl1.dump()\n \n # Use conf to initialize. Prefix = 'status_log'\n sl2 = StatusLog(conf)\n sl2._checkInit()\n sl2.dump()\n \n # Use conf to initialize. Prefix = 'status_log_local'\n sl3 = StatusLog()\n sl3.confInit(conf, prefix='status_log_local')\n sl3.dump()\n \n #sl3.addStatus('TEST/statuslog_py/hourly', '2017/01/22/11')\n #sl3.addStatus('TEST/statuslog_py/DAILY', '2017/01/22')\n\n sl3._selectAll(50);\n \n _testStatus(sl3, 'TEST/statuslog_py/hourly', '2017/01/22/11')\n _testStatus(sl3, 'TEST/statuslog_py/hourly', '2017/01/22/12')", "def test_import_all_tests():\n with temporary_dir() as output_dir:\n copyfile(\n TEST_ROBOT_OUTPUT_FILES / \"robot_with_failures.xml\",\n Path(output_dir) / \"output.xml\",\n )\n robot_importer.import_robot_test_results(FlowTaskFactory(), output_dir)\n\n failing_test_results = models.TestResult.objects.filter(outcome=\"Fail\")\n passing_test_results = models.TestResult.objects.filter(outcome=\"Pass\")\n assert len(failing_test_results) == 3\n assert len(passing_test_results) == 1", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def load_test_users():\n return [load_test_angel(), load_test_troublemaker(), load_test_rebel()]", "def load(self):\n the_redis = DARedis()\n cases = the_redis.get_data(self.user_cases_key) or {}\n self.cases = cases", "def test_dashboard_is_up(dashboard_address):\n response = requests.get(f\"{dashboard_address}/health\")\n assert response.status_code == 200\n assert response.text == \"ok\"", "def test_load_file_contents():\n\n file_name = 'test_fooof_all'\n loaded_data = load_json(file_name, TEST_DATA_PATH)\n\n # Check settings\n for setting in OBJ_DESC['settings']:\n assert setting in loaded_data.keys()\n\n # Check results\n for result in OBJ_DESC['results']:\n assert result in loaded_data.keys()\n\n # Check results\n for datum in OBJ_DESC['data']:\n assert datum in loaded_data.keys()", "def test_load_avg_5():\n result = _run_metric('load_avg_5')\n assert result.exit_code == 0", "def test_get_status(self):\n pass", "def test_get_status(self):\n pass", "def load(self,streamdata,appID=None):\n if appID is None: appID = 0\n healthrules = self.__parse_healthrules_XML(streamdata)\n if len(healthrules) == 0:\n try:\n healthrules = json.loads(streamdata)\n except TypeError as error:\n sys.stderr.write(\"load_health_rule: \"+str(error)+\"\\n\")\n return 0\n\n if type(healthrules) is dict:\n self.entityDict.update({str(appID):[healthrules]})\n else:\n self.entityDict.update({str(appID):healthrules})\n\n return len(healthrules)", "def _to_test_health_dicts(test_health_list: List[TestHealthInfo]\n ) -> List[JsonSafeDict]:\n java_test_health_list = []\n for test_health in test_health_list:\n if test_health.java_test_health:\n java_test_health_list.append(_to_test_health_dict(test_health))\n else:\n logging.warning(\n f'Skipped non-Java test \"{test_health.test_name}\"; currently'\n 'only Java tests are supported.')\n\n return java_test_health_list", "def load(cfg):\n\n checks = collections.OrderedDict()\n if cfg and cfg.get('checks', None):\n for name, options in cfg['checks'].iteritems():\n check_type = options.get('type', 'command')\n\n if not options:\n check_type = 'override'\n\n if check_type not in _types:\n msg = \"unknown check type '{}'\".format(check_type)\n raise CheckInvalid(msg)\n\n checks[name] = _types[check_type](name=name, **options)\n\n return checks", "def test_compute_glycemic_load(self):\n pass", "def setUpClass(cls):\n cls.data = get_image_dict()\n cls.resource = BossResourceBasic(cls.data)\n\n cls.config = load_test_config_file()\n\n cls.state_client = redis.StrictRedis(host=cls.config[\"aws\"][\"cache-state\"], port=6379, db=1,\n decode_responses=False)\n\n cls.config_data = {\"state_client\": cls.state_client}", "def before_each_test(self, request):\n self.test_counter = Counter()\n self.check_ref = request.config.getvalue(\"check_ref\")\n self.create_ref = request.config.getvalue(\"create_ref\")" ]
[ "0.6312396", "0.62286896", "0.61631715", "0.6092404", "0.6075559", "0.60417944", "0.6005269", "0.6000439", "0.5768636", "0.57478154", "0.57459795", "0.57209116", "0.5693739", "0.5679516", "0.56593746", "0.56551856", "0.56339574", "0.55915415", "0.55771744", "0.5574354", "0.5552758", "0.55081075", "0.54977244", "0.5481446", "0.5387548", "0.53715044", "0.5365683", "0.53294945", "0.5291307", "0.5280294", "0.52792764", "0.5273942", "0.5243651", "0.5242761", "0.5241945", "0.52365804", "0.52276045", "0.52193916", "0.5219094", "0.52180845", "0.52168137", "0.52006966", "0.5191119", "0.5183683", "0.5174619", "0.51546896", "0.51508266", "0.514643", "0.5144289", "0.51144725", "0.5094968", "0.5069582", "0.5058101", "0.5050754", "0.5048542", "0.50378203", "0.50344473", "0.5032149", "0.50252146", "0.50220954", "0.5015213", "0.5003044", "0.50028414", "0.49976602", "0.49918786", "0.4981089", "0.49779788", "0.49660608", "0.49574405", "0.49397564", "0.4937477", "0.49345526", "0.49326172", "0.49247032", "0.49243286", "0.49232224", "0.4923164", "0.49155912", "0.4903728", "0.48963717", "0.48942664", "0.48933414", "0.48881382", "0.48864576", "0.4884312", "0.4884312", "0.4884312", "0.48826605", "0.48811588", "0.488008", "0.48784643", "0.48711944", "0.48673028", "0.48673028", "0.486459", "0.48644495", "0.4861914", "0.4860061", "0.48576644", "0.48558608" ]
0.68875015
0
Load healthchecks from module.
def loadTestsFromModule(self, module, *args, **kwargs): suite = super(HealthCheckLoader, self).loadTestsFromModule( module, *args, **kwargs) return self.filter_suite(suite)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def healthcheck(parameters): \n\n print(\"In healthcheck module\")", "def _load(self):\n p = os.path.join(paths.setup_dir, 'system_health.yaml')\n if os.path.isfile(p):\n with open(p, 'r') as rfile:\n config = yaml.load(rfile)\n if config:\n self._values = config['values']\n self._conditionals = config['conditionals']\n\n general = config['general']\n self._limit = general['limit']", "def loadTestsFromName(self, name, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromName(name, module)\n return self.filter_suite(suite)", "def loadTestsFromNames(self, names, module=None):\n suite = super(HealthCheckLoader, self).loadTestsFromNames(names,\n module)\n return self.filter_suite(suite)", "def test_load(self):\n (spec, check) = bundylogging.load()\n # It returns the checking function\n self.assertEqual(check, bundylogging.check)\n # The plugin stores it's spec\n self.assertEqual(spec, bundylogging.spec)", "def load_module(name):\n return __import__(\"metaswitch.%s\" % name,\n fromlist=[\"ROUTES\"])", "def load_module(self, module_name): # pragma: no cover\r\n try:\r\n module = import_module('SoftLayer.CLI.modules.%s' % module_name)\r\n for _, obj in inspect.getmembers(module):\r\n if inspect.isclass(obj) and issubclass(obj, CLIRunnable):\r\n self.add_plugin(obj)\r\n return module\r\n except ImportError:\r\n raise InvalidModule(module_name)", "def load_module(module):\n try:\n return import_module(module)\n except ImportError:\n sys.stderr.write('Unable to load the module: %s.\\n' % module)\n exit(-1)", "def test_health_get(self):\n pass", "def healthcheck(self):\n url = urljoin(self.url, \"/.well-known/healthcheck.json\")\n r = requests.get(url)\n return r.json()", "async def _load_hsm_status(self) -> None:\n hsm: Dict[str, str] = await self._api_request(\"hsm\")\n _LOGGER.debug(\"Loaded hsm status\")\n self._hsm_status = hsm[\"hsm\"]", "def test_lint(self):\n l = self.l\n l.loadTestsFromTestCase\n l.loadTestsFromModule\n l.loadTestsFromName\n l.loadTestsFromNames", "def loadModule(*args, allModules: bool=True, load: AnyStr=\"\", scan: bool=True,\n **kwargs)->List[AnyStr]:\n pass", "def deserialize(data):\n healthchecks = []\n if data is None:\n return []\n for k, v in data.iteritems():\n hc = HealthCheck()\n hc._HealthCheck__data = v\n hc.name = k\n hc.script = v.get(\"Script\", \"\")\n hc.interval = v.get(\"Interval\", 0)\n hc.timeout = v.get(\"Timeout\", 0)\n hc.kill_count_limit = v.get(\"KillCountLimit\", default[\"KillCountLimit\"])\n hc.kill_exit_codes = v.get(\"KillExitCodes\", default[\"KillExitCodes\"])\n healthchecks.append(hc)\n return healthchecks", "def __init__(self):\n ScriptedLoadableModuleLogic.__init__(self)", "def test_load_testcase_in_module(self):\n tests = self.loader.load(\"tests.sampletest.InitTest\")\n self.assertEqual(len(tests), 1)\n from tests.sampletest import InitTest\n\n self.assertEqual(type(tests[0]), InitTest)", "def test_fake_health_get(self):\n pass", "def test_check_health(self):\n cache = DummyCache()\n ok, msg = cache.check_health()\n self.assertTrue(ok)", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def _load_module(self):\n self.log(logging.INFO, \"Checking file\", (self.filename, os.getpid()))\n\n try:\n return self.load_module(self.filename)\n except KeyboardInterrupt:\n raise\n except BaseException as e:\n # don't re-raise the error, just proceed without a module object\n # this can happen with scripts that aren't intended to be imported\n if not self.has_file_level_ignore():\n traceback.print_exc()\n if self.tree.body:\n node = self.tree.body[0]\n else:\n node = None\n self.show_error(\n node,\n \"Failed to import {} due to {!r}\".format(self.filename, e),\n error_code=ErrorCode.import_failed,\n )\n return None, False", "async def healthcheck(self):\n for service in self.services:\n await service.healthcheck()", "def health_check():\n return dict(api_status='OK')", "def load(cfg):\n\n checks = collections.OrderedDict()\n if cfg and cfg.get('checks', None):\n for name, options in cfg['checks'].iteritems():\n check_type = options.get('type', 'command')\n\n if not options:\n check_type = 'override'\n\n if check_type not in _types:\n msg = \"unknown check type '{}'\".format(check_type)\n raise CheckInvalid(msg)\n\n checks[name] = _types[check_type](name=name, **options)\n\n return checks", "def test_simple_health_check(self):\n response = self.client.open(\n '/awadallah/VaultsManager/1.0.0/health',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def load_snakes():\n\n module_path = dirname(__file__)\n return _safe_unpickle(join(module_path, 'snakes.pickle'))", "def modules_load(machine_config):\n\t#---modules in LOCAL configuration must be loaded before checking version\n\timport importlib\n\tif 'module_path' in machine_config: module_path = machine_config['module_path']\n\telse:\n\t\tmodule_parent = os.environ.get('MODULESHOME','/usr/share/Modules/default')\n\t\tmodule_path = os.path.join(module_parent,'init','python.py')\n\tincoming = {}\n\tif sys.version_info<(3,0): execfile(module_path,incoming)\n\telse: exec(open(module_path).read(),incoming)\n\t#---note that modules that rely on dynamically-linked C-code must use EnvironmentModules\n\tmodlist = machine_config['modules']\n\tif type(modlist)==str: modlist = modlist.split(',')\n\tfor mod in modlist:\n\t\t#---always unload gromacs to ensure correct version\n\t\tincoming['module']('unload','gromacs')\n\t\tprint('[STATUS] module load %s'%mod)\n\t\tincoming['module']('load',mod)", "def health_check():\n printed_something = False\n\n job_checks = {}\n job_names = []\n for job in config.enabled_jobs:\n spec = nomad.parse(get_job(job.template))\n printed_something |= bool(nomad.check_events_and_logs(job.name))\n for service, checks in nomad.get_health_checks_from_spec(spec):\n if not checks:\n log.warn(f'service {service} has no health checks')\n continue\n job_checks[service] = checks\n job_names.append(job.name)\n printed_something |= nomad.wait_for_service_health_checks(consul, job_names, job_checks, nowait=True)\n\n if printed_something:\n log.error('Problems detected; see logs above.')\n sys.exit(1)\n else:\n log.info('No problems detected.')", "def test_get_hyperflex_health_list(self):\n pass", "def test_health(self):\n self.assert_request('get', '/_health')", "def loadmodule(self, name):\n\n if name in self._modules:\n return self._modules[name]()\n\n raise Error(\"No such module: {0}\".format(name))", "def load_basic(self, conanfile_path, graph_lock=None, display=\"\", remotes=None,\n update=None, check_update=None):\n return self.load_basic_module(conanfile_path, graph_lock, display, remotes,\n update, check_update)[0]", "def load(self):\n self.__additionnal_datas.update({\n 'hostname_fqdn': socket.getfqdn(),\n 'hostname': socket.gethostname(),\n 'ipcheck_version': self.__ipcheck_version,\n })\n # parse all trigger section in config file\n for section in filter(lambda s: self.RE_EXTENSION_SECTION.match(s) is not None, self.cp.sections()):\n conf = dict(self.cp.items(section))\n conf['name'] = section.partition('.')[2].lower()\n ext_name = conf['name']\n # check if the trigger name contains only alpha caracters\n if not ext_name.isalpha():\n self.__logger.error('[extension] Extension name \"%s\" must contains only alphabetical caracters', ext_name)\n continue\n # import process\n try:\n m = __import__('ipcheckadvanced.extension.' + ext_name, fromlist=['Extension'])\n ext = m.Extension()\n if not isinstance(ext, ExtensionBase):\n # inheritance error\n self.__logger.error('[extension] Extension \"%s\" must inherit from TriggerHandler class',\n ext_name)\n continue\n ext.logger = logging.getLogger('ipcheck.' + ext_name)\n ext.configuration = conf\n ext.event_receiver = self\n if ext.load():\n self.__extensions.append(ext)\n self.__logger.debug('[extension] loaded extension %s', ext_name)\n else:\n # loading error\n self.__logger.error('[extension] Extension \"%s\" cannot be loaded', ext_name)\n except ImportError as e:\n self.__logger.error('[extension] Extension \"%s\" name cannot be found in extension directory %s',\n ext_name, str(e))\n except NotImplementedError as e:\n self.__logger.error('[extension] Extension \"%s\" must implement the method \"%s\"',\n ext_name, str(e))\n except KeyError as e:\n self.__logger.error('[extension] Extension \"%s\" require %s missing parameters see extension documentation',\n ext_name, str(e))\n except Exception as e:\n self.__logger.error('[extension] Extension \"%s\" has encountered an unknown error: %s',\n ext_name, str(e))\n self.__logger.exception(e)\n # # return false if no trigger have been loaded\n return self.hasExtensions()", "def test_health_checks_constructed(self):\n\n node = Node(\n {\n 'healthchecks': [\n {\n 'command': '/some/basic/example',\n 'on_failure': None,\n 'on_failure_even_if_security_violation': False\n },\n\n {\n 'command': '/some/basic/example',\n 'on_failure': '/some/rescue-command',\n 'on_failure_even_if_security_violation': True\n },\n\n {\n 'command': '/some/basic/example'\n }\n ]\n },\n {},\n mock.Mock()\n )\n\n self.assertEqual(3, len(node.get_health_checks()))", "def test_health_check(self):\n result = self.app.get('/v1/health')\n\n # assert the status code of the response 200 (OK)\n self.assertEqual(result.status_code, 200)\n self.assertEqual(result.data, b'UP')", "def add_module(self, module):\n getattr(module, 'load_bench')(self)", "def loadConfigModule(name, options, tags):\n if isinstance(name, str):\n LOG.info('Loading %s', name)\n d = {}\n module = __import__(name[:-3], d, d)\n else:\n module = reload(name)\n onload = module.__dict__.get('onload')\n if callable(onload):\n try:\n onload(options, tags)\n except:\n LOG.fatal('Exception while loading %s', name)\n raise\n return module", "def load_banner_module(self):\n self.banner_module = None\n try:\n if self.force_json:\n banner_module_name = \"BannerJson\"\n else:\n banner_module_name = self.config_dict[\"BannerModule\"]\n \n except KeyError:\n print(\"***********************************\")\n print(\"* No BannerModule defined! EXITING \")\n print(\"***********************************\")\n sys.exit(1)\n\n try:\n banner_module = importlib.import_module(banner_module_name)\n except Exception as e:\n print(f\"******************************************\")\n print(f\"* Exception loading {banner_module_name} \")\n print(f\"* {e} \")\n print(f\"* in: {sys.path} \")\n print(f\"******************************************\")\n print(f\"Script-Path: {os.path.dirname(os.path.realpath(__file__))}\")\n print(f\"Working-Path: {os.getcwd()}\")\n sys.exit(1)\n\n if hasattr(banner_module, \"create_instance\"):\n self.banner_module = banner_module\n # print(f\"Successfully registered banner plugin '{banner_module_name}'\")\n else:\n print(\"************************************************\")\n print(f\"* BannerModule {banner_context_name}\")\n print(\"* is missing create_instance function; EXITING \")\n print(\"************************************************\")\n sys.exit(1)\n\n # TODO The banner is probably more a property of the router\n self.banner = self.banner_module.create_instance()", "def check(self, test_modules=__all__):\n\n # if test suite is being running from within forcebalance module, append the forcebalance prefix\n if __name__==\"forcebalance.test.__init__\":\n test_modules = [\"forcebalance.test.\" + test_module for test_module in test_modules]\n\n for test_module in test_modules:\n __import__(test_module)", "def AddModule (self, module):\n getattr (module, 'load_bench') (self)", "def loadTestsFromTestCase(self, testCaseClass):\n suite = super(HealthCheckLoader, self).loadTestsFromTestCase(\n testCaseClass)\n return self.filter_suite(suite)", "def load_from_module(self, module: ModuleType) -> None:\n for key in dir(module):\n if key.startswith(\"__\") and key.endswith(\"__\"):\n continue\n value = getattr(module, key)\n self[key] = value", "def __init__(self):\n self.module_params = utils.get_vplex_management_host_parameters()\n self.module_params.update(get_vplex_storageview_parameters())\n self.resource_fail_msg = \"Failed to collect resources\"\n self.fail_msg = \"Could not collect resources in {0}\"\n\n self.module = AnsibleModule(\n argument_spec=self.module_params,\n supports_check_mode=False\n )\n\n # Check for external libraries\n lib_status, message = utils.external_library_check()\n if not lib_status:\n LOG.error(message)\n self.module.fail_json(msg=message)\n\n # Check for Python vplexapi sdk\n if HAS_VPLEXAPI_SDK is False:\n self.module.fail_json(msg=\"Ansible modules for VPLEX require \"\n \"the vplexapi python library to be \"\n \"installed. Please install the library \"\n \"before using these modules.\")\n\n self.cl_name = self.module.params['cluster_name']\n if not self.cl_name:\n msg = \"Following is required: cluster_name\"\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n\n # Create the configuration instance to communicate\n # with vplexapi\n self.client = utils.config_vplexapi(self.module.params)\n # Validating the user inputs\n if isinstance(self.client, tuple):\n err_code, msg = self.client\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n\n vplex_setup = utils.get_vplex_setup(self.client)\n LOG.info(vplex_setup)\n # Checking if the cluster is reachable\n (err_code, msg) = utils.verify_cluster_name(self.client, self.cl_name)\n if err_code != 200:\n if \"Resource not found\" in msg:\n msg = \"Could not find resource {0}\".format(self.cl_name)\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n\n # Create an instance to communicate with storageview VPLEX api\n self.cls = utils.ClustersApi(api_client=self.client)\n self.storageview = utils.ExportsApi(api_client=self.client)\n self.virtualvolume = utils.VirtualVolumeApi(api_client=self.client)\n self.maps = utils.MapsApi(api_client=self.client)\n self.distvv = utils.DistributedStorageApi(api_client=self.client)\n\n # Module parameters\n self.st_name = self.module.params['storage_view_name']\n self.new_st_name = self.module.params['new_storage_view_name']\n self.ports = self.module.params['ports']\n self.pt_state = self.module.params['port_state']\n self.initiators = self.module.params['initiators']\n self.ini_state = self.module.params['initiator_state']\n self.virvols = self.module.params['virtual_volumes']\n self.virvol_state = self.module.params['virtual_volume_state']\n self.vir_vol = {}\n\n # result is a dictionary that contains changed status and\n # storage view details\n self.result = {\"changed\": False, \"storageview_details\": {}}", "def __init__(self, module):\n self.state_change = False\n self.swift = None\n\n # Load AnsibleModule\n self.module = module", "def test_load_testcase(self):\n tests = self.loader.load(\"tests.sampletest.hellotest.HelloTest\")\n self.assertEqual(len(tests), 1)\n from tests.sampletest.hellotest import HelloTest\n\n self.assertEqual(type(tests[0]), HelloTest)", "def healthcheck():\n return make_response(jsonify(status=200, message='Healthy'), status.HTTP_200_OK)", "async def check_health():\n return {\"healthy\": True}", "def load(self, eng):\n eng.eval(\"load_system('simulink_househeat')\", nargout=0)", "def load_from_module_path(self, filename: str) -> None:\n # pylint: disable=import-outside-toplevel\n import importlib.util\n spec = importlib.util.spec_from_file_location(\"base_config\", filename)\n module = importlib.util.module_from_spec(spec)\n if spec.loader is not None:\n spec.loader.exec_module(module)\n else:\n raise Exception(\"Could not get module loader from spec\")\n self.load_from_module(module)", "def health_check():\n app.logger.info(\"Health Check!\")\n return Response(\"All Good!\", status=200)", "def load(self):\n\n self.commands = {\n # Usual text commands (e.g. \"/echo 123\")\n 'user': {},\n 'owner': {\n 'load': self.load,\n 'modprobe': self.modprobe,\n 'rmmod': self.rmmod\n },\n # Modules for bot's reaction to a different message types\n 'text': {},\n 'photo': {},\n 'audio': {},\n 'video': {},\n 'sticker': {},\n 'voice': {}\n }\n\n for file in os.listdir('modules'):\n if file.endswith('.py'):\n command_type, command = file.split('_', 1)\n self.modprobe(self, command[:-3])", "def load_reportlab(finder, module):\n finder.IncludeModule(\"reportlab.rl_settings\")", "def test_health(self) -> None:\n self._response = self._app.get('/health')\n\n self.assertEqual(self._response.status, '200 OK')", "def ping():\n \"\"\"Get the estimator object for this instance, loading it if it's not already loaded.\"\"\"\n checker = os.listdir('/opt/ml')\n health = checker is not None # health check here\n status = 200 if health else 404\n return flask.Response(response='\\n', status=status, mimetype='application/json')", "async def test_health():\n response = health()\n assert response\n assert {'status': 'ok'} == response", "def health_check(name, target='TCP:22', healthy_threashold=2, unhealthy_threashold=3, interval=30, timeout=3):\n hc = HealthCheck(title=name + 'healthcheck')\n hc.HealthyThreshold = healthy_threashold\n hc.UnhealthyThreshold = unhealthy_threashold\n hc.Interval = interval\n hc.Target = target\n hc.Timeout = timeout\n return hc", "def load_basic_module(self, conanfile_path, graph_lock=None, display=\"\", remotes=None,\n update=None, check_update=None, tested_python_requires=None):\n cached = self._cached_conanfile_classes.get(conanfile_path)\n if cached:\n conanfile = cached[0](display)\n conanfile._conan_helpers = self._conanfile_helpers\n if hasattr(conanfile, \"init\") and callable(conanfile.init):\n with conanfile_exception_formatter(conanfile, \"init\"):\n conanfile.init()\n return conanfile, cached[1]\n\n try:\n module, conanfile = parse_conanfile(conanfile_path)\n if tested_python_requires:\n conanfile.python_requires = tested_python_requires\n\n if self._pyreq_loader:\n self._pyreq_loader.load_py_requires(conanfile, self, graph_lock, remotes,\n update, check_update)\n\n conanfile.recipe_folder = os.path.dirname(conanfile_path)\n conanfile.recipe_path = Path(conanfile.recipe_folder)\n\n # Load and populate dynamic fields from the data file\n conan_data = self._load_data(conanfile_path)\n conanfile.conan_data = conan_data\n\n self._cached_conanfile_classes[conanfile_path] = (conanfile, module)\n result = conanfile(display)\n\n result._conan_helpers = self._conanfile_helpers\n if hasattr(result, \"init\") and callable(result.init):\n with conanfile_exception_formatter(result, \"init\"):\n result.init()\n return result, module\n except ConanException as e:\n raise ConanException(\"Error loading conanfile at '{}': {}\".format(conanfile_path, e))", "def testLoadConfigs(self):\n config_path = GetTestFilePath('unified_lab_config/valid_lab/hosts')\n pool = lab_config.UnifiedLabConfigPool(config_path)\n pool.LoadConfigs()\n self.assertIsNotNone(pool.GetLabConfig())\n self.assertIsNotNone(pool.GetHostConfigs('postsubmit'))\n self.assertIsNotNone(pool.GetHostConfigs('crystalball'))\n self.assertIsNotNone(pool.GetHostConfigs('crystalball-power'))", "def main():\n argument_spec = {\n 'gather_subset': dict(default=['software_info', 'software_images',\n 'host_name', 'platform_name',\n 'management_interface',\n 'software_version', 'fans',\n 'power_supplies', 'product_info',\n 'physical_interfaces',\n 'resource_utilization', 'domain_name'],\n type='list',\n choices=['software_info', 'software_images',\n 'host_name', 'platform_name',\n 'management_interface',\n 'software_version',\n 'config', 'fans', 'power_supplies',\n 'product_info', 'physical_interfaces',\n 'resource_utilization', 'domain_name']),\n 'gather_network_resources': dict(type='list',\n choices=['interfaces', 'vlans',\n 'vrfs'])\n }\n\n argument_spec.update(aoscx_http_argument_spec)\n\n module = AnsibleModule(argument_spec=argument_spec,\n supports_check_mode=True)\n\n module._connection = get_connection(module) # noqa\n\n warnings = []\n if module.params[\"gather_subset\"] == \"!config\":\n warnings.append(\n 'default value for `gather_subset` will be changed '\n 'to `min` from `!config` v2.11 onwards')\n\n result = Facts(module).get_facts()\n\n ansible_facts, additional_warnings = result\n warnings.extend(additional_warnings)\n\n module.exit_json(ansible_facts=ansible_facts, warnings=warnings)", "def _load_defined_tasks():\n task_path = Path(__file__).parent.resolve() / \"nalu_tasks\"\n py_files = glob.glob(str(task_path / \"[a-z]*.py\"))\n modset = {Path(ff).stem for ff in py_files}\n for pymod in modset:\n importlib.import_module(\".%s\"%pymod, 'exawind.nalu.nalu_tasks')", "def health(self) -> Dict[str, str]:\n return self.http.get(self.config.paths.health)", "def pre_loadbalancer_healthmonitor_read(self, resource_id):\n pass", "def load():\n add_default_configuration('logging', {\n 'level': 'WARNING'\n })", "def mod_load(self):\n raise NotImplementedError(\"Mod load isn't overriden\")", "def health_check(cls):\n cb = cls.CACHE_BACKEND()\n return cb.health_check()", "def load_modules(bot, config):\n for item in MODULES:\n importlib.import_module(\"cogs.\" + item).setup(bot, config)", "def load_module(cls, bytes, options=None):\n\t\traise NotImplementedError(\"load_module must be implemented\")", "def test_check_non_existing_module(self) -> None:\n with self.assertRaises(ClientErrorException):\n check_module(\"non_existing_module\")", "def health():\n return jsonify({\n 'status': 'UP',\n 'dependencies': {\n 'predixpy': predix.version,\n 'python': sys.version,\n }\n })", "def notify_module_loaded(module):\n name = get_module_name(module)\n hooks = _post_import_hooks.get(name, [])\n\n for hook in hooks:\n try:\n hook(module)\n except Exception:\n log.warning('hook \"%s\" for module \"%s\" failed', hook, name, exc_info=True)", "def pre_loadbalancer_healthmonitor_create(self, resource_dict):\n pass", "def command_load(interface,command,args):\n try:\n modules.add_module(args)\n interface.reply(\"Loaded %s\"%args)\n except ImportError, e:\n interface.reply(str(e))\n except modules.ModuleAlreadyLoaded, e:\n interface.reply(str(e))", "def loadmodule( conf ):\n try:\n #conf = routes[ route ]\n # try to load the module\n module_name = conf['module']['name']\n module_path = conf['module']['path']\n \n mod_name, file_ext = os.path.splitext( os.path.split( module_path )[ -1] )\n if file_ext.lower() == '.py':\n py_mod = imp.load_source( mod_name, module_path )\n elif file_ext.lower() == '.pyc':\n py_mod = imp.load_compiled( mod_name, module_path )\n else:\n raise Exception(\"Cannot handle module for route: \" + route )\n except Exception, e:\n import traceback\n traceback.print_exc( file=sys.stdout )\n # TODO log error + msg\n return py_mod", "def test_load_simple_module():\n loader = Loader()\n main_fname = loader.load(\"https://gist.githubusercontent.com/miohtama/80391980c2e73b285cfe/raw/dd89a55497ba33a6014453d9bb7432ab424c01cf/kivyhello.py#main\")\n mod = path_to_mod_name(main_fname)\n result = loader.run(mod, \"hello\")\n assert result == \"Hello there\"\n loader.close()", "def load_conf_modules():\n for modname in _list_module_names():\n mod = importutils.import_module('monasca_api.conf.' + modname)\n required_funcs = ['register_opts', 'list_opts']\n for func in required_funcs:\n if hasattr(mod, func):\n yield mod", "def load_checks_to_execute(\n bulk_checks_metadata: dict,\n bulk_compliance_frameworks: dict,\n checks_file: str,\n check_list: list,\n service_list: list,\n severities: list,\n compliance_frameworks: list,\n categories: set,\n provider: str,\n) -> set:\n checks_to_execute = set()\n\n # Handle if there are checks passed using -c/--checks\n if check_list:\n for check_name in check_list:\n checks_to_execute.add(check_name)\n\n # Handle if there are some severities passed using --severity\n elif severities:\n for check in bulk_checks_metadata:\n # Check check's severity\n if bulk_checks_metadata[check].Severity in severities:\n checks_to_execute.add(check)\n\n # Handle if there are checks passed using -C/--checks-file\n elif checks_file:\n try:\n checks_to_execute = parse_checks_from_file(checks_file, provider)\n except Exception as e:\n logger.error(f\"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}\")\n\n # Handle if there are services passed using -s/--services\n elif service_list:\n checks_to_execute = recover_checks_from_service(service_list, provider)\n\n # Handle if there are compliance frameworks passed using --compliance\n elif compliance_frameworks:\n try:\n checks_to_execute = parse_checks_from_compliance_framework(\n compliance_frameworks, bulk_compliance_frameworks\n )\n except Exception as e:\n logger.error(f\"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}\")\n\n # Handle if there are categories passed using --categories\n elif categories:\n for cat in categories:\n for check in bulk_checks_metadata:\n # Check check's categories\n if cat in bulk_checks_metadata[check].Categories:\n checks_to_execute.add(check)\n\n # If there are no checks passed as argument\n else:\n try:\n # Get all check modules to run with the specific provider\n checks = recover_checks_from_provider(provider)\n except Exception as e:\n logger.error(f\"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}\")\n else:\n for check_info in checks:\n # Recover check name from import path (last part)\n # Format: \"providers.{provider}.services.{service}.{check_name}.{check_name}\"\n check_name = check_info[0]\n checks_to_execute.add(check_name)\n\n return checks_to_execute", "def test_health_endpoint(self):\n url = f\"{BASE_URL}/health\"\n response = requests.get(url)\n response_json = response.json()\n assert response.status_code == 200\n assert response_json['status'] == 200", "def load(self, path):\n\n try:\n with open(path) as f:\n try:\n self.hooks = yaml.load(f.read())\n except ScannerError:\n self.warning('Error loading {0} hooks - Is it '\n 'correctly formatted?'.format(path))\n else:\n self.out('Loading hooks')\n except IOError:\n self.warning('{0} not found'.format(path))", "def load(self, configs, container):\n pass;", "def import_module(module: str):\n logging.info('Importing %s.', module)\n try:\n importlib.import_module(module)\n except RuntimeError as e:\n if (str(e) ==\n 'Attempted to add a new configurable after the config was locked.'):\n raise RuntimeError(\n 'Your Task/Mixture module contains gin configurables that must be '\n 'loaded before gin flag parsing. One fix is to add '\n f\"'import {module}' in your gin file.\")\n raise e", "def main():\n\n # the AnsibleModule object will be our abstraction for working with Ansible.\n # This includes instantiation, a couple of common attr that will be the\n # args/params passed to the execution, as well as if the module\n # supports check mode\n module = AnsibleModule(\n argument_spec=dict(\n hostvars=dict(type='raw', required=True),\n report_timestamp=dict(type=str, required=False, default=''),\n registered_dict_name=dict(type=str, required=False, default=\"get_sas_host_details_results\"),\n include_hotfix_report=dict(type=bool, required=False, default=True),\n hotfix_url = dict(type=str, required=True),\n hotfix_master_file = dict(type=str, required=True)\n ),\n supports_check_mode=True\n )\n\n # get module parameters\n hostvars = module.params['hostvars']\n report_timestamp = module.params['report_timestamp']\n registered_dict_name = module.params['registered_dict_name']\n include_hotfix_report = module.params['include_hotfix_report']\n hotfix_url = module.params['hotfix_url']\n hotfix_master_file = module.params['hotfix_master_file']\n\n # Starting in Ansible 2.8.1, there is the potential for hostvars\n # to be passed as a byte string, if the dict is too large\n # This will convert the str back to a dict before proceeding\n if isinstance(hostvars, str):\n hostvars = ast.literal_eval(hostvars.decode())\n\n results = dict()\n results['sas_hosts'] = dict()\n results['created'] = report_timestamp\n\n for inventory_hostname, host_vars in hostvars.items():\n\n # set up returnable values\n unreachable = True\n failed = True\n failure_details = dict(\n msg=\"\",\n rc=0,\n stderr=\"\",\n stdout=\"\",\n )\n\n # get the host details dict\n host_details = host_vars.get(registered_dict_name)\n\n # check if the host has the registered dict\n if host_details is not None:\n\n # host details exist, so host was reachable\n unreachable = False\n\n # check if the host failed\n failed = host_details['failed']\n\n # if the module reported a failure, collect details\n if failed:\n failure_details['msg'] = host_details['msg']\n failure_details['rc'] = host_details['rc']\n failure_details['stderr'] = host_details['module_stderr']\n failure_details['stdout'] = host_details['module_stdout']\n else:\n # get module results\n host_results = host_details.get('sas_host_details')\n\n if host_results is not None:\n results['sas_hosts'].update(host_results)\n else:\n failed = True\n\n # if the results dict could not be found, mark the host as unreachable\n if failed or unreachable:\n host_groups = host_vars.get('group_names')\n\n if host_groups is not None and 'sas_all' in host_groups:\n hostname = host_vars.get('ansible_fqdn')\n if hostname is None or hostname == \"\":\n hostname = host_vars.get('ansible_hostname')\n if hostname is None or hostname == \"\":\n hostname = host_vars.get('ansible_host')\n if hostname is None or hostname == \"\":\n hostname = host_vars.get('inventory_hostname')\n if hostname is None or hostname == \"\":\n hostname = inventory_hostname\n\n try:\n host_groups.remove('sas_all')\n host_groups.remove('sas-all')\n except ValueError:\n pass # do nothing\n\n results['sas_hosts'][hostname] = dict(\n _id=hostname.replace('.', '-'),\n _unreachable=unreachable,\n _failed=failed,\n _failure_details=failure_details,\n ansible_host_groups=host_groups\n )\n else:\n pass # this host isn't in sas_all so there's no need to try and report on it\n\n ##################################################################################\n # This section will find all of the hotfixes available and add them to the report.\n ##################################################################################\n\n # There are a few data structures that are complicated enough to warrant a description:\n # fullReport\n # This will hold all of the data in a format condusive to printing it out in the final report. This is how\n # It is structured:\n # fullReport (dict):\n # key=Hot Fix Name, point to another dict:\n # key=\"released\", points to a string containing the release date of the hotfix.\n # key= \"installed\", points to a boolean that will reflect whether any of the packages used by this hotfix are installed on any of the machines in the deployment.\n # key=\"upToDate\", point to a boolean that will reflest whether ALL of the packages used by this hotfix are up to date on ALL of the machines in the deployment.\n # key=\"sasnote\", points to another dict:\n # key=SASNote number, points to the description of the SASNote.\n # key=\"package\", points to another dict:\n # key=\"platform\" , points to another dict:\n # key=OS, points to another dict:\n # key=\"version\", points to the string of the version of the package.\n # key=\"installed\", points to a boolean which reflects whether this package is installed on any machine in the deployment.\n # key=\"upToDate\", points to a boolean which reflects whether this package is up to data on ALL of the machines in the deployment.\n # key=\"os\", points to the fully qualified name of the operating system.\n # key=\"arch\", points to the architecture of the OS (NOTE: This does not exist on Windows systems.)\n # key=\"alreadyUpdated\", points to a boolean, which is used to keep track of whether the upToDate has already been set.\n # key=\"installedVersions\", points to another dict:\n # key=machineName, points to a 2 element list:\n # [0]=string containing package version that is currently installed.\n # [1]=boolean reflecting whether this version is at or above the package delevered in this hotfix.\n #\n ###########################################################################\n #\n # packageToHotFix\n # This will hold a dict of lists:\n # key: package name, pointing to a 2 element list:\n # [0] OS\n # [1] The Hotfix that this package is associated with.\n #\n ###########################################################################\n #\n # environmentReportDict\n # This is inherited from the environment report, but it's probably worth documenting what it looks like.\n # There is a lot of data inerherited, and I'm only describing what is used in this script.\n # environmentReportDict\n # key=hostname (for each machine in the deployment), pointing to another dict:\n # key=\"OS\", pointing to string for the OS family.\n # key=\"arch\", pointing to the string for the architecture of the host.\n # key=\"sas_packages\", pointing to another dict:\n # key=package number, pointing to another dict:\n # key=\"attributes\", pointing to another dict:\n # key=\"version\", pointing to a string of the package versions currently installed on the host.\n ############################################################################\n\n results[\"include_hotfix_report\"] = include_hotfix_report\n if include_hotfix_report:\n # This is the URL from which to pull the hotfix files.\n if hotfix_url[-1:] == '/':\n baseURL = hotfix_url\n else:\n baseURL = hotfix_url + '/'\n # This is the master file that lists which other files should be examined for the actual hotfixes themselves.\n masterFile = hotfix_master_file\n # This is the top level object to store the hotfix report information (see above).\n fullReport = {}\n # This is a dict of package to hotfixes (see above).\n packageToHotfix = {}\n # This boolean will help with debugging.\n debug = False\n\n try:\n # Parse the master file to obtain where the hotfix files are.\n masterFileXML = urllib2.urlopen(baseURL + masterFile)\n\n # Parse the master file and build a list of all files.\n allFilesRoot = ET.fromstring(masterFileXML.read())\n results[\"contact_hotfix_website\"] = True\n except urllib2.URLError :\n results[\"contact_hotfix_website\"] = False\n results[\"master_website\"] = baseURL + masterFile\n if debug:\n print(\"***** Error parsing \" + baseURL + masterFile)\n print(traceback.format_exc())\n print(\"***** No hot fix information obtained. Skipping hot fix report.\\n\\n\")\n\n if results[\"contact_hotfix_website\"]:\n # Loop through the files discoverd in the master file\n if debug:\n print(\"Building hot fix report, based on master file input.\")\n for file_tag in allFilesRoot.findall('File'):\n currentFile = file_tag.get('fileName')\n fileToParse = baseURL + currentFile\n # Retrieve each file.\n # Inside of each file, the lines are keyed by the hot fix id. There are three types of lines, in order:\n # 1) id and release date\n # 2) id, sasnote, sasnotetitle\n # 3) id, OS, package.\n # This script loops through to build a dictionary of dictonaries with the basic structure:\n # ID\n # Release Date\n # SASNotes\n # SASNote and Title\n # ...\n # Packages\n # Package Name, Version, and OS\n try:\n currentFileXML = urllib2.urlopen(fileToParse)\n currentFileRoot = ET.fromstring(currentFileXML.read())\n updateID = \"\"\n for update_tag in currentFileRoot.findall('update'):\n currentUpdate = update_tag.get('id')\n releaseDate = update_tag.get('released')\n # To get the top level Dictionary seeded with the hot fix Name and release date.\n if releaseDate is not None:\n if currentUpdate in fullReport:\n if debug:\n print(\"WARNING! Hot Fix \" + currentUpdate + \" already discovered. Skipping\")\n updateID = \"DUPLICATE-SKIP\"\n else:\n # The SCXXXX hot fixes are special. The package files are only included in\n # Viya_<version>_<platform>_home.xml files. So, the entries in the\n # scheduled_update_<platform>_<shipevent>.xml files can be skipped.\n if currentUpdate.startswith(\"SC\") and currentFile.find(\"scheduled_update_\") < 0:\n continue\n updateID = currentUpdate\n fullReport[updateID] = {}\n fullReport[updateID][\"release_date\"] = releaseDate\n fullReport[updateID][\"installed\"] = False\n fullReport[updateID][\"upToDate\"] = False\n # To get the SASNote information under the hot fix\n else:\n if updateID == \"DUPLICATE-SKIP\":\n continue\n sasNote = update_tag.get('sasnote')\n sasNoteTitle = update_tag.get('sasnoteTitle')\n if sasNote is not None:\n if \"sasnote\" not in fullReport[updateID]:\n fullReport[updateID][\"sasnote\"] = {}\n # This string needs to be encoded because some non-ASCII characters are\n # in some of the titles.\n fullReport[updateID][\"sasnote\"][sasNote] = sasNoteTitle.encode('utf-8')\n # To get the Package information under the hot fix.\n else:\n os = update_tag.get(\"os\")\n fullPackage = update_tag.get(\"package\")\n if fullPackage is not None:\n if \"package\" not in fullReport[updateID]:\n fullReport[updateID][\"package\"] = {}\n\n lastPeriodIndex = fullPackage.rfind(\".\")\n # Format the package information.\n # Windows does not have a dash in the version; Linux does. So, we need to break differently,\n # depending on the OS.\n if os.lower().find(\"windows\") > -1:\n versionStartIndex = fullPackage.rfind(\"-\")\n achitectureStartIndex = -1\n versionEndIndex = lastPeriodIndex\n osFamily = \"Windows\"\n else:\n versionStartIndex = fullPackage.rfind(\"-\", 0, fullPackage.rfind(\"-\"))\n # Linux has architecture in the package. This will be stored in its own key.\n achitectureStartIndex = fullPackage.rfind(\".\", 0, lastPeriodIndex)\n # SLES has the string 'suse' in its package. This will strip it out (as well as an extra .).\n if os.lower().find(\"suse\") > -1:\n versionEndIndex = achitectureStartIndex - 5\n osFamily = \"Suse\"\n else:\n if os.lower().find(\"yocto\") > -1:\n versionEndIndex = achitectureStartIndex - 6\n osFamily = \"Yocto\"\n else:\n if os.lower().find(\"ubuntu\") > -1:\n versionStartIndex = fullPackage.rfind(\"_\", 0, fullPackage.rfind(\"_\"))\n versionEndIndex = fullPackage.rfind(\"_\")\n achitectureStartIndex = versionEndIndex\n osFamily = \"Ubuntu\"\n else:\n if os.lower().find(\"red hat enterprise linux 7\") > -1:\n versionStartIndex = versionStartIndex = fullPackage.rfind(\":\")\n versionEndIndex = len(fullPackage)\n achitectureStartIndex = -1\n osFamily = \"RedHat\"\n else:\n versionEndIndex = achitectureStartIndex\n osFamily = \"RedHat\"\n package = fullPackage[:versionStartIndex]\n packageVersion = fullPackage[versionStartIndex + 1:versionEndIndex]\n architecture = fullPackage[achitectureStartIndex + 1:lastPeriodIndex]\n\n if package not in fullReport[updateID][\"package\"]:\n fullReport[updateID][\"package\"][package] = {}\n if \"platform\" not in fullReport[updateID][\"package\"][package]:\n fullReport[updateID][\"package\"][package][\"platform\"] = {}\n if osFamily not in fullReport[updateID][\"package\"][package][\"platform\"]:\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily] = {}\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"version\"] = packageVersion\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"installed\"] = False\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"upToDate\"] = False\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"os\"] = os\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"installedVersions\"] = {}\n if achitectureStartIndex != -1:\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"arch\"] = architecture\n # This property is used to make sure that when evaluating the installed packages,\n # the upToDate=false does not get overridden by a True at the end.\n fullReport[updateID][\"package\"][package][\"platform\"][osFamily][\"alreadyUpdated\"] = False\n\n # Add to the package to hot fix dict.\n if package not in packageToHotfix:\n packageToHotfix[package] = []\n packageToHotfix[package].append([osFamily, updateID])\n\n except ET.ParseError:\n if debug:\n print(\"***** Error parsing \" + fileToParse)\n print(traceback.format_exc())\n print(\"***** Skipping file.\\n\\n\")\n except urllib2.HTTPError:\n if debug:\n print(\"***** Cannot access \" + fileToParse)\n print(traceback.format_exc())\n print(\"***** Skipping the file.\\n\\n\")\n except:\n if debug:\n print(\"***** Error encountered with \" + fileToParse)\n print(traceback.format_exc())\n print(\"***** Skipping the file.\\n\\n\")\n\n if debug:\n print(\"**** Build complete. Here are the hot fixes:\")\n print_Full_Report(fullReport)\n print(\"***********************************************************************************\")\n print(\"**** Here is the package to hot fix dict:\")\n print(\"***********************************************************************************\")\n for current_package in packageToHotfix:\n print(\" \" + current_package)\n for machine_list in packageToHotfix[current_package]:\n print(\" \" + machine_list[0] + \" @ \" + machine_list[1] + \".\")\n print(\"***********************************************************************************\")\n print(\"Report built.\")\n print(\"Accessing environment Data.\")\n\n for currentMachine in results['sas_hosts']:\n if not results['sas_hosts'][currentMachine][\"_unreachable\"] and not results['sas_hosts'][currentMachine][\"_failed\"]:\n currentOS = results['sas_hosts'][currentMachine]['os']['family']\n for currentPackage in results['sas_hosts'][currentMachine]['sas_packages']:\n if currentPackage in packageToHotfix:\n for osHotfix in packageToHotfix[currentPackage]:\n if osHotfix[0] == currentOS:\n currentHotfix = osHotfix[1]\n installedVersion = \\\n results['sas_hosts'][currentMachine]['sas_packages'][currentPackage]['attributes']['version']\n if installedVersion.endswith('.suse'):\n installedVersion = installedVersion[:-5]\n else:\n if installedVersion.endswith('.yocto'):\n installedVersion = installedVersion[:-6]\n else:\n if '_' in installedVersion:\n installedVersion = installedVersion[0:installedVersion.rfind(\"_\")]\n hotfixVersion = fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"version\"]\n upToDate = compare_versions(installedVersion, hotfixVersion) >= 0\n fullReport[currentHotfix][\"installed\"] = True\n fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installed\"] = True\n # If a previous pacakage marked updateToDate=True, it can still be pulled back to false if another package isn't\n # up to date. If the previous package was marked upToDate=false, the hotfix cannot be marked true.\n if not fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"alreadyUpdated\"] or \\\n (fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"alreadyUpdated\"] and\n fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"upToDate\"]):\n fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"upToDate\"] = upToDate\n fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"alreadyUpdated\"] = True\n fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installedVersions\"][currentMachine] = [installedVersion, upToDate]\n\n if debug:\n print(\"Comparing evironment data to hotfix data.\")\n for currentHotFix in fullReport:\n cumulativeOverallUpToDate = True\n # This will only allow the top level \"upToDate\" property to be set, if there is a package installed on this OS.\n allowTopLevelUpdate = False\n for currentPackage in fullReport[currentHotFix][\"package\"]:\n cumulativeOSUpToDate = True\n for currentOS in fullReport[currentHotFix][\"package\"][currentPackage][\"platform\"]:\n if len(fullReport[currentHotFix][\"package\"][currentPackage][\"platform\"][currentOS][\"installedVersions\"]) > 0:\n cumulativeOSUpToDate = cumulativeOSUpToDate and \\\n fullReport[currentHotFix][\"package\"][currentPackage][\"platform\"][currentOS][\n \"upToDate\"]\n allowTopLevelUpdate = True\n\n cumulativeOverallUpToDate = cumulativeOverallUpToDate and cumulativeOSUpToDate\n if allowTopLevelUpdate:\n fullReport[currentHotFix][\"upToDate\"] = cumulativeOverallUpToDate\n\n # Now that the fullReport has been updated, go back and add to results, for the final report.\n results[\"available_hotfixes\"] = {}\n results[\"installed_hotfixes\"] = {}\n\n for currentHotfix in fullReport:\n if not fullReport[currentHotfix][\"installed\"]:\n continue\n if fullReport[currentHotfix][\"upToDate\"]:\n hotfix_dict_to_use = \"installed_hotfixes\"\n else:\n hotfix_dict_to_use = \"available_hotfixes\"\n results[hotfix_dict_to_use][currentHotfix] = {}\n results[hotfix_dict_to_use][currentHotfix][\"release_date\"] = fullReport[currentHotfix][\"release_date\"]\n results[hotfix_dict_to_use][currentHotfix][\"packages\"] = []\n for currentPackage in fullReport[currentHotfix][\"package\"]:\n for currentOS in fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"]:\n if not fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installed\"]:\n continue\n for currentHost in fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installedVersions\"]:\n temp_dict = {}\n temp_dict[\"hostname\"] = currentHost\n temp_dict[\"package\"] = currentPackage\n temp_dict[\"installed_version\"] = fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installedVersions\"][currentHost][0]\n temp_dict[\"hotfix_version\"] = fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"version\"]\n temp_dict[\"up_to_date\"] = fullReport[currentHotfix][\"package\"][currentPackage][\"platform\"][currentOS][\"installedVersions\"][currentHost][1]\n results[hotfix_dict_to_use][currentHotfix][\"packages\"].append(temp_dict)\n # Format the SAS Note description so that we can respect any HTML tags that are included in the text.\n results[hotfix_dict_to_use][currentHotfix][\"sas_notes\"] = {}\n for current_number in fullReport[currentHotfix][\"sasnote\"]:\n # Honor any html that is coming through.\n temp_sasnote_description = fullReport[currentHotfix][\"sasnote\"][current_number]\n temp_sasnote_description = temp_sasnote_description.replace(\"&lt;\", \"<\")\n temp_sasnote_description = temp_sasnote_description.replace(\"&gt;\", \">\")\n # Build a link to the URL for the SAS Note.\n hot_fix_prefix = current_number[:2]\n hot_fix_postfix = current_number[2:]\n sas_note_url = \"http://support.sas.com/kb/\" + hot_fix_prefix + \"/\" + hot_fix_postfix + \".html\"\n sas_note_html_link = \"<a href=\\\"\" + sas_note_url + \"\\\"\\>\" + current_number + \"</a>\"\n results[hotfix_dict_to_use][currentHotfix][\"sas_notes\"][current_number] = {\"sas_note_link\":sas_note_html_link, \"description\":temp_sasnote_description}\n\n # in the event of a successful module execution, you will want to\n # simple AnsibleModule.exit_json(), passing the key/value results\n #\n # changed will always be 'False' since we'll never alter state on a host\n module.exit_json(changed=False, processed_host_details=results)", "def test_load(self):\n command = constituencies.Command()\n command.handle('load', silent=True)", "async def api_healthcheck(self) -> Optional[Exception]:\n try:\n await self._client.get(\"/health\")\n return None\n except Exception as exc:\n return exc", "def get_health_check(self):\n return util.create_response(output=\"OK\")", "def _load(self):\n module = importlib.import_module(self.__name__)\n self._parent_module_globals[self._local_name] = module\n\n if self._warning:\n logger.warning(self._warning)\n # Make sure to only warn once.\n self._warning = None\n\n # Update this object's dict so that if someone keeps a reference to the\n # LazyLoader, lookupts are efficient (__getattr__ is only called on lookups\n # that fail).\n self.__dict__.update(module.__dict__)\n return module", "def _load_vulnerabilities_report_file(file_name):\n with open(os.path.join(module_path, test_name, file_name)) as file:\n json_data = json.load(file)\n return ImageVulnerabilitiesReport.from_json(json_data)", "def load(cls, name):\n try:\n return importlib.import_module(cls._plugins[name])\n except Exception as err:\n print(\"** could not load command [%s]:\\n%s\" % (name, err))", "def _load_modules(self):\n modules = []\n agent_cls_list = dashboard_utils.get_all_modules(\n dashboard_utils.DashboardAgentModule\n )\n for cls in agent_cls_list:\n logger.info(\n \"Loading %s: %s\", dashboard_utils.DashboardAgentModule.__name__, cls\n )\n c = cls(self)\n modules.append(c)\n logger.info(\"Loaded %d modules.\", len(modules))\n return modules", "def test_module(self):\n pass", "def load_kernel_modules():\n if not os.path.isdir(W1ThermSensor.BASE_DIRECTORY):\n os.system(\"modprobe w1-gpio >/dev/null 2>&1\")\n os.system(\"modprobe w1-therm >/dev/null 2>&1\")\n\n for _ in range(W1ThermSensor.RETRY_ATTEMPTS):\n if os.path.isdir(\n W1ThermSensor.BASE_DIRECTORY\n ): # w1 therm modules loaded correctly\n break\n time.sleep(W1ThermSensor.RETRY_DELAY_SECONDS)\n else:\n raise KernelModuleLoadError()", "def testLoadConfiguration(self):\n loader = Loader()\n loader.loadFromDirectory(self.__exampleDirectory)\n\n self.assertEqual(len(loader.taskHolders()), 1)\n\n self.assertEqual(\n os.path.basename(loader.taskHolders()[0].var('contextConfig')),\n 'config.hjson'\n )", "def get_healthcheck() -> Response:\n\n try:\n with get_cursor(db_creds, commit=False) as cur:\n cur.execute(\"SELECT * FROM events.healthchecks\")\n data = cur.fetchall()\n return jsonify(status_code=200, data=data)\n except psycopg2.Error as e:\n return jsonify(\n message=f\"Psycopg2 driver error: {type(e)}\",\n args=e.args,\n status_code=500,\n error_type=\"Internal Server Error\",\n )\n except Exception as e:\n return jsonify(\n message=f\"Internal Server Error: {type(e)}\",\n args=e.args,\n status_code=500,\n error_type=\"Internal Server Error\",\n )", "def test_health_check(self):\n self.url = reverse(\"health-check\")\n response = self.client.get(self.url, **self.auth_headers)\n self.assertEqual(200, response.status_code)", "def test_load_full_reload_hooks(self):\n class TestExtension(Extension):\n pass\n\n extension = self.setup_extension(TestExtension)\n\n self.assertEqual(len(self.manager.get_installed_extensions()), 1)\n self.assertEqual(len(self.manager.get_enabled_extensions()), 1)\n\n URLHook(extension, ())\n self.assertEqual(len(URLHook.hooks), 1)\n self.assertEqual(URLHook.hooks[0].extension, extension)\n\n self.manager.load(full_reload=True)\n\n self.assertEqual(len(URLHook.hooks), 0)", "async def _reload(self, ctx, *, module: str=None):\n if module is None or module == \"all\":\n await ctx.message.add_reaction('\\N{HOURGLASS}')\n try:\n for extension in startup_extensions:\n self.bot.unload_extension(extension)\n self.bot.load_extension(extension)\n except Exception as e:\n await ctx.message.remove_reaction('\\N{HOURGLASS}', ctx.me)\n await ctx.message.add_reaction('\\N{CROSS MARK}')\n await ctx.send('{}: {}'.format(type(e).__name__, e))\n traceback.print_exc()\n else:\n await ctx.message.remove_reaction('\\N{HOURGLASS}', ctx.me)\n await ctx.message.add_reaction('\\N{WHITE HEAVY CHECK MARK}')\n else:\n await ctx.message.add_reaction('\\N{HOURGLASS}')\n try:\n self.bot.unload_extension(module)\n self.bot.load_extension(module)\n except Exception as e:\n await ctx.message.remove_reaction('\\N{HOURGLASS}', ctx.me)\n await ctx.message.add_reaction('\\N{CROSS MARK}')\n await ctx.send('{}: {}'.format(type(e).__name__, e))\n traceback.print_exc()\n else:\n await ctx.message.remove_reaction('\\N{HOURGLASS}', ctx.me)\n await ctx.message.add_reaction('\\N{WHITE HEAVY CHECK MARK}')", "def health_check(self, *, scope: Scope) -> HealthCheckStatus:", "def main():\n module = AnsibleModule(\n argument_spec=dict(\n host=dict(type='str', required=True),\n destination=dict(type='str', required=True),\n repeat_count=dict(type='int', default=5),\n vrf_name=dict(type='str'),\n min_success_rate=dict(type='int', default=100)\n ),\n supports_check_mode=True\n )\n\n if module.check_mode:\n module.exit_json(changed=False)\n\n try:\n retvals = ping(module.params['host'],\n module.params['destination'],\n module.params['repeat_count'],\n module.params['vrf_name'])\n except Exception as exc:\n module.fail_json(msg='Reachability validation failed ({})'.format(exc))\n\n retvals['changed'] = False\n\n if retvals['success_rate'] >= module.params['min_success_rate']:\n module.exit_json(**retvals)\n else:\n module.fail_json(msg=('Success rate lower than expected ({}<{})').\n format(retvals['success_rate'],\n module.params['min_success_rate']))", "async def test_health_check(client: AsyncClient):\n\n response = await client.get(f\"/health-check\")\n assert response.status_code == 200\n\n data = response.json()\n assert data[\"service\"][\"status\"] == \"healthy\"\n assert data[\"service\"][\"error\"] is None\n assert data[\"database\"][\"status\"] == \"healthy\"\n assert data[\"database\"][\"error\"] is None", "def setup_module():\n\n c = Config()\n if c.get('general', 'in_production'): # pragma: no cover\n raise RuntimeError(\"DO NOT run destructive test on production system\")\n\n \"Pull in the filesystem dump from a previous mirth run\"\n mi = MirthInteraction()\n mi.restore_database()\n\n \"Run a quick sanity check, whole module requires a populated db\"\n connection = db_connection('warehouse')\n count = connection.session.query(HL7_Msh).count()\n connection.disconnect()\n\n if count < 4000:\n err = \"Minimal expected count of records not present. \"\\\n \"Be sure to run 'process_testfiles_via_mirth' as a prerequisite\"\n raise RuntimeError(err)" ]
[ "0.6399756", "0.6244733", "0.600857", "0.5915704", "0.57699925", "0.569914", "0.565888", "0.5624556", "0.5607272", "0.560578", "0.55228066", "0.5457219", "0.541206", "0.5375242", "0.53745407", "0.53611326", "0.5358373", "0.5354235", "0.53232366", "0.53232366", "0.53232366", "0.5315311", "0.5311305", "0.52602094", "0.5241662", "0.5178809", "0.51606846", "0.51494527", "0.51355046", "0.5128314", "0.5126949", "0.51217943", "0.51194346", "0.5118271", "0.5101392", "0.50923246", "0.5080689", "0.5048432", "0.5047157", "0.5044515", "0.5030429", "0.5025104", "0.5021741", "0.5021508", "0.5016173", "0.5014396", "0.5013898", "0.5012933", "0.5001878", "0.49885282", "0.49866953", "0.49863148", "0.49781564", "0.4976862", "0.49707946", "0.4968651", "0.4966756", "0.49630225", "0.4962661", "0.49518102", "0.4943962", "0.4940712", "0.49397352", "0.49346033", "0.49321133", "0.4930362", "0.49273548", "0.48919442", "0.48872623", "0.48866203", "0.4881183", "0.4878875", "0.48705617", "0.48681712", "0.48611352", "0.4860951", "0.48605946", "0.48570904", "0.48529613", "0.4852859", "0.48416188", "0.4838839", "0.48385474", "0.4836752", "0.48360276", "0.48277575", "0.48239225", "0.48207206", "0.4804278", "0.4799753", "0.47967234", "0.47955018", "0.4794321", "0.47929445", "0.47852206", "0.47845414", "0.47761536", "0.47711825", "0.47688082", "0.47685757" ]
0.6768524
0