file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
matgen.go | // Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package testlapack
import (
"math"
"golang.org/x/exp/rand"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
"gonum.org/v1/gonum/floats"
)
// Dlatm1 computes the entries of dst as specified by mode, cond and rsign.
//
// mode describes how dst will be computed:
// |mode| == 1: dst[0] = 1 and dst[1:n] = 1/cond
// |mode| == 2: dst[:n-1] = 1/cond and dst[n-1] = 1
// |mode| == 3: dst[i] = cond^{-i/(n-1)}, i=0,...,n-1
// |mode| == 4: dst[i] = 1 - i*(1-1/cond)/(n-1)
// |mode| == 5: dst[i] = random number in the range (1/cond, 1) such that
// their logarithms are uniformly distributed
// |mode| == 6: dst[i] = random number from the distribution given by dist
// If mode is negative, the order of the elements of dst will be reversed.
// For other values of mode Dlatm1 will panic.
//
// If rsign is true and mode is not ±6, each entry of dst will be multiplied by 1
// or -1 with probability 0.5
//
// dist specifies the type of distribution to be used when mode == ±6:
// dist == 1: Uniform[0,1)
// dist == 2: Uniform[-1,1)
// dist == 3: Normal(0,1)
// For other values of dist Dlatm1 will panic.
//
// rnd is used as a source of random numbers.
func Dlatm1(dst []float64, mode int, cond float64, rsign bool, dist int, rnd *rand.Rand) {
amode := mode
if amode < 0 {
amode = -amode
}
if amode < 1 || 6 < amode {
panic("testlapack: invalid mode")
}
if cond < 1 {
panic("testlapack: cond < 1")
}
if amode == 6 && (dist < 1 || 3 < dist) {
panic("testlapack: invalid dist")
}
n := len(dst)
if n == 0 {
return
}
switch amode {
case 1:
dst[0] = 1
for i := 1; i < n; i++ {
dst[i] = 1 / cond
}
case 2:
for i := 0; i < n-1; i++ {
dst[i] = 1
}
dst[n-1] = 1 / cond
case 3:
dst[0] = 1
if n > 1 {
alpha := math.Pow(cond, -1/float64(n-1))
for i := 1; i < n; i++ {
dst[i] = math.Pow(alpha, float64(i))
}
}
case 4:
dst[0] = 1
if n > 1 {
condInv := 1 / cond
alpha := (1 - condInv) / float64(n-1)
for i := 1; i < n; i++ {
dst[i] = float64(n-i-1)*alpha + condInv
}
}
case 5:
alpha := math.Log(1 / cond)
for i := range dst {
dst[i] = math.Exp(alpha * rnd.Float64())
}
case 6:
switch dist {
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
if rsign && amode != 6 {
for i, v := range dst {
if rnd.Float64() < 0.5 {
dst[i] = -v
}
}
}
if mode < 0 {
for i := 0; i < n/2; i++ {
dst[i], dst[n-i-1] = dst[n-i-1], dst[i]
}
}
}
// Dlagsy generates an n×n symmetric matrix A, by pre- and post- multiplying a
// real diagonal matrix D with a random orthogonal matrix:
// A = U * D * U^T.
//
// work must have length at least 2*n, otherwise Dlagsy will panic.
//
// The parameter k is unused but it must satisfy
// 0 <= k <= n-1.
func Dlagsy(n, k int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(n, n, a, lda)
if k < 0 || max(0, n-1) < k {
panic("testlapack: invalid value of k")
}
if len(d) != n {
panic("testlapack: bad length of d")
}
if len(work) < 2*n {
panic("testlapack: insufficient work length")
}
// Initialize lower triangle of A to diagonal matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = d[i]
}
bi := blas64.Implementation()
// Generate lower triangle of symmetric matrix.
for i := n - 2; i >= 0; i-- {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Apply random reflection to A[i:n,i:n] from the left and the
// right.
//
// Compute y := tau * A * u.
bi.Dsymv(blas.Lower, n-i, tau, a[i*lda+i:], lda, work[:n-i], 1, 0, work[n:2*n-i], 1)
// Compute v := y - 1/2 * tau * ( y, u ) * u.
alpha := -0.5 * tau * bi.Ddot(n-i, work[n:2*n-i], 1, work[:n-i], 1)
bi.Daxpy(n-i, alpha, work[:n-i], 1, work[n:2*n-i], 1)
// Apply the transformation as a rank-2 update to A[i:n,i:n].
bi.Dsyr2(blas.Lower, n-i, -1, work[:n-i], 1, work[n:2*n-i], 1, a[i*lda+i:], lda)
}
// Store full symmetric matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[j*lda+i] = a[i*lda+j]
}
}
}
// Dlagge generates a real general m×n matrix A, by pre- and post-multiplying
// a real diagonal matrix D with random orthogonal matrices:
// A = U*D*V.
//
// d must have length min(m,n), and work must have length m+n, otherwise Dlagge
// will panic.
//
// The parameters ku and kl are unused but they must satisfy
// 0 <= kl <= m-1,
// 0 <= ku <= n-1.
func Dlagge(m, n, kl, ku int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(m, n, a, lda)
if kl < 0 || max(0, m-1) < kl {
panic("testlapack: invalid value of kl")
}
if ku < 0 || max(0, n-1) < ku {
panic("testlapack: invalid value of ku")
}
if len(d) != min(m, n) {
panic("testlapack: bad length of d")
}
if len(work) < m+n {
panic("testlapack: insufficient work length")
}
// Initialize A to diagonal matrix.
for i := 0; i < m; i++ {
for j := 0; j < n; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < min(m, n); i++ {
a[i*lda+i] = d[i]
}
// Quick exit if the user wants a diagonal matrix.
// if kl == 0 && ku == 0 {
// return
// }
bi := blas64.Implementation()
// Pre- and post-multiply A by random orthogonal matrices.
for i := min(m, n) - 1; i >= 0; i-- {
if i < m-1 {
for j := 0; j < m-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(m-i, work[:m-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(m-i-1, 1/wb, work[1:m-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the left.
bi.Dgemv(blas.Trans, m-i, n-i,
1, a[i*lda+i:], lda, work[:m-i], 1,
0, work[m:m+n-i], 1)
bi.Dger(m-i, n-i,
-tau, work[:m-i], 1, work[m:m+n-i], 1,
a[i*lda+i:], lda)
}
if i < n-1 {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the right.
bi.Dgemv(blas.NoTrans, m-i, n-i,
1, a[i*lda+i:], lda, work[:n-i], 1,
0, work[n:n+m-i], 1)
bi.Dger(m-i, n-i,
-tau, work[n:n+m-i], 1, work[:n-i], 1,
a[i*lda+i:], lda)
}
}
// TODO(vladimir-ch): Reduce number of subdiagonals to kl and number of
// superdiagonals to ku.
}
// dlarnv fills dst with random numbers from a uniform or normal distribution
// specified by dist:
// dist=1: uniform(0,1),
// dist=2: uniform(-1,1),
// dist=3: normal(0,1).
// For other values of dist dlarnv will panic.
func dlarnv(dst []float64, dist int, rnd *rand.Rand) {
switch dist {
default:
panic("testlapack: invalid dist")
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
// dlattr generates an n×n triangular test matrix A with its properties uniquely
// determined by imat and uplo, and returns whether A has unit diagonal. If diag
// is blas.Unit, the diagonal elements are set so that A[k,k]=k.
//
// trans specifies whether the matrix A or its transpose will be used.
//
// If imat is greater than 10, dlattr also generates the right hand side of the
// linear system A*x=b, or A^T*x=b. Valid values of imat are 7, and all between 11
// and 19, inclusive.
//
// b mush have length n, and work must have length 3*n, and dlattr will panic
// otherwise.
func dlattr(imat int, uplo blas.Uplo, trans blas.Transpose, n int, a []float64, lda int, b, work []float64, rnd *rand.Rand) (diag blas.Diag) {
checkMatrix(n, n, a, lda)
if len(b) != n {
panic("testlapack: bad length of b")
}
if len(work) < 3*n {
panic("testlapack: insufficient length of work")
}
if uplo != blas.Upper && uplo != blas.Lower {
panic("testlapack: bad uplo")
}
if trans != blas.Trans && trans != blas.NoTrans {
panic("testlapack: bad trans")
}
if n == 0 {
return blas.NonUnit
}
ulp := dlamchE * dlamchB
smlnum := dlamchS
bignum := (1 - ulp) / smlnum
bi := blas64.Implementation()
switch imat {
default:
// TODO(vladimir-ch): Implement the remaining cases.
panic("testlapack: invalid or unimplemented imat")
case 7:
// Identity matrix. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
a[i*lda+i] = math.NaN()
}
}
case 11:
// Generate a triangular matrix with elements between -1 and 1,
// give the diagonal norm 2 to make it well-conditioned, and
// make the right hand side large so that it requires scaling.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n-1; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 1; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
imax := bi.Idamax(n, b, 1)
bscal := bignum / math.Max(1, b[imax])
bi.Dscal(n, bscal, b, 1)
case 12:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are small (cnorm[j] < 1).
diag = blas.NonUnit
tscal := 1 / math.Max(1, float64(n-1))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
bi.Dscal(n-i-1, tscal, a[i*lda+i+1:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
bi.Dscal(i, tscal, a[i*lda:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 13:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are O(1) (cnorm[j] > 1).
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 14:
// T is diagonal with small numbers on the diagonal to
// make the growth factor underflow, but a small right hand side
// chosen so that the solution does not overflow.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
if (n-1-i)&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
if i&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
}
// Set the right hand side alternately zero and small.
switch uplo {
case blas.Upper:
b[0] = 0
for i := n - 1; i > 0; i -= 2 {
b[i] = 0
b[i-1] = smlnum
}
case blas.Lower:
for i := 0; i < n-1; i += 2 {
b[i] = 0
b[i+1] = smlnum
}
b[n-1] = 0
}
case 15:
// Make the diagonal elements small to cause gradual overflow
// when dividing by T[j,j]. To control the amount of scaling
// needed, the matrix is bidiagonal.
diag = blas.NonUnit
texp := 1 / math.Max(1, float64(n-1))
tscal := math.Pow(smlnum, texp)
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = tscal
if i < n-1 {
a[i*lda+i+1] = -1
}
for j := i + 2; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i-1; j++ {
a[i*lda+j] = 0
}
if i > 0 {
a[i*lda+i-1] = -1
}
a[i*lda+i] = tscal
}
}
dlarnv(b, 2, rnd)
case 16:
// One zero diagonal element.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
}
iy := n / 2
a[iy*lda+iy] = 0
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
case 17:
// Make the offdiagonal elements large to cause overflow when
// adding a column of T. In the non-transposed case, the matrix
// is constructed to cause overflow when adding a column in
// every other step.
diag = blas.NonUnit
tscal := (1 - ulp) / (dlamchS / ulp)
texp := 1.0
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
a[i*lda+j] = 0
}
}
for j := n - 1; j >= 1; j -= 2 {
a[j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[j-1] = -tscal / float64(n+1) / float64(n+2)
a[(j-1)*lda+j-1] = 1
b[j-1] = texp * float64(n*n+n-1)
texp *= 2
}
b[0] = float64(n+1) / float64(n+2) * tscal
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j <= i; j++ {
a[i*lda+j] = 0
}
}
for j := 0; j < n-1; j += 2 {
a[(n-1)*lda+j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[(n-1)*lda+j+1] = -tscal / float64(n+1) / float64(n+2)
a[(j+1)*lda+j+1] = 1
b[j+1] = texp * float64(n*n+n-1)
texp *= 2
}
b[n-1] = float64(n+1) / float64(n+2) * tscal
}
case 18:
// Generate a unit triangular matrix with elements between -1
// and 1, and make the right hand side large so that it requires
// scaling. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
dlarnv(a[i*lda+i+1:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i], 2, rnd)
a[i*lda+i] = math.NaN()
}
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
iy := bi.Idamax(n, b, 1)
bnorm := math.Abs(b[iy])
bscal := bignum / math.Max(1, bnorm)
bi.Dscal(n, bscal, b, 1)
case 19:
// Generate a triangular matrix with elements between
// bignum/(n-1) and bignum so that at least one of the column
// norms will exceed bignum.
// Dlatrs cannot handle this case for (typically) n>5.
diag = blas.NonUnit
tleft := bignum / math.Max(1, float64(n-1))
tscal := bignum * (float64(n-1) / math.Max(1, float64(n)))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
for j := i; j < n; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
for j := 0; j <= i; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
}
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
}
// Flip the matrix if the transpose will be used.
if trans == blas.Trans {
switch uplo {
case blas.Upper:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], 1, a[(j+1)*lda+n-j-1:], -lda)
}
case blas.Lower:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], lda, a[(n-j-1)*lda+j+1:], -1)
}
}
}
return diag
}
func checkMatrix(m, n int, a []float64, lda int) {
if m < 0 {
panic("testlapack: m < 0")
}
if n < 0 {
panic("testlapack: n < 0")
}
if lda < max(1, n) {
panic("testlapack: lda < max(1, n)")
}
if len(a) < (m-1)*lda+n {
pa | randomOrthogonal returns an n×n random orthogonal matrix.
func randomOrthogonal(n int, rnd *rand.Rand) blas64.General {
q := eye(n, n)
x := make([]float64, n)
v := make([]float64, n)
for j := 0; j < n-1; j++ {
// x represents the j-th column of a random matrix.
for i := 0; i < j; i++ {
x[i] = 0
}
for i := j; i < n; i++ {
x[i] = rnd.NormFloat64()
}
// Compute v that represents the elementary reflector that
// annihilates the subdiagonal elements of x.
reflector(v, x, j)
// Compute Q * H_j and store the result into Q.
applyReflector(q, q, v)
}
return q
}
// reflector generates a Householder reflector v that zeros out subdiagonal
// entries in the j-th column of a matrix.
func reflector(v, col []float64, j int) {
n := len(col)
if len(v) != n {
panic("slice length mismatch")
}
if j < 0 || n <= j {
panic("invalid column index")
}
for i := range v {
v[i] = 0
}
if j == n-1 {
return
}
s := floats.Norm(col[j:], 2)
if s == 0 {
return
}
v[j] = col[j] + math.Copysign(s, col[j])
copy(v[j+1:], col[j+1:])
s = floats.Norm(v[j:], 2)
floats.Scale(1/s, v[j:])
}
// applyReflector computes Q*H where H is a Householder matrix represented by
// the Householder reflector v.
func applyReflector(qh blas64.General, q blas64.General, v []float64) {
n := len(v)
if qh.Rows != n || qh.Cols != n {
panic("bad size of qh")
}
if q.Rows != n || q.Cols != n {
panic("bad size of q")
}
qv := make([]float64, n)
blas64.Gemv(blas.NoTrans, 1, q, blas64.Vector{Data: v, Inc: 1}, 0, blas64.Vector{Data: qv, Inc: 1})
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] = q.Data[i*q.Stride+j]
}
}
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] -= 2 * qv[i] * v[j]
}
}
var norm2 float64
for _, vi := range v {
norm2 += vi * vi
}
norm2inv := 1 / norm2
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] *= norm2inv
}
}
}
| nic("testlapack: insufficient matrix slice length")
}
}
// | conditional_block |
matgen.go | // Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package testlapack
import (
"math"
"golang.org/x/exp/rand"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
"gonum.org/v1/gonum/floats"
)
// Dlatm1 computes the entries of dst as specified by mode, cond and rsign.
//
// mode describes how dst will be computed:
// |mode| == 1: dst[0] = 1 and dst[1:n] = 1/cond
// |mode| == 2: dst[:n-1] = 1/cond and dst[n-1] = 1
// |mode| == 3: dst[i] = cond^{-i/(n-1)}, i=0,...,n-1
// |mode| == 4: dst[i] = 1 - i*(1-1/cond)/(n-1)
// |mode| == 5: dst[i] = random number in the range (1/cond, 1) such that
// their logarithms are uniformly distributed
// |mode| == 6: dst[i] = random number from the distribution given by dist
// If mode is negative, the order of the elements of dst will be reversed.
// For other values of mode Dlatm1 will panic.
//
// If rsign is true and mode is not ±6, each entry of dst will be multiplied by 1
// or -1 with probability 0.5
//
// dist specifies the type of distribution to be used when mode == ±6:
// dist == 1: Uniform[0,1)
// dist == 2: Uniform[-1,1)
// dist == 3: Normal(0,1)
// For other values of dist Dlatm1 will panic.
//
// rnd is used as a source of random numbers.
func Dlatm1(dst []float64, mode int, cond float64, rsign bool, dist int, rnd *rand.Rand) {
amode := mode
if amode < 0 {
amode = -amode
}
if amode < 1 || 6 < amode {
panic("testlapack: invalid mode")
}
if cond < 1 {
panic("testlapack: cond < 1")
}
if amode == 6 && (dist < 1 || 3 < dist) {
panic("testlapack: invalid dist")
}
n := len(dst)
if n == 0 {
return
}
switch amode {
case 1:
dst[0] = 1
for i := 1; i < n; i++ {
dst[i] = 1 / cond
}
case 2:
for i := 0; i < n-1; i++ {
dst[i] = 1
}
dst[n-1] = 1 / cond
case 3:
dst[0] = 1
if n > 1 {
alpha := math.Pow(cond, -1/float64(n-1))
for i := 1; i < n; i++ {
dst[i] = math.Pow(alpha, float64(i))
}
}
case 4:
dst[0] = 1
if n > 1 {
condInv := 1 / cond
alpha := (1 - condInv) / float64(n-1)
for i := 1; i < n; i++ {
dst[i] = float64(n-i-1)*alpha + condInv
}
}
case 5:
alpha := math.Log(1 / cond)
for i := range dst {
dst[i] = math.Exp(alpha * rnd.Float64())
}
case 6:
switch dist {
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
if rsign && amode != 6 {
for i, v := range dst {
if rnd.Float64() < 0.5 {
dst[i] = -v
}
}
}
if mode < 0 {
for i := 0; i < n/2; i++ {
dst[i], dst[n-i-1] = dst[n-i-1], dst[i]
}
}
}
// Dlagsy generates an n×n symmetric matrix A, by pre- and post- multiplying a
// real diagonal matrix D with a random orthogonal matrix:
// A = U * D * U^T.
//
// work must have length at least 2*n, otherwise Dlagsy will panic.
//
// The parameter k is unused but it must satisfy
// 0 <= k <= n-1.
func Dlagsy(n, k int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(n, n, a, lda)
if k < 0 || max(0, n-1) < k {
panic("testlapack: invalid value of k")
}
if len(d) != n {
panic("testlapack: bad length of d")
}
if len(work) < 2*n {
panic("testlapack: insufficient work length")
}
// Initialize lower triangle of A to diagonal matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = d[i]
}
bi := blas64.Implementation()
// Generate lower triangle of symmetric matrix.
for i := n - 2; i >= 0; i-- {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Apply random reflection to A[i:n,i:n] from the left and the
// right.
//
// Compute y := tau * A * u.
bi.Dsymv(blas.Lower, n-i, tau, a[i*lda+i:], lda, work[:n-i], 1, 0, work[n:2*n-i], 1)
// Compute v := y - 1/2 * tau * ( y, u ) * u.
alpha := -0.5 * tau * bi.Ddot(n-i, work[n:2*n-i], 1, work[:n-i], 1)
bi.Daxpy(n-i, alpha, work[:n-i], 1, work[n:2*n-i], 1)
// Apply the transformation as a rank-2 update to A[i:n,i:n].
bi.Dsyr2(blas.Lower, n-i, -1, work[:n-i], 1, work[n:2*n-i], 1, a[i*lda+i:], lda)
}
// Store full symmetric matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[j*lda+i] = a[i*lda+j]
}
}
}
// Dlagge generates a real general m×n matrix A, by pre- and post-multiplying
// a real diagonal matrix D with random orthogonal matrices:
// A = U*D*V.
//
// d must have length min(m,n), and work must have length m+n, otherwise Dlagge
// will panic.
//
// The parameters ku and kl are unused but they must satisfy
// 0 <= kl <= m-1,
// 0 <= ku <= n-1.
func Dlagge(m, n, kl, ku int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(m, n, a, lda)
if kl < 0 || max(0, m-1) < kl {
panic("testlapack: invalid value of kl")
}
if ku < 0 || max(0, n-1) < ku {
panic("testlapack: invalid value of ku")
}
if len(d) != min(m, n) {
panic("testlapack: bad length of d")
}
if len(work) < m+n {
panic("testlapack: insufficient work length")
}
// Initialize A to diagonal matrix.
for i := 0; i < m; i++ {
for j := 0; j < n; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < min(m, n); i++ {
a[i*lda+i] = d[i]
}
// Quick exit if the user wants a diagonal matrix.
// if kl == 0 && ku == 0 {
// return
// }
bi := blas64.Implementation()
// Pre- and post-multiply A by random orthogonal matrices.
for i := min(m, n) - 1; i >= 0; i-- {
if i < m-1 {
for j := 0; j < m-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(m-i, work[:m-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(m-i-1, 1/wb, work[1:m-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the left.
bi.Dgemv(blas.Trans, m-i, n-i,
1, a[i*lda+i:], lda, work[:m-i], 1,
0, work[m:m+n-i], 1)
bi.Dger(m-i, n-i, | }
if i < n-1 {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the right.
bi.Dgemv(blas.NoTrans, m-i, n-i,
1, a[i*lda+i:], lda, work[:n-i], 1,
0, work[n:n+m-i], 1)
bi.Dger(m-i, n-i,
-tau, work[n:n+m-i], 1, work[:n-i], 1,
a[i*lda+i:], lda)
}
}
// TODO(vladimir-ch): Reduce number of subdiagonals to kl and number of
// superdiagonals to ku.
}
// dlarnv fills dst with random numbers from a uniform or normal distribution
// specified by dist:
// dist=1: uniform(0,1),
// dist=2: uniform(-1,1),
// dist=3: normal(0,1).
// For other values of dist dlarnv will panic.
func dlarnv(dst []float64, dist int, rnd *rand.Rand) {
switch dist {
default:
panic("testlapack: invalid dist")
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
// dlattr generates an n×n triangular test matrix A with its properties uniquely
// determined by imat and uplo, and returns whether A has unit diagonal. If diag
// is blas.Unit, the diagonal elements are set so that A[k,k]=k.
//
// trans specifies whether the matrix A or its transpose will be used.
//
// If imat is greater than 10, dlattr also generates the right hand side of the
// linear system A*x=b, or A^T*x=b. Valid values of imat are 7, and all between 11
// and 19, inclusive.
//
// b mush have length n, and work must have length 3*n, and dlattr will panic
// otherwise.
func dlattr(imat int, uplo blas.Uplo, trans blas.Transpose, n int, a []float64, lda int, b, work []float64, rnd *rand.Rand) (diag blas.Diag) {
checkMatrix(n, n, a, lda)
if len(b) != n {
panic("testlapack: bad length of b")
}
if len(work) < 3*n {
panic("testlapack: insufficient length of work")
}
if uplo != blas.Upper && uplo != blas.Lower {
panic("testlapack: bad uplo")
}
if trans != blas.Trans && trans != blas.NoTrans {
panic("testlapack: bad trans")
}
if n == 0 {
return blas.NonUnit
}
ulp := dlamchE * dlamchB
smlnum := dlamchS
bignum := (1 - ulp) / smlnum
bi := blas64.Implementation()
switch imat {
default:
// TODO(vladimir-ch): Implement the remaining cases.
panic("testlapack: invalid or unimplemented imat")
case 7:
// Identity matrix. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
a[i*lda+i] = math.NaN()
}
}
case 11:
// Generate a triangular matrix with elements between -1 and 1,
// give the diagonal norm 2 to make it well-conditioned, and
// make the right hand side large so that it requires scaling.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n-1; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 1; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
imax := bi.Idamax(n, b, 1)
bscal := bignum / math.Max(1, b[imax])
bi.Dscal(n, bscal, b, 1)
case 12:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are small (cnorm[j] < 1).
diag = blas.NonUnit
tscal := 1 / math.Max(1, float64(n-1))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
bi.Dscal(n-i-1, tscal, a[i*lda+i+1:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
bi.Dscal(i, tscal, a[i*lda:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 13:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are O(1) (cnorm[j] > 1).
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 14:
// T is diagonal with small numbers on the diagonal to
// make the growth factor underflow, but a small right hand side
// chosen so that the solution does not overflow.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
if (n-1-i)&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
if i&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
}
// Set the right hand side alternately zero and small.
switch uplo {
case blas.Upper:
b[0] = 0
for i := n - 1; i > 0; i -= 2 {
b[i] = 0
b[i-1] = smlnum
}
case blas.Lower:
for i := 0; i < n-1; i += 2 {
b[i] = 0
b[i+1] = smlnum
}
b[n-1] = 0
}
case 15:
// Make the diagonal elements small to cause gradual overflow
// when dividing by T[j,j]. To control the amount of scaling
// needed, the matrix is bidiagonal.
diag = blas.NonUnit
texp := 1 / math.Max(1, float64(n-1))
tscal := math.Pow(smlnum, texp)
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = tscal
if i < n-1 {
a[i*lda+i+1] = -1
}
for j := i + 2; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i-1; j++ {
a[i*lda+j] = 0
}
if i > 0 {
a[i*lda+i-1] = -1
}
a[i*lda+i] = tscal
}
}
dlarnv(b, 2, rnd)
case 16:
// One zero diagonal element.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
}
iy := n / 2
a[iy*lda+iy] = 0
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
case 17:
// Make the offdiagonal elements large to cause overflow when
// adding a column of T. In the non-transposed case, the matrix
// is constructed to cause overflow when adding a column in
// every other step.
diag = blas.NonUnit
tscal := (1 - ulp) / (dlamchS / ulp)
texp := 1.0
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
a[i*lda+j] = 0
}
}
for j := n - 1; j >= 1; j -= 2 {
a[j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[j-1] = -tscal / float64(n+1) / float64(n+2)
a[(j-1)*lda+j-1] = 1
b[j-1] = texp * float64(n*n+n-1)
texp *= 2
}
b[0] = float64(n+1) / float64(n+2) * tscal
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j <= i; j++ {
a[i*lda+j] = 0
}
}
for j := 0; j < n-1; j += 2 {
a[(n-1)*lda+j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[(n-1)*lda+j+1] = -tscal / float64(n+1) / float64(n+2)
a[(j+1)*lda+j+1] = 1
b[j+1] = texp * float64(n*n+n-1)
texp *= 2
}
b[n-1] = float64(n+1) / float64(n+2) * tscal
}
case 18:
// Generate a unit triangular matrix with elements between -1
// and 1, and make the right hand side large so that it requires
// scaling. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
dlarnv(a[i*lda+i+1:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i], 2, rnd)
a[i*lda+i] = math.NaN()
}
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
iy := bi.Idamax(n, b, 1)
bnorm := math.Abs(b[iy])
bscal := bignum / math.Max(1, bnorm)
bi.Dscal(n, bscal, b, 1)
case 19:
// Generate a triangular matrix with elements between
// bignum/(n-1) and bignum so that at least one of the column
// norms will exceed bignum.
// Dlatrs cannot handle this case for (typically) n>5.
diag = blas.NonUnit
tleft := bignum / math.Max(1, float64(n-1))
tscal := bignum * (float64(n-1) / math.Max(1, float64(n)))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
for j := i; j < n; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
for j := 0; j <= i; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
}
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
}
// Flip the matrix if the transpose will be used.
if trans == blas.Trans {
switch uplo {
case blas.Upper:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], 1, a[(j+1)*lda+n-j-1:], -lda)
}
case blas.Lower:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], lda, a[(n-j-1)*lda+j+1:], -1)
}
}
}
return diag
}
func checkMatrix(m, n int, a []float64, lda int) {
if m < 0 {
panic("testlapack: m < 0")
}
if n < 0 {
panic("testlapack: n < 0")
}
if lda < max(1, n) {
panic("testlapack: lda < max(1, n)")
}
if len(a) < (m-1)*lda+n {
panic("testlapack: insufficient matrix slice length")
}
}
// randomOrthogonal returns an n×n random orthogonal matrix.
func randomOrthogonal(n int, rnd *rand.Rand) blas64.General {
q := eye(n, n)
x := make([]float64, n)
v := make([]float64, n)
for j := 0; j < n-1; j++ {
// x represents the j-th column of a random matrix.
for i := 0; i < j; i++ {
x[i] = 0
}
for i := j; i < n; i++ {
x[i] = rnd.NormFloat64()
}
// Compute v that represents the elementary reflector that
// annihilates the subdiagonal elements of x.
reflector(v, x, j)
// Compute Q * H_j and store the result into Q.
applyReflector(q, q, v)
}
return q
}
// reflector generates a Householder reflector v that zeros out subdiagonal
// entries in the j-th column of a matrix.
func reflector(v, col []float64, j int) {
n := len(col)
if len(v) != n {
panic("slice length mismatch")
}
if j < 0 || n <= j {
panic("invalid column index")
}
for i := range v {
v[i] = 0
}
if j == n-1 {
return
}
s := floats.Norm(col[j:], 2)
if s == 0 {
return
}
v[j] = col[j] + math.Copysign(s, col[j])
copy(v[j+1:], col[j+1:])
s = floats.Norm(v[j:], 2)
floats.Scale(1/s, v[j:])
}
// applyReflector computes Q*H where H is a Householder matrix represented by
// the Householder reflector v.
func applyReflector(qh blas64.General, q blas64.General, v []float64) {
n := len(v)
if qh.Rows != n || qh.Cols != n {
panic("bad size of qh")
}
if q.Rows != n || q.Cols != n {
panic("bad size of q")
}
qv := make([]float64, n)
blas64.Gemv(blas.NoTrans, 1, q, blas64.Vector{Data: v, Inc: 1}, 0, blas64.Vector{Data: qv, Inc: 1})
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] = q.Data[i*q.Stride+j]
}
}
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] -= 2 * qv[i] * v[j]
}
}
var norm2 float64
for _, vi := range v {
norm2 += vi * vi
}
norm2inv := 1 / norm2
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] *= norm2inv
}
}
} | -tau, work[:m-i], 1, work[m:m+n-i], 1,
a[i*lda+i:], lda) | random_line_split |
matgen.go | // Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package testlapack
import (
"math"
"golang.org/x/exp/rand"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
"gonum.org/v1/gonum/floats"
)
// Dlatm1 computes the entries of dst as specified by mode, cond and rsign.
//
// mode describes how dst will be computed:
// |mode| == 1: dst[0] = 1 and dst[1:n] = 1/cond
// |mode| == 2: dst[:n-1] = 1/cond and dst[n-1] = 1
// |mode| == 3: dst[i] = cond^{-i/(n-1)}, i=0,...,n-1
// |mode| == 4: dst[i] = 1 - i*(1-1/cond)/(n-1)
// |mode| == 5: dst[i] = random number in the range (1/cond, 1) such that
// their logarithms are uniformly distributed
// |mode| == 6: dst[i] = random number from the distribution given by dist
// If mode is negative, the order of the elements of dst will be reversed.
// For other values of mode Dlatm1 will panic.
//
// If rsign is true and mode is not ±6, each entry of dst will be multiplied by 1
// or -1 with probability 0.5
//
// dist specifies the type of distribution to be used when mode == ±6:
// dist == 1: Uniform[0,1)
// dist == 2: Uniform[-1,1)
// dist == 3: Normal(0,1)
// For other values of dist Dlatm1 will panic.
//
// rnd is used as a source of random numbers.
func Dlatm1(dst []float64, mode int, cond float64, rsign bool, dist int, rnd *rand.Rand) {
amode := mode
if amode < 0 {
amode = -amode
}
if amode < 1 || 6 < amode {
panic("testlapack: invalid mode")
}
if cond < 1 {
panic("testlapack: cond < 1")
}
if amode == 6 && (dist < 1 || 3 < dist) {
panic("testlapack: invalid dist")
}
n := len(dst)
if n == 0 {
return
}
switch amode {
case 1:
dst[0] = 1
for i := 1; i < n; i++ {
dst[i] = 1 / cond
}
case 2:
for i := 0; i < n-1; i++ {
dst[i] = 1
}
dst[n-1] = 1 / cond
case 3:
dst[0] = 1
if n > 1 {
alpha := math.Pow(cond, -1/float64(n-1))
for i := 1; i < n; i++ {
dst[i] = math.Pow(alpha, float64(i))
}
}
case 4:
dst[0] = 1
if n > 1 {
condInv := 1 / cond
alpha := (1 - condInv) / float64(n-1)
for i := 1; i < n; i++ {
dst[i] = float64(n-i-1)*alpha + condInv
}
}
case 5:
alpha := math.Log(1 / cond)
for i := range dst {
dst[i] = math.Exp(alpha * rnd.Float64())
}
case 6:
switch dist {
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
if rsign && amode != 6 {
for i, v := range dst {
if rnd.Float64() < 0.5 {
dst[i] = -v
}
}
}
if mode < 0 {
for i := 0; i < n/2; i++ {
dst[i], dst[n-i-1] = dst[n-i-1], dst[i]
}
}
}
// Dlagsy generates an n×n symmetric matrix A, by pre- and post- multiplying a
// real diagonal matrix D with a random orthogonal matrix:
// A = U * D * U^T.
//
// work must have length at least 2*n, otherwise Dlagsy will panic.
//
// The parameter k is unused but it must satisfy
// 0 <= k <= n-1.
func Dlagsy(n, k int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(n, n, a, lda)
if k < 0 || max(0, n-1) < k {
panic("testlapack: invalid value of k")
}
if len(d) != n {
panic("testlapack: bad length of d")
}
if len(work) < 2*n {
panic("testlapack: insufficient work length")
}
// Initialize lower triangle of A to diagonal matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = d[i]
}
bi := blas64.Implementation()
// Generate lower triangle of symmetric matrix.
for i := n - 2; i >= 0; i-- {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Apply random reflection to A[i:n,i:n] from the left and the
// right.
//
// Compute y := tau * A * u.
bi.Dsymv(blas.Lower, n-i, tau, a[i*lda+i:], lda, work[:n-i], 1, 0, work[n:2*n-i], 1)
// Compute v := y - 1/2 * tau * ( y, u ) * u.
alpha := -0.5 * tau * bi.Ddot(n-i, work[n:2*n-i], 1, work[:n-i], 1)
bi.Daxpy(n-i, alpha, work[:n-i], 1, work[n:2*n-i], 1)
// Apply the transformation as a rank-2 update to A[i:n,i:n].
bi.Dsyr2(blas.Lower, n-i, -1, work[:n-i], 1, work[n:2*n-i], 1, a[i*lda+i:], lda)
}
// Store full symmetric matrix.
for i := 1; i < n; i++ {
for j := 0; j < i; j++ {
a[j*lda+i] = a[i*lda+j]
}
}
}
// Dlagge generates a real general m×n matrix A, by pre- and post-multiplying
// a real diagonal matrix D with random orthogonal matrices:
// A = U*D*V.
//
// d must have length min(m,n), and work must have length m+n, otherwise Dlagge
// will panic.
//
// The parameters ku and kl are unused but they must satisfy
// 0 <= kl <= m-1,
// 0 <= ku <= n-1.
func Dlagge(m, n, kl, ku int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) {
checkMatrix(m, n, a, lda)
if kl < 0 || max(0, m-1) < kl {
panic("testlapack: invalid value of kl")
}
if ku < 0 || max(0, n-1) < ku {
panic("testlapack: invalid value of ku")
}
if len(d) != min(m, n) {
panic("testlapack: bad length of d")
}
if len(work) < m+n {
panic("testlapack: insufficient work length")
}
// Initialize A to diagonal matrix.
for i := 0; i < m; i++ {
for j := 0; j < n; j++ {
a[i*lda+j] = 0
}
}
for i := 0; i < min(m, n); i++ {
a[i*lda+i] = d[i]
}
// Quick exit if the user wants a diagonal matrix.
// if kl == 0 && ku == 0 {
// return
// }
bi := blas64.Implementation()
// Pre- and post-multiply A by random orthogonal matrices.
for i := min(m, n) - 1; i >= 0; i-- {
if i < m-1 {
for j := 0; j < m-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(m-i, work[:m-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(m-i-1, 1/wb, work[1:m-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the left.
bi.Dgemv(blas.Trans, m-i, n-i,
1, a[i*lda+i:], lda, work[:m-i], 1,
0, work[m:m+n-i], 1)
bi.Dger(m-i, n-i,
-tau, work[:m-i], 1, work[m:m+n-i], 1,
a[i*lda+i:], lda)
}
if i < n-1 {
for j := 0; j < n-i; j++ {
work[j] = rnd.NormFloat64()
}
wn := bi.Dnrm2(n-i, work[:n-i], 1)
wa := math.Copysign(wn, work[0])
var tau float64
if wn != 0 {
wb := work[0] + wa
bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1)
work[0] = 1
tau = wb / wa
}
// Multiply A[i:m,i:n] by random reflection from the right.
bi.Dgemv(blas.NoTrans, m-i, n-i,
1, a[i*lda+i:], lda, work[:n-i], 1,
0, work[n:n+m-i], 1)
bi.Dger(m-i, n-i,
-tau, work[n:n+m-i], 1, work[:n-i], 1,
a[i*lda+i:], lda)
}
}
// TODO(vladimir-ch): Reduce number of subdiagonals to kl and number of
// superdiagonals to ku.
}
// dlarnv fills dst with random numbers from a uniform or normal distribution
// specified by dist:
// dist=1: uniform(0,1),
// dist=2: uniform(-1,1),
// dist=3: normal(0,1).
// For other values of dist dlarnv will panic.
func dlarnv(dst []float64, dist int, rnd *rand.Rand) {
sw | dlattr generates an n×n triangular test matrix A with its properties uniquely
// determined by imat and uplo, and returns whether A has unit diagonal. If diag
// is blas.Unit, the diagonal elements are set so that A[k,k]=k.
//
// trans specifies whether the matrix A or its transpose will be used.
//
// If imat is greater than 10, dlattr also generates the right hand side of the
// linear system A*x=b, or A^T*x=b. Valid values of imat are 7, and all between 11
// and 19, inclusive.
//
// b mush have length n, and work must have length 3*n, and dlattr will panic
// otherwise.
func dlattr(imat int, uplo blas.Uplo, trans blas.Transpose, n int, a []float64, lda int, b, work []float64, rnd *rand.Rand) (diag blas.Diag) {
checkMatrix(n, n, a, lda)
if len(b) != n {
panic("testlapack: bad length of b")
}
if len(work) < 3*n {
panic("testlapack: insufficient length of work")
}
if uplo != blas.Upper && uplo != blas.Lower {
panic("testlapack: bad uplo")
}
if trans != blas.Trans && trans != blas.NoTrans {
panic("testlapack: bad trans")
}
if n == 0 {
return blas.NonUnit
}
ulp := dlamchE * dlamchB
smlnum := dlamchS
bignum := (1 - ulp) / smlnum
bi := blas64.Implementation()
switch imat {
default:
// TODO(vladimir-ch): Implement the remaining cases.
panic("testlapack: invalid or unimplemented imat")
case 7:
// Identity matrix. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
a[i*lda+i] = math.NaN()
}
}
case 11:
// Generate a triangular matrix with elements between -1 and 1,
// give the diagonal norm 2 to make it well-conditioned, and
// make the right hand side large so that it requires scaling.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n-1; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 1; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
}
}
for i := 0; i < n; i++ {
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
imax := bi.Idamax(n, b, 1)
bscal := bignum / math.Max(1, b[imax])
bi.Dscal(n, bscal, b, 1)
case 12:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are small (cnorm[j] < 1).
diag = blas.NonUnit
tscal := 1 / math.Max(1, float64(n-1))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
bi.Dscal(n-i-1, tscal, a[i*lda+i+1:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
bi.Dscal(i, tscal, a[i*lda:], 1)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 13:
// Make the first diagonal element in the solve small to cause
// immediate overflow when dividing by T[j,j]. The off-diagonal
// elements are O(1) (cnorm[j] > 1).
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[(n-1)*lda+n-1] *= smlnum
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(1, a[i*lda+i])
}
a[0] *= smlnum
}
dlarnv(b, 2, rnd)
case 14:
// T is diagonal with small numbers on the diagonal to
// make the growth factor underflow, but a small right hand side
// chosen so that the solution does not overflow.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i + 1; j < n; j++ {
a[i*lda+j] = 0
}
if (n-1-i)&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i; j++ {
a[i*lda+j] = 0
}
if i&0x2 == 0 {
a[i*lda+i] = smlnum
} else {
a[i*lda+i] = 1
}
}
}
// Set the right hand side alternately zero and small.
switch uplo {
case blas.Upper:
b[0] = 0
for i := n - 1; i > 0; i -= 2 {
b[i] = 0
b[i-1] = smlnum
}
case blas.Lower:
for i := 0; i < n-1; i += 2 {
b[i] = 0
b[i+1] = smlnum
}
b[n-1] = 0
}
case 15:
// Make the diagonal elements small to cause gradual overflow
// when dividing by T[j,j]. To control the amount of scaling
// needed, the matrix is bidiagonal.
diag = blas.NonUnit
texp := 1 / math.Max(1, float64(n-1))
tscal := math.Pow(smlnum, texp)
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = tscal
if i < n-1 {
a[i*lda+i+1] = -1
}
for j := i + 2; j < n; j++ {
a[i*lda+j] = 0
}
}
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j < i-1; j++ {
a[i*lda+j] = 0
}
if i > 0 {
a[i*lda+i-1] = -1
}
a[i*lda+i] = tscal
}
}
dlarnv(b, 2, rnd)
case 16:
// One zero diagonal element.
diag = blas.NonUnit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
a[i*lda+i] = math.Copysign(2, a[i*lda+i])
}
}
iy := n / 2
a[iy*lda+iy] = 0
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
case 17:
// Make the offdiagonal elements large to cause overflow when
// adding a column of T. In the non-transposed case, the matrix
// is constructed to cause overflow when adding a column in
// every other step.
diag = blas.NonUnit
tscal := (1 - ulp) / (dlamchS / ulp)
texp := 1.0
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
a[i*lda+j] = 0
}
}
for j := n - 1; j >= 1; j -= 2 {
a[j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[j-1] = -tscal / float64(n+1) / float64(n+2)
a[(j-1)*lda+j-1] = 1
b[j-1] = texp * float64(n*n+n-1)
texp *= 2
}
b[0] = float64(n+1) / float64(n+2) * tscal
case blas.Lower:
for i := 0; i < n; i++ {
for j := 0; j <= i; j++ {
a[i*lda+j] = 0
}
}
for j := 0; j < n-1; j += 2 {
a[(n-1)*lda+j] = -tscal / float64(n+1)
a[j*lda+j] = 1
b[j] = texp * (1 - ulp)
a[(n-1)*lda+j+1] = -tscal / float64(n+1) / float64(n+2)
a[(j+1)*lda+j+1] = 1
b[j+1] = texp * float64(n*n+n-1)
texp *= 2
}
b[n-1] = float64(n+1) / float64(n+2) * tscal
}
case 18:
// Generate a unit triangular matrix with elements between -1
// and 1, and make the right hand side large so that it requires
// scaling. The diagonal is set to NaN.
diag = blas.Unit
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
a[i*lda+i] = math.NaN()
dlarnv(a[i*lda+i+1:i*lda+n], 2, rnd)
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i], 2, rnd)
a[i*lda+i] = math.NaN()
}
}
// Set the right hand side so that the largest value is bignum.
dlarnv(b, 2, rnd)
iy := bi.Idamax(n, b, 1)
bnorm := math.Abs(b[iy])
bscal := bignum / math.Max(1, bnorm)
bi.Dscal(n, bscal, b, 1)
case 19:
// Generate a triangular matrix with elements between
// bignum/(n-1) and bignum so that at least one of the column
// norms will exceed bignum.
// Dlatrs cannot handle this case for (typically) n>5.
diag = blas.NonUnit
tleft := bignum / math.Max(1, float64(n-1))
tscal := bignum * (float64(n-1) / math.Max(1, float64(n)))
switch uplo {
case blas.Upper:
for i := 0; i < n; i++ {
dlarnv(a[i*lda+i:i*lda+n], 2, rnd)
for j := i; j < n; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
case blas.Lower:
for i := 0; i < n; i++ {
dlarnv(a[i*lda:i*lda+i+1], 2, rnd)
for j := 0; j <= i; j++ {
aij := a[i*lda+j]
a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij
}
}
}
dlarnv(b, 2, rnd)
bi.Dscal(n, 2, b, 1)
}
// Flip the matrix if the transpose will be used.
if trans == blas.Trans {
switch uplo {
case blas.Upper:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], 1, a[(j+1)*lda+n-j-1:], -lda)
}
case blas.Lower:
for j := 0; j < n/2; j++ {
bi.Dswap(n-2*j-1, a[j*lda+j:], lda, a[(n-j-1)*lda+j+1:], -1)
}
}
}
return diag
}
func checkMatrix(m, n int, a []float64, lda int) {
if m < 0 {
panic("testlapack: m < 0")
}
if n < 0 {
panic("testlapack: n < 0")
}
if lda < max(1, n) {
panic("testlapack: lda < max(1, n)")
}
if len(a) < (m-1)*lda+n {
panic("testlapack: insufficient matrix slice length")
}
}
// randomOrthogonal returns an n×n random orthogonal matrix.
func randomOrthogonal(n int, rnd *rand.Rand) blas64.General {
q := eye(n, n)
x := make([]float64, n)
v := make([]float64, n)
for j := 0; j < n-1; j++ {
// x represents the j-th column of a random matrix.
for i := 0; i < j; i++ {
x[i] = 0
}
for i := j; i < n; i++ {
x[i] = rnd.NormFloat64()
}
// Compute v that represents the elementary reflector that
// annihilates the subdiagonal elements of x.
reflector(v, x, j)
// Compute Q * H_j and store the result into Q.
applyReflector(q, q, v)
}
return q
}
// reflector generates a Householder reflector v that zeros out subdiagonal
// entries in the j-th column of a matrix.
func reflector(v, col []float64, j int) {
n := len(col)
if len(v) != n {
panic("slice length mismatch")
}
if j < 0 || n <= j {
panic("invalid column index")
}
for i := range v {
v[i] = 0
}
if j == n-1 {
return
}
s := floats.Norm(col[j:], 2)
if s == 0 {
return
}
v[j] = col[j] + math.Copysign(s, col[j])
copy(v[j+1:], col[j+1:])
s = floats.Norm(v[j:], 2)
floats.Scale(1/s, v[j:])
}
// applyReflector computes Q*H where H is a Householder matrix represented by
// the Householder reflector v.
func applyReflector(qh blas64.General, q blas64.General, v []float64) {
n := len(v)
if qh.Rows != n || qh.Cols != n {
panic("bad size of qh")
}
if q.Rows != n || q.Cols != n {
panic("bad size of q")
}
qv := make([]float64, n)
blas64.Gemv(blas.NoTrans, 1, q, blas64.Vector{Data: v, Inc: 1}, 0, blas64.Vector{Data: qv, Inc: 1})
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] = q.Data[i*q.Stride+j]
}
}
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] -= 2 * qv[i] * v[j]
}
}
var norm2 float64
for _, vi := range v {
norm2 += vi * vi
}
norm2inv := 1 / norm2
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
qh.Data[i*qh.Stride+j] *= norm2inv
}
}
}
| itch dist {
default:
panic("testlapack: invalid dist")
case 1:
for i := range dst {
dst[i] = rnd.Float64()
}
case 2:
for i := range dst {
dst[i] = 2*rnd.Float64() - 1
}
case 3:
for i := range dst {
dst[i] = rnd.NormFloat64()
}
}
}
// | identifier_body |
page.rs | use serde::Deserialize;
use yew::format::{Json, Nothing};
use yew::prelude::*;
use yew::services::fetch::{FetchService, FetchTask, Request, Response};
use yew::services::ConsoleService;
use yew::Properties;
use super::entry::{Entry, EntryProps};
use super::modal::{is_media, MediaType, Modal, ModalProps};
use crate::{App, AppAnchor, AppRoute, SERVER_URL};
use anyhow::{anyhow, Error};
#[derive(Deserialize, Clone, PartialEq, Debug)]
pub struct Dir {
title: String,
base_path: String,
read_only: bool,
files: Vec<EntryProps>,
folders: Vec<EntryProps>,
}
#[derive(Debug)]
pub enum PageMsg {
Page(Dir),
File,
Error(Error),
Modal(String),
ModalNext,
ModalPrevious,
}
#[derive(Properties, Clone, PartialEq)]
pub struct PageProps {
pub path: String,
pub page: Option<Dir>,
}
pub struct Page {
link: ComponentLink<Self>,
props: PageProps,
modal: ModalProps,
task: Option<FetchTask>,
loaded: Option<String>,
error: Option<Error>,
show_loading: bool,
}
impl Component for Page {
type Message = PageMsg;
type Properties = PageProps;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
props,
modal: ModalProps::default(),
task: None,
loaded: None,
error: None,
show_loading: true,
}
}
fn | (&mut self, msg: Self::Message) -> ShouldRender {
match msg {
PageMsg::Page(page) => {
self.props.page = Some(page);
self.error = None;
self.show_loading = false;
true
}
// TODO: This means non-media (non-modal display) files
// end up loading twice. Once on request,
// and then again on popup. Parent directy also reloads.
// Not sure the best solution at the moment.
// (Start with HEAD request instead of GET?)
PageMsg::File => {
let url = format!("{}{}", *SERVER_URL, &self.props.path);
web_sys::window()
.unwrap()
.open_with_url_and_target(&url, "_new_file")
.unwrap();
self.show_loading = false;
self.error = None;
// Show containing dir by navigating to parent directory
if let Some(index) = &self.props.path.rfind('/') {
let path = &self.props.path[0..index + 1];
App::replace_route(path.to_string());
}
false
}
PageMsg::Error(error) => {
ConsoleService::error(format!("Invalid response: {:?}", error).as_str());
self.error = Some(error);
self.show_loading = false;
self.modal = ModalProps::default();
true
}
PageMsg::Modal(src) => {
ConsoleService::info(format!("Loading modal for: {:?}", src).as_str());
self.modal.src = src.to_string();
self.modal.media = MediaType::from_path(src.as_str());
self.show_loading = false;
true
}
PageMsg::ModalNext => {
let src = format!("/{}", self.next_file());
App::change_route(src);
true
}
PageMsg::ModalPrevious => {
let src = format!("/{}", self.prev_file());
App::change_route(src);
true
}
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self.props.path != props.path {
ConsoleService::info(format!("Page Changed: {:?}", props.path).as_str());
if is_media(props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(props.path.to_owned());
self.show_loading = true;
} else {
// Only re-fetch page if not already loaded
if self.loaded.is_none() || self.loaded.as_ref().unwrap() != &props.path {
self.loaded = Some(props.path.to_owned());
self.task = self.fetch_page(props.path.as_str());
self.show_loading = true;
} else {
self.show_loading = false;
}
// Reset Modal
self.modal = ModalProps::default();
}
self.props.path = props.path;
true
} else {
false
}
}
fn rendered(&mut self, first_render: bool) {
// On page init, the path may be a dir or a file
// display modal if it's a file + load directory the file is in
if first_render {
let fetch_path: &str;
if is_media(&self.props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(self.props.path.to_owned());
// Get dir of file
let index = self.props.path.rfind('/').unwrap();
fetch_path = &self.props.path[0..index + 1];
} else {
fetch_path = &self.props.path;
}
self.loaded = Some(fetch_path.to_string());
self.task = self.fetch_page(fetch_path);
}
if let Some(data) = &self.props.page {
if !data.title.is_empty() {
App::set_title(data.title.to_string());
}
}
}
fn view(&self) -> Html {
let mut title = "";
let mut base_path = "";
let content = if let Some(data) = &self.props.page {
title = data.title.as_str();
base_path = data.base_path.as_str();
let folders = data.folders.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="folder"
/>
}
});
let files = data.files.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="file"
/>
}
});
html! {
<div class="row gx-5">
{ for folders }
{ for files }
</div>
}
} else {
html! {}
};
// Convert title into links for each subdir
let combined = if title == String::from("") {
base_path.to_string()
} else {
format!("{}{}/", base_path, title)
};
let split = combined.split_inclusive('/').enumerate();
let clone = split.clone();
let html_title = split.map(|s| {
// Note: Not happy with a loop of "clone" calls
// but the strings have to be duplicated anyway.
// Current solution adds one extra "clone" than is ideally necessary
let link = clone
.clone()
.filter(|&(i, _)| i <= s.0)
.map(|(_, e)| e)
.collect::<String>();
let text = &s.1;
html! {
<AppAnchor route={ AppRoute::Entry(link) }>{ text }</AppAnchor>
}
});
let loading = if self.show_loading {
html! {<span class="loading"></span>}
} else {
html! {}
};
let error = if self.error.is_some() {
html! {<h2 class="text-danger">{ "Error: " }{ self.error.as_ref().unwrap() }</h2>}
} else {
html! {}
};
html! {
<>
<Modal src={ self.modal.src.to_owned() } media={ self.modal.media.to_owned() } />
<h1 id="title">
{ for html_title }
{ loading }
</h1>
{ error }
{ content }
</>
}
}
}
impl Page {
fn fetch_page(&self, path: &str) -> Option<FetchTask> {
// TODO: This results in double "//" in path.
// Not a major issue, but should be accounted for
let url = format!("{}{}", *SERVER_URL, path);
let request = Request::get(url.as_str())
.body(Nothing)
.expect("Could not load from API");
let callback = self
.link
.callback(|response: Response<Json<Result<Dir, Error>>>| {
let status = response.status();
if !status.is_success() {
let err = anyhow!(
"Error: {} ({})",
&status.canonical_reason().unwrap(),
&status.as_str()
);
return PageMsg::Error(err);
}
let content = response.headers().get("content-type");
if content.is_none() {
return PageMsg::Error(anyhow!("Invalid Content Type"));
} else if content.unwrap() != &"application/json" {
return PageMsg::File;
}
let Json(data) = response.into_body();
match data {
Ok(dir) => PageMsg::Page(dir),
Err(err) => PageMsg::Error(err),
}
});
let task = FetchService::fetch(request, callback).expect("Could not load page");
Some(task)
}
// Determine the next file in modal sequence
fn next_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if index + 1 >= files.len() {
files.first().unwrap().path.to_owned()
} else {
files.get(index + 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
// Determine the prev file in modal sequence
fn prev_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if (index as i8) - 1 < 0 {
files.last().unwrap().path.to_owned()
} else {
files.get(index - 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
}
| update | identifier_name |
page.rs | use serde::Deserialize;
use yew::format::{Json, Nothing};
use yew::prelude::*;
use yew::services::fetch::{FetchService, FetchTask, Request, Response};
use yew::services::ConsoleService;
use yew::Properties;
use super::entry::{Entry, EntryProps};
use super::modal::{is_media, MediaType, Modal, ModalProps};
use crate::{App, AppAnchor, AppRoute, SERVER_URL};
use anyhow::{anyhow, Error};
#[derive(Deserialize, Clone, PartialEq, Debug)]
pub struct Dir {
title: String,
base_path: String,
read_only: bool,
files: Vec<EntryProps>,
folders: Vec<EntryProps>,
}
#[derive(Debug)]
pub enum PageMsg {
Page(Dir),
File,
Error(Error),
Modal(String),
ModalNext,
ModalPrevious,
}
#[derive(Properties, Clone, PartialEq)]
pub struct PageProps {
pub path: String,
pub page: Option<Dir>,
}
pub struct Page {
link: ComponentLink<Self>,
props: PageProps,
modal: ModalProps,
task: Option<FetchTask>,
loaded: Option<String>,
error: Option<Error>,
show_loading: bool,
}
impl Component for Page {
type Message = PageMsg;
type Properties = PageProps;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
props,
modal: ModalProps::default(),
task: None,
loaded: None,
error: None,
show_loading: true,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
PageMsg::Page(page) => {
self.props.page = Some(page);
self.error = None;
self.show_loading = false;
true
}
// TODO: This means non-media (non-modal display) files
// end up loading twice. Once on request,
// and then again on popup. Parent directy also reloads.
// Not sure the best solution at the moment.
// (Start with HEAD request instead of GET?)
PageMsg::File => {
let url = format!("{}{}", *SERVER_URL, &self.props.path);
web_sys::window()
.unwrap()
.open_with_url_and_target(&url, "_new_file")
.unwrap();
self.show_loading = false;
self.error = None;
// Show containing dir by navigating to parent directory
if let Some(index) = &self.props.path.rfind('/') {
let path = &self.props.path[0..index + 1];
App::replace_route(path.to_string());
}
false
}
PageMsg::Error(error) => {
ConsoleService::error(format!("Invalid response: {:?}", error).as_str());
self.error = Some(error);
self.show_loading = false;
self.modal = ModalProps::default();
true
}
PageMsg::Modal(src) => {
ConsoleService::info(format!("Loading modal for: {:?}", src).as_str());
self.modal.src = src.to_string();
self.modal.media = MediaType::from_path(src.as_str());
self.show_loading = false;
true
}
PageMsg::ModalNext => {
let src = format!("/{}", self.next_file());
App::change_route(src);
true
}
PageMsg::ModalPrevious => {
let src = format!("/{}", self.prev_file());
App::change_route(src);
true
}
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self.props.path != props.path {
ConsoleService::info(format!("Page Changed: {:?}", props.path).as_str());
if is_media(props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(props.path.to_owned());
self.show_loading = true;
} else {
// Only re-fetch page if not already loaded
if self.loaded.is_none() || self.loaded.as_ref().unwrap() != &props.path {
self.loaded = Some(props.path.to_owned());
self.task = self.fetch_page(props.path.as_str());
self.show_loading = true;
} else {
self.show_loading = false;
}
// Reset Modal
self.modal = ModalProps::default();
}
self.props.path = props.path;
true
} else {
false
}
}
fn rendered(&mut self, first_render: bool) {
// On page init, the path may be a dir or a file
// display modal if it's a file + load directory the file is in
if first_render {
let fetch_path: &str;
if is_media(&self.props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(self.props.path.to_owned());
// Get dir of file
let index = self.props.path.rfind('/').unwrap();
fetch_path = &self.props.path[0..index + 1];
} else {
fetch_path = &self.props.path;
}
self.loaded = Some(fetch_path.to_string());
self.task = self.fetch_page(fetch_path);
}
if let Some(data) = &self.props.page {
if !data.title.is_empty() {
App::set_title(data.title.to_string());
}
}
}
fn view(&self) -> Html {
let mut title = "";
let mut base_path = "";
let content = if let Some(data) = &self.props.page {
title = data.title.as_str();
base_path = data.base_path.as_str();
let folders = data.folders.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="folder"
/>
}
});
let files = data.files.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="file"
/>
}
});
html! {
<div class="row gx-5">
{ for folders }
{ for files }
</div>
}
} else {
html! {}
};
// Convert title into links for each subdir
let combined = if title == String::from("") {
base_path.to_string()
} else {
format!("{}{}/", base_path, title)
};
let split = combined.split_inclusive('/').enumerate();
let clone = split.clone();
let html_title = split.map(|s| {
// Note: Not happy with a loop of "clone" calls
// but the strings have to be duplicated anyway.
// Current solution adds one extra "clone" than is ideally necessary
let link = clone
.clone()
.filter(|&(i, _)| i <= s.0)
.map(|(_, e)| e)
.collect::<String>();
let text = &s.1;
html! {
<AppAnchor route={ AppRoute::Entry(link) }>{ text }</AppAnchor>
} | } else {
html! {}
};
let error = if self.error.is_some() {
html! {<h2 class="text-danger">{ "Error: " }{ self.error.as_ref().unwrap() }</h2>}
} else {
html! {}
};
html! {
<>
<Modal src={ self.modal.src.to_owned() } media={ self.modal.media.to_owned() } />
<h1 id="title">
{ for html_title }
{ loading }
</h1>
{ error }
{ content }
</>
}
}
}
impl Page {
fn fetch_page(&self, path: &str) -> Option<FetchTask> {
// TODO: This results in double "//" in path.
// Not a major issue, but should be accounted for
let url = format!("{}{}", *SERVER_URL, path);
let request = Request::get(url.as_str())
.body(Nothing)
.expect("Could not load from API");
let callback = self
.link
.callback(|response: Response<Json<Result<Dir, Error>>>| {
let status = response.status();
if !status.is_success() {
let err = anyhow!(
"Error: {} ({})",
&status.canonical_reason().unwrap(),
&status.as_str()
);
return PageMsg::Error(err);
}
let content = response.headers().get("content-type");
if content.is_none() {
return PageMsg::Error(anyhow!("Invalid Content Type"));
} else if content.unwrap() != &"application/json" {
return PageMsg::File;
}
let Json(data) = response.into_body();
match data {
Ok(dir) => PageMsg::Page(dir),
Err(err) => PageMsg::Error(err),
}
});
let task = FetchService::fetch(request, callback).expect("Could not load page");
Some(task)
}
// Determine the next file in modal sequence
fn next_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if index + 1 >= files.len() {
files.first().unwrap().path.to_owned()
} else {
files.get(index + 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
// Determine the prev file in modal sequence
fn prev_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if (index as i8) - 1 < 0 {
files.last().unwrap().path.to_owned()
} else {
files.get(index - 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
} | });
let loading = if self.show_loading {
html! {<span class="loading"></span>} | random_line_split |
page.rs | use serde::Deserialize;
use yew::format::{Json, Nothing};
use yew::prelude::*;
use yew::services::fetch::{FetchService, FetchTask, Request, Response};
use yew::services::ConsoleService;
use yew::Properties;
use super::entry::{Entry, EntryProps};
use super::modal::{is_media, MediaType, Modal, ModalProps};
use crate::{App, AppAnchor, AppRoute, SERVER_URL};
use anyhow::{anyhow, Error};
#[derive(Deserialize, Clone, PartialEq, Debug)]
pub struct Dir {
title: String,
base_path: String,
read_only: bool,
files: Vec<EntryProps>,
folders: Vec<EntryProps>,
}
#[derive(Debug)]
pub enum PageMsg {
Page(Dir),
File,
Error(Error),
Modal(String),
ModalNext,
ModalPrevious,
}
#[derive(Properties, Clone, PartialEq)]
pub struct PageProps {
pub path: String,
pub page: Option<Dir>,
}
pub struct Page {
link: ComponentLink<Self>,
props: PageProps,
modal: ModalProps,
task: Option<FetchTask>,
loaded: Option<String>,
error: Option<Error>,
show_loading: bool,
}
impl Component for Page {
type Message = PageMsg;
type Properties = PageProps;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
props,
modal: ModalProps::default(),
task: None,
loaded: None,
error: None,
show_loading: true,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
PageMsg::Page(page) => {
self.props.page = Some(page);
self.error = None;
self.show_loading = false;
true
}
// TODO: This means non-media (non-modal display) files
// end up loading twice. Once on request,
// and then again on popup. Parent directy also reloads.
// Not sure the best solution at the moment.
// (Start with HEAD request instead of GET?)
PageMsg::File => {
let url = format!("{}{}", *SERVER_URL, &self.props.path);
web_sys::window()
.unwrap()
.open_with_url_and_target(&url, "_new_file")
.unwrap();
self.show_loading = false;
self.error = None;
// Show containing dir by navigating to parent directory
if let Some(index) = &self.props.path.rfind('/') {
let path = &self.props.path[0..index + 1];
App::replace_route(path.to_string());
}
false
}
PageMsg::Error(error) => {
ConsoleService::error(format!("Invalid response: {:?}", error).as_str());
self.error = Some(error);
self.show_loading = false;
self.modal = ModalProps::default();
true
}
PageMsg::Modal(src) => |
PageMsg::ModalNext => {
let src = format!("/{}", self.next_file());
App::change_route(src);
true
}
PageMsg::ModalPrevious => {
let src = format!("/{}", self.prev_file());
App::change_route(src);
true
}
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self.props.path != props.path {
ConsoleService::info(format!("Page Changed: {:?}", props.path).as_str());
if is_media(props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(props.path.to_owned());
self.show_loading = true;
} else {
// Only re-fetch page if not already loaded
if self.loaded.is_none() || self.loaded.as_ref().unwrap() != &props.path {
self.loaded = Some(props.path.to_owned());
self.task = self.fetch_page(props.path.as_str());
self.show_loading = true;
} else {
self.show_loading = false;
}
// Reset Modal
self.modal = ModalProps::default();
}
self.props.path = props.path;
true
} else {
false
}
}
fn rendered(&mut self, first_render: bool) {
// On page init, the path may be a dir or a file
// display modal if it's a file + load directory the file is in
if first_render {
let fetch_path: &str;
if is_media(&self.props.path.as_str()) {
// Trigger modal
self.link
.callback(PageMsg::Modal)
.emit(self.props.path.to_owned());
// Get dir of file
let index = self.props.path.rfind('/').unwrap();
fetch_path = &self.props.path[0..index + 1];
} else {
fetch_path = &self.props.path;
}
self.loaded = Some(fetch_path.to_string());
self.task = self.fetch_page(fetch_path);
}
if let Some(data) = &self.props.page {
if !data.title.is_empty() {
App::set_title(data.title.to_string());
}
}
}
fn view(&self) -> Html {
let mut title = "";
let mut base_path = "";
let content = if let Some(data) = &self.props.page {
title = data.title.as_str();
base_path = data.base_path.as_str();
let folders = data.folders.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="folder"
/>
}
});
let files = data.files.iter().map(|e| {
html! {
<Entry
name={ e.name.to_owned() }
path={ e.path.to_owned() }
size={ e.size.to_owned() }
date={ e.date.to_owned() }
date_string={ e.date_string.to_owned() }
thumb={ e.thumb.to_owned() }
ext={ e.ext.to_owned() }
etype="file"
/>
}
});
html! {
<div class="row gx-5">
{ for folders }
{ for files }
</div>
}
} else {
html! {}
};
// Convert title into links for each subdir
let combined = if title == String::from("") {
base_path.to_string()
} else {
format!("{}{}/", base_path, title)
};
let split = combined.split_inclusive('/').enumerate();
let clone = split.clone();
let html_title = split.map(|s| {
// Note: Not happy with a loop of "clone" calls
// but the strings have to be duplicated anyway.
// Current solution adds one extra "clone" than is ideally necessary
let link = clone
.clone()
.filter(|&(i, _)| i <= s.0)
.map(|(_, e)| e)
.collect::<String>();
let text = &s.1;
html! {
<AppAnchor route={ AppRoute::Entry(link) }>{ text }</AppAnchor>
}
});
let loading = if self.show_loading {
html! {<span class="loading"></span>}
} else {
html! {}
};
let error = if self.error.is_some() {
html! {<h2 class="text-danger">{ "Error: " }{ self.error.as_ref().unwrap() }</h2>}
} else {
html! {}
};
html! {
<>
<Modal src={ self.modal.src.to_owned() } media={ self.modal.media.to_owned() } />
<h1 id="title">
{ for html_title }
{ loading }
</h1>
{ error }
{ content }
</>
}
}
}
impl Page {
fn fetch_page(&self, path: &str) -> Option<FetchTask> {
// TODO: This results in double "//" in path.
// Not a major issue, but should be accounted for
let url = format!("{}{}", *SERVER_URL, path);
let request = Request::get(url.as_str())
.body(Nothing)
.expect("Could not load from API");
let callback = self
.link
.callback(|response: Response<Json<Result<Dir, Error>>>| {
let status = response.status();
if !status.is_success() {
let err = anyhow!(
"Error: {} ({})",
&status.canonical_reason().unwrap(),
&status.as_str()
);
return PageMsg::Error(err);
}
let content = response.headers().get("content-type");
if content.is_none() {
return PageMsg::Error(anyhow!("Invalid Content Type"));
} else if content.unwrap() != &"application/json" {
return PageMsg::File;
}
let Json(data) = response.into_body();
match data {
Ok(dir) => PageMsg::Page(dir),
Err(err) => PageMsg::Error(err),
}
});
let task = FetchService::fetch(request, callback).expect("Could not load page");
Some(task)
}
// Determine the next file in modal sequence
fn next_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if index + 1 >= files.len() {
files.first().unwrap().path.to_owned()
} else {
files.get(index + 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
// Determine the prev file in modal sequence
fn prev_file(&self) -> String {
let findex = &self.modal.src.rfind('/').expect("complete path");
let srcname = &self.modal.src[*findex + 1..];
let page = &self.props.page.as_ref().unwrap();
let files = &page.files;
let current = files.iter().position(|e| e.name == srcname);
if let Some(index) = current {
if (index as i8) - 1 < 0 {
files.last().unwrap().path.to_owned()
} else {
files.get(index - 1).unwrap().path.to_owned()
}
} else {
"".to_string()
}
}
}
| {
ConsoleService::info(format!("Loading modal for: {:?}", src).as_str());
self.modal.src = src.to_string();
self.modal.media = MediaType::from_path(src.as_str());
self.show_loading = false;
true
} | conditional_block |
lib.rs | use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::collections::UnorderedMap;
use near_sdk::serde::{Deserialize, Serialize};
use near_contract_standards::non_fungible_token::metadata::{
NFTContractMetadata, NonFungibleTokenMetadataProvider, TokenMetadata, NFT_METADATA_SPEC,
};
use near_contract_standards::non_fungible_token::{Token, TokenId};
use near_contract_standards::non_fungible_token::NonFungibleToken;
use near_sdk::collections::LazyOption;
use near_sdk::json_types::ValidAccountId;
use near_sdk::{
setup_alloc, env, near_bindgen, AccountId, BorshStorageKey, Promise, PromiseOrValue,
};
#[derive(BorshSerialize, BorshStorageKey)]
enum StorageKey {
NonFungibleToken,
Metadata,
TokenMetadata,
Enumeration,
Approval,
}
setup_alloc!();
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct UserInfo {
pub name: String,
pub dob: String,
pub national_id: String,
pub from: Issuer,
pub owner: ValidAccountId
}
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct TokenSerialize {
pub token_id: String,
pub owner_id: String,
pub metadata: TokenMetadata,
pub tx: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct CertInfo {
pub user_info: UserInfo,
pub is_first_approved: bool,
}
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct SmartCertificateContract{
owner: AccountId, //Owners of this contract, the only person can add more issuers
issuers: UnorderedMap<AccountId, Issuer>, //List of issuers, only issuers in this list can create a cert
need_user_approved: UnorderedMap<String, CertInfo>,
ready_deploy_nft: UnorderedMap<String, CertInfo>,
//NFT Define
nft_cert: UnorderedMap<String, TokenSerialize>,
nft_token: NonFungibleToken,
metadata: LazyOption<NFTContractMetadata>,
}
// #[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Issuer {
pub name: String,
pub issuer_id: AccountId
}
impl Default for SmartCertificateContract {
fn default() -> Self |
}
const DATA_IMAGE_SVG_NEAR_ICON: &str = "data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E";
#[near_bindgen]
impl SmartCertificateContract {
#[init]
pub fn new(nft_owner: ValidAccountId) -> Self {
assert!(!env::state_exists(), "The contract is already initialized");
assert!(
env::is_valid_account_id(env::predecessor_account_id().as_bytes()),
"The NEAR Foundation account ID is invalid"
);
let metadata = NFTContractMetadata {
spec: NFT_METADATA_SPEC.to_string(),
name: "Example NEAR non-fungible token".to_string(),
symbol: "EXAMPLE".to_string(),
icon: Some(DATA_IMAGE_SVG_NEAR_ICON.to_string()),
base_uri: None,
reference: None,
reference_hash: None,
};
SmartCertificateContract {
owner: env::predecessor_account_id(),
issuers: UnorderedMap::new(b"i".to_vec()),
need_user_approved: UnorderedMap::new(b"n".to_vec()),
ready_deploy_nft: UnorderedMap::new(b"r".to_vec()),
nft_cert: UnorderedMap::new(b"nft".to_vec()),
nft_token: NonFungibleToken::new(
StorageKey::NonFungibleToken,
nft_owner,
Some(StorageKey::TokenMetadata),
Some(StorageKey::Enumeration),
Some(StorageKey::Approval),
),
metadata: LazyOption::new(StorageKey::Metadata, Some(&metadata)),
}
}
pub fn add_issuer(&mut self, issuer_account: AccountId, name: String) -> bool {
assert!(
env::is_valid_account_id(issuer_account.as_bytes()),
"The given account ID is invalid"
);
self.assert_called_by_foundation();
if !self.issuers.get(&issuer_account).is_some() {
let new_issuer = Issuer {
name: name,
issuer_id: issuer_account.clone(),
};
self.issuers.insert(&issuer_account, &new_issuer);
return true;
}
return false;
}
pub fn create_cert(&mut self, user_account_id: ValidAccountId, name: String, dob: String, national_id: String) {
self.assert_called_by_issuers();
let issuer = self.issuers.get(&env::predecessor_account_id()).clone().unwrap();
let user = UserInfo {
name: name,
dob: dob,
national_id: national_id,
from: issuer.clone(),
owner: user_account_id.clone()
};
let cert_info = CertInfo {
user_info: user,
is_first_approved: false
};
let id = self.generate_cert_key(user_account_id.clone(), env::predecessor_account_id());
self.need_user_approved.insert(&id, &cert_info);
}
pub fn user_approved(&mut self, id: String) {
let cert = self.need_user_approved.get(&id).unwrap();
let new_cert = CertInfo {
user_info: cert.user_info.clone(),
is_first_approved: true
};
env::log(
format!(
"new cert @{}",
new_cert.user_info.owner
).as_bytes()
);
self.need_user_approved.remove(&id);
self.ready_deploy_nft.insert(&id, &new_cert);
}
#[payable]
pub fn nft_mint(
&mut self,
id: String,
) -> Token {
self.assert_called_by_foundation();
let cert = self.ready_deploy_nft.get(&id).unwrap();
let owner = cert.user_info.clone().owner;
let token = self.nft_token.mint((self.nft_cert.len() + 1).to_string(), owner, Some(self.create_meta_data(cert)));
let token_serialize = TokenSerialize {
token_id: token.token_id.clone(),
owner_id: token.owner_id.clone(),
metadata: token.metadata.clone().unwrap(),
tx: "".to_string()
};
self.nft_cert.insert(&id.clone(), &token_serialize);
return token;
}
pub fn finalize(&mut self, id: String, txid: String) {
let mut token = self.nft_cert.get(&id.clone()).unwrap();
token.tx = txid.clone();
self.ready_deploy_nft.remove(&id.clone());
self.nft_cert.remove(&id.clone());
self.nft_cert.insert(&txid.clone(), &token);
}
/*****************/
/* View Methods */
/*****************/
pub fn get_cert_info(&self, txid: String) -> TokenMetadata {
let cert = self.nft_cert.get(&txid.clone()).unwrap();
return cert.metadata;
}
pub fn get_issuers(&self) -> Vec<(AccountId, Issuer)> {
return self.issuers.to_vec();
}
pub fn get_certs(&self) -> Vec<(String, TokenSerialize)> {
return self
.nft_cert
.iter()
.collect();
}
pub fn get_un_approved_cert(&self, owner_id: String) -> Vec<(String, CertInfo)> {
return self.need_user_approved
.iter()
.filter(|(_k, v)| String::from(v.user_info.owner.clone()) == owner_id)
.collect();
}
pub fn get_ready_deploy_cert(&self) -> Vec<(String, CertInfo)> {
return self.ready_deploy_nft
.iter()
.collect();
}
/************/
/* Utils */
/************/
fn generate_cert_key(&self, user: ValidAccountId, issuer: AccountId) -> String {
return [String::from(user), issuer].join("_");
}
fn create_meta_data(&self, cert: CertInfo) -> TokenMetadata {
let description = cert.user_info.name + &String::from("'s Certificate issued by ") + &cert.user_info.from.name;
return TokenMetadata {
title: Some("Certificate".into()),
description: Some(description.into()),
media: None,
media_hash: None,
copies: Some(1u64),
issued_at: None,
expires_at: None,
starts_at: None,
updated_at: None,
extra: None,
reference: None,
reference_hash: None,
};
}
/************/
/* Internal */
/************/
fn assert_called_by_foundation(&self) {
assert_eq!(
&env::predecessor_account_id(),
&self.owner,
"Can only be called by NEAR Foundation"
);
}
fn assert_called_by_issuers(&self) {
assert!(
self.issuers.get(&env::predecessor_account_id()).is_some(),
"Only call by issuers"
);
}
}
near_contract_standards::impl_non_fungible_token_core!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_approval!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_enumeration!(SmartCertificateContract, nft_token);
#[near_bindgen]
impl NonFungibleTokenMetadataProvider for SmartCertificateContract {
fn nft_metadata(&self) -> NFTContractMetadata {
self.metadata.get().unwrap()
}
}
| {
env::panic(b"SmartCertificate contract should be initialized before usage")
} | identifier_body |
lib.rs | use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::collections::UnorderedMap;
use near_sdk::serde::{Deserialize, Serialize};
use near_contract_standards::non_fungible_token::metadata::{
NFTContractMetadata, NonFungibleTokenMetadataProvider, TokenMetadata, NFT_METADATA_SPEC,
};
use near_contract_standards::non_fungible_token::{Token, TokenId};
use near_contract_standards::non_fungible_token::NonFungibleToken;
use near_sdk::collections::LazyOption;
use near_sdk::json_types::ValidAccountId;
use near_sdk::{
setup_alloc, env, near_bindgen, AccountId, BorshStorageKey, Promise, PromiseOrValue,
};
#[derive(BorshSerialize, BorshStorageKey)]
enum StorageKey {
NonFungibleToken,
Metadata,
TokenMetadata,
Enumeration,
Approval,
}
setup_alloc!();
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct UserInfo {
pub name: String,
pub dob: String,
pub national_id: String,
pub from: Issuer,
pub owner: ValidAccountId
}
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct TokenSerialize {
pub token_id: String,
pub owner_id: String,
pub metadata: TokenMetadata,
pub tx: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct CertInfo {
pub user_info: UserInfo,
pub is_first_approved: bool,
}
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct SmartCertificateContract{
owner: AccountId, //Owners of this contract, the only person can add more issuers
issuers: UnorderedMap<AccountId, Issuer>, //List of issuers, only issuers in this list can create a cert
need_user_approved: UnorderedMap<String, CertInfo>,
ready_deploy_nft: UnorderedMap<String, CertInfo>,
//NFT Define
nft_cert: UnorderedMap<String, TokenSerialize>,
nft_token: NonFungibleToken,
metadata: LazyOption<NFTContractMetadata>,
}
// #[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Issuer {
pub name: String,
pub issuer_id: AccountId
}
impl Default for SmartCertificateContract {
fn default() -> Self {
env::panic(b"SmartCertificate contract should be initialized before usage")
}
}
const DATA_IMAGE_SVG_NEAR_ICON: &str = "data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E";
#[near_bindgen]
impl SmartCertificateContract {
#[init]
pub fn new(nft_owner: ValidAccountId) -> Self {
assert!(!env::state_exists(), "The contract is already initialized");
assert!(
env::is_valid_account_id(env::predecessor_account_id().as_bytes()),
"The NEAR Foundation account ID is invalid"
);
let metadata = NFTContractMetadata {
spec: NFT_METADATA_SPEC.to_string(),
name: "Example NEAR non-fungible token".to_string(),
symbol: "EXAMPLE".to_string(),
icon: Some(DATA_IMAGE_SVG_NEAR_ICON.to_string()),
base_uri: None,
reference: None,
reference_hash: None,
};
SmartCertificateContract {
owner: env::predecessor_account_id(),
issuers: UnorderedMap::new(b"i".to_vec()),
need_user_approved: UnorderedMap::new(b"n".to_vec()),
ready_deploy_nft: UnorderedMap::new(b"r".to_vec()),
nft_cert: UnorderedMap::new(b"nft".to_vec()),
nft_token: NonFungibleToken::new(
StorageKey::NonFungibleToken,
nft_owner,
Some(StorageKey::TokenMetadata),
Some(StorageKey::Enumeration),
Some(StorageKey::Approval),
),
metadata: LazyOption::new(StorageKey::Metadata, Some(&metadata)),
}
}
pub fn add_issuer(&mut self, issuer_account: AccountId, name: String) -> bool {
assert!(
env::is_valid_account_id(issuer_account.as_bytes()),
"The given account ID is invalid"
);
self.assert_called_by_foundation();
if !self.issuers.get(&issuer_account).is_some() |
return false;
}
pub fn create_cert(&mut self, user_account_id: ValidAccountId, name: String, dob: String, national_id: String) {
self.assert_called_by_issuers();
let issuer = self.issuers.get(&env::predecessor_account_id()).clone().unwrap();
let user = UserInfo {
name: name,
dob: dob,
national_id: national_id,
from: issuer.clone(),
owner: user_account_id.clone()
};
let cert_info = CertInfo {
user_info: user,
is_first_approved: false
};
let id = self.generate_cert_key(user_account_id.clone(), env::predecessor_account_id());
self.need_user_approved.insert(&id, &cert_info);
}
pub fn user_approved(&mut self, id: String) {
let cert = self.need_user_approved.get(&id).unwrap();
let new_cert = CertInfo {
user_info: cert.user_info.clone(),
is_first_approved: true
};
env::log(
format!(
"new cert @{}",
new_cert.user_info.owner
).as_bytes()
);
self.need_user_approved.remove(&id);
self.ready_deploy_nft.insert(&id, &new_cert);
}
#[payable]
pub fn nft_mint(
&mut self,
id: String,
) -> Token {
self.assert_called_by_foundation();
let cert = self.ready_deploy_nft.get(&id).unwrap();
let owner = cert.user_info.clone().owner;
let token = self.nft_token.mint((self.nft_cert.len() + 1).to_string(), owner, Some(self.create_meta_data(cert)));
let token_serialize = TokenSerialize {
token_id: token.token_id.clone(),
owner_id: token.owner_id.clone(),
metadata: token.metadata.clone().unwrap(),
tx: "".to_string()
};
self.nft_cert.insert(&id.clone(), &token_serialize);
return token;
}
pub fn finalize(&mut self, id: String, txid: String) {
let mut token = self.nft_cert.get(&id.clone()).unwrap();
token.tx = txid.clone();
self.ready_deploy_nft.remove(&id.clone());
self.nft_cert.remove(&id.clone());
self.nft_cert.insert(&txid.clone(), &token);
}
/*****************/
/* View Methods */
/*****************/
pub fn get_cert_info(&self, txid: String) -> TokenMetadata {
let cert = self.nft_cert.get(&txid.clone()).unwrap();
return cert.metadata;
}
pub fn get_issuers(&self) -> Vec<(AccountId, Issuer)> {
return self.issuers.to_vec();
}
pub fn get_certs(&self) -> Vec<(String, TokenSerialize)> {
return self
.nft_cert
.iter()
.collect();
}
pub fn get_un_approved_cert(&self, owner_id: String) -> Vec<(String, CertInfo)> {
return self.need_user_approved
.iter()
.filter(|(_k, v)| String::from(v.user_info.owner.clone()) == owner_id)
.collect();
}
pub fn get_ready_deploy_cert(&self) -> Vec<(String, CertInfo)> {
return self.ready_deploy_nft
.iter()
.collect();
}
/************/
/* Utils */
/************/
fn generate_cert_key(&self, user: ValidAccountId, issuer: AccountId) -> String {
return [String::from(user), issuer].join("_");
}
fn create_meta_data(&self, cert: CertInfo) -> TokenMetadata {
let description = cert.user_info.name + &String::from("'s Certificate issued by ") + &cert.user_info.from.name;
return TokenMetadata {
title: Some("Certificate".into()),
description: Some(description.into()),
media: None,
media_hash: None,
copies: Some(1u64),
issued_at: None,
expires_at: None,
starts_at: None,
updated_at: None,
extra: None,
reference: None,
reference_hash: None,
};
}
/************/
/* Internal */
/************/
fn assert_called_by_foundation(&self) {
assert_eq!(
&env::predecessor_account_id(),
&self.owner,
"Can only be called by NEAR Foundation"
);
}
fn assert_called_by_issuers(&self) {
assert!(
self.issuers.get(&env::predecessor_account_id()).is_some(),
"Only call by issuers"
);
}
}
near_contract_standards::impl_non_fungible_token_core!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_approval!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_enumeration!(SmartCertificateContract, nft_token);
#[near_bindgen]
impl NonFungibleTokenMetadataProvider for SmartCertificateContract {
fn nft_metadata(&self) -> NFTContractMetadata {
self.metadata.get().unwrap()
}
}
| {
let new_issuer = Issuer {
name: name,
issuer_id: issuer_account.clone(),
};
self.issuers.insert(&issuer_account, &new_issuer);
return true;
} | conditional_block |
lib.rs | use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::collections::UnorderedMap;
use near_sdk::serde::{Deserialize, Serialize};
use near_contract_standards::non_fungible_token::metadata::{
NFTContractMetadata, NonFungibleTokenMetadataProvider, TokenMetadata, NFT_METADATA_SPEC,
};
use near_contract_standards::non_fungible_token::{Token, TokenId};
use near_contract_standards::non_fungible_token::NonFungibleToken;
use near_sdk::collections::LazyOption;
use near_sdk::json_types::ValidAccountId;
use near_sdk::{
setup_alloc, env, near_bindgen, AccountId, BorshStorageKey, Promise, PromiseOrValue,
};
#[derive(BorshSerialize, BorshStorageKey)]
enum StorageKey {
NonFungibleToken,
Metadata,
TokenMetadata,
Enumeration,
Approval,
}
setup_alloc!();
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct UserInfo {
pub name: String,
pub dob: String,
pub national_id: String,
pub from: Issuer,
pub owner: ValidAccountId
}
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct TokenSerialize {
pub token_id: String,
pub owner_id: String,
pub metadata: TokenMetadata,
pub tx: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct CertInfo {
pub user_info: UserInfo,
pub is_first_approved: bool,
}
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct SmartCertificateContract{
owner: AccountId, //Owners of this contract, the only person can add more issuers
issuers: UnorderedMap<AccountId, Issuer>, //List of issuers, only issuers in this list can create a cert
need_user_approved: UnorderedMap<String, CertInfo>,
ready_deploy_nft: UnorderedMap<String, CertInfo>,
//NFT Define
nft_cert: UnorderedMap<String, TokenSerialize>,
nft_token: NonFungibleToken,
metadata: LazyOption<NFTContractMetadata>,
}
// #[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Issuer {
pub name: String,
pub issuer_id: AccountId
}
impl Default for SmartCertificateContract {
fn default() -> Self {
env::panic(b"SmartCertificate contract should be initialized before usage")
}
}
const DATA_IMAGE_SVG_NEAR_ICON: &str = "data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E";
#[near_bindgen]
impl SmartCertificateContract {
#[init]
pub fn new(nft_owner: ValidAccountId) -> Self {
assert!(!env::state_exists(), "The contract is already initialized");
assert!(
env::is_valid_account_id(env::predecessor_account_id().as_bytes()),
"The NEAR Foundation account ID is invalid"
);
let metadata = NFTContractMetadata {
spec: NFT_METADATA_SPEC.to_string(),
name: "Example NEAR non-fungible token".to_string(),
symbol: "EXAMPLE".to_string(),
icon: Some(DATA_IMAGE_SVG_NEAR_ICON.to_string()),
base_uri: None,
reference: None,
reference_hash: None,
};
SmartCertificateContract {
owner: env::predecessor_account_id(),
issuers: UnorderedMap::new(b"i".to_vec()),
need_user_approved: UnorderedMap::new(b"n".to_vec()),
ready_deploy_nft: UnorderedMap::new(b"r".to_vec()),
nft_cert: UnorderedMap::new(b"nft".to_vec()),
nft_token: NonFungibleToken::new(
StorageKey::NonFungibleToken,
nft_owner,
Some(StorageKey::TokenMetadata),
Some(StorageKey::Enumeration),
Some(StorageKey::Approval),
),
metadata: LazyOption::new(StorageKey::Metadata, Some(&metadata)),
}
}
| assert!(
env::is_valid_account_id(issuer_account.as_bytes()),
"The given account ID is invalid"
);
self.assert_called_by_foundation();
if !self.issuers.get(&issuer_account).is_some() {
let new_issuer = Issuer {
name: name,
issuer_id: issuer_account.clone(),
};
self.issuers.insert(&issuer_account, &new_issuer);
return true;
}
return false;
}
pub fn create_cert(&mut self, user_account_id: ValidAccountId, name: String, dob: String, national_id: String) {
self.assert_called_by_issuers();
let issuer = self.issuers.get(&env::predecessor_account_id()).clone().unwrap();
let user = UserInfo {
name: name,
dob: dob,
national_id: national_id,
from: issuer.clone(),
owner: user_account_id.clone()
};
let cert_info = CertInfo {
user_info: user,
is_first_approved: false
};
let id = self.generate_cert_key(user_account_id.clone(), env::predecessor_account_id());
self.need_user_approved.insert(&id, &cert_info);
}
pub fn user_approved(&mut self, id: String) {
let cert = self.need_user_approved.get(&id).unwrap();
let new_cert = CertInfo {
user_info: cert.user_info.clone(),
is_first_approved: true
};
env::log(
format!(
"new cert @{}",
new_cert.user_info.owner
).as_bytes()
);
self.need_user_approved.remove(&id);
self.ready_deploy_nft.insert(&id, &new_cert);
}
#[payable]
pub fn nft_mint(
&mut self,
id: String,
) -> Token {
self.assert_called_by_foundation();
let cert = self.ready_deploy_nft.get(&id).unwrap();
let owner = cert.user_info.clone().owner;
let token = self.nft_token.mint((self.nft_cert.len() + 1).to_string(), owner, Some(self.create_meta_data(cert)));
let token_serialize = TokenSerialize {
token_id: token.token_id.clone(),
owner_id: token.owner_id.clone(),
metadata: token.metadata.clone().unwrap(),
tx: "".to_string()
};
self.nft_cert.insert(&id.clone(), &token_serialize);
return token;
}
pub fn finalize(&mut self, id: String, txid: String) {
let mut token = self.nft_cert.get(&id.clone()).unwrap();
token.tx = txid.clone();
self.ready_deploy_nft.remove(&id.clone());
self.nft_cert.remove(&id.clone());
self.nft_cert.insert(&txid.clone(), &token);
}
/*****************/
/* View Methods */
/*****************/
pub fn get_cert_info(&self, txid: String) -> TokenMetadata {
let cert = self.nft_cert.get(&txid.clone()).unwrap();
return cert.metadata;
}
pub fn get_issuers(&self) -> Vec<(AccountId, Issuer)> {
return self.issuers.to_vec();
}
pub fn get_certs(&self) -> Vec<(String, TokenSerialize)> {
return self
.nft_cert
.iter()
.collect();
}
pub fn get_un_approved_cert(&self, owner_id: String) -> Vec<(String, CertInfo)> {
return self.need_user_approved
.iter()
.filter(|(_k, v)| String::from(v.user_info.owner.clone()) == owner_id)
.collect();
}
pub fn get_ready_deploy_cert(&self) -> Vec<(String, CertInfo)> {
return self.ready_deploy_nft
.iter()
.collect();
}
/************/
/* Utils */
/************/
fn generate_cert_key(&self, user: ValidAccountId, issuer: AccountId) -> String {
return [String::from(user), issuer].join("_");
}
fn create_meta_data(&self, cert: CertInfo) -> TokenMetadata {
let description = cert.user_info.name + &String::from("'s Certificate issued by ") + &cert.user_info.from.name;
return TokenMetadata {
title: Some("Certificate".into()),
description: Some(description.into()),
media: None,
media_hash: None,
copies: Some(1u64),
issued_at: None,
expires_at: None,
starts_at: None,
updated_at: None,
extra: None,
reference: None,
reference_hash: None,
};
}
/************/
/* Internal */
/************/
fn assert_called_by_foundation(&self) {
assert_eq!(
&env::predecessor_account_id(),
&self.owner,
"Can only be called by NEAR Foundation"
);
}
fn assert_called_by_issuers(&self) {
assert!(
self.issuers.get(&env::predecessor_account_id()).is_some(),
"Only call by issuers"
);
}
}
near_contract_standards::impl_non_fungible_token_core!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_approval!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_enumeration!(SmartCertificateContract, nft_token);
#[near_bindgen]
impl NonFungibleTokenMetadataProvider for SmartCertificateContract {
fn nft_metadata(&self) -> NFTContractMetadata {
self.metadata.get().unwrap()
}
} | pub fn add_issuer(&mut self, issuer_account: AccountId, name: String) -> bool { | random_line_split |
lib.rs | use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::collections::UnorderedMap;
use near_sdk::serde::{Deserialize, Serialize};
use near_contract_standards::non_fungible_token::metadata::{
NFTContractMetadata, NonFungibleTokenMetadataProvider, TokenMetadata, NFT_METADATA_SPEC,
};
use near_contract_standards::non_fungible_token::{Token, TokenId};
use near_contract_standards::non_fungible_token::NonFungibleToken;
use near_sdk::collections::LazyOption;
use near_sdk::json_types::ValidAccountId;
use near_sdk::{
setup_alloc, env, near_bindgen, AccountId, BorshStorageKey, Promise, PromiseOrValue,
};
#[derive(BorshSerialize, BorshStorageKey)]
enum StorageKey {
NonFungibleToken,
Metadata,
TokenMetadata,
Enumeration,
Approval,
}
setup_alloc!();
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct UserInfo {
pub name: String,
pub dob: String,
pub national_id: String,
pub from: Issuer,
pub owner: ValidAccountId
}
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct TokenSerialize {
pub token_id: String,
pub owner_id: String,
pub metadata: TokenMetadata,
pub tx: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize)]
#[serde(crate = "near_sdk::serde")]
pub struct CertInfo {
pub user_info: UserInfo,
pub is_first_approved: bool,
}
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct SmartCertificateContract{
owner: AccountId, //Owners of this contract, the only person can add more issuers
issuers: UnorderedMap<AccountId, Issuer>, //List of issuers, only issuers in this list can create a cert
need_user_approved: UnorderedMap<String, CertInfo>,
ready_deploy_nft: UnorderedMap<String, CertInfo>,
//NFT Define
nft_cert: UnorderedMap<String, TokenSerialize>,
nft_token: NonFungibleToken,
metadata: LazyOption<NFTContractMetadata>,
}
// #[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize, Clone, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Issuer {
pub name: String,
pub issuer_id: AccountId
}
impl Default for SmartCertificateContract {
fn default() -> Self {
env::panic(b"SmartCertificate contract should be initialized before usage")
}
}
const DATA_IMAGE_SVG_NEAR_ICON: &str = "data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E";
#[near_bindgen]
impl SmartCertificateContract {
#[init]
pub fn new(nft_owner: ValidAccountId) -> Self {
assert!(!env::state_exists(), "The contract is already initialized");
assert!(
env::is_valid_account_id(env::predecessor_account_id().as_bytes()),
"The NEAR Foundation account ID is invalid"
);
let metadata = NFTContractMetadata {
spec: NFT_METADATA_SPEC.to_string(),
name: "Example NEAR non-fungible token".to_string(),
symbol: "EXAMPLE".to_string(),
icon: Some(DATA_IMAGE_SVG_NEAR_ICON.to_string()),
base_uri: None,
reference: None,
reference_hash: None,
};
SmartCertificateContract {
owner: env::predecessor_account_id(),
issuers: UnorderedMap::new(b"i".to_vec()),
need_user_approved: UnorderedMap::new(b"n".to_vec()),
ready_deploy_nft: UnorderedMap::new(b"r".to_vec()),
nft_cert: UnorderedMap::new(b"nft".to_vec()),
nft_token: NonFungibleToken::new(
StorageKey::NonFungibleToken,
nft_owner,
Some(StorageKey::TokenMetadata),
Some(StorageKey::Enumeration),
Some(StorageKey::Approval),
),
metadata: LazyOption::new(StorageKey::Metadata, Some(&metadata)),
}
}
pub fn | (&mut self, issuer_account: AccountId, name: String) -> bool {
assert!(
env::is_valid_account_id(issuer_account.as_bytes()),
"The given account ID is invalid"
);
self.assert_called_by_foundation();
if !self.issuers.get(&issuer_account).is_some() {
let new_issuer = Issuer {
name: name,
issuer_id: issuer_account.clone(),
};
self.issuers.insert(&issuer_account, &new_issuer);
return true;
}
return false;
}
pub fn create_cert(&mut self, user_account_id: ValidAccountId, name: String, dob: String, national_id: String) {
self.assert_called_by_issuers();
let issuer = self.issuers.get(&env::predecessor_account_id()).clone().unwrap();
let user = UserInfo {
name: name,
dob: dob,
national_id: national_id,
from: issuer.clone(),
owner: user_account_id.clone()
};
let cert_info = CertInfo {
user_info: user,
is_first_approved: false
};
let id = self.generate_cert_key(user_account_id.clone(), env::predecessor_account_id());
self.need_user_approved.insert(&id, &cert_info);
}
pub fn user_approved(&mut self, id: String) {
let cert = self.need_user_approved.get(&id).unwrap();
let new_cert = CertInfo {
user_info: cert.user_info.clone(),
is_first_approved: true
};
env::log(
format!(
"new cert @{}",
new_cert.user_info.owner
).as_bytes()
);
self.need_user_approved.remove(&id);
self.ready_deploy_nft.insert(&id, &new_cert);
}
#[payable]
pub fn nft_mint(
&mut self,
id: String,
) -> Token {
self.assert_called_by_foundation();
let cert = self.ready_deploy_nft.get(&id).unwrap();
let owner = cert.user_info.clone().owner;
let token = self.nft_token.mint((self.nft_cert.len() + 1).to_string(), owner, Some(self.create_meta_data(cert)));
let token_serialize = TokenSerialize {
token_id: token.token_id.clone(),
owner_id: token.owner_id.clone(),
metadata: token.metadata.clone().unwrap(),
tx: "".to_string()
};
self.nft_cert.insert(&id.clone(), &token_serialize);
return token;
}
pub fn finalize(&mut self, id: String, txid: String) {
let mut token = self.nft_cert.get(&id.clone()).unwrap();
token.tx = txid.clone();
self.ready_deploy_nft.remove(&id.clone());
self.nft_cert.remove(&id.clone());
self.nft_cert.insert(&txid.clone(), &token);
}
/*****************/
/* View Methods */
/*****************/
pub fn get_cert_info(&self, txid: String) -> TokenMetadata {
let cert = self.nft_cert.get(&txid.clone()).unwrap();
return cert.metadata;
}
pub fn get_issuers(&self) -> Vec<(AccountId, Issuer)> {
return self.issuers.to_vec();
}
pub fn get_certs(&self) -> Vec<(String, TokenSerialize)> {
return self
.nft_cert
.iter()
.collect();
}
pub fn get_un_approved_cert(&self, owner_id: String) -> Vec<(String, CertInfo)> {
return self.need_user_approved
.iter()
.filter(|(_k, v)| String::from(v.user_info.owner.clone()) == owner_id)
.collect();
}
pub fn get_ready_deploy_cert(&self) -> Vec<(String, CertInfo)> {
return self.ready_deploy_nft
.iter()
.collect();
}
/************/
/* Utils */
/************/
fn generate_cert_key(&self, user: ValidAccountId, issuer: AccountId) -> String {
return [String::from(user), issuer].join("_");
}
fn create_meta_data(&self, cert: CertInfo) -> TokenMetadata {
let description = cert.user_info.name + &String::from("'s Certificate issued by ") + &cert.user_info.from.name;
return TokenMetadata {
title: Some("Certificate".into()),
description: Some(description.into()),
media: None,
media_hash: None,
copies: Some(1u64),
issued_at: None,
expires_at: None,
starts_at: None,
updated_at: None,
extra: None,
reference: None,
reference_hash: None,
};
}
/************/
/* Internal */
/************/
fn assert_called_by_foundation(&self) {
assert_eq!(
&env::predecessor_account_id(),
&self.owner,
"Can only be called by NEAR Foundation"
);
}
fn assert_called_by_issuers(&self) {
assert!(
self.issuers.get(&env::predecessor_account_id()).is_some(),
"Only call by issuers"
);
}
}
near_contract_standards::impl_non_fungible_token_core!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_approval!(SmartCertificateContract, nft_token);
near_contract_standards::impl_non_fungible_token_enumeration!(SmartCertificateContract, nft_token);
#[near_bindgen]
impl NonFungibleTokenMetadataProvider for SmartCertificateContract {
fn nft_metadata(&self) -> NFTContractMetadata {
self.metadata.get().unwrap()
}
}
| add_issuer | identifier_name |
label_leaves_in_expr_with_numbered_intervals.py | # -*- encoding: utf-8 -*-
from abjad.tools import scoretools
from abjad.tools import scoretools
from abjad.tools import markuptools
from abjad.tools import scoretools
from abjad.tools import pitchtools
from abjad.tools.topleveltools import attach
from abjad.tools.topleveltools import iterate
def label_leaves_in_expr_with_numbered_intervals(expr, markup_direction=Up):
| r"""Label leaves in `expr` with numbered intervals:
::
>>> notes = scoretools.make_notes(
... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5],
... [Duration(1, 8)],
... )
>>> staff = Staff(notes)
>>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff)
.. doctest::
>>> print(format(staff))
\new Staff {
c'8 ^ \markup { +25 }
cs'''8 ^ \markup { -14 }
b'8 ^ \markup { -15 }
af8 ^ \markup { -10 }
bf,8 ^ \markup { +1 }
b,8 ^ \markup { +22 }
a'8 ^ \markup { +1 }
bf'8 ^ \markup { -4 }
fs'8 ^ \markup { -1 }
f'8
}
::
>>> show(staff) # doctest: +SKIP
Returns none.
"""
for note in iterate(expr).by_class(scoretools.Note):
logical_voice_iterator = iterate(note).by_logical_voice_from_component(
scoretools.Leaf,
)
try:
next(logical_voice_iterator)
next_leaf = next(logical_voice_iterator)
if isinstance(next_leaf, scoretools.Note):
mci = pitchtools.NumberedInterval.from_pitch_carriers(
note, next_leaf)
markup = markuptools.Markup(mci, markup_direction)
attach(markup, note)
except StopIteration:
pass | identifier_body | |
label_leaves_in_expr_with_numbered_intervals.py | # -*- encoding: utf-8 -*-
from abjad.tools import scoretools
from abjad.tools import scoretools
from abjad.tools import markuptools
from abjad.tools import scoretools
from abjad.tools import pitchtools
from abjad.tools.topleveltools import attach
from abjad.tools.topleveltools import iterate
def label_leaves_in_expr_with_numbered_intervals(expr, markup_direction=Up):
r"""Label leaves in `expr` with numbered intervals:
::
>>> notes = scoretools.make_notes(
... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5],
... [Duration(1, 8)],
... )
>>> staff = Staff(notes)
>>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff)
.. doctest::
>>> print(format(staff))
\new Staff {
c'8 ^ \markup { +25 }
cs'''8 ^ \markup { -14 }
b'8 ^ \markup { -15 }
af8 ^ \markup { -10 }
bf,8 ^ \markup { +1 }
b,8 ^ \markup { +22 }
a'8 ^ \markup { +1 }
bf'8 ^ \markup { -4 }
fs'8 ^ \markup { -1 }
f'8
}
::
>>> show(staff) # doctest: +SKIP
Returns none.
"""
for note in iterate(expr).by_class(scoretools.Note):
logical_voice_iterator = iterate(note).by_logical_voice_from_component(
scoretools.Leaf,
)
try:
next(logical_voice_iterator)
next_leaf = next(logical_voice_iterator)
if isinstance(next_leaf, scoretools.Note):
|
except StopIteration:
pass
| mci = pitchtools.NumberedInterval.from_pitch_carriers(
note, next_leaf)
markup = markuptools.Markup(mci, markup_direction)
attach(markup, note) | conditional_block |
label_leaves_in_expr_with_numbered_intervals.py | # -*- encoding: utf-8 -*-
from abjad.tools import scoretools
from abjad.tools import scoretools
from abjad.tools import markuptools
from abjad.tools import scoretools
from abjad.tools import pitchtools
from abjad.tools.topleveltools import attach
from abjad.tools.topleveltools import iterate
def | (expr, markup_direction=Up):
r"""Label leaves in `expr` with numbered intervals:
::
>>> notes = scoretools.make_notes(
... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5],
... [Duration(1, 8)],
... )
>>> staff = Staff(notes)
>>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff)
.. doctest::
>>> print(format(staff))
\new Staff {
c'8 ^ \markup { +25 }
cs'''8 ^ \markup { -14 }
b'8 ^ \markup { -15 }
af8 ^ \markup { -10 }
bf,8 ^ \markup { +1 }
b,8 ^ \markup { +22 }
a'8 ^ \markup { +1 }
bf'8 ^ \markup { -4 }
fs'8 ^ \markup { -1 }
f'8
}
::
>>> show(staff) # doctest: +SKIP
Returns none.
"""
for note in iterate(expr).by_class(scoretools.Note):
logical_voice_iterator = iterate(note).by_logical_voice_from_component(
scoretools.Leaf,
)
try:
next(logical_voice_iterator)
next_leaf = next(logical_voice_iterator)
if isinstance(next_leaf, scoretools.Note):
mci = pitchtools.NumberedInterval.from_pitch_carriers(
note, next_leaf)
markup = markuptools.Markup(mci, markup_direction)
attach(markup, note)
except StopIteration:
pass
| label_leaves_in_expr_with_numbered_intervals | identifier_name |
label_leaves_in_expr_with_numbered_intervals.py | # -*- encoding: utf-8 -*-
from abjad.tools import scoretools
from abjad.tools import scoretools
from abjad.tools import markuptools
from abjad.tools import scoretools
from abjad.tools import pitchtools
from abjad.tools.topleveltools import attach |
def label_leaves_in_expr_with_numbered_intervals(expr, markup_direction=Up):
r"""Label leaves in `expr` with numbered intervals:
::
>>> notes = scoretools.make_notes(
... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5],
... [Duration(1, 8)],
... )
>>> staff = Staff(notes)
>>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff)
.. doctest::
>>> print(format(staff))
\new Staff {
c'8 ^ \markup { +25 }
cs'''8 ^ \markup { -14 }
b'8 ^ \markup { -15 }
af8 ^ \markup { -10 }
bf,8 ^ \markup { +1 }
b,8 ^ \markup { +22 }
a'8 ^ \markup { +1 }
bf'8 ^ \markup { -4 }
fs'8 ^ \markup { -1 }
f'8
}
::
>>> show(staff) # doctest: +SKIP
Returns none.
"""
for note in iterate(expr).by_class(scoretools.Note):
logical_voice_iterator = iterate(note).by_logical_voice_from_component(
scoretools.Leaf,
)
try:
next(logical_voice_iterator)
next_leaf = next(logical_voice_iterator)
if isinstance(next_leaf, scoretools.Note):
mci = pitchtools.NumberedInterval.from_pitch_carriers(
note, next_leaf)
markup = markuptools.Markup(mci, markup_direction)
attach(markup, note)
except StopIteration:
pass | from abjad.tools.topleveltools import iterate
| random_line_split |
in-memory-data.service.ts | import {InMemoryDbService} from "angular-in-memory-web-api";
export class InMemoryDataService implements InMemoryDbService {
createDb() {
let heroes = [
{ id: 11, name: 'Mr. Nice' },
{ id: 12, name: 'Narco' },
{ id: 13, name: 'Bombasto' },
{ id: 14, name: 'Celeritas' },
{ id: 15, name: 'Magneta' },
{ id: 16, name: 'RubberMan' },
{ id: 17, name: 'Dynama' },
{ id: 18, name: 'Dr IQ' }, | ];
return {heroes};
}
} | { id: 19, name: 'Magma' },
{ id: 20, name: 'Tornado' } | random_line_split |
in-memory-data.service.ts | import {InMemoryDbService} from "angular-in-memory-web-api";
export class InMemoryDataService implements InMemoryDbService {
createDb() |
}
| {
let heroes = [
{ id: 11, name: 'Mr. Nice' },
{ id: 12, name: 'Narco' },
{ id: 13, name: 'Bombasto' },
{ id: 14, name: 'Celeritas' },
{ id: 15, name: 'Magneta' },
{ id: 16, name: 'RubberMan' },
{ id: 17, name: 'Dynama' },
{ id: 18, name: 'Dr IQ' },
{ id: 19, name: 'Magma' },
{ id: 20, name: 'Tornado' }
];
return {heroes};
} | identifier_body |
in-memory-data.service.ts | import {InMemoryDbService} from "angular-in-memory-web-api";
export class InMemoryDataService implements InMemoryDbService {
| () {
let heroes = [
{ id: 11, name: 'Mr. Nice' },
{ id: 12, name: 'Narco' },
{ id: 13, name: 'Bombasto' },
{ id: 14, name: 'Celeritas' },
{ id: 15, name: 'Magneta' },
{ id: 16, name: 'RubberMan' },
{ id: 17, name: 'Dynama' },
{ id: 18, name: 'Dr IQ' },
{ id: 19, name: 'Magma' },
{ id: 20, name: 'Tornado' }
];
return {heroes};
}
}
| createDb | identifier_name |
args.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Global initialization and retrieval of command line arguments.
//!
//! On some platforms these are stored during runtime startup,
//! and on some they are retrieved from the system on demand.
#![allow(dead_code)] // runtime init functions not used during testing
use ffi::OsString;
use marker::PhantomData;
use vec;
/// One-time global initialization.
pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) }
/// One-time global cleanup.
pub unsafe fn cleanup() { imp::cleanup() }
/// Returns the command line arguments
pub fn args() -> Args {
imp::args()
}
pub struct Args {
iter: vec::IntoIter<OsString>,
_dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Args {
pub fn inner_debug(&self) -> &[OsString] {
self.iter.as_slice()
}
}
impl Iterator for Args {
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl ExactSizeIterator for Args {
fn len(&self) -> usize { self.iter.len() }
}
impl DoubleEndedIterator for Args {
fn next_back(&mut self) -> Option<OsString> { self.iter.next_back() }
}
mod imp {
use os::unix::prelude::*;
use mem;
use ffi::{CStr, OsString};
use marker::PhantomData;
use libc;
use super::Args;
use sys_common::mutex::Mutex;
static mut GLOBAL_ARGS_PTR: usize = 0;
static LOCK: Mutex = Mutex::new();
pub unsafe fn | (argc: isize, argv: *const *const u8) {
let args = (0..argc).map(|i| {
CStr::from_ptr(*argv.offset(i) as *const libc::c_char).to_bytes().to_vec()
}).collect();
LOCK.lock();
let ptr = get_global_ptr();
assert!((*ptr).is_none());
(*ptr) = Some(box args);
LOCK.unlock();
}
pub unsafe fn cleanup() {
LOCK.lock();
*get_global_ptr() = None;
LOCK.unlock();
}
pub fn args() -> Args {
let bytes = clone().unwrap_or(Vec::new());
let v: Vec<OsString> = bytes.into_iter().map(|v| {
OsStringExt::from_vec(v)
}).collect();
Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData }
}
fn clone() -> Option<Vec<Vec<u8>>> {
unsafe {
LOCK.lock();
let ptr = get_global_ptr();
let ret = (*ptr).as_ref().map(|s| (**s).clone());
LOCK.unlock();
return ret
}
}
fn get_global_ptr() -> *mut Option<Box<Vec<Vec<u8>>>> {
unsafe { mem::transmute(&GLOBAL_ARGS_PTR) }
}
}
| init | identifier_name |
args.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Global initialization and retrieval of command line arguments.
//!
//! On some platforms these are stored during runtime startup,
//! and on some they are retrieved from the system on demand.
#![allow(dead_code)] // runtime init functions not used during testing
use ffi::OsString;
use marker::PhantomData;
use vec;
/// One-time global initialization.
pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) }
/// One-time global cleanup.
pub unsafe fn cleanup() { imp::cleanup() }
/// Returns the command line arguments
pub fn args() -> Args {
imp::args()
}
pub struct Args {
iter: vec::IntoIter<OsString>,
_dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Args {
pub fn inner_debug(&self) -> &[OsString] {
self.iter.as_slice()
}
}
impl Iterator for Args {
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl ExactSizeIterator for Args {
fn len(&self) -> usize { self.iter.len() }
}
| fn next_back(&mut self) -> Option<OsString> { self.iter.next_back() }
}
mod imp {
use os::unix::prelude::*;
use mem;
use ffi::{CStr, OsString};
use marker::PhantomData;
use libc;
use super::Args;
use sys_common::mutex::Mutex;
static mut GLOBAL_ARGS_PTR: usize = 0;
static LOCK: Mutex = Mutex::new();
pub unsafe fn init(argc: isize, argv: *const *const u8) {
let args = (0..argc).map(|i| {
CStr::from_ptr(*argv.offset(i) as *const libc::c_char).to_bytes().to_vec()
}).collect();
LOCK.lock();
let ptr = get_global_ptr();
assert!((*ptr).is_none());
(*ptr) = Some(box args);
LOCK.unlock();
}
pub unsafe fn cleanup() {
LOCK.lock();
*get_global_ptr() = None;
LOCK.unlock();
}
pub fn args() -> Args {
let bytes = clone().unwrap_or(Vec::new());
let v: Vec<OsString> = bytes.into_iter().map(|v| {
OsStringExt::from_vec(v)
}).collect();
Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData }
}
fn clone() -> Option<Vec<Vec<u8>>> {
unsafe {
LOCK.lock();
let ptr = get_global_ptr();
let ret = (*ptr).as_ref().map(|s| (**s).clone());
LOCK.unlock();
return ret
}
}
fn get_global_ptr() -> *mut Option<Box<Vec<Vec<u8>>>> {
unsafe { mem::transmute(&GLOBAL_ARGS_PTR) }
}
} | impl DoubleEndedIterator for Args { | random_line_split |
args.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Global initialization and retrieval of command line arguments.
//!
//! On some platforms these are stored during runtime startup,
//! and on some they are retrieved from the system on demand.
#![allow(dead_code)] // runtime init functions not used during testing
use ffi::OsString;
use marker::PhantomData;
use vec;
/// One-time global initialization.
pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) }
/// One-time global cleanup.
pub unsafe fn cleanup() { imp::cleanup() }
/// Returns the command line arguments
pub fn args() -> Args {
imp::args()
}
pub struct Args {
iter: vec::IntoIter<OsString>,
_dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Args {
pub fn inner_debug(&self) -> &[OsString] |
}
impl Iterator for Args {
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl ExactSizeIterator for Args {
fn len(&self) -> usize { self.iter.len() }
}
impl DoubleEndedIterator for Args {
fn next_back(&mut self) -> Option<OsString> { self.iter.next_back() }
}
mod imp {
use os::unix::prelude::*;
use mem;
use ffi::{CStr, OsString};
use marker::PhantomData;
use libc;
use super::Args;
use sys_common::mutex::Mutex;
static mut GLOBAL_ARGS_PTR: usize = 0;
static LOCK: Mutex = Mutex::new();
pub unsafe fn init(argc: isize, argv: *const *const u8) {
let args = (0..argc).map(|i| {
CStr::from_ptr(*argv.offset(i) as *const libc::c_char).to_bytes().to_vec()
}).collect();
LOCK.lock();
let ptr = get_global_ptr();
assert!((*ptr).is_none());
(*ptr) = Some(box args);
LOCK.unlock();
}
pub unsafe fn cleanup() {
LOCK.lock();
*get_global_ptr() = None;
LOCK.unlock();
}
pub fn args() -> Args {
let bytes = clone().unwrap_or(Vec::new());
let v: Vec<OsString> = bytes.into_iter().map(|v| {
OsStringExt::from_vec(v)
}).collect();
Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData }
}
fn clone() -> Option<Vec<Vec<u8>>> {
unsafe {
LOCK.lock();
let ptr = get_global_ptr();
let ret = (*ptr).as_ref().map(|s| (**s).clone());
LOCK.unlock();
return ret
}
}
fn get_global_ptr() -> *mut Option<Box<Vec<Vec<u8>>>> {
unsafe { mem::transmute(&GLOBAL_ARGS_PTR) }
}
}
| {
self.iter.as_slice()
} | identifier_body |
windows_1257.rs | // AUTOGENERATED FROM index-windows-1257.txt, ORIGINAL COMMENT FOLLOWS:
//
// Any copyright is dedicated to the Public Domain.
// http://creativecommons.org/publicdomain/zero/1.0/
//
// For details on index-windows-1257.txt see the Encoding Standard
// http://encoding.spec.whatwg.org/
static FORWARD_TABLE: &'static [u16] = &[
8364, 129, 8218, 131, 8222, 8230, 8224, 8225, 136, 8240, 138, 8249, 140,
168, 711, 184, 144, 8216, 8217, 8220, 8221, 8226, 8211, 8212, 152, 8482,
154, 8250, 156, 175, 731, 159, 160, 65535, 162, 163, 164, 65535, 166, 167,
216, 169, 342, 171, 172, 173, 174, 198, 176, 177, 178, 179, 180, 181, 182,
183, 248, 185, 343, 187, 188, 189, 190, 230, 260, 302, 256, 262, 196, 197,
280, 274, 268, 201, 377, 278, 290, 310, 298, 315, 352, 323, 325, 211, 332,
213, 214, 215, 370, 321, 346, 362, 220, 379, 381, 223, 261, 303, 257, 263,
228, 229, 281, 275, 269, 233, 378, 279, 291, 311, 299, 316, 353, 324, 326,
243, 333, 245, 246, 247, 371, 322, 347, 363, 252, 380, 382, 729,
];
#[inline]
pub fn forward(code: u8) -> u16 {
FORWARD_TABLE[(code - 0x80) as uint]
}
static BACKWARD_TABLE_LOWER: &'static [u8] = &[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 0, 131, 0, 0, 0, 0, 136,
0, 138, 0, 140, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 152, 0, 154, 0, 156, 0,
0, 159, 160, 0, 162, 163, 164, 0, 166, 167, 141, 169, 0, 171, 172, 173,
174, 157, 176, 177, 178, 179, 180, 181, 182, 183, 143, 185, 0, 187, 188,
189, 190, 0, 0, 0, 0, 0, 196, 197, 175, 0, 0, 201, 0, 0, 0, 0, 0, 0, 0, 0,
0, 211, 0, 213, 214, 215, 168, 0, 0, 0, 220, 0, 0, 223, 0, 0, 0, 0, 228,
229, 191, 0, 0, 233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 0, 245, 246, 247, 184,
0, 0, 0, 252, 0, 0, 0, 194, 226, 0, 0, 192, 224, 195, 227, 0, 0, 0, 0, 200,
232, 0, 0, 0, 0, 199, 231, 0, 0, 203, 235, 198, 230, 0, 0, 0, 0, 0, 0, 0,
0, 204, 236, 0, 0, 0, 0, 0, 0, 206, 238, 0, 0, 193, 225, 0, 0, 0, 0, 0, 0,
205, 237, 0, 0, 0, 207, 239, 0, 0, 0, 0, 217, 249, 209, 241, 210, 242, 0,
0, 0, 0, 0, 212, 244, 0, 0, 0, 0, 0, 0, 0, 0, 170, 186, 0, 0, 218, 250, 0,
0, 0, 0, 208, 240, 0, 0, 0, 0, 0, 0, 0, 0, 219, 251, 0, 0, 0, 0, 0, 0, 216,
248, 0, 0, 0, 0, 0, 202, 234, 221, 253, 222, 254, 0, 0, 0, 0, 0, 0, 0, 0,
142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 158, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 150, 151, 0, 0, 0, 145, 146, 130, 0, 147, 148, 132, 0, 134, 135,
149, 0, 0, 0, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 0,
139, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
static BACKWARD_TABLE_UPPER: &'static [u16] = &[
0, 0, 64, 128, 192, 256, 0, 0, 0, 0, 0, 320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 384, 0, 448, 0, 512,
];
#[inline]
pub fn backward(code: u32) -> u8 {
let offset = (code >> 6) as uint;
let offset = if offset < 133 | else {0};
BACKWARD_TABLE_LOWER[offset + ((code & 63) as uint)]
}
#[cfg(test)]
single_byte_tests!()
| {BACKWARD_TABLE_UPPER[offset] as uint} | conditional_block |
windows_1257.rs | // AUTOGENERATED FROM index-windows-1257.txt, ORIGINAL COMMENT FOLLOWS:
//
// Any copyright is dedicated to the Public Domain.
// http://creativecommons.org/publicdomain/zero/1.0/
//
// For details on index-windows-1257.txt see the Encoding Standard
// http://encoding.spec.whatwg.org/
static FORWARD_TABLE: &'static [u16] = &[
8364, 129, 8218, 131, 8222, 8230, 8224, 8225, 136, 8240, 138, 8249, 140,
168, 711, 184, 144, 8216, 8217, 8220, 8221, 8226, 8211, 8212, 152, 8482,
154, 8250, 156, 175, 731, 159, 160, 65535, 162, 163, 164, 65535, 166, 167,
216, 169, 342, 171, 172, 173, 174, 198, 176, 177, 178, 179, 180, 181, 182,
183, 248, 185, 343, 187, 188, 189, 190, 230, 260, 302, 256, 262, 196, 197,
280, 274, 268, 201, 377, 278, 290, 310, 298, 315, 352, 323, 325, 211, 332,
213, 214, 215, 370, 321, 346, 362, 220, 379, 381, 223, 261, 303, 257, 263,
228, 229, 281, 275, 269, 233, 378, 279, 291, 311, 299, 316, 353, 324, 326,
243, 333, 245, 246, 247, 371, 322, 347, 363, 252, 380, 382, 729,
];
#[inline]
pub fn forward(code: u8) -> u16 |
static BACKWARD_TABLE_LOWER: &'static [u8] = &[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 0, 131, 0, 0, 0, 0, 136,
0, 138, 0, 140, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 152, 0, 154, 0, 156, 0,
0, 159, 160, 0, 162, 163, 164, 0, 166, 167, 141, 169, 0, 171, 172, 173,
174, 157, 176, 177, 178, 179, 180, 181, 182, 183, 143, 185, 0, 187, 188,
189, 190, 0, 0, 0, 0, 0, 196, 197, 175, 0, 0, 201, 0, 0, 0, 0, 0, 0, 0, 0,
0, 211, 0, 213, 214, 215, 168, 0, 0, 0, 220, 0, 0, 223, 0, 0, 0, 0, 228,
229, 191, 0, 0, 233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 0, 245, 246, 247, 184,
0, 0, 0, 252, 0, 0, 0, 194, 226, 0, 0, 192, 224, 195, 227, 0, 0, 0, 0, 200,
232, 0, 0, 0, 0, 199, 231, 0, 0, 203, 235, 198, 230, 0, 0, 0, 0, 0, 0, 0,
0, 204, 236, 0, 0, 0, 0, 0, 0, 206, 238, 0, 0, 193, 225, 0, 0, 0, 0, 0, 0,
205, 237, 0, 0, 0, 207, 239, 0, 0, 0, 0, 217, 249, 209, 241, 210, 242, 0,
0, 0, 0, 0, 212, 244, 0, 0, 0, 0, 0, 0, 0, 0, 170, 186, 0, 0, 218, 250, 0,
0, 0, 0, 208, 240, 0, 0, 0, 0, 0, 0, 0, 0, 219, 251, 0, 0, 0, 0, 0, 0, 216,
248, 0, 0, 0, 0, 0, 202, 234, 221, 253, 222, 254, 0, 0, 0, 0, 0, 0, 0, 0,
142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 158, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 150, 151, 0, 0, 0, 145, 146, 130, 0, 147, 148, 132, 0, 134, 135,
149, 0, 0, 0, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 0,
139, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
static BACKWARD_TABLE_UPPER: &'static [u16] = &[
0, 0, 64, 128, 192, 256, 0, 0, 0, 0, 0, 320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 384, 0, 448, 0, 512,
];
#[inline]
pub fn backward(code: u32) -> u8 {
let offset = (code >> 6) as uint;
let offset = if offset < 133 {BACKWARD_TABLE_UPPER[offset] as uint} else {0};
BACKWARD_TABLE_LOWER[offset + ((code & 63) as uint)]
}
#[cfg(test)]
single_byte_tests!()
| {
FORWARD_TABLE[(code - 0x80) as uint]
} | identifier_body |
windows_1257.rs | // AUTOGENERATED FROM index-windows-1257.txt, ORIGINAL COMMENT FOLLOWS:
//
// Any copyright is dedicated to the Public Domain.
// http://creativecommons.org/publicdomain/zero/1.0/
//
// For details on index-windows-1257.txt see the Encoding Standard
// http://encoding.spec.whatwg.org/
static FORWARD_TABLE: &'static [u16] = &[
8364, 129, 8218, 131, 8222, 8230, 8224, 8225, 136, 8240, 138, 8249, 140,
168, 711, 184, 144, 8216, 8217, 8220, 8221, 8226, 8211, 8212, 152, 8482,
154, 8250, 156, 175, 731, 159, 160, 65535, 162, 163, 164, 65535, 166, 167,
216, 169, 342, 171, 172, 173, 174, 198, 176, 177, 178, 179, 180, 181, 182,
183, 248, 185, 343, 187, 188, 189, 190, 230, 260, 302, 256, 262, 196, 197,
280, 274, 268, 201, 377, 278, 290, 310, 298, 315, 352, 323, 325, 211, 332,
213, 214, 215, 370, 321, 346, 362, 220, 379, 381, 223, 261, 303, 257, 263,
228, 229, 281, 275, 269, 233, 378, 279, 291, 311, 299, 316, 353, 324, 326,
243, 333, 245, 246, 247, 371, 322, 347, 363, 252, 380, 382, 729,
];
#[inline]
pub fn forward(code: u8) -> u16 {
FORWARD_TABLE[(code - 0x80) as uint]
}
static BACKWARD_TABLE_LOWER: &'static [u8] = &[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 0, 131, 0, 0, 0, 0, 136,
0, 138, 0, 140, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 152, 0, 154, 0, 156, 0,
0, 159, 160, 0, 162, 163, 164, 0, 166, 167, 141, 169, 0, 171, 172, 173,
174, 157, 176, 177, 178, 179, 180, 181, 182, 183, 143, 185, 0, 187, 188,
189, 190, 0, 0, 0, 0, 0, 196, 197, 175, 0, 0, 201, 0, 0, 0, 0, 0, 0, 0, 0,
0, 211, 0, 213, 214, 215, 168, 0, 0, 0, 220, 0, 0, 223, 0, 0, 0, 0, 228,
229, 191, 0, 0, 233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 0, 245, 246, 247, 184,
0, 0, 0, 252, 0, 0, 0, 194, 226, 0, 0, 192, 224, 195, 227, 0, 0, 0, 0, 200,
232, 0, 0, 0, 0, 199, 231, 0, 0, 203, 235, 198, 230, 0, 0, 0, 0, 0, 0, 0,
0, 204, 236, 0, 0, 0, 0, 0, 0, 206, 238, 0, 0, 193, 225, 0, 0, 0, 0, 0, 0,
205, 237, 0, 0, 0, 207, 239, 0, 0, 0, 0, 217, 249, 209, 241, 210, 242, 0,
0, 0, 0, 0, 212, 244, 0, 0, 0, 0, 0, 0, 0, 0, 170, 186, 0, 0, 218, 250, 0,
0, 0, 0, 208, 240, 0, 0, 0, 0, 0, 0, 0, 0, 219, 251, 0, 0, 0, 0, 0, 0, 216,
248, 0, 0, 0, 0, 0, 202, 234, 221, 253, 222, 254, 0, 0, 0, 0, 0, 0, 0, 0,
142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 158, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 150, 151, 0, 0, 0, 145, 146, 130, 0, 147, 148, 132, 0, 134, 135,
149, 0, 0, 0, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 0,
139, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
static BACKWARD_TABLE_UPPER: &'static [u16] = &[
0, 0, 64, 128, 192, 256, 0, 0, 0, 0, 0, 320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 384, 0, 448, 0, 512,
];
#[inline]
pub fn | (code: u32) -> u8 {
let offset = (code >> 6) as uint;
let offset = if offset < 133 {BACKWARD_TABLE_UPPER[offset] as uint} else {0};
BACKWARD_TABLE_LOWER[offset + ((code & 63) as uint)]
}
#[cfg(test)]
single_byte_tests!()
| backward | identifier_name |
windows_1257.rs | // AUTOGENERATED FROM index-windows-1257.txt, ORIGINAL COMMENT FOLLOWS:
//
// Any copyright is dedicated to the Public Domain.
// http://creativecommons.org/publicdomain/zero/1.0/
//
// For details on index-windows-1257.txt see the Encoding Standard
// http://encoding.spec.whatwg.org/
static FORWARD_TABLE: &'static [u16] = &[
8364, 129, 8218, 131, 8222, 8230, 8224, 8225, 136, 8240, 138, 8249, 140,
168, 711, 184, 144, 8216, 8217, 8220, 8221, 8226, 8211, 8212, 152, 8482,
154, 8250, 156, 175, 731, 159, 160, 65535, 162, 163, 164, 65535, 166, 167,
216, 169, 342, 171, 172, 173, 174, 198, 176, 177, 178, 179, 180, 181, 182,
183, 248, 185, 343, 187, 188, 189, 190, 230, 260, 302, 256, 262, 196, 197,
280, 274, 268, 201, 377, 278, 290, 310, 298, 315, 352, 323, 325, 211, 332,
213, 214, 215, 370, 321, 346, 362, 220, 379, 381, 223, 261, 303, 257, 263,
228, 229, 281, 275, 269, 233, 378, 279, 291, 311, 299, 316, 353, 324, 326,
243, 333, 245, 246, 247, 371, 322, 347, 363, 252, 380, 382, 729,
];
#[inline]
pub fn forward(code: u8) -> u16 {
FORWARD_TABLE[(code - 0x80) as uint]
}
static BACKWARD_TABLE_LOWER: &'static [u8] = &[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 0, 131, 0, 0, 0, 0, 136,
0, 138, 0, 140, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 152, 0, 154, 0, 156, 0,
0, 159, 160, 0, 162, 163, 164, 0, 166, 167, 141, 169, 0, 171, 172, 173,
174, 157, 176, 177, 178, 179, 180, 181, 182, 183, 143, 185, 0, 187, 188,
189, 190, 0, 0, 0, 0, 0, 196, 197, 175, 0, 0, 201, 0, 0, 0, 0, 0, 0, 0, 0,
0, 211, 0, 213, 214, 215, 168, 0, 0, 0, 220, 0, 0, 223, 0, 0, 0, 0, 228,
229, 191, 0, 0, 233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 0, 245, 246, 247, 184,
0, 0, 0, 252, 0, 0, 0, 194, 226, 0, 0, 192, 224, 195, 227, 0, 0, 0, 0, 200,
232, 0, 0, 0, 0, 199, 231, 0, 0, 203, 235, 198, 230, 0, 0, 0, 0, 0, 0, 0,
0, 204, 236, 0, 0, 0, 0, 0, 0, 206, 238, 0, 0, 193, 225, 0, 0, 0, 0, 0, 0,
205, 237, 0, 0, 0, 207, 239, 0, 0, 0, 0, 217, 249, 209, 241, 210, 242, 0,
0, 0, 0, 0, 212, 244, 0, 0, 0, 0, 0, 0, 0, 0, 170, 186, 0, 0, 218, 250, 0,
0, 0, 0, 208, 240, 0, 0, 0, 0, 0, 0, 0, 0, 219, 251, 0, 0, 0, 0, 0, 0, 216,
248, 0, 0, 0, 0, 0, 202, 234, 221, 253, 222, 254, 0, 0, 0, 0, 0, 0, 0, 0,
142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 158, 0, 0, | 149, 0, 0, 0, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 0,
139, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
static BACKWARD_TABLE_UPPER: &'static [u16] = &[
0, 0, 64, 128, 192, 256, 0, 0, 0, 0, 0, 320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 384, 0, 448, 0, 512,
];
#[inline]
pub fn backward(code: u32) -> u8 {
let offset = (code >> 6) as uint;
let offset = if offset < 133 {BACKWARD_TABLE_UPPER[offset] as uint} else {0};
BACKWARD_TABLE_LOWER[offset + ((code & 63) as uint)]
}
#[cfg(test)]
single_byte_tests!() | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 150, 151, 0, 0, 0, 145, 146, 130, 0, 147, 148, 132, 0, 134, 135, | random_line_split |
empathy.py | # -*- coding: UTF-8 -*-
# vim: set noexpandtab ts=8 sw=8:
__kupfer_name__ = _("Empathy")
__kupfer_sources__ = ("ContactsSource", )
__kupfer_actions__ = ("ChangeStatus", 'OpenChat')
__description__ = _("Access to Empathy Contacts")
__version__ = "2010-10-17"
__author__ = "Jakh Daven <tuxcanfly@gmail.com>"
import dbus
import time
from kupfer import icons
from kupfer import plugin_support
from kupfer import pretty
from kupfer.objects import Leaf, Action, Source, AppLeaf
from kupfer.weaklib import dbus_signal_connect_weakly
from kupfer.obj.helplib import PicklingHelperMixin
from kupfer.obj.apps import AppLeafContentMixin
from kupfer.obj.grouping import ToplevelGroupingSource
from kupfer.obj.contacts import ContactLeaf, JabberContact, JABBER_JID_KEY
__kupfer_settings__ = plugin_support.PluginSettings(
{
"key" : "show_offline",
"label": _("Show offline contacts"),
"type": bool,
"value": False,
},
)
plugin_support.check_dbus_connection()
_STATUSES = {
'available': _('Available'),
'away': _('Away'),
'dnd': _('Busy'),
'xa': _('Not Available'),
'hidden': _('Invisible'),
'offline': _('Offline')
}
_ATTRIBUTES = {
'alias': 'org.freedesktop.Telepathy.Connection.Interface.Aliasing/alias',
'presence': 'org.freedesktop.Telepathy.Connection.Interface.SimplePresence/presence',
'contact_caps': 'org.freedesktop.Telepathy.Connection.Interface.ContactCapabilities.DRAFT/caps',
'jid': 'org.freedesktop.Telepathy.Connection/contact-id',
'caps': 'org.freedesktop.Telepathy.Connection.Interface.Capabilities/caps',
}
ACCOUNTMANAGER_PATH = "/org/freedesktop/Telepathy/AccountManager"
ACCOUNTMANAGER_IFACE = "org.freedesktop.Telepathy.AccountManager"
ACCOUNT_IFACE = "org.freedesktop.Telepathy.Account"
CHANNEL_GROUP_IFACE = "org.freedesktop.Telepathy.Channel.Interface.Group"
CONTACT_IFACE = "org.freedesktop.Telepathy.Connection.Interface.Contacts"
SIMPLE_PRESENCE_IFACE = "org.freedesktop.Telepathy.Connection.Interface.SimplePresence"
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
CHANNELDISPATCHER_IFACE = "org.freedesktop.Telepathy.ChannelDispatcher"
CHANNELDISPATCHER_PATH = "/org/freedesktop/Telepathy/ChannelDispatcher"
CHANNEL_TYPE = "org.freedesktop.Telepathy.Channel.ChannelType"
CHANNEL_TYPE_TEXT = "org.freedesktop.Telepathy.Channel.Type.Text"
CHANNEL_TARGETHANDLE = "org.freedesktop.Telepathy.Channel.TargetHandle"
CHANNEL_TARGETHANDLETYPE = "org.freedesktop.Telepathy.Channel.TargetHandleType"
EMPATHY_CLIENT_IFACE = "org.freedesktop.Telepathy.Client.Empathy"
EMPATHY_ACCOUNT_KEY = "EMPATHY_ACCOUNT"
EMPATHY_CONTACT_ID = "EMPATHY_CONTACT_ID"
def _create_dbus_connection():
try:
sbus = dbus.SessionBus()
proxy_obj = sbus.get_object(ACCOUNTMANAGER_IFACE, ACCOUNTMANAGER_PATH)
dbus_iface = dbus.Interface(proxy_obj, DBUS_PROPS_IFACE)
return dbus_iface
except dbus.DBusException as exc:
pretty.print_exc(__name__)
class EmpathyContact(JabberContact):
def __init__(self, jid, name, status, resources, account, contact_id):
empathy_slots= { EMPATHY_ACCOUNT_KEY: account, EMPATHY_CONTACT_ID: contact_id }
JabberContact.__init__(self, jid, name, status, resources, empathy_slots)
def repr_key(self):
return "".join((self.object[JABBER_JID_KEY], self.object[EMPATHY_ACCOUNT_KEY]))
def get_gicon(self):
return icons.ComposedIconSmall(self.get_icon_name(), "empathy")
class AccountStatus(Leaf):
pass
class OpenChat(Action):
def __init__(self):
Action.__init__(self, _('Open Chat'))
def activate(self, leaf):
bus = dbus.SessionBus()
jid = JABBER_JID_KEY in leaf and leaf[JABBER_JID_KEY]
account = bus.get_object(ACCOUNTMANAGER_IFACE, leaf[EMPATHY_ACCOUNT_KEY])
contact_id = leaf[EMPATHY_CONTACT_ID]
channel_dispatcher_iface = bus.get_object(CHANNELDISPATCHER_IFACE, CHANNELDISPATCHER_PATH)
ticks = dbus.Int64(time.time())
channel_request_params = dbus.Dictionary()
channel_request_params[CHANNEL_TYPE] = dbus.String(CHANNEL_TYPE_TEXT, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLETYPE] = dbus.UInt32(1, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLE] = contact_id
message_channel_path = channel_dispatcher_iface.EnsureChannel(account, channel_request_params, ticks, EMPATHY_CLIENT_IFACE)
channel_request = bus.get_object(ACCOUNTMANAGER_IFACE, message_channel_path)
channel_request.Proceed()
def get_icon_name(self):
return 'empathy'
def item_types(self):
yield ContactLeaf
def valid_for_item(self, item):
return EMPATHY_ACCOUNT_KEY in item and item[EMPATHY_ACCOUNT_KEY]
class ChangeStatus(Action):
''' Change global status '''
def __init__(self):
Action.__init__(self, _('Change Global Status To...'))
def activate(self, leaf, iobj):
bus = dbus.SessionBus()
interface = _create_dbus_connection()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
if iobj.object == "offline":
false = dbus.Boolean(0, variant_level=1)
account.Set(ACCOUNT_IFACE, "Enabled", false)
else:
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
simple_presence = dbus.Interface(connection, SIMPLE_PRESENCE_IFACE)
simple_presence.SetPresence(iobj.object, _STATUSES.get(iobj.object))
def item_types(self):
yield AppLeaf
def valid_for_item(self, leaf):
return leaf.get_id() == 'empathy'
def requires_object(self):
return True
def object_types(self):
yield AccountStatus
def object_source(self, for_item=None):
return StatusSource()
class ContactsSource(AppLeafContentMixin, ToplevelGroupingSource,
PicklingHelperMixin):
''' Get contacts from all on-line accounts in Empathy via DBus '''
appleaf_content_id = 'empathy'
def __init__(self, name=_('Empathy Contacts')):
super(ContactsSource, self).__init__(name, "Contacts")
self._version = 2
self.unpickle_finish()
def pickle_prepare(self):
self._contacts = []
def unpickle_finish(self):
self.mark_for_update()
self._contacts = []
def initialize(self):
ToplevelGroupingSource.initialize(self)
def get_items(self):
interface = _create_dbus_connection()
if interface is not None:
self._contacts = list(self._find_all_contacts(interface))
else:
self._contacts = []
return self._contacts
def _find_all_contacts(self, interface):
|
def get_icon_name(self):
return 'empathy'
def provides(self):
yield ContactLeaf
class StatusSource(Source):
def __init__(self):
Source.__init__(self, _("Empathy Account Status"))
def get_items(self):
for status, name in _STATUSES.iteritems():
yield AccountStatus(status, name)
def provides(self):
yield AccountStatus
| show_offline = __kupfer_settings__["show_offline"]
bus = dbus.SessionBus()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
channels = connection.ListChannels()
for channel in channels:
contact_group = bus.get_object(connection_iface, channel[0])
try:
contacts = contact_group.Get(CHANNEL_GROUP_IFACE, "Members")
except dbus.exceptions.DBusException, ex:
self.output_info(ex)
contacts = None
if contacts:
contacts = [c for c in contacts]
contact_attributes = connection.Get(CONTACT_IFACE, "ContactAttributeInterfaces")
contact_attributes = [str(a) for a in contact_attributes]
contact_details = connection.GetContactAttributes(contacts, contact_attributes, False)
for contact, details in contact_details.iteritems():
try:
status_code = details[_ATTRIBUTES.get("presence")][1]
except KeyError, ex:
self.output_info('Presence could not be established with %s. Leaving unknown.' % ex)
status_code = u'unknown'
if not show_offline and status_code == 'offline':
continue
yield EmpathyContact(
details[_ATTRIBUTES.get("jid")],
details[_ATTRIBUTES.get("alias")],
_STATUSES.get(status_code),
'', # empathy does not provide resource here AFAIK
valid_account,
contact) | identifier_body |
empathy.py | # -*- coding: UTF-8 -*-
# vim: set noexpandtab ts=8 sw=8:
__kupfer_name__ = _("Empathy")
__kupfer_sources__ = ("ContactsSource", )
__kupfer_actions__ = ("ChangeStatus", 'OpenChat')
__description__ = _("Access to Empathy Contacts")
__version__ = "2010-10-17"
__author__ = "Jakh Daven <tuxcanfly@gmail.com>"
import dbus
import time
from kupfer import icons
from kupfer import plugin_support
from kupfer import pretty
from kupfer.objects import Leaf, Action, Source, AppLeaf
from kupfer.weaklib import dbus_signal_connect_weakly
from kupfer.obj.helplib import PicklingHelperMixin
from kupfer.obj.apps import AppLeafContentMixin
from kupfer.obj.grouping import ToplevelGroupingSource
from kupfer.obj.contacts import ContactLeaf, JabberContact, JABBER_JID_KEY
__kupfer_settings__ = plugin_support.PluginSettings(
{
"key" : "show_offline",
"label": _("Show offline contacts"),
"type": bool,
"value": False,
},
)
plugin_support.check_dbus_connection()
_STATUSES = {
'available': _('Available'),
'away': _('Away'),
'dnd': _('Busy'),
'xa': _('Not Available'),
'hidden': _('Invisible'),
'offline': _('Offline')
}
_ATTRIBUTES = {
'alias': 'org.freedesktop.Telepathy.Connection.Interface.Aliasing/alias',
'presence': 'org.freedesktop.Telepathy.Connection.Interface.SimplePresence/presence',
'contact_caps': 'org.freedesktop.Telepathy.Connection.Interface.ContactCapabilities.DRAFT/caps',
'jid': 'org.freedesktop.Telepathy.Connection/contact-id',
'caps': 'org.freedesktop.Telepathy.Connection.Interface.Capabilities/caps',
}
ACCOUNTMANAGER_PATH = "/org/freedesktop/Telepathy/AccountManager"
ACCOUNTMANAGER_IFACE = "org.freedesktop.Telepathy.AccountManager"
ACCOUNT_IFACE = "org.freedesktop.Telepathy.Account"
CHANNEL_GROUP_IFACE = "org.freedesktop.Telepathy.Channel.Interface.Group"
CONTACT_IFACE = "org.freedesktop.Telepathy.Connection.Interface.Contacts"
SIMPLE_PRESENCE_IFACE = "org.freedesktop.Telepathy.Connection.Interface.SimplePresence"
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
CHANNELDISPATCHER_IFACE = "org.freedesktop.Telepathy.ChannelDispatcher"
CHANNELDISPATCHER_PATH = "/org/freedesktop/Telepathy/ChannelDispatcher"
CHANNEL_TYPE = "org.freedesktop.Telepathy.Channel.ChannelType"
CHANNEL_TYPE_TEXT = "org.freedesktop.Telepathy.Channel.Type.Text"
CHANNEL_TARGETHANDLE = "org.freedesktop.Telepathy.Channel.TargetHandle"
CHANNEL_TARGETHANDLETYPE = "org.freedesktop.Telepathy.Channel.TargetHandleType"
EMPATHY_CLIENT_IFACE = "org.freedesktop.Telepathy.Client.Empathy"
EMPATHY_ACCOUNT_KEY = "EMPATHY_ACCOUNT"
EMPATHY_CONTACT_ID = "EMPATHY_CONTACT_ID"
def _create_dbus_connection():
try:
sbus = dbus.SessionBus()
proxy_obj = sbus.get_object(ACCOUNTMANAGER_IFACE, ACCOUNTMANAGER_PATH)
dbus_iface = dbus.Interface(proxy_obj, DBUS_PROPS_IFACE)
return dbus_iface
except dbus.DBusException as exc:
pretty.print_exc(__name__)
class EmpathyContact(JabberContact):
def __init__(self, jid, name, status, resources, account, contact_id):
empathy_slots= { EMPATHY_ACCOUNT_KEY: account, EMPATHY_CONTACT_ID: contact_id }
JabberContact.__init__(self, jid, name, status, resources, empathy_slots)
def repr_key(self):
return "".join((self.object[JABBER_JID_KEY], self.object[EMPATHY_ACCOUNT_KEY]))
def get_gicon(self):
return icons.ComposedIconSmall(self.get_icon_name(), "empathy")
class AccountStatus(Leaf):
pass
class OpenChat(Action):
def __init__(self):
Action.__init__(self, _('Open Chat'))
def activate(self, leaf):
bus = dbus.SessionBus()
jid = JABBER_JID_KEY in leaf and leaf[JABBER_JID_KEY]
account = bus.get_object(ACCOUNTMANAGER_IFACE, leaf[EMPATHY_ACCOUNT_KEY])
contact_id = leaf[EMPATHY_CONTACT_ID]
channel_dispatcher_iface = bus.get_object(CHANNELDISPATCHER_IFACE, CHANNELDISPATCHER_PATH)
ticks = dbus.Int64(time.time())
channel_request_params = dbus.Dictionary()
channel_request_params[CHANNEL_TYPE] = dbus.String(CHANNEL_TYPE_TEXT, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLETYPE] = dbus.UInt32(1, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLE] = contact_id
message_channel_path = channel_dispatcher_iface.EnsureChannel(account, channel_request_params, ticks, EMPATHY_CLIENT_IFACE)
channel_request = bus.get_object(ACCOUNTMANAGER_IFACE, message_channel_path)
channel_request.Proceed()
def get_icon_name(self):
return 'empathy'
def item_types(self):
yield ContactLeaf
def valid_for_item(self, item):
return EMPATHY_ACCOUNT_KEY in item and item[EMPATHY_ACCOUNT_KEY]
class ChangeStatus(Action):
''' Change global status '''
def __init__(self):
Action.__init__(self, _('Change Global Status To...'))
def activate(self, leaf, iobj):
bus = dbus.SessionBus()
interface = _create_dbus_connection()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
if iobj.object == "offline":
|
else:
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
simple_presence = dbus.Interface(connection, SIMPLE_PRESENCE_IFACE)
simple_presence.SetPresence(iobj.object, _STATUSES.get(iobj.object))
def item_types(self):
yield AppLeaf
def valid_for_item(self, leaf):
return leaf.get_id() == 'empathy'
def requires_object(self):
return True
def object_types(self):
yield AccountStatus
def object_source(self, for_item=None):
return StatusSource()
class ContactsSource(AppLeafContentMixin, ToplevelGroupingSource,
PicklingHelperMixin):
''' Get contacts from all on-line accounts in Empathy via DBus '''
appleaf_content_id = 'empathy'
def __init__(self, name=_('Empathy Contacts')):
super(ContactsSource, self).__init__(name, "Contacts")
self._version = 2
self.unpickle_finish()
def pickle_prepare(self):
self._contacts = []
def unpickle_finish(self):
self.mark_for_update()
self._contacts = []
def initialize(self):
ToplevelGroupingSource.initialize(self)
def get_items(self):
interface = _create_dbus_connection()
if interface is not None:
self._contacts = list(self._find_all_contacts(interface))
else:
self._contacts = []
return self._contacts
def _find_all_contacts(self, interface):
show_offline = __kupfer_settings__["show_offline"]
bus = dbus.SessionBus()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
channels = connection.ListChannels()
for channel in channels:
contact_group = bus.get_object(connection_iface, channel[0])
try:
contacts = contact_group.Get(CHANNEL_GROUP_IFACE, "Members")
except dbus.exceptions.DBusException, ex:
self.output_info(ex)
contacts = None
if contacts:
contacts = [c for c in contacts]
contact_attributes = connection.Get(CONTACT_IFACE, "ContactAttributeInterfaces")
contact_attributes = [str(a) for a in contact_attributes]
contact_details = connection.GetContactAttributes(contacts, contact_attributes, False)
for contact, details in contact_details.iteritems():
try:
status_code = details[_ATTRIBUTES.get("presence")][1]
except KeyError, ex:
self.output_info('Presence could not be established with %s. Leaving unknown.' % ex)
status_code = u'unknown'
if not show_offline and status_code == 'offline':
continue
yield EmpathyContact(
details[_ATTRIBUTES.get("jid")],
details[_ATTRIBUTES.get("alias")],
_STATUSES.get(status_code),
'', # empathy does not provide resource here AFAIK
valid_account,
contact)
def get_icon_name(self):
return 'empathy'
def provides(self):
yield ContactLeaf
class StatusSource(Source):
def __init__(self):
Source.__init__(self, _("Empathy Account Status"))
def get_items(self):
for status, name in _STATUSES.iteritems():
yield AccountStatus(status, name)
def provides(self):
yield AccountStatus
| false = dbus.Boolean(0, variant_level=1)
account.Set(ACCOUNT_IFACE, "Enabled", false) | conditional_block |
empathy.py | # -*- coding: UTF-8 -*-
# vim: set noexpandtab ts=8 sw=8:
__kupfer_name__ = _("Empathy")
__kupfer_sources__ = ("ContactsSource", )
__kupfer_actions__ = ("ChangeStatus", 'OpenChat')
__description__ = _("Access to Empathy Contacts")
__version__ = "2010-10-17"
__author__ = "Jakh Daven <tuxcanfly@gmail.com>"
import dbus
import time
from kupfer import icons
from kupfer import plugin_support
from kupfer import pretty
from kupfer.objects import Leaf, Action, Source, AppLeaf
from kupfer.weaklib import dbus_signal_connect_weakly
from kupfer.obj.helplib import PicklingHelperMixin
from kupfer.obj.apps import AppLeafContentMixin
from kupfer.obj.grouping import ToplevelGroupingSource
from kupfer.obj.contacts import ContactLeaf, JabberContact, JABBER_JID_KEY
__kupfer_settings__ = plugin_support.PluginSettings(
{
"key" : "show_offline",
"label": _("Show offline contacts"),
"type": bool,
"value": False,
},
)
plugin_support.check_dbus_connection()
_STATUSES = {
'available': _('Available'),
'away': _('Away'),
'dnd': _('Busy'),
'xa': _('Not Available'),
'hidden': _('Invisible'),
'offline': _('Offline')
}
_ATTRIBUTES = {
'alias': 'org.freedesktop.Telepathy.Connection.Interface.Aliasing/alias',
'presence': 'org.freedesktop.Telepathy.Connection.Interface.SimplePresence/presence',
'contact_caps': 'org.freedesktop.Telepathy.Connection.Interface.ContactCapabilities.DRAFT/caps',
'jid': 'org.freedesktop.Telepathy.Connection/contact-id',
'caps': 'org.freedesktop.Telepathy.Connection.Interface.Capabilities/caps',
}
ACCOUNTMANAGER_PATH = "/org/freedesktop/Telepathy/AccountManager"
ACCOUNTMANAGER_IFACE = "org.freedesktop.Telepathy.AccountManager"
ACCOUNT_IFACE = "org.freedesktop.Telepathy.Account"
CHANNEL_GROUP_IFACE = "org.freedesktop.Telepathy.Channel.Interface.Group"
CONTACT_IFACE = "org.freedesktop.Telepathy.Connection.Interface.Contacts"
SIMPLE_PRESENCE_IFACE = "org.freedesktop.Telepathy.Connection.Interface.SimplePresence"
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
CHANNELDISPATCHER_IFACE = "org.freedesktop.Telepathy.ChannelDispatcher"
CHANNELDISPATCHER_PATH = "/org/freedesktop/Telepathy/ChannelDispatcher"
CHANNEL_TYPE = "org.freedesktop.Telepathy.Channel.ChannelType"
CHANNEL_TYPE_TEXT = "org.freedesktop.Telepathy.Channel.Type.Text"
CHANNEL_TARGETHANDLE = "org.freedesktop.Telepathy.Channel.TargetHandle"
CHANNEL_TARGETHANDLETYPE = "org.freedesktop.Telepathy.Channel.TargetHandleType"
EMPATHY_CLIENT_IFACE = "org.freedesktop.Telepathy.Client.Empathy"
EMPATHY_ACCOUNT_KEY = "EMPATHY_ACCOUNT"
EMPATHY_CONTACT_ID = "EMPATHY_CONTACT_ID"
def _create_dbus_connection():
try:
sbus = dbus.SessionBus()
proxy_obj = sbus.get_object(ACCOUNTMANAGER_IFACE, ACCOUNTMANAGER_PATH)
dbus_iface = dbus.Interface(proxy_obj, DBUS_PROPS_IFACE)
return dbus_iface
except dbus.DBusException as exc:
pretty.print_exc(__name__)
class EmpathyContact(JabberContact):
def __init__(self, jid, name, status, resources, account, contact_id):
empathy_slots= { EMPATHY_ACCOUNT_KEY: account, EMPATHY_CONTACT_ID: contact_id }
JabberContact.__init__(self, jid, name, status, resources, empathy_slots)
def repr_key(self):
return "".join((self.object[JABBER_JID_KEY], self.object[EMPATHY_ACCOUNT_KEY]))
def get_gicon(self):
return icons.ComposedIconSmall(self.get_icon_name(), "empathy")
class AccountStatus(Leaf):
pass
class OpenChat(Action):
def __init__(self):
Action.__init__(self, _('Open Chat'))
def activate(self, leaf):
bus = dbus.SessionBus()
jid = JABBER_JID_KEY in leaf and leaf[JABBER_JID_KEY]
account = bus.get_object(ACCOUNTMANAGER_IFACE, leaf[EMPATHY_ACCOUNT_KEY])
contact_id = leaf[EMPATHY_CONTACT_ID]
channel_dispatcher_iface = bus.get_object(CHANNELDISPATCHER_IFACE, CHANNELDISPATCHER_PATH)
ticks = dbus.Int64(time.time())
channel_request_params = dbus.Dictionary()
channel_request_params[CHANNEL_TYPE] = dbus.String(CHANNEL_TYPE_TEXT, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLETYPE] = dbus.UInt32(1, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLE] = contact_id
message_channel_path = channel_dispatcher_iface.EnsureChannel(account, channel_request_params, ticks, EMPATHY_CLIENT_IFACE)
channel_request = bus.get_object(ACCOUNTMANAGER_IFACE, message_channel_path)
channel_request.Proceed()
def get_icon_name(self):
return 'empathy'
def item_types(self):
yield ContactLeaf
def valid_for_item(self, item):
return EMPATHY_ACCOUNT_KEY in item and item[EMPATHY_ACCOUNT_KEY]
class ChangeStatus(Action):
''' Change global status '''
def __init__(self):
Action.__init__(self, _('Change Global Status To...'))
def activate(self, leaf, iobj):
bus = dbus.SessionBus()
interface = _create_dbus_connection()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
if iobj.object == "offline":
false = dbus.Boolean(0, variant_level=1)
account.Set(ACCOUNT_IFACE, "Enabled", false)
else:
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
simple_presence = dbus.Interface(connection, SIMPLE_PRESENCE_IFACE)
simple_presence.SetPresence(iobj.object, _STATUSES.get(iobj.object))
def item_types(self):
yield AppLeaf
def valid_for_item(self, leaf):
return leaf.get_id() == 'empathy'
def requires_object(self):
return True
def object_types(self):
yield AccountStatus
def object_source(self, for_item=None):
return StatusSource()
class ContactsSource(AppLeafContentMixin, ToplevelGroupingSource,
PicklingHelperMixin):
''' Get contacts from all on-line accounts in Empathy via DBus '''
appleaf_content_id = 'empathy'
def __init__(self, name=_('Empathy Contacts')):
super(ContactsSource, self).__init__(name, "Contacts")
self._version = 2
self.unpickle_finish()
def pickle_prepare(self):
self._contacts = []
def unpickle_finish(self):
self.mark_for_update()
self._contacts = []
def initialize(self):
ToplevelGroupingSource.initialize(self)
def get_items(self):
interface = _create_dbus_connection()
if interface is not None:
self._contacts = list(self._find_all_contacts(interface))
else:
self._contacts = []
return self._contacts
def _find_all_contacts(self, interface):
show_offline = __kupfer_settings__["show_offline"]
bus = dbus.SessionBus()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
channels = connection.ListChannels()
for channel in channels:
contact_group = bus.get_object(connection_iface, channel[0])
try:
contacts = contact_group.Get(CHANNEL_GROUP_IFACE, "Members")
except dbus.exceptions.DBusException, ex:
self.output_info(ex)
contacts = None
if contacts:
contacts = [c for c in contacts]
contact_attributes = connection.Get(CONTACT_IFACE, "ContactAttributeInterfaces")
contact_attributes = [str(a) for a in contact_attributes]
contact_details = connection.GetContactAttributes(contacts, contact_attributes, False)
for contact, details in contact_details.iteritems():
try:
status_code = details[_ATTRIBUTES.get("presence")][1]
except KeyError, ex:
self.output_info('Presence could not be established with %s. Leaving unknown.' % ex)
status_code = u'unknown'
if not show_offline and status_code == 'offline':
continue
yield EmpathyContact(
details[_ATTRIBUTES.get("jid")],
details[_ATTRIBUTES.get("alias")],
_STATUSES.get(status_code),
'', # empathy does not provide resource here AFAIK
valid_account,
contact)
def get_icon_name(self):
return 'empathy'
def provides(self):
yield ContactLeaf
class StatusSource(Source):
|
def get_items(self):
for status, name in _STATUSES.iteritems():
yield AccountStatus(status, name)
def provides(self):
yield AccountStatus | def __init__(self):
Source.__init__(self, _("Empathy Account Status")) | random_line_split |
empathy.py | # -*- coding: UTF-8 -*-
# vim: set noexpandtab ts=8 sw=8:
__kupfer_name__ = _("Empathy")
__kupfer_sources__ = ("ContactsSource", )
__kupfer_actions__ = ("ChangeStatus", 'OpenChat')
__description__ = _("Access to Empathy Contacts")
__version__ = "2010-10-17"
__author__ = "Jakh Daven <tuxcanfly@gmail.com>"
import dbus
import time
from kupfer import icons
from kupfer import plugin_support
from kupfer import pretty
from kupfer.objects import Leaf, Action, Source, AppLeaf
from kupfer.weaklib import dbus_signal_connect_weakly
from kupfer.obj.helplib import PicklingHelperMixin
from kupfer.obj.apps import AppLeafContentMixin
from kupfer.obj.grouping import ToplevelGroupingSource
from kupfer.obj.contacts import ContactLeaf, JabberContact, JABBER_JID_KEY
__kupfer_settings__ = plugin_support.PluginSettings(
{
"key" : "show_offline",
"label": _("Show offline contacts"),
"type": bool,
"value": False,
},
)
plugin_support.check_dbus_connection()
_STATUSES = {
'available': _('Available'),
'away': _('Away'),
'dnd': _('Busy'),
'xa': _('Not Available'),
'hidden': _('Invisible'),
'offline': _('Offline')
}
_ATTRIBUTES = {
'alias': 'org.freedesktop.Telepathy.Connection.Interface.Aliasing/alias',
'presence': 'org.freedesktop.Telepathy.Connection.Interface.SimplePresence/presence',
'contact_caps': 'org.freedesktop.Telepathy.Connection.Interface.ContactCapabilities.DRAFT/caps',
'jid': 'org.freedesktop.Telepathy.Connection/contact-id',
'caps': 'org.freedesktop.Telepathy.Connection.Interface.Capabilities/caps',
}
ACCOUNTMANAGER_PATH = "/org/freedesktop/Telepathy/AccountManager"
ACCOUNTMANAGER_IFACE = "org.freedesktop.Telepathy.AccountManager"
ACCOUNT_IFACE = "org.freedesktop.Telepathy.Account"
CHANNEL_GROUP_IFACE = "org.freedesktop.Telepathy.Channel.Interface.Group"
CONTACT_IFACE = "org.freedesktop.Telepathy.Connection.Interface.Contacts"
SIMPLE_PRESENCE_IFACE = "org.freedesktop.Telepathy.Connection.Interface.SimplePresence"
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
CHANNELDISPATCHER_IFACE = "org.freedesktop.Telepathy.ChannelDispatcher"
CHANNELDISPATCHER_PATH = "/org/freedesktop/Telepathy/ChannelDispatcher"
CHANNEL_TYPE = "org.freedesktop.Telepathy.Channel.ChannelType"
CHANNEL_TYPE_TEXT = "org.freedesktop.Telepathy.Channel.Type.Text"
CHANNEL_TARGETHANDLE = "org.freedesktop.Telepathy.Channel.TargetHandle"
CHANNEL_TARGETHANDLETYPE = "org.freedesktop.Telepathy.Channel.TargetHandleType"
EMPATHY_CLIENT_IFACE = "org.freedesktop.Telepathy.Client.Empathy"
EMPATHY_ACCOUNT_KEY = "EMPATHY_ACCOUNT"
EMPATHY_CONTACT_ID = "EMPATHY_CONTACT_ID"
def _create_dbus_connection():
try:
sbus = dbus.SessionBus()
proxy_obj = sbus.get_object(ACCOUNTMANAGER_IFACE, ACCOUNTMANAGER_PATH)
dbus_iface = dbus.Interface(proxy_obj, DBUS_PROPS_IFACE)
return dbus_iface
except dbus.DBusException as exc:
pretty.print_exc(__name__)
class EmpathyContact(JabberContact):
def __init__(self, jid, name, status, resources, account, contact_id):
empathy_slots= { EMPATHY_ACCOUNT_KEY: account, EMPATHY_CONTACT_ID: contact_id }
JabberContact.__init__(self, jid, name, status, resources, empathy_slots)
def repr_key(self):
return "".join((self.object[JABBER_JID_KEY], self.object[EMPATHY_ACCOUNT_KEY]))
def get_gicon(self):
return icons.ComposedIconSmall(self.get_icon_name(), "empathy")
class AccountStatus(Leaf):
pass
class OpenChat(Action):
def __init__(self):
Action.__init__(self, _('Open Chat'))
def activate(self, leaf):
bus = dbus.SessionBus()
jid = JABBER_JID_KEY in leaf and leaf[JABBER_JID_KEY]
account = bus.get_object(ACCOUNTMANAGER_IFACE, leaf[EMPATHY_ACCOUNT_KEY])
contact_id = leaf[EMPATHY_CONTACT_ID]
channel_dispatcher_iface = bus.get_object(CHANNELDISPATCHER_IFACE, CHANNELDISPATCHER_PATH)
ticks = dbus.Int64(time.time())
channel_request_params = dbus.Dictionary()
channel_request_params[CHANNEL_TYPE] = dbus.String(CHANNEL_TYPE_TEXT, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLETYPE] = dbus.UInt32(1, variant_level=1)
channel_request_params[CHANNEL_TARGETHANDLE] = contact_id
message_channel_path = channel_dispatcher_iface.EnsureChannel(account, channel_request_params, ticks, EMPATHY_CLIENT_IFACE)
channel_request = bus.get_object(ACCOUNTMANAGER_IFACE, message_channel_path)
channel_request.Proceed()
def get_icon_name(self):
return 'empathy'
def item_types(self):
yield ContactLeaf
def valid_for_item(self, item):
return EMPATHY_ACCOUNT_KEY in item and item[EMPATHY_ACCOUNT_KEY]
class ChangeStatus(Action):
''' Change global status '''
def | (self):
Action.__init__(self, _('Change Global Status To...'))
def activate(self, leaf, iobj):
bus = dbus.SessionBus()
interface = _create_dbus_connection()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
if iobj.object == "offline":
false = dbus.Boolean(0, variant_level=1)
account.Set(ACCOUNT_IFACE, "Enabled", false)
else:
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
simple_presence = dbus.Interface(connection, SIMPLE_PRESENCE_IFACE)
simple_presence.SetPresence(iobj.object, _STATUSES.get(iobj.object))
def item_types(self):
yield AppLeaf
def valid_for_item(self, leaf):
return leaf.get_id() == 'empathy'
def requires_object(self):
return True
def object_types(self):
yield AccountStatus
def object_source(self, for_item=None):
return StatusSource()
class ContactsSource(AppLeafContentMixin, ToplevelGroupingSource,
PicklingHelperMixin):
''' Get contacts from all on-line accounts in Empathy via DBus '''
appleaf_content_id = 'empathy'
def __init__(self, name=_('Empathy Contacts')):
super(ContactsSource, self).__init__(name, "Contacts")
self._version = 2
self.unpickle_finish()
def pickle_prepare(self):
self._contacts = []
def unpickle_finish(self):
self.mark_for_update()
self._contacts = []
def initialize(self):
ToplevelGroupingSource.initialize(self)
def get_items(self):
interface = _create_dbus_connection()
if interface is not None:
self._contacts = list(self._find_all_contacts(interface))
else:
self._contacts = []
return self._contacts
def _find_all_contacts(self, interface):
show_offline = __kupfer_settings__["show_offline"]
bus = dbus.SessionBus()
for valid_account in interface.Get(ACCOUNTMANAGER_IFACE, "ValidAccounts"):
account = bus.get_object(ACCOUNTMANAGER_IFACE, valid_account)
connection_status = account.Get(ACCOUNT_IFACE, "ConnectionStatus")
if connection_status != 0:
continue
connection_path = account.Get(ACCOUNT_IFACE, "Connection")
connection_iface = connection_path.replace("/", ".")[1:]
connection = bus.get_object(connection_iface, connection_path)
channels = connection.ListChannels()
for channel in channels:
contact_group = bus.get_object(connection_iface, channel[0])
try:
contacts = contact_group.Get(CHANNEL_GROUP_IFACE, "Members")
except dbus.exceptions.DBusException, ex:
self.output_info(ex)
contacts = None
if contacts:
contacts = [c for c in contacts]
contact_attributes = connection.Get(CONTACT_IFACE, "ContactAttributeInterfaces")
contact_attributes = [str(a) for a in contact_attributes]
contact_details = connection.GetContactAttributes(contacts, contact_attributes, False)
for contact, details in contact_details.iteritems():
try:
status_code = details[_ATTRIBUTES.get("presence")][1]
except KeyError, ex:
self.output_info('Presence could not be established with %s. Leaving unknown.' % ex)
status_code = u'unknown'
if not show_offline and status_code == 'offline':
continue
yield EmpathyContact(
details[_ATTRIBUTES.get("jid")],
details[_ATTRIBUTES.get("alias")],
_STATUSES.get(status_code),
'', # empathy does not provide resource here AFAIK
valid_account,
contact)
def get_icon_name(self):
return 'empathy'
def provides(self):
yield ContactLeaf
class StatusSource(Source):
def __init__(self):
Source.__init__(self, _("Empathy Account Status"))
def get_items(self):
for status, name in _STATUSES.iteritems():
yield AccountStatus(status, name)
def provides(self):
yield AccountStatus
| __init__ | identifier_name |
props.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
use dbus_tree::{MTSync, MethodErr, PropInfo};
use crate::{
dbus_api::{
filesystem::shared::{self, filesystem_operation},
types::TData,
},
engine::{Engine, Name, Pool},
};
/// Get a filesystem property and place it on the D-Bus. The property is
/// found by means of the getter method which takes a reference to a
/// Filesystem and obtains the property from the filesystem.
fn get_filesystem_property<F, R, E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
getter: F,
) -> Result<(), MethodErr>
where
F: Fn((Name, Name, &<E::Pool as Pool>::Filesystem)) -> Result<R, String>,
R: dbus::arg::Append,
E: Engine,
{
#[allow(clippy::redundant_closure)]
i.append(
filesystem_operation(p.tree, p.path.get_name(), getter)
.map_err(|ref e| MethodErr::failed(e))?,
);
Ok(())
}
/// Get the devnode for an object path.
pub fn get_filesystem_devnode<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(pool_name, fs_name, fs)| {
Ok(shared::fs_devnode_prop::<E>(fs, &pool_name, &fs_name))
})
}
| where
E: Engine,
{
get_filesystem_property(i, p, |(_, fs_name, _)| Ok(shared::fs_name_prop(&fs_name)))
}
/// Get the creation date and time in rfc3339 format.
pub fn get_filesystem_created<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_created_prop::<E>(fs)))
}
/// Get the size of the filesystem in bytes.
pub fn get_filesystem_size<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_size_prop(fs)))
}
/// Get the size of the used portion of the filesystem in bytes.
pub fn get_filesystem_used<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_used_prop::<E>(fs)))
} | pub fn get_filesystem_name<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr> | random_line_split |
props.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
use dbus_tree::{MTSync, MethodErr, PropInfo};
use crate::{
dbus_api::{
filesystem::shared::{self, filesystem_operation},
types::TData,
},
engine::{Engine, Name, Pool},
};
/// Get a filesystem property and place it on the D-Bus. The property is
/// found by means of the getter method which takes a reference to a
/// Filesystem and obtains the property from the filesystem.
fn get_filesystem_property<F, R, E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
getter: F,
) -> Result<(), MethodErr>
where
F: Fn((Name, Name, &<E::Pool as Pool>::Filesystem)) -> Result<R, String>,
R: dbus::arg::Append,
E: Engine,
{
#[allow(clippy::redundant_closure)]
i.append(
filesystem_operation(p.tree, p.path.get_name(), getter)
.map_err(|ref e| MethodErr::failed(e))?,
);
Ok(())
}
/// Get the devnode for an object path.
pub fn get_filesystem_devnode<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
|
pub fn get_filesystem_name<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, fs_name, _)| Ok(shared::fs_name_prop(&fs_name)))
}
/// Get the creation date and time in rfc3339 format.
pub fn get_filesystem_created<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_created_prop::<E>(fs)))
}
/// Get the size of the filesystem in bytes.
pub fn get_filesystem_size<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_size_prop(fs)))
}
/// Get the size of the used portion of the filesystem in bytes.
pub fn get_filesystem_used<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_used_prop::<E>(fs)))
}
| {
get_filesystem_property(i, p, |(pool_name, fs_name, fs)| {
Ok(shared::fs_devnode_prop::<E>(fs, &pool_name, &fs_name))
})
} | identifier_body |
props.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
use dbus_tree::{MTSync, MethodErr, PropInfo};
use crate::{
dbus_api::{
filesystem::shared::{self, filesystem_operation},
types::TData,
},
engine::{Engine, Name, Pool},
};
/// Get a filesystem property and place it on the D-Bus. The property is
/// found by means of the getter method which takes a reference to a
/// Filesystem and obtains the property from the filesystem.
fn | <F, R, E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
getter: F,
) -> Result<(), MethodErr>
where
F: Fn((Name, Name, &<E::Pool as Pool>::Filesystem)) -> Result<R, String>,
R: dbus::arg::Append,
E: Engine,
{
#[allow(clippy::redundant_closure)]
i.append(
filesystem_operation(p.tree, p.path.get_name(), getter)
.map_err(|ref e| MethodErr::failed(e))?,
);
Ok(())
}
/// Get the devnode for an object path.
pub fn get_filesystem_devnode<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(pool_name, fs_name, fs)| {
Ok(shared::fs_devnode_prop::<E>(fs, &pool_name, &fs_name))
})
}
pub fn get_filesystem_name<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, fs_name, _)| Ok(shared::fs_name_prop(&fs_name)))
}
/// Get the creation date and time in rfc3339 format.
pub fn get_filesystem_created<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_created_prop::<E>(fs)))
}
/// Get the size of the filesystem in bytes.
pub fn get_filesystem_size<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_size_prop(fs)))
}
/// Get the size of the used portion of the filesystem in bytes.
pub fn get_filesystem_used<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_used_prop::<E>(fs)))
}
| get_filesystem_property | identifier_name |
content.component.ts | import { Component, OnDestroy, OnInit, ViewChild, AfterViewInit } from '@angular/core';
import { RecordService } from '../../common/record.service'
@Component({
selector: 'content-page',
template: require('./content.component.html'),
styles: [require('./content.component.css')]
})
export class ContentComponent {
@ViewChild('snackbar') snackbarRef;
@ViewChild('progressbar') progressRef;
fid: number = 0;
constructor(
private recorder: RecordService
) {
this.fid = 1;
}
get total_frames() |
get frameid(){
return this.recorder.framelist[this.fid-1];
}
updateprogress(fid: number) {
if (fid > this.total_frames) {
this.recorder.submit();
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have finished the test`,
timeout: 1000,
actionText: 'RESTART',
actionHandler: () => {
this.updateprogress(1);
}
});
return;
}
this.fid = fid;
this.progressRef.nativeElement.MaterialProgress.setProgress(100 * this.fid / this.total_frames);
}
choose(id: string) {
this.recorder.put(this.fid, id);
var fid = this.fid;
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have chosen id for ${this.fid}`,
timeout: 1000,
actionText: 'UNDO',
actionHandler: () => {
this.updateprogress(fid);
this.recorder.remove(fid);
}
});
this.updateprogress(fid + 1);
}
}
| {
return this.recorder.framelist.length;
} | identifier_body |
content.component.ts | import { Component, OnDestroy, OnInit, ViewChild, AfterViewInit } from '@angular/core';
import { RecordService } from '../../common/record.service'
@Component({
selector: 'content-page',
template: require('./content.component.html'),
styles: [require('./content.component.css')]
})
export class ContentComponent {
@ViewChild('snackbar') snackbarRef;
@ViewChild('progressbar') progressRef;
fid: number = 0;
constructor( | ) {
this.fid = 1;
}
get total_frames() {
return this.recorder.framelist.length;
}
get frameid(){
return this.recorder.framelist[this.fid-1];
}
updateprogress(fid: number) {
if (fid > this.total_frames) {
this.recorder.submit();
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have finished the test`,
timeout: 1000,
actionText: 'RESTART',
actionHandler: () => {
this.updateprogress(1);
}
});
return;
}
this.fid = fid;
this.progressRef.nativeElement.MaterialProgress.setProgress(100 * this.fid / this.total_frames);
}
choose(id: string) {
this.recorder.put(this.fid, id);
var fid = this.fid;
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have chosen id for ${this.fid}`,
timeout: 1000,
actionText: 'UNDO',
actionHandler: () => {
this.updateprogress(fid);
this.recorder.remove(fid);
}
});
this.updateprogress(fid + 1);
}
} | private recorder: RecordService | random_line_split |
content.component.ts | import { Component, OnDestroy, OnInit, ViewChild, AfterViewInit } from '@angular/core';
import { RecordService } from '../../common/record.service'
@Component({
selector: 'content-page',
template: require('./content.component.html'),
styles: [require('./content.component.css')]
})
export class ContentComponent {
@ViewChild('snackbar') snackbarRef;
@ViewChild('progressbar') progressRef;
fid: number = 0;
constructor(
private recorder: RecordService
) {
this.fid = 1;
}
get total_frames() {
return this.recorder.framelist.length;
}
get frameid(){
return this.recorder.framelist[this.fid-1];
}
updateprogress(fid: number) {
if (fid > this.total_frames) |
this.fid = fid;
this.progressRef.nativeElement.MaterialProgress.setProgress(100 * this.fid / this.total_frames);
}
choose(id: string) {
this.recorder.put(this.fid, id);
var fid = this.fid;
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have chosen id for ${this.fid}`,
timeout: 1000,
actionText: 'UNDO',
actionHandler: () => {
this.updateprogress(fid);
this.recorder.remove(fid);
}
});
this.updateprogress(fid + 1);
}
}
| {
this.recorder.submit();
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have finished the test`,
timeout: 1000,
actionText: 'RESTART',
actionHandler: () => {
this.updateprogress(1);
}
});
return;
} | conditional_block |
content.component.ts | import { Component, OnDestroy, OnInit, ViewChild, AfterViewInit } from '@angular/core';
import { RecordService } from '../../common/record.service'
@Component({
selector: 'content-page',
template: require('./content.component.html'),
styles: [require('./content.component.css')]
})
export class ContentComponent {
@ViewChild('snackbar') snackbarRef;
@ViewChild('progressbar') progressRef;
fid: number = 0;
constructor(
private recorder: RecordService
) {
this.fid = 1;
}
get total_frames() {
return this.recorder.framelist.length;
}
get frameid(){
return this.recorder.framelist[this.fid-1];
}
| (fid: number) {
if (fid > this.total_frames) {
this.recorder.submit();
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have finished the test`,
timeout: 1000,
actionText: 'RESTART',
actionHandler: () => {
this.updateprogress(1);
}
});
return;
}
this.fid = fid;
this.progressRef.nativeElement.MaterialProgress.setProgress(100 * this.fid / this.total_frames);
}
choose(id: string) {
this.recorder.put(this.fid, id);
var fid = this.fid;
this.snackbarRef.nativeElement.MaterialSnackbar.showSnackbar({
message: `You have chosen id for ${this.fid}`,
timeout: 1000,
actionText: 'UNDO',
actionHandler: () => {
this.updateprogress(fid);
this.recorder.remove(fid);
}
});
this.updateprogress(fid + 1);
}
}
| updateprogress | identifier_name |
utils.js | var base58 = require('base58-native');
var cnUtil = require('cryptonote-util');
exports.uid = function () {
var min = 100000000000000;
var max = 999999999999999;
var id = Math.floor(Math.random() * (max - min + 1)) + min;
return id.toString();
};
exports.ringBuffer = function (maxSize) {
var data = [];
var cursor = 0;
var isFull = false;
return {
append: function (x) {
if (isFull) {
data[cursor] = x;
cursor = (cursor + 1) % maxSize;
} else |
},
avg: function (plusOne) {
var sum = data.reduce(function (a, b) {
return a + b
}, plusOne || 0);
return sum / ((isFull ? maxSize : cursor) + (plusOne ? 1 : 0));
},
size: function () {
return isFull ? maxSize : cursor;
},
clear: function () {
data = [];
cursor = 0;
isFull = false;
}
};
};
exports.varIntEncode = function (n) {
};
| {
data.push(x);
cursor++;
if (data.length === maxSize) {
cursor = 0;
isFull = true;
}
} | conditional_block |
utils.js | var base58 = require('base58-native');
var cnUtil = require('cryptonote-util');
exports.uid = function () {
var min = 100000000000000;
var max = 999999999999999;
var id = Math.floor(Math.random() * (max - min + 1)) + min;
return id.toString();
};
exports.ringBuffer = function (maxSize) {
var data = [];
var cursor = 0;
var isFull = false;
return {
append: function (x) {
if (isFull) {
data[cursor] = x;
cursor = (cursor + 1) % maxSize;
} else {
data.push(x);
cursor++;
if (data.length === maxSize) {
cursor = 0;
isFull = true;
}
}
},
avg: function (plusOne) {
var sum = data.reduce(function (a, b) {
return a + b
}, plusOne || 0);
return sum / ((isFull ? maxSize : cursor) + (plusOne ? 1 : 0));
},
size: function () {
return isFull ? maxSize : cursor;
},
clear: function () {
data = [];
cursor = 0; | };
exports.varIntEncode = function (n) {
}; | isFull = false;
}
}; | random_line_split |
settings.py | # Django settings for ibistu_serverV2_webserver project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('admin', 'c0710204@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'admin.db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and | # calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '_tnl@7cyqlgnwe!@2ptc56)15+mzpxk4uz!c+xy8#b(w^%0c-2'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ibistu_serverV2_webserver.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'ibistu_serverV2_webserver.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
} | random_line_split | |
tests.py | #
# Test-Related Pages
#
import pscheduler
from pschedulerapiserver import application
from flask import request
from .dbcursor import dbcursor_query
from .json import *
from .response import *
#
# Tests
#
# All tests
@application.route("/tests", methods=['GET'])
def tests():
return json_query("SELECT json FROM test"
" WHERE available ORDER BY name", [])
# Test <name>
@application.route("/tests/<name>", methods=['GET'])
def tests_name(name):
return json_query("SELECT json FROM test"
" WHERE available AND name = %s",
[name], single=True)
# Derive a spec from command line arguments in 'arg'
@application.route("/tests/<name>/spec", methods=['GET'])
def tests_name_spec(name):
cursor = dbcursor_query("SELECT EXISTS (SELECT * FROM test"
" WHERE available AND name = %s)",
[ name ])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
try:
args = arg_json('args')
except ValueError as ex:
return bad_request("JSON passed to 'args': %s " % (str(ex)))
status, stdout, stderr = pscheduler.run_program(
[ 'pscheduler', 'internal', 'invoke', 'test', name, 'cli-to-spec' ],
stdin = pscheduler.json_dump(args),
timeout=5
)
if status != 0:
return bad_request(stderr)
# The extra parse here makes 'pretty' work.
returned_json = pscheduler.json_load(stdout)
return ok_json(returned_json, sanitize=False)
# Test spec validation
@application.route("/tests/<name>/spec/is-valid", methods=['GET'])
def tests_name_spec_is_valid(name):
cursor = dbcursor_query(
"SELECT EXISTS"
" (SELECT * FROM test WHERE available AND name = %s)",
[name])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
["pscheduler", "internal", "invoke", "test",
name, "spec-is-valid"],
stdin=spec)
if returncode != 0:
return error("Unable to validate test spec: %s" % (stderr))
validate_json = pscheduler.json_load(stdout, max_schema=1)
return ok_json(validate_json)
except Exception as ex:
return error("Unable to validate test spec: %s" % (str(ex)))
| def tests_name_tools(name):
# TODO: Should probably 404 if the test doesn't exist.
# TODO: Is this used anywhere?
expanded = is_expanded()
cursor = dbcursor_query("""
SELECT
tool.name,
tool.json
FROM
tool
JOIN tool_test ON tool_test.tool = tool.id
JOIN test ON test.id = tool_test.test
WHERE
tool.available
AND test.name = %s
""", [name])
result = []
for row in cursor:
url = root_url('tools/' + row[0])
if not expanded:
result.append(url)
continue
row[1]['href'] = url
result.append(row[1])
cursor.close()
return json_response(result)
# Participants in a test spec
@application.route("/tests/<name>/participants", methods=['GET'])
def tests_name_participants(name):
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
[ "pscheduler", "internal", "invoke", "test", name,
"participants"],
stdin = spec,
)
except KeyError:
return bad_request("Invalid spec")
except Exception as ex:
return bad_request(ex)
if returncode != 0:
return bad_request(stderr)
# If this fails because of bad JSON, an exception will be thrown,
# caught and logged.
return json_response(pscheduler.json_load(stdout, max_schema=1)) | # Tools that can carry out test <name>
@application.route("/tests/<name>/tools", methods=['GET']) | random_line_split |
tests.py | #
# Test-Related Pages
#
import pscheduler
from pschedulerapiserver import application
from flask import request
from .dbcursor import dbcursor_query
from .json import *
from .response import *
#
# Tests
#
# All tests
@application.route("/tests", methods=['GET'])
def tests():
return json_query("SELECT json FROM test"
" WHERE available ORDER BY name", [])
# Test <name>
@application.route("/tests/<name>", methods=['GET'])
def tests_name(name):
return json_query("SELECT json FROM test"
" WHERE available AND name = %s",
[name], single=True)
# Derive a spec from command line arguments in 'arg'
@application.route("/tests/<name>/spec", methods=['GET'])
def tests_name_spec(name):
cursor = dbcursor_query("SELECT EXISTS (SELECT * FROM test"
" WHERE available AND name = %s)",
[ name ])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
try:
args = arg_json('args')
except ValueError as ex:
return bad_request("JSON passed to 'args': %s " % (str(ex)))
status, stdout, stderr = pscheduler.run_program(
[ 'pscheduler', 'internal', 'invoke', 'test', name, 'cli-to-spec' ],
stdin = pscheduler.json_dump(args),
timeout=5
)
if status != 0:
return bad_request(stderr)
# The extra parse here makes 'pretty' work.
returned_json = pscheduler.json_load(stdout)
return ok_json(returned_json, sanitize=False)
# Test spec validation
@application.route("/tests/<name>/spec/is-valid", methods=['GET'])
def tests_name_spec_is_valid(name):
cursor = dbcursor_query(
"SELECT EXISTS"
" (SELECT * FROM test WHERE available AND name = %s)",
[name])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
["pscheduler", "internal", "invoke", "test",
name, "spec-is-valid"],
stdin=spec)
if returncode != 0:
return error("Unable to validate test spec: %s" % (stderr))
validate_json = pscheduler.json_load(stdout, max_schema=1)
return ok_json(validate_json)
except Exception as ex:
return error("Unable to validate test spec: %s" % (str(ex)))
# Tools that can carry out test <name>
@application.route("/tests/<name>/tools", methods=['GET'])
def tests_name_tools(name):
# TODO: Should probably 404 if the test doesn't exist.
# TODO: Is this used anywhere?
expanded = is_expanded()
cursor = dbcursor_query("""
SELECT
tool.name,
tool.json
FROM
tool
JOIN tool_test ON tool_test.tool = tool.id
JOIN test ON test.id = tool_test.test
WHERE
tool.available
AND test.name = %s
""", [name])
result = []
for row in cursor:
url = root_url('tools/' + row[0])
if not expanded:
result.append(url)
continue
row[1]['href'] = url
result.append(row[1])
cursor.close()
return json_response(result)
# Participants in a test spec
@application.route("/tests/<name>/participants", methods=['GET'])
def tests_name_participants(name):
spec = request.args.get('spec')
if spec is None:
|
try:
returncode, stdout, stderr = pscheduler.run_program(
[ "pscheduler", "internal", "invoke", "test", name,
"participants"],
stdin = spec,
)
except KeyError:
return bad_request("Invalid spec")
except Exception as ex:
return bad_request(ex)
if returncode != 0:
return bad_request(stderr)
# If this fails because of bad JSON, an exception will be thrown,
# caught and logged.
return json_response(pscheduler.json_load(stdout, max_schema=1))
| return bad_request("No test spec provided") | conditional_block |
tests.py | #
# Test-Related Pages
#
import pscheduler
from pschedulerapiserver import application
from flask import request
from .dbcursor import dbcursor_query
from .json import *
from .response import *
#
# Tests
#
# All tests
@application.route("/tests", methods=['GET'])
def | ():
return json_query("SELECT json FROM test"
" WHERE available ORDER BY name", [])
# Test <name>
@application.route("/tests/<name>", methods=['GET'])
def tests_name(name):
return json_query("SELECT json FROM test"
" WHERE available AND name = %s",
[name], single=True)
# Derive a spec from command line arguments in 'arg'
@application.route("/tests/<name>/spec", methods=['GET'])
def tests_name_spec(name):
cursor = dbcursor_query("SELECT EXISTS (SELECT * FROM test"
" WHERE available AND name = %s)",
[ name ])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
try:
args = arg_json('args')
except ValueError as ex:
return bad_request("JSON passed to 'args': %s " % (str(ex)))
status, stdout, stderr = pscheduler.run_program(
[ 'pscheduler', 'internal', 'invoke', 'test', name, 'cli-to-spec' ],
stdin = pscheduler.json_dump(args),
timeout=5
)
if status != 0:
return bad_request(stderr)
# The extra parse here makes 'pretty' work.
returned_json = pscheduler.json_load(stdout)
return ok_json(returned_json, sanitize=False)
# Test spec validation
@application.route("/tests/<name>/spec/is-valid", methods=['GET'])
def tests_name_spec_is_valid(name):
cursor = dbcursor_query(
"SELECT EXISTS"
" (SELECT * FROM test WHERE available AND name = %s)",
[name])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
["pscheduler", "internal", "invoke", "test",
name, "spec-is-valid"],
stdin=spec)
if returncode != 0:
return error("Unable to validate test spec: %s" % (stderr))
validate_json = pscheduler.json_load(stdout, max_schema=1)
return ok_json(validate_json)
except Exception as ex:
return error("Unable to validate test spec: %s" % (str(ex)))
# Tools that can carry out test <name>
@application.route("/tests/<name>/tools", methods=['GET'])
def tests_name_tools(name):
# TODO: Should probably 404 if the test doesn't exist.
# TODO: Is this used anywhere?
expanded = is_expanded()
cursor = dbcursor_query("""
SELECT
tool.name,
tool.json
FROM
tool
JOIN tool_test ON tool_test.tool = tool.id
JOIN test ON test.id = tool_test.test
WHERE
tool.available
AND test.name = %s
""", [name])
result = []
for row in cursor:
url = root_url('tools/' + row[0])
if not expanded:
result.append(url)
continue
row[1]['href'] = url
result.append(row[1])
cursor.close()
return json_response(result)
# Participants in a test spec
@application.route("/tests/<name>/participants", methods=['GET'])
def tests_name_participants(name):
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
[ "pscheduler", "internal", "invoke", "test", name,
"participants"],
stdin = spec,
)
except KeyError:
return bad_request("Invalid spec")
except Exception as ex:
return bad_request(ex)
if returncode != 0:
return bad_request(stderr)
# If this fails because of bad JSON, an exception will be thrown,
# caught and logged.
return json_response(pscheduler.json_load(stdout, max_schema=1))
| tests | identifier_name |
tests.py | #
# Test-Related Pages
#
import pscheduler
from pschedulerapiserver import application
from flask import request
from .dbcursor import dbcursor_query
from .json import *
from .response import *
#
# Tests
#
# All tests
@application.route("/tests", methods=['GET'])
def tests():
|
# Test <name>
@application.route("/tests/<name>", methods=['GET'])
def tests_name(name):
return json_query("SELECT json FROM test"
" WHERE available AND name = %s",
[name], single=True)
# Derive a spec from command line arguments in 'arg'
@application.route("/tests/<name>/spec", methods=['GET'])
def tests_name_spec(name):
cursor = dbcursor_query("SELECT EXISTS (SELECT * FROM test"
" WHERE available AND name = %s)",
[ name ])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
try:
args = arg_json('args')
except ValueError as ex:
return bad_request("JSON passed to 'args': %s " % (str(ex)))
status, stdout, stderr = pscheduler.run_program(
[ 'pscheduler', 'internal', 'invoke', 'test', name, 'cli-to-spec' ],
stdin = pscheduler.json_dump(args),
timeout=5
)
if status != 0:
return bad_request(stderr)
# The extra parse here makes 'pretty' work.
returned_json = pscheduler.json_load(stdout)
return ok_json(returned_json, sanitize=False)
# Test spec validation
@application.route("/tests/<name>/spec/is-valid", methods=['GET'])
def tests_name_spec_is_valid(name):
cursor = dbcursor_query(
"SELECT EXISTS"
" (SELECT * FROM test WHERE available AND name = %s)",
[name])
exists = cursor.fetchone()[0]
cursor.close()
if not exists:
return not_found()
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
["pscheduler", "internal", "invoke", "test",
name, "spec-is-valid"],
stdin=spec)
if returncode != 0:
return error("Unable to validate test spec: %s" % (stderr))
validate_json = pscheduler.json_load(stdout, max_schema=1)
return ok_json(validate_json)
except Exception as ex:
return error("Unable to validate test spec: %s" % (str(ex)))
# Tools that can carry out test <name>
@application.route("/tests/<name>/tools", methods=['GET'])
def tests_name_tools(name):
# TODO: Should probably 404 if the test doesn't exist.
# TODO: Is this used anywhere?
expanded = is_expanded()
cursor = dbcursor_query("""
SELECT
tool.name,
tool.json
FROM
tool
JOIN tool_test ON tool_test.tool = tool.id
JOIN test ON test.id = tool_test.test
WHERE
tool.available
AND test.name = %s
""", [name])
result = []
for row in cursor:
url = root_url('tools/' + row[0])
if not expanded:
result.append(url)
continue
row[1]['href'] = url
result.append(row[1])
cursor.close()
return json_response(result)
# Participants in a test spec
@application.route("/tests/<name>/participants", methods=['GET'])
def tests_name_participants(name):
spec = request.args.get('spec')
if spec is None:
return bad_request("No test spec provided")
try:
returncode, stdout, stderr = pscheduler.run_program(
[ "pscheduler", "internal", "invoke", "test", name,
"participants"],
stdin = spec,
)
except KeyError:
return bad_request("Invalid spec")
except Exception as ex:
return bad_request(ex)
if returncode != 0:
return bad_request(stderr)
# If this fails because of bad JSON, an exception will be thrown,
# caught and logged.
return json_response(pscheduler.json_load(stdout, max_schema=1))
| return json_query("SELECT json FROM test"
" WHERE available ORDER BY name", []) | identifier_body |
engine.rs | //! A solver for dataflow problems.
use std::borrow::BorrowMut;
use std::ffi::OsString;
use std::path::PathBuf;
use rustc_ast as ast;
use rustc_data_structures::work_queue::WorkQueue;
use rustc_graphviz as dot;
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::{self, traversal, BasicBlock};
use rustc_middle::mir::{create_dump_file, dump_enabled};
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::{sym, Symbol};
use super::fmt::DebugWithContext;
use super::graphviz;
use super::{
visit_results, Analysis, Direction, GenKill, GenKillAnalysis, GenKillSet, JoinSemiLattice,
ResultsCursor, ResultsVisitor,
};
/// A dataflow analysis that has converged to fixpoint.
pub struct Results<'tcx, A>
where
A: Analysis<'tcx>,
{
pub analysis: A,
pub(super) entry_sets: IndexVec<BasicBlock, A::Domain>,
}
impl<A> Results<'tcx, A>
where
A: Analysis<'tcx>,
{
/// Creates a `ResultsCursor` that can inspect these `Results`.
pub fn into_results_cursor(self, body: &'mir mir::Body<'tcx>) -> ResultsCursor<'mir, 'tcx, A> {
ResultsCursor::new(body, self)
}
/// Gets the dataflow state for the given block.
pub fn entry_set_for_block(&self, block: BasicBlock) -> &A::Domain {
&self.entry_sets[block]
}
pub fn visit_with(
&self,
body: &'mir mir::Body<'tcx>,
blocks: impl IntoIterator<Item = BasicBlock>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
visit_results(body, blocks, self, vis)
}
pub fn visit_reachable_with(
&self,
body: &'mir mir::Body<'tcx>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
let blocks = mir::traversal::reachable(body);
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
}
}
/// A solver for dataflow problems.
pub struct Engine<'a, 'tcx, A>
where
A: Analysis<'tcx>,
{
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
dead_unwinds: Option<&'a BitSet<BasicBlock>>,
entry_sets: IndexVec<BasicBlock, A::Domain>,
pass_name: Option<&'static str>,
analysis: A,
/// Cached, cumulative transfer functions for each block.
//
// FIXME(ecstaticmorse): This boxed `Fn` trait object is invoked inside a tight loop for
// gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade
// performance in practice. I've tried a few ways to avoid this, but they have downsides. See
// the message for the commit that added this FIXME for more information.
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
}
impl<A, D, T> Engine<'a, 'tcx, A>
where
A: GenKillAnalysis<'tcx, Idx = T, Domain = D>,
D: Clone + JoinSemiLattice + GenKill<T> + BorrowMut<BitSet<T>>,
T: Idx,
{
/// Creates a new `Engine` to solve a gen-kill dataflow problem.
pub fn new_gen_kill(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
// If there are no back-edges in the control-flow graph, we only ever need to apply the
// transfer function for each block exactly once (assuming that we process blocks in RPO).
//
// In this case, there's no need to compute the block transfer functions ahead of time.
if !body.is_cfg_cyclic() {
return Self::new(tcx, body, analysis, None);
}
// Otherwise, compute and store the cumulative transfer function for each block.
let identity = GenKillSet::identity(analysis.bottom_value(body).borrow().domain_size());
let mut trans_for_block = IndexVec::from_elem(identity, body.basic_blocks());
for (block, block_data) in body.basic_blocks().iter_enumerated() {
let trans = &mut trans_for_block[block];
A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data);
}
let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| {
trans_for_block[bb].apply(state.borrow_mut());
});
Self::new(tcx, body, analysis, Some(apply_trans as Box<_>))
}
}
impl<A, D> Engine<'a, 'tcx, A>
where
A: Analysis<'tcx, Domain = D>,
D: Clone + JoinSemiLattice,
{
/// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
/// function.
///
/// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
/// better performance.
pub fn new_generic(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
Self::new(tcx, body, analysis, None)
}
fn new(
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
analysis: A,
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
) -> Self {
let bottom_value = analysis.bottom_value(body);
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), body.basic_blocks());
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
if A::Direction::is_backward() && entry_sets[mir::START_BLOCK] != bottom_value {
bug!("`initialize_start_block` is not yet supported for backward dataflow analyses");
}
Engine {
analysis,
tcx,
body,
dead_unwinds: None,
pass_name: None,
entry_sets,
apply_trans_for_block,
}
}
/// Signals that we do not want dataflow state to propagate across unwind edges for these
/// `BasicBlock`s.
///
/// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
/// unwind during execution. Otherwise, your dataflow results will not be correct.
pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
self.dead_unwinds = Some(dead_unwinds);
self
}
/// Adds an identifier to the graphviz output for this particular run of a dataflow analysis.
///
/// Some analyses are run multiple times in the compilation pipeline. Give them a `pass_name`
/// to differentiate them. Otherwise, only the results for the latest run will be saved.
pub fn pass_name(mut self, name: &'static str) -> Self |
/// Computes the fixpoint for this dataflow problem and returns it.
pub fn iterate_to_fixpoint(self) -> Results<'tcx, A>
where
A::Domain: DebugWithContext<A>,
{
let Engine {
analysis,
body,
dead_unwinds,
mut entry_sets,
tcx,
apply_trans_for_block,
pass_name,
..
} = self;
let mut dirty_queue: WorkQueue<BasicBlock> =
WorkQueue::with_none(body.basic_blocks().len());
if A::Direction::is_forward() {
for (bb, _) in traversal::reverse_postorder(body) {
dirty_queue.insert(bb);
}
} else {
// Reverse post-order on the reverse CFG may generate a better iteration order for
// backward dataflow analyses, but probably not enough to matter.
for (bb, _) in traversal::postorder(body) {
dirty_queue.insert(bb);
}
}
// `state` is not actually used between iterations;
// this is just an optimization to avoid reallocating
// every iteration.
let mut state = analysis.bottom_value(body);
while let Some(bb) = dirty_queue.pop() {
let bb_data = &body[bb];
// Set the state to the entry state of the block.
// This is equivalent to `state = entry_sets[bb].clone()`,
// but it saves an allocation, thus improving compile times.
state.clone_from(&entry_sets[bb]);
// Apply the block transfer function, using the cached one if it exists.
match &apply_trans_for_block {
Some(apply) => apply(bb, &mut state),
None => A::Direction::apply_effects_in_block(&analysis, &mut state, bb, bb_data),
}
A::Direction::join_state_into_successors_of(
&analysis,
tcx,
body,
dead_unwinds,
&mut state,
(bb, bb_data),
|target: BasicBlock, state: &A::Domain| {
let set_changed = entry_sets[target].join(state);
if set_changed {
dirty_queue.insert(target);
}
},
);
}
let results = Results { analysis, entry_sets };
let res = write_graphviz_results(tcx, &body, &results, pass_name);
if let Err(e) = res {
error!("Failed to write graphviz dataflow results: {}", e);
}
results
}
}
// Graphviz
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
/// `rustc_mir` attributes.
fn write_graphviz_results<A>(
tcx: TyCtxt<'tcx>,
body: &mir::Body<'tcx>,
results: &Results<'tcx, A>,
pass_name: Option<&'static str>,
) -> std::io::Result<()>
where
A: Analysis<'tcx>,
A::Domain: DebugWithContext<A>,
{
use std::fs;
use std::io::{self, Write};
let def_id = body.source.def_id();
let attrs = match RustcMirAttrs::parse(tcx, def_id) {
Ok(attrs) => attrs,
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
Err(()) => return Ok(()),
};
let mut file = match attrs.output_path(A::NAME) {
Some(path) => {
debug!("printing dataflow results for {:?} to {}", def_id, path.display());
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
io::BufWriter::new(fs::File::create(&path)?)
}
None if tcx.sess.opts.debugging_opts.dump_mir_dataflow
&& dump_enabled(tcx, A::NAME, def_id) =>
{
create_dump_file(
tcx,
".dot",
None,
A::NAME,
&pass_name.unwrap_or("-----"),
body.source,
)?
}
_ => return Ok(()),
};
let style = match attrs.formatter {
Some(sym::two_phase) => graphviz::OutputStyle::BeforeAndAfter,
_ => graphviz::OutputStyle::AfterOnly,
};
let mut buf = Vec::new();
let graphviz = graphviz::Formatter::new(body, results, style);
let mut render_opts =
vec![dot::RenderOption::Fontname(tcx.sess.opts.debugging_opts.graphviz_font.clone())];
if tcx.sess.opts.debugging_opts.graphviz_dark_mode {
render_opts.push(dot::RenderOption::DarkTheme);
}
dot::render_opts(&graphviz, &mut buf, &render_opts)?;
file.write_all(&buf)?;
Ok(())
}
#[derive(Default)]
struct RustcMirAttrs {
basename_and_suffix: Option<PathBuf>,
formatter: Option<Symbol>,
}
impl RustcMirAttrs {
fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
let attrs = tcx.get_attrs(def_id);
let mut result = Ok(());
let mut ret = RustcMirAttrs::default();
let rustc_mir_attrs = attrs
.iter()
.filter(|attr| attr.has_name(sym::rustc_mir))
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
for attr in rustc_mir_attrs {
let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) {
Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
let path = PathBuf::from(s.to_string());
match path.file_name() {
Some(_) => Ok(path),
None => {
tcx.sess.span_err(attr.span(), "path must end in a filename");
Err(())
}
}
})
} else if attr.has_name(sym::borrowck_graphviz_format) {
Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
sym::gen_kill | sym::two_phase => Ok(s),
_ => {
tcx.sess.span_err(attr.span(), "unknown formatter");
Err(())
}
})
} else {
Ok(())
};
result = result.and(attr_result);
}
result.map(|()| ret)
}
fn set_field<T>(
field: &mut Option<T>,
tcx: TyCtxt<'tcx>,
attr: &ast::NestedMetaItem,
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
) -> Result<(), ()> {
if field.is_some() {
tcx.sess
.span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
return Err(());
}
if let Some(s) = attr.value_str() {
*field = Some(mapper(s)?);
Ok(())
} else {
tcx.sess
.span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
Err(())
}
}
/// Returns the path where dataflow results should be written, or `None`
/// `borrowck_graphviz_postflow` was not specified.
///
/// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
///
/// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
let mut ret = self.basename_and_suffix.as_ref().cloned()?;
let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
let mut file_name: OsString = analysis_name.into();
file_name.push("_");
file_name.push(suffix);
ret.set_file_name(file_name);
Some(ret)
}
}
| {
self.pass_name = Some(name);
self
} | identifier_body |
engine.rs | //! A solver for dataflow problems.
use std::borrow::BorrowMut;
use std::ffi::OsString;
use std::path::PathBuf;
use rustc_ast as ast;
use rustc_data_structures::work_queue::WorkQueue;
use rustc_graphviz as dot;
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::{self, traversal, BasicBlock};
use rustc_middle::mir::{create_dump_file, dump_enabled};
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::{sym, Symbol};
use super::fmt::DebugWithContext;
use super::graphviz;
use super::{
visit_results, Analysis, Direction, GenKill, GenKillAnalysis, GenKillSet, JoinSemiLattice,
ResultsCursor, ResultsVisitor,
};
/// A dataflow analysis that has converged to fixpoint.
pub struct Results<'tcx, A>
where
A: Analysis<'tcx>,
{
pub analysis: A,
pub(super) entry_sets: IndexVec<BasicBlock, A::Domain>,
}
impl<A> Results<'tcx, A>
where
A: Analysis<'tcx>,
{
/// Creates a `ResultsCursor` that can inspect these `Results`.
pub fn into_results_cursor(self, body: &'mir mir::Body<'tcx>) -> ResultsCursor<'mir, 'tcx, A> {
ResultsCursor::new(body, self)
}
/// Gets the dataflow state for the given block.
pub fn entry_set_for_block(&self, block: BasicBlock) -> &A::Domain {
&self.entry_sets[block]
}
pub fn visit_with(
&self,
body: &'mir mir::Body<'tcx>,
blocks: impl IntoIterator<Item = BasicBlock>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
visit_results(body, blocks, self, vis)
}
pub fn visit_reachable_with(
&self,
body: &'mir mir::Body<'tcx>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
let blocks = mir::traversal::reachable(body);
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
}
}
/// A solver for dataflow problems.
pub struct Engine<'a, 'tcx, A>
where
A: Analysis<'tcx>,
{
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
dead_unwinds: Option<&'a BitSet<BasicBlock>>,
entry_sets: IndexVec<BasicBlock, A::Domain>,
pass_name: Option<&'static str>,
analysis: A,
/// Cached, cumulative transfer functions for each block.
//
// FIXME(ecstaticmorse): This boxed `Fn` trait object is invoked inside a tight loop for
// gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade
// performance in practice. I've tried a few ways to avoid this, but they have downsides. See
// the message for the commit that added this FIXME for more information.
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
}
impl<A, D, T> Engine<'a, 'tcx, A>
where
A: GenKillAnalysis<'tcx, Idx = T, Domain = D>,
D: Clone + JoinSemiLattice + GenKill<T> + BorrowMut<BitSet<T>>,
T: Idx,
{
/// Creates a new `Engine` to solve a gen-kill dataflow problem.
pub fn new_gen_kill(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
// If there are no back-edges in the control-flow graph, we only ever need to apply the
// transfer function for each block exactly once (assuming that we process blocks in RPO).
//
// In this case, there's no need to compute the block transfer functions ahead of time.
if !body.is_cfg_cyclic() {
return Self::new(tcx, body, analysis, None);
}
// Otherwise, compute and store the cumulative transfer function for each block.
let identity = GenKillSet::identity(analysis.bottom_value(body).borrow().domain_size());
let mut trans_for_block = IndexVec::from_elem(identity, body.basic_blocks());
for (block, block_data) in body.basic_blocks().iter_enumerated() {
let trans = &mut trans_for_block[block];
A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data);
}
let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| {
trans_for_block[bb].apply(state.borrow_mut());
});
Self::new(tcx, body, analysis, Some(apply_trans as Box<_>))
}
}
impl<A, D> Engine<'a, 'tcx, A>
where
A: Analysis<'tcx, Domain = D>,
D: Clone + JoinSemiLattice,
{
/// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
/// function.
///
/// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
/// better performance.
pub fn new_generic(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
Self::new(tcx, body, analysis, None)
}
fn new(
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
analysis: A,
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
) -> Self {
let bottom_value = analysis.bottom_value(body);
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), body.basic_blocks());
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
if A::Direction::is_backward() && entry_sets[mir::START_BLOCK] != bottom_value {
bug!("`initialize_start_block` is not yet supported for backward dataflow analyses");
}
Engine {
analysis,
tcx,
body,
dead_unwinds: None,
pass_name: None,
entry_sets,
apply_trans_for_block,
}
}
/// Signals that we do not want dataflow state to propagate across unwind edges for these
/// `BasicBlock`s.
///
/// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
/// unwind during execution. Otherwise, your dataflow results will not be correct.
pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
self.dead_unwinds = Some(dead_unwinds);
self
}
/// Adds an identifier to the graphviz output for this particular run of a dataflow analysis.
///
/// Some analyses are run multiple times in the compilation pipeline. Give them a `pass_name`
/// to differentiate them. Otherwise, only the results for the latest run will be saved.
pub fn pass_name(mut self, name: &'static str) -> Self {
self.pass_name = Some(name);
self
}
/// Computes the fixpoint for this dataflow problem and returns it.
pub fn | (self) -> Results<'tcx, A>
where
A::Domain: DebugWithContext<A>,
{
let Engine {
analysis,
body,
dead_unwinds,
mut entry_sets,
tcx,
apply_trans_for_block,
pass_name,
..
} = self;
let mut dirty_queue: WorkQueue<BasicBlock> =
WorkQueue::with_none(body.basic_blocks().len());
if A::Direction::is_forward() {
for (bb, _) in traversal::reverse_postorder(body) {
dirty_queue.insert(bb);
}
} else {
// Reverse post-order on the reverse CFG may generate a better iteration order for
// backward dataflow analyses, but probably not enough to matter.
for (bb, _) in traversal::postorder(body) {
dirty_queue.insert(bb);
}
}
// `state` is not actually used between iterations;
// this is just an optimization to avoid reallocating
// every iteration.
let mut state = analysis.bottom_value(body);
while let Some(bb) = dirty_queue.pop() {
let bb_data = &body[bb];
// Set the state to the entry state of the block.
// This is equivalent to `state = entry_sets[bb].clone()`,
// but it saves an allocation, thus improving compile times.
state.clone_from(&entry_sets[bb]);
// Apply the block transfer function, using the cached one if it exists.
match &apply_trans_for_block {
Some(apply) => apply(bb, &mut state),
None => A::Direction::apply_effects_in_block(&analysis, &mut state, bb, bb_data),
}
A::Direction::join_state_into_successors_of(
&analysis,
tcx,
body,
dead_unwinds,
&mut state,
(bb, bb_data),
|target: BasicBlock, state: &A::Domain| {
let set_changed = entry_sets[target].join(state);
if set_changed {
dirty_queue.insert(target);
}
},
);
}
let results = Results { analysis, entry_sets };
let res = write_graphviz_results(tcx, &body, &results, pass_name);
if let Err(e) = res {
error!("Failed to write graphviz dataflow results: {}", e);
}
results
}
}
// Graphviz
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
/// `rustc_mir` attributes.
fn write_graphviz_results<A>(
tcx: TyCtxt<'tcx>,
body: &mir::Body<'tcx>,
results: &Results<'tcx, A>,
pass_name: Option<&'static str>,
) -> std::io::Result<()>
where
A: Analysis<'tcx>,
A::Domain: DebugWithContext<A>,
{
use std::fs;
use std::io::{self, Write};
let def_id = body.source.def_id();
let attrs = match RustcMirAttrs::parse(tcx, def_id) {
Ok(attrs) => attrs,
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
Err(()) => return Ok(()),
};
let mut file = match attrs.output_path(A::NAME) {
Some(path) => {
debug!("printing dataflow results for {:?} to {}", def_id, path.display());
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
io::BufWriter::new(fs::File::create(&path)?)
}
None if tcx.sess.opts.debugging_opts.dump_mir_dataflow
&& dump_enabled(tcx, A::NAME, def_id) =>
{
create_dump_file(
tcx,
".dot",
None,
A::NAME,
&pass_name.unwrap_or("-----"),
body.source,
)?
}
_ => return Ok(()),
};
let style = match attrs.formatter {
Some(sym::two_phase) => graphviz::OutputStyle::BeforeAndAfter,
_ => graphviz::OutputStyle::AfterOnly,
};
let mut buf = Vec::new();
let graphviz = graphviz::Formatter::new(body, results, style);
let mut render_opts =
vec![dot::RenderOption::Fontname(tcx.sess.opts.debugging_opts.graphviz_font.clone())];
if tcx.sess.opts.debugging_opts.graphviz_dark_mode {
render_opts.push(dot::RenderOption::DarkTheme);
}
dot::render_opts(&graphviz, &mut buf, &render_opts)?;
file.write_all(&buf)?;
Ok(())
}
#[derive(Default)]
struct RustcMirAttrs {
basename_and_suffix: Option<PathBuf>,
formatter: Option<Symbol>,
}
impl RustcMirAttrs {
fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
let attrs = tcx.get_attrs(def_id);
let mut result = Ok(());
let mut ret = RustcMirAttrs::default();
let rustc_mir_attrs = attrs
.iter()
.filter(|attr| attr.has_name(sym::rustc_mir))
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
for attr in rustc_mir_attrs {
let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) {
Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
let path = PathBuf::from(s.to_string());
match path.file_name() {
Some(_) => Ok(path),
None => {
tcx.sess.span_err(attr.span(), "path must end in a filename");
Err(())
}
}
})
} else if attr.has_name(sym::borrowck_graphviz_format) {
Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
sym::gen_kill | sym::two_phase => Ok(s),
_ => {
tcx.sess.span_err(attr.span(), "unknown formatter");
Err(())
}
})
} else {
Ok(())
};
result = result.and(attr_result);
}
result.map(|()| ret)
}
fn set_field<T>(
field: &mut Option<T>,
tcx: TyCtxt<'tcx>,
attr: &ast::NestedMetaItem,
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
) -> Result<(), ()> {
if field.is_some() {
tcx.sess
.span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
return Err(());
}
if let Some(s) = attr.value_str() {
*field = Some(mapper(s)?);
Ok(())
} else {
tcx.sess
.span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
Err(())
}
}
/// Returns the path where dataflow results should be written, or `None`
/// `borrowck_graphviz_postflow` was not specified.
///
/// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
///
/// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
let mut ret = self.basename_and_suffix.as_ref().cloned()?;
let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
let mut file_name: OsString = analysis_name.into();
file_name.push("_");
file_name.push(suffix);
ret.set_file_name(file_name);
Some(ret)
}
}
| iterate_to_fixpoint | identifier_name |
engine.rs | //! A solver for dataflow problems.
use std::borrow::BorrowMut;
use std::ffi::OsString;
use std::path::PathBuf;
use rustc_ast as ast;
use rustc_data_structures::work_queue::WorkQueue;
use rustc_graphviz as dot;
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::{self, traversal, BasicBlock};
use rustc_middle::mir::{create_dump_file, dump_enabled};
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::{sym, Symbol};
use super::fmt::DebugWithContext;
use super::graphviz;
use super::{
visit_results, Analysis, Direction, GenKill, GenKillAnalysis, GenKillSet, JoinSemiLattice,
ResultsCursor, ResultsVisitor,
};
/// A dataflow analysis that has converged to fixpoint.
pub struct Results<'tcx, A>
where
A: Analysis<'tcx>,
{
pub analysis: A,
pub(super) entry_sets: IndexVec<BasicBlock, A::Domain>,
}
impl<A> Results<'tcx, A>
where
A: Analysis<'tcx>,
{
/// Creates a `ResultsCursor` that can inspect these `Results`.
pub fn into_results_cursor(self, body: &'mir mir::Body<'tcx>) -> ResultsCursor<'mir, 'tcx, A> {
ResultsCursor::new(body, self)
}
/// Gets the dataflow state for the given block.
pub fn entry_set_for_block(&self, block: BasicBlock) -> &A::Domain {
&self.entry_sets[block]
}
pub fn visit_with(
&self,
body: &'mir mir::Body<'tcx>,
blocks: impl IntoIterator<Item = BasicBlock>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
visit_results(body, blocks, self, vis)
}
pub fn visit_reachable_with(
&self,
body: &'mir mir::Body<'tcx>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
) {
let blocks = mir::traversal::reachable(body);
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
}
}
/// A solver for dataflow problems.
pub struct Engine<'a, 'tcx, A>
where
A: Analysis<'tcx>,
{
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
dead_unwinds: Option<&'a BitSet<BasicBlock>>,
entry_sets: IndexVec<BasicBlock, A::Domain>,
pass_name: Option<&'static str>,
analysis: A,
/// Cached, cumulative transfer functions for each block.
//
// FIXME(ecstaticmorse): This boxed `Fn` trait object is invoked inside a tight loop for
// gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade
// performance in practice. I've tried a few ways to avoid this, but they have downsides. See
// the message for the commit that added this FIXME for more information.
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
}
impl<A, D, T> Engine<'a, 'tcx, A>
where
A: GenKillAnalysis<'tcx, Idx = T, Domain = D>,
D: Clone + JoinSemiLattice + GenKill<T> + BorrowMut<BitSet<T>>,
T: Idx,
{
/// Creates a new `Engine` to solve a gen-kill dataflow problem.
pub fn new_gen_kill(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
// If there are no back-edges in the control-flow graph, we only ever need to apply the
// transfer function for each block exactly once (assuming that we process blocks in RPO).
//
// In this case, there's no need to compute the block transfer functions ahead of time.
if !body.is_cfg_cyclic() {
return Self::new(tcx, body, analysis, None);
}
// Otherwise, compute and store the cumulative transfer function for each block.
let identity = GenKillSet::identity(analysis.bottom_value(body).borrow().domain_size());
let mut trans_for_block = IndexVec::from_elem(identity, body.basic_blocks());
for (block, block_data) in body.basic_blocks().iter_enumerated() {
let trans = &mut trans_for_block[block];
A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data);
}
let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| {
trans_for_block[bb].apply(state.borrow_mut());
});
Self::new(tcx, body, analysis, Some(apply_trans as Box<_>))
}
}
impl<A, D> Engine<'a, 'tcx, A>
where
A: Analysis<'tcx, Domain = D>,
D: Clone + JoinSemiLattice,
{
/// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
/// function.
///
/// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
/// better performance.
pub fn new_generic(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -> Self {
Self::new(tcx, body, analysis, None)
}
fn new(
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
analysis: A,
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
) -> Self {
let bottom_value = analysis.bottom_value(body);
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), body.basic_blocks());
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
if A::Direction::is_backward() && entry_sets[mir::START_BLOCK] != bottom_value {
bug!("`initialize_start_block` is not yet supported for backward dataflow analyses");
}
Engine {
analysis,
tcx,
body,
dead_unwinds: None,
pass_name: None,
entry_sets,
apply_trans_for_block,
}
}
/// Signals that we do not want dataflow state to propagate across unwind edges for these
/// `BasicBlock`s.
///
/// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
/// unwind during execution. Otherwise, your dataflow results will not be correct.
pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
self.dead_unwinds = Some(dead_unwinds);
self
}
/// Adds an identifier to the graphviz output for this particular run of a dataflow analysis.
///
/// Some analyses are run multiple times in the compilation pipeline. Give them a `pass_name`
/// to differentiate them. Otherwise, only the results for the latest run will be saved.
pub fn pass_name(mut self, name: &'static str) -> Self {
self.pass_name = Some(name);
self
}
/// Computes the fixpoint for this dataflow problem and returns it.
pub fn iterate_to_fixpoint(self) -> Results<'tcx, A>
where
A::Domain: DebugWithContext<A>,
{
let Engine {
analysis,
body,
dead_unwinds,
mut entry_sets,
tcx,
apply_trans_for_block,
pass_name,
..
} = self;
let mut dirty_queue: WorkQueue<BasicBlock> =
WorkQueue::with_none(body.basic_blocks().len());
if A::Direction::is_forward() {
for (bb, _) in traversal::reverse_postorder(body) {
dirty_queue.insert(bb);
}
} else {
// Reverse post-order on the reverse CFG may generate a better iteration order for
// backward dataflow analyses, but probably not enough to matter.
for (bb, _) in traversal::postorder(body) {
dirty_queue.insert(bb);
}
}
// `state` is not actually used between iterations;
// this is just an optimization to avoid reallocating
// every iteration.
let mut state = analysis.bottom_value(body);
while let Some(bb) = dirty_queue.pop() {
let bb_data = &body[bb];
// Set the state to the entry state of the block.
// This is equivalent to `state = entry_sets[bb].clone()`,
// but it saves an allocation, thus improving compile times.
state.clone_from(&entry_sets[bb]);
// Apply the block transfer function, using the cached one if it exists.
match &apply_trans_for_block {
Some(apply) => apply(bb, &mut state),
None => A::Direction::apply_effects_in_block(&analysis, &mut state, bb, bb_data),
}
A::Direction::join_state_into_successors_of(
&analysis,
tcx,
body,
dead_unwinds,
&mut state,
(bb, bb_data),
|target: BasicBlock, state: &A::Domain| {
let set_changed = entry_sets[target].join(state);
if set_changed {
dirty_queue.insert(target);
}
},
);
}
let results = Results { analysis, entry_sets };
let res = write_graphviz_results(tcx, &body, &results, pass_name);
if let Err(e) = res {
error!("Failed to write graphviz dataflow results: {}", e);
}
results
}
}
// Graphviz
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via | tcx: TyCtxt<'tcx>,
body: &mir::Body<'tcx>,
results: &Results<'tcx, A>,
pass_name: Option<&'static str>,
) -> std::io::Result<()>
where
A: Analysis<'tcx>,
A::Domain: DebugWithContext<A>,
{
use std::fs;
use std::io::{self, Write};
let def_id = body.source.def_id();
let attrs = match RustcMirAttrs::parse(tcx, def_id) {
Ok(attrs) => attrs,
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
Err(()) => return Ok(()),
};
let mut file = match attrs.output_path(A::NAME) {
Some(path) => {
debug!("printing dataflow results for {:?} to {}", def_id, path.display());
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
io::BufWriter::new(fs::File::create(&path)?)
}
None if tcx.sess.opts.debugging_opts.dump_mir_dataflow
&& dump_enabled(tcx, A::NAME, def_id) =>
{
create_dump_file(
tcx,
".dot",
None,
A::NAME,
&pass_name.unwrap_or("-----"),
body.source,
)?
}
_ => return Ok(()),
};
let style = match attrs.formatter {
Some(sym::two_phase) => graphviz::OutputStyle::BeforeAndAfter,
_ => graphviz::OutputStyle::AfterOnly,
};
let mut buf = Vec::new();
let graphviz = graphviz::Formatter::new(body, results, style);
let mut render_opts =
vec![dot::RenderOption::Fontname(tcx.sess.opts.debugging_opts.graphviz_font.clone())];
if tcx.sess.opts.debugging_opts.graphviz_dark_mode {
render_opts.push(dot::RenderOption::DarkTheme);
}
dot::render_opts(&graphviz, &mut buf, &render_opts)?;
file.write_all(&buf)?;
Ok(())
}
#[derive(Default)]
struct RustcMirAttrs {
basename_and_suffix: Option<PathBuf>,
formatter: Option<Symbol>,
}
impl RustcMirAttrs {
fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
let attrs = tcx.get_attrs(def_id);
let mut result = Ok(());
let mut ret = RustcMirAttrs::default();
let rustc_mir_attrs = attrs
.iter()
.filter(|attr| attr.has_name(sym::rustc_mir))
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
for attr in rustc_mir_attrs {
let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) {
Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
let path = PathBuf::from(s.to_string());
match path.file_name() {
Some(_) => Ok(path),
None => {
tcx.sess.span_err(attr.span(), "path must end in a filename");
Err(())
}
}
})
} else if attr.has_name(sym::borrowck_graphviz_format) {
Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
sym::gen_kill | sym::two_phase => Ok(s),
_ => {
tcx.sess.span_err(attr.span(), "unknown formatter");
Err(())
}
})
} else {
Ok(())
};
result = result.and(attr_result);
}
result.map(|()| ret)
}
fn set_field<T>(
field: &mut Option<T>,
tcx: TyCtxt<'tcx>,
attr: &ast::NestedMetaItem,
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
) -> Result<(), ()> {
if field.is_some() {
tcx.sess
.span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
return Err(());
}
if let Some(s) = attr.value_str() {
*field = Some(mapper(s)?);
Ok(())
} else {
tcx.sess
.span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
Err(())
}
}
/// Returns the path where dataflow results should be written, or `None`
/// `borrowck_graphviz_postflow` was not specified.
///
/// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
///
/// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
let mut ret = self.basename_and_suffix.as_ref().cloned()?;
let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
let mut file_name: OsString = analysis_name.into();
file_name.push("_");
file_name.push(suffix);
ret.set_file_name(file_name);
Some(ret)
}
} | /// `rustc_mir` attributes.
fn write_graphviz_results<A>( | random_line_split |
length-test.js | /**
* Copyright 2015, Yahoo! Inc.
* Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
import Ember from 'ember';
import {
moduleFor, test
}
from 'ember-qunit';
var options, validator, message;
var set = Ember.set;
moduleFor('validator:length', 'Unit | Validator | length', {
needs: ['validator:messages'],
setup: function() {
validator = this.subject();
}
});
test('no options', function(assert) {
assert.expect(1);
message = validator.validate();
assert.equal(message, true);
});
test('allow blank', function(assert) {
assert.expect(2);
options = {
allowBlank: true,
min: 5
};
set(validator, 'options', options);
message = validator.validate();
assert.equal(message, true);
message = validator.validate('test');
assert.equal(message, 'This field is too short (minimum is 5 characters)');
});
test('is', function(assert) {
assert.expect(2);
options = { | is: 4
};
set(validator, 'options', options);
message = validator.validate('testing');
assert.equal(message, 'This field is the wrong length (should be 4 characters)');
message = validator.validate('test');
assert.equal(message, true);
});
test('min', function(assert) {
assert.expect(2);
options = {
min: 5
};
set(validator, 'options', options);
message = validator.validate('test');
assert.equal(message, 'This field is too short (minimum is 5 characters)');
message = validator.validate('testing');
assert.equal(message, true);
});
test('max', function(assert) {
assert.expect(2);
options = {
max: 5
};
set(validator, 'options', options);
message = validator.validate('testing');
assert.equal(message, 'This field is too long (maximum is 5 characters)');
message = validator.validate('test');
assert.equal(message, true);
});
test('message function', function(assert) {
assert.expect(1);
options = {
max: 5,
message: function(type, options, value) {
return "is too long brosef. It like cant be more than like %@ characters";
}
};
set(validator, 'options', options);
message = validator.validate('testing');
assert.equal(message, 'This field is too long brosef. It like cant be more than like 5 characters');
}); | random_line_split | |
as_unsigned_mut.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u8, i8 }
type U = u8;
type S = i8;
type T = S;
#[test]
fn | () {
let slice: &mut [T] = &mut [0];
{
let as_unsigned_mut: &mut [U] = slice.as_unsigned_mut();
as_unsigned_mut[0] = 0xff;
}
assert_eq!(slice, &mut[-1]);
}
}
| as_unsigned_mut_test1 | identifier_name |
as_unsigned_mut.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u8, i8 }
type U = u8;
type S = i8;
type T = S;
#[test]
fn as_unsigned_mut_test1() |
}
| {
let slice: &mut [T] = &mut [0];
{
let as_unsigned_mut: &mut [U] = slice.as_unsigned_mut();
as_unsigned_mut[0] = 0xff;
}
assert_eq!(slice, &mut[-1]);
} | identifier_body |
as_unsigned_mut.rs | #![feature(core)]
extern crate core; | // pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u8, i8 }
type U = u8;
type S = i8;
type T = S;
#[test]
fn as_unsigned_mut_test1() {
let slice: &mut [T] = &mut [0];
{
let as_unsigned_mut: &mut [U] = slice.as_unsigned_mut();
as_unsigned_mut[0] = 0xff;
}
assert_eq!(slice, &mut[-1]);
}
} |
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
| random_line_split |
mod.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Method lookup: the secret sauce of Rust. See `README.md`.
use astconv::AstConv;
use check::{FnCtxt};
use check::vtable;
use check::vtable::select_new_fcx_obligations;
use middle::def;
use middle::privacy::{AllPublic, DependsOn, LastPrivate, LastMod};
use middle::subst;
use middle::traits;
use middle::ty::*;
use middle::ty;
use middle::infer;
use util::ppaux::Repr;
use std::rc::Rc;
use syntax::ast::{DefId};
use syntax::ast;
use syntax::codemap::Span;
pub use self::MethodError::*;
pub use self::CandidateSource::*;
pub use self::suggest::{report_error, AllTraitsVec};
mod confirm;
mod probe;
mod suggest;
pub enum MethodError {
// Did not find an applicable method, but we did find various
// static methods that may apply, as well as a list of
// not-in-scope traits which may work.
NoMatch(Vec<CandidateSource>, Vec<ast::DefId>),
// Multiple methods might apply.
Ambiguity(Vec<CandidateSource>),
// Using a `Fn`/`FnMut`/etc method on a raw closure type before we have inferred its kind.
ClosureAmbiguity(/* DefId of fn trait */ ast::DefId),
}
// A pared down enum describing just the places from which a method
// candidate can arise. Used for error reporting only.
#[derive(Copy, PartialOrd, Ord, PartialEq, Eq)]
pub enum CandidateSource {
ImplSource(ast::DefId),
TraitSource(/* trait id */ ast::DefId),
}
type MethodIndex = uint; // just for doc purposes
/// Determines whether the type `self_ty` supports a method name `method_name` or not.
pub fn exists<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
call_expr_id: ast::NodeId)
-> bool
{
let mode = probe::Mode::MethodCall;
match probe::probe(fcx, span, mode, method_name, self_ty, call_expr_id) {
Ok(..) => true,
Err(NoMatch(..)) => false,
Err(Ambiguity(..)) => true,
Err(ClosureAmbiguity(..)) => true,
}
}
/// Performs method lookup. If lookup is successful, it will return the callee and store an
/// appropriate adjustment for the self-expr. In some cases it may report an error (e.g., invoking
/// the `drop` method).
///
/// # Arguments
///
/// Given a method call like `foo.bar::<T1,...Tn>(...)`:
///
/// * `fcx`: the surrounding `FnCtxt` (!)
/// * `span`: the span for the method call
/// * `method_name`: the name of the method being called (`bar`)
/// * `self_ty`: the (unadjusted) type of the self expression (`foo`)
/// * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`)
/// * `self_expr`: the self expression (`foo`)
pub fn lookup<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>,
call_expr: &'tcx ast::Expr,
self_expr: &'tcx ast::Expr)
-> Result<MethodCallee<'tcx>, MethodError>
{
debug!("lookup(method_name={}, self_ty={}, call_expr={}, self_expr={})",
method_name.repr(fcx.tcx()),
self_ty.repr(fcx.tcx()),
call_expr.repr(fcx.tcx()),
self_expr.repr(fcx.tcx()));
let mode = probe::Mode::MethodCall;
let self_ty = fcx.infcx().resolve_type_vars_if_possible(&self_ty);
let pick = try!(probe::probe(fcx, span, mode, method_name, self_ty, call_expr.id));
Ok(confirm::confirm(fcx, span, self_expr, call_expr, self_ty, pick, supplied_method_types))
}
pub fn lookup_in_trait<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_expr: Option<&ast::Expr>,
m_name: ast::Name,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<Vec<Ty<'tcx>>>)
-> Option<MethodCallee<'tcx>>
{
lookup_in_trait_adjusted(fcx, span, self_expr, m_name, trait_def_id,
ty::AutoDerefRef { autoderefs: 0, autoref: None },
self_ty, opt_input_types)
}
/// `lookup_in_trait_adjusted` is used for overloaded operators. It does a very narrow slice of
/// what the normal probe/confirm path does. In particular, it doesn't really do any probing: it
/// simply constructs an obligation for a particular trait with the given self-type and checks
/// whether that trait is implemented.
///
/// FIXME(#18741) -- It seems likely that we can consolidate some of this code with the other
/// method-lookup code. In particular, autoderef on index is basically identical to autoderef with
/// normal probes, except that the test also looks for built-in indexing. Also, the second half of
/// this method is basically the same as confirmation.
pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_expr: Option<&ast::Expr>,
m_name: ast::Name,
trait_def_id: DefId,
autoderefref: ty::AutoDerefRef<'tcx>,
self_ty: Ty<'tcx>,
opt_input_types: Option<Vec<Ty<'tcx>>>)
-> Option<MethodCallee<'tcx>>
{
debug!("lookup_in_trait_adjusted(self_ty={}, self_expr={}, m_name={}, trait_def_id={})",
self_ty.repr(fcx.tcx()),
self_expr.repr(fcx.tcx()),
m_name.repr(fcx.tcx()),
trait_def_id.repr(fcx.tcx()));
let trait_def = ty::lookup_trait_def(fcx.tcx(), trait_def_id);
let expected_number_of_input_types = trait_def.generics.types.len(subst::TypeSpace);
let input_types = match opt_input_types {
Some(input_types) => {
assert_eq!(expected_number_of_input_types, input_types.len());
input_types
}
None => {
fcx.inh.infcx.next_ty_vars(expected_number_of_input_types)
}
};
assert_eq!(trait_def.generics.types.len(subst::FnSpace), 0);
assert!(trait_def.generics.regions.is_empty());
// Construct a trait-reference `self_ty : Trait<input_tys>`
let substs = subst::Substs::new_trait(input_types, Vec::new(), self_ty);
let trait_ref = Rc::new(ty::TraitRef::new(trait_def_id, fcx.tcx().mk_substs(substs)));
// Construct an obligation
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation = traits::Obligation::misc(span,
fcx.body_id,
poly_trait_ref.as_predicate());
// Now we want to know if this can be matched
let mut selcx = traits::SelectionContext::new(fcx.infcx(), fcx);
if !selcx.evaluate_obligation(&obligation) {
debug!("--> Cannot match obligation");
return None; // Cannot be matched, no such method resolution is possible.
}
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = fcx.tcx();
let (method_num, method_ty) = trait_method(tcx, trait_def_id, m_name).unwrap();
assert_eq!(method_ty.generics.types.len(subst::FnSpace), 0);
assert_eq!(method_ty.generics.regions.len(subst::FnSpace), 0);
debug!("lookup_in_trait_adjusted: method_num={} method_ty={}",
method_num, method_ty.repr(fcx.tcx()));
// Instantiate late-bound regions and substitute the trait
// parameters into the method type to get the actual method type.
//
// NB: Instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
let fn_sig = fcx.infcx().replace_late_bound_regions_with_fresh_var(span,
infer::FnCall,
&method_ty.fty.sig).0;
let fn_sig = fcx.instantiate_type_scheme(span, trait_ref.substs, &fn_sig);
let transformed_self_ty = fn_sig.inputs[0];
let fty = ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(ty::BareFnTy {
sig: ty::Binder(fn_sig),
unsafety: method_ty.fty.unsafety,
abi: method_ty.fty.abi.clone(),
}));
debug!("lookup_in_trait_adjusted: matched method fty={} obligation={}",
fty.repr(fcx.tcx()),
obligation.repr(fcx.tcx()));
// Register obligations for the parameters. This will include the
// `Self` parameter, which in turn has a bound of the main trait,
// so this also effectively registers `obligation` as well. (We
// used to register `obligation` explicitly, but that resulted in
// double error messages being reported.)
//
// Note that as the method comes from a trait, it should not have
// any late-bound regions appearing in its bounds.
let method_bounds = fcx.instantiate_bounds(span, trait_ref.substs, &method_ty.predicates);
assert!(!method_bounds.has_escaping_regions());
fcx.add_obligations_for_parameters(
traits::ObligationCause::misc(span, fcx.body_id),
&method_bounds);
// FIXME(#18653) -- Try to resolve obligations, giving us more
// typing information, which can sometimes be needed to avoid
// pathological region inference failures.
vtable::select_new_fcx_obligations(fcx);
// Insert any adjustments needed (always an autoref of some mutability).
match self_expr {
None => { }
Some(self_expr) => {
debug!("lookup_in_trait_adjusted: inserting adjustment if needed \
(self-id={}, base adjustment={:?}, explicit_self={:?})",
self_expr.id, autoderefref, method_ty.explicit_self);
match method_ty.explicit_self {
ty::ByValueExplicitSelfCategory => {
// Trait method is fn(self), no transformation needed.
if !autoderefref.is_identity() {
fcx.write_adjustment(
self_expr.id,
span,
ty::AdjustDerefRef(autoderefref));
}
}
ty::ByReferenceExplicitSelfCategory(..) => {
// Trait method is fn(&self) or fn(&mut self), need an
// autoref. Pull the region etc out of the type of first argument.
match transformed_self_ty.sty {
ty::ty_rptr(region, ty::mt { mutbl, ty: _ }) => {
let ty::AutoDerefRef { autoderefs, autoref } = autoderefref;
let autoref = autoref.map(|r| box r);
fcx.write_adjustment(
self_expr.id,
span,
ty::AdjustDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(ty::AutoPtr(*region, mutbl, autoref))
}));
}
_ => {
fcx.tcx().sess.span_bug(
span,
&format!(
"trait method is &self but first arg is: {}",
transformed_self_ty.repr(fcx.tcx())));
}
}
}
_ => {
fcx.tcx().sess.span_bug(
span,
&format!(
"unexpected explicit self type in operator method: {:?}",
method_ty.explicit_self));
}
}
}
}
let callee = MethodCallee {
origin: MethodTypeParam(MethodParam{trait_ref: trait_ref.clone(),
method_num: method_num,
impl_def_id: None}),
ty: fty,
substs: trait_ref.substs.clone()
};
debug!("callee = {}", callee.repr(fcx.tcx()));
Some(callee)
}
pub fn | <'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
expr_id: ast::NodeId)
-> Result<(def::Def, LastPrivate), MethodError>
{
let mode = probe::Mode::Path;
let pick = try!(probe::probe(fcx, span, mode, method_name, self_ty, expr_id));
let def_id = pick.method_ty.def_id;
let mut lp = LastMod(AllPublic);
let provenance = match pick.kind {
probe::InherentImplPick(impl_def_id) => {
if pick.method_ty.vis != ast::Public {
lp = LastMod(DependsOn(def_id));
}
def::FromImpl(impl_def_id)
}
_ => def::FromTrait(pick.method_ty.container.id())
};
Ok((def::DefMethod(def_id, provenance), lp))
}
/// Find method with name `method_name` defined in `trait_def_id` and return it, along with its
/// index (or `None`, if no such method).
fn trait_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method_name: ast::Name)
-> Option<(uint, Rc<ty::Method<'tcx>>)>
{
let trait_items = ty::trait_items(tcx, trait_def_id);
trait_items
.iter()
.enumerate()
.find(|&(_, ref item)| item.name() == method_name)
.and_then(|(idx, item)| item.as_opt_method().map(|m| (idx, m)))
}
fn impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_def_id: ast::DefId,
method_name: ast::Name)
-> Option<Rc<ty::Method<'tcx>>>
{
let impl_items = tcx.impl_items.borrow();
let impl_items = impl_items.get(&impl_def_id).unwrap();
impl_items
.iter()
.map(|&did| ty::impl_or_trait_item(tcx, did.def_id()))
.find(|m| m.name() == method_name)
.and_then(|item| item.as_opt_method())
}
| resolve_ufcs | identifier_name |
mod.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Method lookup: the secret sauce of Rust. See `README.md`.
use astconv::AstConv;
use check::{FnCtxt};
use check::vtable;
use check::vtable::select_new_fcx_obligations;
use middle::def;
use middle::privacy::{AllPublic, DependsOn, LastPrivate, LastMod};
use middle::subst;
use middle::traits;
use middle::ty::*;
use middle::ty;
use middle::infer;
use util::ppaux::Repr;
use std::rc::Rc;
use syntax::ast::{DefId};
use syntax::ast;
use syntax::codemap::Span;
pub use self::MethodError::*;
pub use self::CandidateSource::*;
pub use self::suggest::{report_error, AllTraitsVec};
mod confirm;
mod probe;
mod suggest;
pub enum MethodError {
// Did not find an applicable method, but we did find various
// static methods that may apply, as well as a list of
// not-in-scope traits which may work.
NoMatch(Vec<CandidateSource>, Vec<ast::DefId>),
// Multiple methods might apply.
Ambiguity(Vec<CandidateSource>),
// Using a `Fn`/`FnMut`/etc method on a raw closure type before we have inferred its kind.
ClosureAmbiguity(/* DefId of fn trait */ ast::DefId),
}
// A pared down enum describing just the places from which a method
// candidate can arise. Used for error reporting only.
#[derive(Copy, PartialOrd, Ord, PartialEq, Eq)]
pub enum CandidateSource {
ImplSource(ast::DefId),
TraitSource(/* trait id */ ast::DefId),
}
type MethodIndex = uint; // just for doc purposes
/// Determines whether the type `self_ty` supports a method name `method_name` or not.
pub fn exists<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
call_expr_id: ast::NodeId)
-> bool | Ok(..) => true,
Err(NoMatch(..)) => false,
Err(Ambiguity(..)) => true,
Err(ClosureAmbiguity(..)) => true,
}
}
/// Performs method lookup. If lookup is successful, it will return the callee and store an
/// appropriate adjustment for the self-expr. In some cases it may report an error (e.g., invoking
/// the `drop` method).
///
/// # Arguments
///
/// Given a method call like `foo.bar::<T1,...Tn>(...)`:
///
/// * `fcx`: the surrounding `FnCtxt` (!)
/// * `span`: the span for the method call
/// * `method_name`: the name of the method being called (`bar`)
/// * `self_ty`: the (unadjusted) type of the self expression (`foo`)
/// * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`)
/// * `self_expr`: the self expression (`foo`)
pub fn lookup<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>,
call_expr: &'tcx ast::Expr,
self_expr: &'tcx ast::Expr)
-> Result<MethodCallee<'tcx>, MethodError>
{
debug!("lookup(method_name={}, self_ty={}, call_expr={}, self_expr={})",
method_name.repr(fcx.tcx()),
self_ty.repr(fcx.tcx()),
call_expr.repr(fcx.tcx()),
self_expr.repr(fcx.tcx()));
let mode = probe::Mode::MethodCall;
let self_ty = fcx.infcx().resolve_type_vars_if_possible(&self_ty);
let pick = try!(probe::probe(fcx, span, mode, method_name, self_ty, call_expr.id));
Ok(confirm::confirm(fcx, span, self_expr, call_expr, self_ty, pick, supplied_method_types))
}
pub fn lookup_in_trait<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_expr: Option<&ast::Expr>,
m_name: ast::Name,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<Vec<Ty<'tcx>>>)
-> Option<MethodCallee<'tcx>>
{
lookup_in_trait_adjusted(fcx, span, self_expr, m_name, trait_def_id,
ty::AutoDerefRef { autoderefs: 0, autoref: None },
self_ty, opt_input_types)
}
/// `lookup_in_trait_adjusted` is used for overloaded operators. It does a very narrow slice of
/// what the normal probe/confirm path does. In particular, it doesn't really do any probing: it
/// simply constructs an obligation for a particular trait with the given self-type and checks
/// whether that trait is implemented.
///
/// FIXME(#18741) -- It seems likely that we can consolidate some of this code with the other
/// method-lookup code. In particular, autoderef on index is basically identical to autoderef with
/// normal probes, except that the test also looks for built-in indexing. Also, the second half of
/// this method is basically the same as confirmation.
pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_expr: Option<&ast::Expr>,
m_name: ast::Name,
trait_def_id: DefId,
autoderefref: ty::AutoDerefRef<'tcx>,
self_ty: Ty<'tcx>,
opt_input_types: Option<Vec<Ty<'tcx>>>)
-> Option<MethodCallee<'tcx>>
{
debug!("lookup_in_trait_adjusted(self_ty={}, self_expr={}, m_name={}, trait_def_id={})",
self_ty.repr(fcx.tcx()),
self_expr.repr(fcx.tcx()),
m_name.repr(fcx.tcx()),
trait_def_id.repr(fcx.tcx()));
let trait_def = ty::lookup_trait_def(fcx.tcx(), trait_def_id);
let expected_number_of_input_types = trait_def.generics.types.len(subst::TypeSpace);
let input_types = match opt_input_types {
Some(input_types) => {
assert_eq!(expected_number_of_input_types, input_types.len());
input_types
}
None => {
fcx.inh.infcx.next_ty_vars(expected_number_of_input_types)
}
};
assert_eq!(trait_def.generics.types.len(subst::FnSpace), 0);
assert!(trait_def.generics.regions.is_empty());
// Construct a trait-reference `self_ty : Trait<input_tys>`
let substs = subst::Substs::new_trait(input_types, Vec::new(), self_ty);
let trait_ref = Rc::new(ty::TraitRef::new(trait_def_id, fcx.tcx().mk_substs(substs)));
// Construct an obligation
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation = traits::Obligation::misc(span,
fcx.body_id,
poly_trait_ref.as_predicate());
// Now we want to know if this can be matched
let mut selcx = traits::SelectionContext::new(fcx.infcx(), fcx);
if !selcx.evaluate_obligation(&obligation) {
debug!("--> Cannot match obligation");
return None; // Cannot be matched, no such method resolution is possible.
}
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = fcx.tcx();
let (method_num, method_ty) = trait_method(tcx, trait_def_id, m_name).unwrap();
assert_eq!(method_ty.generics.types.len(subst::FnSpace), 0);
assert_eq!(method_ty.generics.regions.len(subst::FnSpace), 0);
debug!("lookup_in_trait_adjusted: method_num={} method_ty={}",
method_num, method_ty.repr(fcx.tcx()));
// Instantiate late-bound regions and substitute the trait
// parameters into the method type to get the actual method type.
//
// NB: Instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
let fn_sig = fcx.infcx().replace_late_bound_regions_with_fresh_var(span,
infer::FnCall,
&method_ty.fty.sig).0;
let fn_sig = fcx.instantiate_type_scheme(span, trait_ref.substs, &fn_sig);
let transformed_self_ty = fn_sig.inputs[0];
let fty = ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(ty::BareFnTy {
sig: ty::Binder(fn_sig),
unsafety: method_ty.fty.unsafety,
abi: method_ty.fty.abi.clone(),
}));
debug!("lookup_in_trait_adjusted: matched method fty={} obligation={}",
fty.repr(fcx.tcx()),
obligation.repr(fcx.tcx()));
// Register obligations for the parameters. This will include the
// `Self` parameter, which in turn has a bound of the main trait,
// so this also effectively registers `obligation` as well. (We
// used to register `obligation` explicitly, but that resulted in
// double error messages being reported.)
//
// Note that as the method comes from a trait, it should not have
// any late-bound regions appearing in its bounds.
let method_bounds = fcx.instantiate_bounds(span, trait_ref.substs, &method_ty.predicates);
assert!(!method_bounds.has_escaping_regions());
fcx.add_obligations_for_parameters(
traits::ObligationCause::misc(span, fcx.body_id),
&method_bounds);
// FIXME(#18653) -- Try to resolve obligations, giving us more
// typing information, which can sometimes be needed to avoid
// pathological region inference failures.
vtable::select_new_fcx_obligations(fcx);
// Insert any adjustments needed (always an autoref of some mutability).
match self_expr {
None => { }
Some(self_expr) => {
debug!("lookup_in_trait_adjusted: inserting adjustment if needed \
(self-id={}, base adjustment={:?}, explicit_self={:?})",
self_expr.id, autoderefref, method_ty.explicit_self);
match method_ty.explicit_self {
ty::ByValueExplicitSelfCategory => {
// Trait method is fn(self), no transformation needed.
if !autoderefref.is_identity() {
fcx.write_adjustment(
self_expr.id,
span,
ty::AdjustDerefRef(autoderefref));
}
}
ty::ByReferenceExplicitSelfCategory(..) => {
// Trait method is fn(&self) or fn(&mut self), need an
// autoref. Pull the region etc out of the type of first argument.
match transformed_self_ty.sty {
ty::ty_rptr(region, ty::mt { mutbl, ty: _ }) => {
let ty::AutoDerefRef { autoderefs, autoref } = autoderefref;
let autoref = autoref.map(|r| box r);
fcx.write_adjustment(
self_expr.id,
span,
ty::AdjustDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(ty::AutoPtr(*region, mutbl, autoref))
}));
}
_ => {
fcx.tcx().sess.span_bug(
span,
&format!(
"trait method is &self but first arg is: {}",
transformed_self_ty.repr(fcx.tcx())));
}
}
}
_ => {
fcx.tcx().sess.span_bug(
span,
&format!(
"unexpected explicit self type in operator method: {:?}",
method_ty.explicit_self));
}
}
}
}
let callee = MethodCallee {
origin: MethodTypeParam(MethodParam{trait_ref: trait_ref.clone(),
method_num: method_num,
impl_def_id: None}),
ty: fty,
substs: trait_ref.substs.clone()
};
debug!("callee = {}", callee.repr(fcx.tcx()));
Some(callee)
}
pub fn resolve_ufcs<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
expr_id: ast::NodeId)
-> Result<(def::Def, LastPrivate), MethodError>
{
let mode = probe::Mode::Path;
let pick = try!(probe::probe(fcx, span, mode, method_name, self_ty, expr_id));
let def_id = pick.method_ty.def_id;
let mut lp = LastMod(AllPublic);
let provenance = match pick.kind {
probe::InherentImplPick(impl_def_id) => {
if pick.method_ty.vis != ast::Public {
lp = LastMod(DependsOn(def_id));
}
def::FromImpl(impl_def_id)
}
_ => def::FromTrait(pick.method_ty.container.id())
};
Ok((def::DefMethod(def_id, provenance), lp))
}
/// Find method with name `method_name` defined in `trait_def_id` and return it, along with its
/// index (or `None`, if no such method).
fn trait_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method_name: ast::Name)
-> Option<(uint, Rc<ty::Method<'tcx>>)>
{
let trait_items = ty::trait_items(tcx, trait_def_id);
trait_items
.iter()
.enumerate()
.find(|&(_, ref item)| item.name() == method_name)
.and_then(|(idx, item)| item.as_opt_method().map(|m| (idx, m)))
}
fn impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_def_id: ast::DefId,
method_name: ast::Name)
-> Option<Rc<ty::Method<'tcx>>>
{
let impl_items = tcx.impl_items.borrow();
let impl_items = impl_items.get(&impl_def_id).unwrap();
impl_items
.iter()
.map(|&did| ty::impl_or_trait_item(tcx, did.def_id()))
.find(|m| m.name() == method_name)
.and_then(|item| item.as_opt_method())
} | {
let mode = probe::Mode::MethodCall;
match probe::probe(fcx, span, mode, method_name, self_ty, call_expr_id) { | random_line_split |
test-fs-open-close.js | // Copyright IBM Corp. 2014. All Rights Reserved.
// Node module: async-tracker
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
var assert = require('assert');
require('../index.js');
var fs = require('fs');
var util = require('util');
var cnt = 0;
var Listener = function() {
var evtName = asyncTracker.events.fs.open;
this.deferredCreated = {};
this.invokeDeferred = {};
this.deferredReleased = {};
this.deferredCreated[evtName] = function(fName, fId, args) {
assert.equal(cnt, 0);
cnt += 1;
};
this.deferredCreated['default'] = function(fName, fId, args) {
assert.equal(cnt, 4);
cnt += 1;
};
this.invokeDeferred[evtName] = function(fName, fId, next) {
assert.equal(cnt, 2);
cnt += 1;
next();
};
this.invokeDeferred['default'] = function(fName, fId, next) {
assert.equal(cnt, 6);
cnt += 1;
next();
};
this.deferredReleased[evtName] = function(fName, fId) {
assert.equal(cnt, 5);
cnt += 1;
};
this.deferredReleased['default'] = function(fName, fId) {
assert.equal(cnt, 7);
cnt += 1;
};
this.objectCreated = function(obj) {
assert.equal(cnt, 1);
cnt += 1;
};
this.objectReleased = function(obj) {
assert.equal(cnt, 3);
cnt += 1;
};
};
var listener = new Listener();
asyncTracker.addListener(listener, 'listener');
function closeCallback() |
function openCallback(err, fd) {
fs.close(fd, closeCallback);
}
fs.open(__filename, 'r', openCallback);
asyncTracker.removeListener('listener'); | {
} | identifier_body |
test-fs-open-close.js | // Copyright IBM Corp. 2014. All Rights Reserved.
// Node module: async-tracker
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
var assert = require('assert');
require('../index.js');
var fs = require('fs');
var util = require('util');
var cnt = 0;
var Listener = function() {
var evtName = asyncTracker.events.fs.open;
this.deferredCreated = {};
this.invokeDeferred = {};
this.deferredReleased = {};
this.deferredCreated[evtName] = function(fName, fId, args) {
assert.equal(cnt, 0);
cnt += 1;
};
this.deferredCreated['default'] = function(fName, fId, args) {
assert.equal(cnt, 4);
cnt += 1;
};
this.invokeDeferred[evtName] = function(fName, fId, next) {
assert.equal(cnt, 2);
cnt += 1;
next();
};
this.invokeDeferred['default'] = function(fName, fId, next) {
assert.equal(cnt, 6);
cnt += 1;
next();
};
this.deferredReleased[evtName] = function(fName, fId) {
assert.equal(cnt, 5);
cnt += 1;
};
this.deferredReleased['default'] = function(fName, fId) {
assert.equal(cnt, 7);
cnt += 1;
};
this.objectCreated = function(obj) {
assert.equal(cnt, 1);
cnt += 1;
};
this.objectReleased = function(obj) {
assert.equal(cnt, 3);
cnt += 1;
};
};
var listener = new Listener();
asyncTracker.addListener(listener, 'listener');
function closeCallback() {
}
function | (err, fd) {
fs.close(fd, closeCallback);
}
fs.open(__filename, 'r', openCallback);
asyncTracker.removeListener('listener'); | openCallback | identifier_name |
test-fs-open-close.js | // Copyright IBM Corp. 2014. All Rights Reserved.
// Node module: async-tracker
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
var assert = require('assert');
require('../index.js');
var fs = require('fs');
var util = require('util');
var cnt = 0;
var Listener = function() {
var evtName = asyncTracker.events.fs.open;
| this.deferredReleased = {};
this.deferredCreated[evtName] = function(fName, fId, args) {
assert.equal(cnt, 0);
cnt += 1;
};
this.deferredCreated['default'] = function(fName, fId, args) {
assert.equal(cnt, 4);
cnt += 1;
};
this.invokeDeferred[evtName] = function(fName, fId, next) {
assert.equal(cnt, 2);
cnt += 1;
next();
};
this.invokeDeferred['default'] = function(fName, fId, next) {
assert.equal(cnt, 6);
cnt += 1;
next();
};
this.deferredReleased[evtName] = function(fName, fId) {
assert.equal(cnt, 5);
cnt += 1;
};
this.deferredReleased['default'] = function(fName, fId) {
assert.equal(cnt, 7);
cnt += 1;
};
this.objectCreated = function(obj) {
assert.equal(cnt, 1);
cnt += 1;
};
this.objectReleased = function(obj) {
assert.equal(cnt, 3);
cnt += 1;
};
};
var listener = new Listener();
asyncTracker.addListener(listener, 'listener');
function closeCallback() {
}
function openCallback(err, fd) {
fs.close(fd, closeCallback);
}
fs.open(__filename, 'r', openCallback);
asyncTracker.removeListener('listener'); | this.deferredCreated = {};
this.invokeDeferred = {}; | random_line_split |
setup.py | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
from codecs import open
from setuptools import setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
VERSION = "2.0.17+dev"
# If we have source, validate that our version numbers match
# This should prevent uploading releases with mismatched versions.
try:
with open('azure/cli/__init__.py', 'r', encoding='utf-8') as f:
content = f.read()
except OSError:
pass
else:
|
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = [
'azure-cli-acr',
'azure-cli-acs',
'azure-cli-appservice',
'azure-cli-batch',
'azure-cli-backup',
'azure-cli-billing',
'azure-cli-cdn',
'azure-cli-cloud',
'azure-cli-cognitiveservices',
'azure-cli-component',
'azure-cli-container',
'azure-cli-configure',
'azure-cli-consumption',
'azure-cli-core',
'azure-cli-cosmosdb',
'azure-cli-dla',
'azure-cli-dls',
'azure-cli-eventgrid',
'azure-cli-extension',
'azure-cli-feedback',
'azure-cli-find',
'azure-cli-interactive',
'azure-cli-iot',
'azure-cli-keyvault',
'azure-cli-lab',
'azure-cli-monitor',
'azure-cli-network',
'azure-cli-nspkg',
'azure-cli-profile',
'azure-cli-rdbms',
'azure-cli-redis',
'azure-cli-resource',
'azure-cli-role',
'azure-cli-sql',
'azure-cli-storage',
'azure-cli-vm',
'azure-cli-servicefabric'
]
with open('README.rst', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='azure-cli',
version=VERSION,
description='Microsoft Azure Command-Line Tools',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='azpycli@microsoft.com',
url='https://github.com/Azure/azure-cli',
zip_safe=False,
classifiers=CLASSIFIERS,
scripts=[
'az',
'az.completion.sh',
'az.bat',
],
packages=[
'azure',
'azure.cli',
],
install_requires=DEPENDENCIES,
cmdclass=cmdclass
)
| import re
import sys
m = re.search(r'__version__\s*=\s*[\'"](.+?)[\'"]', content)
if not m:
print('Could not find __version__ in azure/cli/__init__.py')
sys.exit(1)
if m.group(1) != VERSION:
print('Expected __version__ = "{}"; found "{}"'.format(VERSION, m.group(1)))
sys.exit(1) | conditional_block |
setup.py | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
from codecs import open
from setuptools import setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
VERSION = "2.0.17+dev"
# If we have source, validate that our version numbers match
# This should prevent uploading releases with mismatched versions.
try:
with open('azure/cli/__init__.py', 'r', encoding='utf-8') as f:
content = f.read()
except OSError:
pass
else:
import re
import sys
m = re.search(r'__version__\s*=\s*[\'"](.+?)[\'"]', content)
if not m:
print('Could not find __version__ in azure/cli/__init__.py')
sys.exit(1)
if m.group(1) != VERSION:
print('Expected __version__ = "{}"; found "{}"'.format(VERSION, m.group(1)))
sys.exit(1)
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators', | 'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = [
'azure-cli-acr',
'azure-cli-acs',
'azure-cli-appservice',
'azure-cli-batch',
'azure-cli-backup',
'azure-cli-billing',
'azure-cli-cdn',
'azure-cli-cloud',
'azure-cli-cognitiveservices',
'azure-cli-component',
'azure-cli-container',
'azure-cli-configure',
'azure-cli-consumption',
'azure-cli-core',
'azure-cli-cosmosdb',
'azure-cli-dla',
'azure-cli-dls',
'azure-cli-eventgrid',
'azure-cli-extension',
'azure-cli-feedback',
'azure-cli-find',
'azure-cli-interactive',
'azure-cli-iot',
'azure-cli-keyvault',
'azure-cli-lab',
'azure-cli-monitor',
'azure-cli-network',
'azure-cli-nspkg',
'azure-cli-profile',
'azure-cli-rdbms',
'azure-cli-redis',
'azure-cli-resource',
'azure-cli-role',
'azure-cli-sql',
'azure-cli-storage',
'azure-cli-vm',
'azure-cli-servicefabric'
]
with open('README.rst', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='azure-cli',
version=VERSION,
description='Microsoft Azure Command-Line Tools',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='azpycli@microsoft.com',
url='https://github.com/Azure/azure-cli',
zip_safe=False,
classifiers=CLASSIFIERS,
scripts=[
'az',
'az.completion.sh',
'az.bat',
],
packages=[
'azure',
'azure.cli',
],
install_requires=DEPENDENCIES,
cmdclass=cmdclass
) | random_line_split | |
snippetedMessages.js | /* global SnippetedMessages */
Template.snippetedMessages.helpers({
hasMessages() {
return SnippetedMessages.find({ snippeted:true, rid: this.rid }, { sort: { ts: -1 } }).count() > 0;
},
messages() {
return SnippetedMessages.find({ snippeted: true, rid: this.rid }, { sort: { ts: -1 } });
},
| () {
return _.extend(this, { customClass: 'snippeted' });
},
hasMore() {
return Template.instance().hasMore.get();
}
});
Template.snippetedMessages.onCreated(function() {
this.hasMore = new ReactiveVar(true);
this.limit = new ReactiveVar(50);
const self = this;
this.autorun(function() {
const data = Template.currentData();
self.subscribe('snippetedMessages', data.rid, self.limit.get(), function() {
if (SnippetedMessages.find({ snippeted: true, rid: data.rid }).count() < self.limit.get()) {
return self.hasMore.set(false);
}
});
});
});
| message | identifier_name |
snippetedMessages.js | /* global SnippetedMessages */
Template.snippetedMessages.helpers({
hasMessages() {
return SnippetedMessages.find({ snippeted:true, rid: this.rid }, { sort: { ts: -1 } }).count() > 0;
},
messages() {
return SnippetedMessages.find({ snippeted: true, rid: this.rid }, { sort: { ts: -1 } });
},
message() {
return _.extend(this, { customClass: 'snippeted' });
},
hasMore() |
});
Template.snippetedMessages.onCreated(function() {
this.hasMore = new ReactiveVar(true);
this.limit = new ReactiveVar(50);
const self = this;
this.autorun(function() {
const data = Template.currentData();
self.subscribe('snippetedMessages', data.rid, self.limit.get(), function() {
if (SnippetedMessages.find({ snippeted: true, rid: data.rid }).count() < self.limit.get()) {
return self.hasMore.set(false);
}
});
});
});
| {
return Template.instance().hasMore.get();
} | identifier_body |
snippetedMessages.js | /* global SnippetedMessages */
Template.snippetedMessages.helpers({
hasMessages() {
return SnippetedMessages.find({ snippeted:true, rid: this.rid }, { sort: { ts: -1 } }).count() > 0;
},
messages() {
return SnippetedMessages.find({ snippeted: true, rid: this.rid }, { sort: { ts: -1 } });
},
message() {
return _.extend(this, { customClass: 'snippeted' });
},
hasMore() {
return Template.instance().hasMore.get();
}
});
Template.snippetedMessages.onCreated(function() {
this.hasMore = new ReactiveVar(true);
this.limit = new ReactiveVar(50);
const self = this;
this.autorun(function() {
const data = Template.currentData();
self.subscribe('snippetedMessages', data.rid, self.limit.get(), function() {
if (SnippetedMessages.find({ snippeted: true, rid: data.rid }).count() < self.limit.get()) |
});
});
});
| {
return self.hasMore.set(false);
} | conditional_block |
snippetedMessages.js | /* global SnippetedMessages */
Template.snippetedMessages.helpers({
hasMessages() {
return SnippetedMessages.find({ snippeted:true, rid: this.rid }, { sort: { ts: -1 } }).count() > 0;
},
messages() {
return SnippetedMessages.find({ snippeted: true, rid: this.rid }, { sort: { ts: -1 } });
},
message() {
return _.extend(this, { customClass: 'snippeted' });
}, | }
});
Template.snippetedMessages.onCreated(function() {
this.hasMore = new ReactiveVar(true);
this.limit = new ReactiveVar(50);
const self = this;
this.autorun(function() {
const data = Template.currentData();
self.subscribe('snippetedMessages', data.rid, self.limit.get(), function() {
if (SnippetedMessages.find({ snippeted: true, rid: data.rid }).count() < self.limit.get()) {
return self.hasMore.set(false);
}
});
});
}); | hasMore() {
return Template.instance().hasMore.get(); | random_line_split |
lastfm.py | """
Simplish interface to parts of the Last.fm API.
Has a built-in lock to stop requests happening at more than one per second.
"""
import os
import time
# Import [c]ElementTree
try:
import cElementTree as ET
except:
import elementtree.ElementTree as ET
# The directory to use to cache downloaded xml
cachedir = "/var/tmp/lastfm/"
# The lockfile to use to stop too many requests.
lockfile = os.path.join(cachedir, "netlock")
if not os.path.exists(lockfile):
open(lockfile, "w")
# The Last.fm API url
lastfm_api_url = "http://ws.audioscrobbler.com/1.0/%s"
# Create the cache dir if we can
try:
os.makedirs(cachedir)
except OSError:
pass
def fetch(path, delay=1.0):
"""Fetches the given path from the Last.fm API,
using a lock file to make sure that there's no more than 1 request a second.
Will block until the URL is downloaded."""
# Wait for the lock to pass
lockvalue = open(lockfile).read().strip()
while os.path.exists(lockfile) and lockvalue and (time.time() - float(lockvalue) < delay):
time.sleep(0.001)
# Write our own lock
fo = open(lockfile, "w")
fo.write(str(time.time()))
fo.close()
# Get the requested file
import urllib
return urllib.urlopen(lastfm_api_url % path)
def available_weeks(username):
"""Returns a list of integer tuples, representing the
available weeks for charts."""
# Get and parse the XML
root = ET.fromstring(fetch("user/%s/weeklychartlist.xml" % username).read())
# Check the type
assert root.tag == "weeklychartlist", "This is not a Weekly Chart List"
# For each week, get the times
weeks = []
for tag in root.findall("chart"):
start = float(tag.attrib['from'])
end = float(tag.attrib['to'])
weeks.append((start, end))
return weeks
def track_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (trackname, artist, plays) tuples.
Implements caching of the XML."""
# Get the XML if it doesn't already exist
filename = os.path.join(cachedir, "trackchart-%s-%s-%s.xml" % (username, start, end))
if not os.path.exists(filename):
fo = fetch("user/%s/weeklytrackchart.xml?from=%i&to=%i" % (username, start, end))
import shutil
shutil.copyfileobj(fo, open(filename, "w"))
# Load and parse the XML
tree = ET.parse(filename)
root = tree.getroot()
# Check the type
assert root.tag == "weeklytrackchart", "This is not a Weekly Chart List"
# Now, loop over the tracks
tracks = []
for tag in root.findall("track"):
artist_tag = tag.find("artist")
artist_name = artist_tag.text
name = tag.find("name").text
plays = int(tag.find("playcount").text)
tracks.append((name, artist_name, plays))
return tracks
def artist_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (artist, plays) tuples.
Implements caching of the XML."""
# We use the track data as it might already be cached
tracks = track_chart(username, start, end)
artists = {}
for track, artist, plays in tracks:
artists[artist] = artists.get(artist, 0) + plays
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists
def artist_range_chart(username, start, end, callback=lambda x:x, dated=False):
"""Like artist_chart, but aggregates over several weeks' charts into a list of values."""
weeks = available_weeks(username)
artist_totals = {}
artists = {}
matching_weeks = [(week_start, week_end) for week_start, week_end in weeks if (week_end > start) and (week_start < end)]
i = 0
for week_start, week_end in matching_weeks:
for artist, plays in artist_chart(username, week_start, week_end):
artist_totals[artist] = artist_totals.get(artist, 0) + plays
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = dict([(artist, []) for artist in artist_totals])
for week_start, week_end in matching_weeks:
plays = dict(artist_chart(username, week_start, week_end))
for artist in artists:
if dated:
artists[artist].append((week_start, plays.get(artist, 0)))
else:
|
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists | artists[artist].append(plays.get(artist, 0)) | conditional_block |
lastfm.py | """
Simplish interface to parts of the Last.fm API.
Has a built-in lock to stop requests happening at more than one per second.
"""
import os
import time
# Import [c]ElementTree
try:
import cElementTree as ET
except:
import elementtree.ElementTree as ET
# The directory to use to cache downloaded xml
cachedir = "/var/tmp/lastfm/"
# The lockfile to use to stop too many requests.
lockfile = os.path.join(cachedir, "netlock")
if not os.path.exists(lockfile):
open(lockfile, "w")
# The Last.fm API url
lastfm_api_url = "http://ws.audioscrobbler.com/1.0/%s"
# Create the cache dir if we can
try:
os.makedirs(cachedir)
except OSError:
pass
def fetch(path, delay=1.0):
"""Fetches the given path from the Last.fm API,
using a lock file to make sure that there's no more than 1 request a second.
Will block until the URL is downloaded."""
# Wait for the lock to pass
lockvalue = open(lockfile).read().strip()
while os.path.exists(lockfile) and lockvalue and (time.time() - float(lockvalue) < delay):
time.sleep(0.001)
# Write our own lock
fo = open(lockfile, "w")
fo.write(str(time.time()))
fo.close()
# Get the requested file
import urllib
return urllib.urlopen(lastfm_api_url % path)
def available_weeks(username):
"""Returns a list of integer tuples, representing the
available weeks for charts."""
# Get and parse the XML
root = ET.fromstring(fetch("user/%s/weeklychartlist.xml" % username).read())
# Check the type
assert root.tag == "weeklychartlist", "This is not a Weekly Chart List"
# For each week, get the times
weeks = []
for tag in root.findall("chart"):
start = float(tag.attrib['from'])
end = float(tag.attrib['to'])
weeks.append((start, end))
return weeks
def track_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (trackname, artist, plays) tuples.
Implements caching of the XML."""
# Get the XML if it doesn't already exist
filename = os.path.join(cachedir, "trackchart-%s-%s-%s.xml" % (username, start, end))
if not os.path.exists(filename):
fo = fetch("user/%s/weeklytrackchart.xml?from=%i&to=%i" % (username, start, end))
import shutil
shutil.copyfileobj(fo, open(filename, "w"))
# Load and parse the XML
tree = ET.parse(filename)
root = tree.getroot()
# Check the type
assert root.tag == "weeklytrackchart", "This is not a Weekly Chart List"
# Now, loop over the tracks
tracks = []
for tag in root.findall("track"):
artist_tag = tag.find("artist")
artist_name = artist_tag.text
name = tag.find("name").text | return tracks
def artist_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (artist, plays) tuples.
Implements caching of the XML."""
# We use the track data as it might already be cached
tracks = track_chart(username, start, end)
artists = {}
for track, artist, plays in tracks:
artists[artist] = artists.get(artist, 0) + plays
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists
def artist_range_chart(username, start, end, callback=lambda x:x, dated=False):
"""Like artist_chart, but aggregates over several weeks' charts into a list of values."""
weeks = available_weeks(username)
artist_totals = {}
artists = {}
matching_weeks = [(week_start, week_end) for week_start, week_end in weeks if (week_end > start) and (week_start < end)]
i = 0
for week_start, week_end in matching_weeks:
for artist, plays in artist_chart(username, week_start, week_end):
artist_totals[artist] = artist_totals.get(artist, 0) + plays
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = dict([(artist, []) for artist in artist_totals])
for week_start, week_end in matching_weeks:
plays = dict(artist_chart(username, week_start, week_end))
for artist in artists:
if dated:
artists[artist].append((week_start, plays.get(artist, 0)))
else:
artists[artist].append(plays.get(artist, 0))
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists | plays = int(tag.find("playcount").text)
tracks.append((name, artist_name, plays))
| random_line_split |
lastfm.py | """
Simplish interface to parts of the Last.fm API.
Has a built-in lock to stop requests happening at more than one per second.
"""
import os
import time
# Import [c]ElementTree
try:
import cElementTree as ET
except:
import elementtree.ElementTree as ET
# The directory to use to cache downloaded xml
cachedir = "/var/tmp/lastfm/"
# The lockfile to use to stop too many requests.
lockfile = os.path.join(cachedir, "netlock")
if not os.path.exists(lockfile):
open(lockfile, "w")
# The Last.fm API url
lastfm_api_url = "http://ws.audioscrobbler.com/1.0/%s"
# Create the cache dir if we can
try:
os.makedirs(cachedir)
except OSError:
pass
def fetch(path, delay=1.0):
"""Fetches the given path from the Last.fm API,
using a lock file to make sure that there's no more than 1 request a second.
Will block until the URL is downloaded."""
# Wait for the lock to pass
lockvalue = open(lockfile).read().strip()
while os.path.exists(lockfile) and lockvalue and (time.time() - float(lockvalue) < delay):
time.sleep(0.001)
# Write our own lock
fo = open(lockfile, "w")
fo.write(str(time.time()))
fo.close()
# Get the requested file
import urllib
return urllib.urlopen(lastfm_api_url % path)
def available_weeks(username):
"""Returns a list of integer tuples, representing the
available weeks for charts."""
# Get and parse the XML
root = ET.fromstring(fetch("user/%s/weeklychartlist.xml" % username).read())
# Check the type
assert root.tag == "weeklychartlist", "This is not a Weekly Chart List"
# For each week, get the times
weeks = []
for tag in root.findall("chart"):
start = float(tag.attrib['from'])
end = float(tag.attrib['to'])
weeks.append((start, end))
return weeks
def track_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (trackname, artist, plays) tuples.
Implements caching of the XML."""
# Get the XML if it doesn't already exist
filename = os.path.join(cachedir, "trackchart-%s-%s-%s.xml" % (username, start, end))
if not os.path.exists(filename):
fo = fetch("user/%s/weeklytrackchart.xml?from=%i&to=%i" % (username, start, end))
import shutil
shutil.copyfileobj(fo, open(filename, "w"))
# Load and parse the XML
tree = ET.parse(filename)
root = tree.getroot()
# Check the type
assert root.tag == "weeklytrackchart", "This is not a Weekly Chart List"
# Now, loop over the tracks
tracks = []
for tag in root.findall("track"):
artist_tag = tag.find("artist")
artist_name = artist_tag.text
name = tag.find("name").text
plays = int(tag.find("playcount").text)
tracks.append((name, artist_name, plays))
return tracks
def artist_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (artist, plays) tuples.
Implements caching of the XML."""
# We use the track data as it might already be cached
tracks = track_chart(username, start, end)
artists = {}
for track, artist, plays in tracks:
artists[artist] = artists.get(artist, 0) + plays
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists
def | (username, start, end, callback=lambda x:x, dated=False):
"""Like artist_chart, but aggregates over several weeks' charts into a list of values."""
weeks = available_weeks(username)
artist_totals = {}
artists = {}
matching_weeks = [(week_start, week_end) for week_start, week_end in weeks if (week_end > start) and (week_start < end)]
i = 0
for week_start, week_end in matching_weeks:
for artist, plays in artist_chart(username, week_start, week_end):
artist_totals[artist] = artist_totals.get(artist, 0) + plays
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = dict([(artist, []) for artist in artist_totals])
for week_start, week_end in matching_weeks:
plays = dict(artist_chart(username, week_start, week_end))
for artist in artists:
if dated:
artists[artist].append((week_start, plays.get(artist, 0)))
else:
artists[artist].append(plays.get(artist, 0))
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists | artist_range_chart | identifier_name |
lastfm.py | """
Simplish interface to parts of the Last.fm API.
Has a built-in lock to stop requests happening at more than one per second.
"""
import os
import time
# Import [c]ElementTree
try:
import cElementTree as ET
except:
import elementtree.ElementTree as ET
# The directory to use to cache downloaded xml
cachedir = "/var/tmp/lastfm/"
# The lockfile to use to stop too many requests.
lockfile = os.path.join(cachedir, "netlock")
if not os.path.exists(lockfile):
open(lockfile, "w")
# The Last.fm API url
lastfm_api_url = "http://ws.audioscrobbler.com/1.0/%s"
# Create the cache dir if we can
try:
os.makedirs(cachedir)
except OSError:
pass
def fetch(path, delay=1.0):
"""Fetches the given path from the Last.fm API,
using a lock file to make sure that there's no more than 1 request a second.
Will block until the URL is downloaded."""
# Wait for the lock to pass
lockvalue = open(lockfile).read().strip()
while os.path.exists(lockfile) and lockvalue and (time.time() - float(lockvalue) < delay):
time.sleep(0.001)
# Write our own lock
fo = open(lockfile, "w")
fo.write(str(time.time()))
fo.close()
# Get the requested file
import urllib
return urllib.urlopen(lastfm_api_url % path)
def available_weeks(username):
|
def track_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (trackname, artist, plays) tuples.
Implements caching of the XML."""
# Get the XML if it doesn't already exist
filename = os.path.join(cachedir, "trackchart-%s-%s-%s.xml" % (username, start, end))
if not os.path.exists(filename):
fo = fetch("user/%s/weeklytrackchart.xml?from=%i&to=%i" % (username, start, end))
import shutil
shutil.copyfileobj(fo, open(filename, "w"))
# Load and parse the XML
tree = ET.parse(filename)
root = tree.getroot()
# Check the type
assert root.tag == "weeklytrackchart", "This is not a Weekly Chart List"
# Now, loop over the tracks
tracks = []
for tag in root.findall("track"):
artist_tag = tag.find("artist")
artist_name = artist_tag.text
name = tag.find("name").text
plays = int(tag.find("playcount").text)
tracks.append((name, artist_name, plays))
return tracks
def artist_chart(username, start, end):
"""Retrieves the track chart for a single week.
The data is returned as an ordered list of (artist, plays) tuples.
Implements caching of the XML."""
# We use the track data as it might already be cached
tracks = track_chart(username, start, end)
artists = {}
for track, artist, plays in tracks:
artists[artist] = artists.get(artist, 0) + plays
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists
def artist_range_chart(username, start, end, callback=lambda x:x, dated=False):
"""Like artist_chart, but aggregates over several weeks' charts into a list of values."""
weeks = available_weeks(username)
artist_totals = {}
artists = {}
matching_weeks = [(week_start, week_end) for week_start, week_end in weeks if (week_end > start) and (week_start < end)]
i = 0
for week_start, week_end in matching_weeks:
for artist, plays in artist_chart(username, week_start, week_end):
artist_totals[artist] = artist_totals.get(artist, 0) + plays
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = dict([(artist, []) for artist in artist_totals])
for week_start, week_end in matching_weeks:
plays = dict(artist_chart(username, week_start, week_end))
for artist in artists:
if dated:
artists[artist].append((week_start, plays.get(artist, 0)))
else:
artists[artist].append(plays.get(artist, 0))
i += 1.0 / len(matching_weeks)
callback(i/2.0)
artists = artists.items()
artists.sort(key=lambda (x,y):y)
artists.reverse()
return artists | """Returns a list of integer tuples, representing the
available weeks for charts."""
# Get and parse the XML
root = ET.fromstring(fetch("user/%s/weeklychartlist.xml" % username).read())
# Check the type
assert root.tag == "weeklychartlist", "This is not a Weekly Chart List"
# For each week, get the times
weeks = []
for tag in root.findall("chart"):
start = float(tag.attrib['from'])
end = float(tag.attrib['to'])
weeks.append((start, end))
return weeks | identifier_body |
prewitt.rs | use rgb::*;
use imgref::*;
use loop9::{Triple,loop9};
pub trait ToGray {
fn to_gray(self) -> i16;
}
impl ToGray for RGBA8 {
fn to_gray(self) -> i16 {
self.rgb().to_gray()
}
}
impl ToGray for RGB8 {
fn to_gray(self) -> i16 {
let px = self.map(|c| c as i16);
px.r + px.g + px.g + px.b
}
}
pub fn prewitt_squared_img<T: ToGray + Copy>(input: ImgRef<'_, T>) -> ImgVec<u16> {
let gray: Vec<_> = input.pixels().map(|px|px.to_gray()).collect();
let gray = Img::new(gray, input.width(), input.height());
let mut prew = Vec::with_capacity(gray.width() * gray.height()); | }
#[inline]
pub fn prewitt_squared3<T: Into<i16>>(top: Triple<T>, mid: Triple<T>, bot: Triple<T>) -> u16 {
prewitt_squared(top.prev, top.curr, top.next, mid.prev, mid.next, bot.prev, bot.curr, bot.next)
}
#[inline]
pub fn prewitt_squared<T: Into<i16>>(top_prev: T, top_curr: T, top_next: T, mid_prev: T, mid_next: T, bot_prev: T, bot_curr: T, bot_next: T) -> u16 {
let top_prev = top_prev.into();
let top_curr = top_curr.into();
let top_next = top_next.into();
let mid_prev = mid_prev.into();
let mid_next = mid_next.into();
let bot_prev = bot_prev.into();
let bot_curr = bot_curr.into();
let bot_next = bot_next.into();
let gx = (
top_next - top_prev +
mid_next - mid_prev +
bot_next - bot_prev) as i32;
let gy = (
bot_prev + bot_curr + bot_next -
top_prev - top_curr - top_next) as i32;
((gx*gx + gy*gy) / 256) as u16
} | loop9(gray.as_ref(), 0,0, gray.width(), gray.height(), |_x,_y,top,mid,bot|{
prew.push(prewitt_squared3(top, mid, bot));
});
ImgVec::new(prew, gray.width(), gray.height()) | random_line_split |
prewitt.rs | use rgb::*;
use imgref::*;
use loop9::{Triple,loop9};
pub trait ToGray {
fn to_gray(self) -> i16;
}
impl ToGray for RGBA8 {
fn to_gray(self) -> i16 {
self.rgb().to_gray()
}
}
impl ToGray for RGB8 {
fn | (self) -> i16 {
let px = self.map(|c| c as i16);
px.r + px.g + px.g + px.b
}
}
pub fn prewitt_squared_img<T: ToGray + Copy>(input: ImgRef<'_, T>) -> ImgVec<u16> {
let gray: Vec<_> = input.pixels().map(|px|px.to_gray()).collect();
let gray = Img::new(gray, input.width(), input.height());
let mut prew = Vec::with_capacity(gray.width() * gray.height());
loop9(gray.as_ref(), 0,0, gray.width(), gray.height(), |_x,_y,top,mid,bot|{
prew.push(prewitt_squared3(top, mid, bot));
});
ImgVec::new(prew, gray.width(), gray.height())
}
#[inline]
pub fn prewitt_squared3<T: Into<i16>>(top: Triple<T>, mid: Triple<T>, bot: Triple<T>) -> u16 {
prewitt_squared(top.prev, top.curr, top.next, mid.prev, mid.next, bot.prev, bot.curr, bot.next)
}
#[inline]
pub fn prewitt_squared<T: Into<i16>>(top_prev: T, top_curr: T, top_next: T, mid_prev: T, mid_next: T, bot_prev: T, bot_curr: T, bot_next: T) -> u16 {
let top_prev = top_prev.into();
let top_curr = top_curr.into();
let top_next = top_next.into();
let mid_prev = mid_prev.into();
let mid_next = mid_next.into();
let bot_prev = bot_prev.into();
let bot_curr = bot_curr.into();
let bot_next = bot_next.into();
let gx = (
top_next - top_prev +
mid_next - mid_prev +
bot_next - bot_prev) as i32;
let gy = (
bot_prev + bot_curr + bot_next -
top_prev - top_curr - top_next) as i32;
((gx*gx + gy*gy) / 256) as u16
}
| to_gray | identifier_name |
prewitt.rs | use rgb::*;
use imgref::*;
use loop9::{Triple,loop9};
pub trait ToGray {
fn to_gray(self) -> i16;
}
impl ToGray for RGBA8 {
fn to_gray(self) -> i16 {
self.rgb().to_gray()
}
}
impl ToGray for RGB8 {
fn to_gray(self) -> i16 {
let px = self.map(|c| c as i16);
px.r + px.g + px.g + px.b
}
}
pub fn prewitt_squared_img<T: ToGray + Copy>(input: ImgRef<'_, T>) -> ImgVec<u16> {
let gray: Vec<_> = input.pixels().map(|px|px.to_gray()).collect();
let gray = Img::new(gray, input.width(), input.height());
let mut prew = Vec::with_capacity(gray.width() * gray.height());
loop9(gray.as_ref(), 0,0, gray.width(), gray.height(), |_x,_y,top,mid,bot|{
prew.push(prewitt_squared3(top, mid, bot));
});
ImgVec::new(prew, gray.width(), gray.height())
}
#[inline]
pub fn prewitt_squared3<T: Into<i16>>(top: Triple<T>, mid: Triple<T>, bot: Triple<T>) -> u16 |
#[inline]
pub fn prewitt_squared<T: Into<i16>>(top_prev: T, top_curr: T, top_next: T, mid_prev: T, mid_next: T, bot_prev: T, bot_curr: T, bot_next: T) -> u16 {
let top_prev = top_prev.into();
let top_curr = top_curr.into();
let top_next = top_next.into();
let mid_prev = mid_prev.into();
let mid_next = mid_next.into();
let bot_prev = bot_prev.into();
let bot_curr = bot_curr.into();
let bot_next = bot_next.into();
let gx = (
top_next - top_prev +
mid_next - mid_prev +
bot_next - bot_prev) as i32;
let gy = (
bot_prev + bot_curr + bot_next -
top_prev - top_curr - top_next) as i32;
((gx*gx + gy*gy) / 256) as u16
}
| {
prewitt_squared(top.prev, top.curr, top.next, mid.prev, mid.next, bot.prev, bot.curr, bot.next)
} | identifier_body |
data_class.py | # DATA class v1.0 written by HR,JB@KIT 2011, 2016
'''
DATA exchange class, also holds global variables for thread management.
Generalized version based on TIP.
'''
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import time
import os
from threading import Lock
import numpy as np
from qkit.storage import store as hdf_lib
from qkit.gui.plot import plot as qviewkit
class DATA(object):
'''
DATA class. Controls all access to parameter values and stored data.
'''
class LOCALHOST(object):
def __init__(self,config):
self.name = config.get('LOCALHOST','name')
self.ip = config.get('LOCALHOST','ip')
self.port = config.getint('LOCALHOST','port')
class REMOTEHOST(object):
def __init__(self,config):
self.name = config.get('REMOTEHOST','name')
self.ip = config.get('REMOTEHOST','ip')
self.port = config.getint('REMOTEHOST','port')
class PARAMETER(object):
def __init__(self,config,p_index,p_attr):
'''
Initialize parameter attributes, mostly taken from the config file.
'''
self.p_index = p_index
self.name = config.get(str(p_attr),'name')
self.interval = config.getfloat(str(p_attr),'interval')
self.data_request_object = lambda: 0 #this is written by server_main.py
self.value = 0
self.timestamp = 0
self.next_schedule = time.time() + self.interval
self.logging = bool(int(config.get(str(p_attr),'logging')))
self.log_path = config.get(str(p_attr),'log_path')
self.log_lock = Lock()
self.url_timestamps = None
print("Parameter %s loaded."%str(self.name))
def get_all(self):
'''
Get all parameter attributes.
'''
with Lock():
return {
"parameter": self.p_index,
"name":self.name,
"interval":self.interval,
"value":self.value,
"timestamp":self.timestamp,
"next_schedule":self.next_schedule,
}
def set_interval(self,interval):
'''
Setup the scheduling, which corresponds to the value request interval.
'''
interval = float(interval)
if interval == 0:
interval = 120*365*24*3600 #120 years
self.next_schedule += -self.interval+interval #update next schedule
self.interval = interval
def get_last_value(self):
with Lock():
return self.value
def get_timestamp(self):
with Lock():
return self.timestamp
def get_history(self,range,nchunks=100):
'''
Read out the h5 file.
- range: history data range
- nchunks: number of data points to be returned
'''
if self.url_timestamps != None:
try:
with self.log_lock:
timestamps = np.array(self.hf[self.url_timestamps])
values = np.array(self.hf[self.url_values])
data_points_requested_mask = np.where(time.time()-timestamps < range*3600)
timestamps_requested = timestamps[data_points_requested_mask]
data_points_requested = values[data_points_requested_mask]
#return only nchunks data points
if len(data_points_requested) > nchunks:
timestamp_chunks = np.array(np.split(timestamps_requested[:(len(timestamps_requested)-len(timestamps_requested)%nchunks)],nchunks))
timestamps = timestamp_chunks[:,int(0.5*len(timestamps_requested)/nchunks)]
data_chunks = np.array(np.split(data_points_requested[:(len(data_points_requested)-len(data_points_requested)%nchunks)],nchunks))
#calculate medians and return them instead of the mean (due to runaways in the log file)
medians = np.sort(data_chunks,axis=-1)[:,int(0.5*len(data_points_requested)/nchunks)]
return [timestamps,medians]
else:
return [timestamps_requested,data_points_requested]
except KeyError: #AttributeError, NameError:
print('Error opening h5 log file.')
return [0]
else:
|
def store_value(self,value):
'''
Store method, used by the worker.
'''
with Lock():
try:
self.value = float(value)
except ValueError:
print('type cast error, ignoring')
self.timestamp = time.time()
if self.logging:
self.append_to_log()
def create_logfile(self):
print('Create new log file for parameter %s.'%self.name)
self.fname = os.path.join(self.log_path,self.name.replace(' ','_')+time.strftime('%d%m%Y%H%M%S')+'.h5')
#print self.fname
self.hf = hdf_lib.Data(self.fname, mode='a')
self.hdf_t = self.hf.add_coordinate('timestamps')
self.hdf_v = self.hf.add_value_vector('values', x = self.hdf_t)
self.url_timestamps = '/entry/data0/timestamps'
self.url_values = '/entry/data0/values'
view = self.hf.add_view('data_vs_time', x = self.hdf_t, y = self.hdf_v) #fit
def append_to_log(self):
with self.log_lock:
self.hdf_t.append(float(self.get_timestamp()))
self.hdf_v.append(float(self.get_last_value()))
def close_logfile(self):
self.hf.close_file()
def schedule(self):
'''
Specifiy whether the parameter is to be updated,
typicalled called in each worker iteration.
Returns True if new parameter value needs to be read.
'''
if time.time() > self.next_schedule:
while time.time() > self.next_schedule:
self.next_schedule += self.interval
return True
else:
return False
def set_schedule(self):
self.next_schedule = time.time()
return True
def __init__(self,config):
'''
Reads the cfg file and instanciates all parameters accordingly.
'''
self.wants_abort = False
self.debug = True
self.cycle_time = config.getfloat('worker','cycle_time')
p_instances = config.get('parameters','p').split(",") #parameter instance names
#print(p_instances)
self.parameters = [self.PARAMETER(config,i,p) for i,p in enumerate(p_instances)] #instanciate parameter array
for i,p_i in enumerate(p_instances): #create human readable aliases, such that objects are accessible from clients according to the seetings.cfg entry in []
setattr(self,str(p_i),self.parameters[i])
self.localhost = self.LOCALHOST(config)
#self.remotehost = self.REMOTEHOST(config)
self.ctrl_lock = Lock()
def atexit(self):
self.set_wants_abort()
def get_wants_abort(self):
with self.ctrl_lock:
return self.wants_abort
def set_wants_abort(self):
with self.ctrl_lock:
self.wants_abort = True
if __name__ == "__main__":
DATA = DATA()
| return [0] | conditional_block |
data_class.py | # DATA class v1.0 written by HR,JB@KIT 2011, 2016
'''
DATA exchange class, also holds global variables for thread management.
Generalized version based on TIP.
'''
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import time
import os
from threading import Lock
import numpy as np
from qkit.storage import store as hdf_lib
from qkit.gui.plot import plot as qviewkit
class DATA(object):
'''
DATA class. Controls all access to parameter values and stored data.
'''
class LOCALHOST(object):
def __init__(self,config):
self.name = config.get('LOCALHOST','name')
self.ip = config.get('LOCALHOST','ip')
self.port = config.getint('LOCALHOST','port')
class REMOTEHOST(object):
def __init__(self,config):
self.name = config.get('REMOTEHOST','name')
self.ip = config.get('REMOTEHOST','ip')
self.port = config.getint('REMOTEHOST','port')
class PARAMETER(object):
def __init__(self,config,p_index,p_attr):
'''
Initialize parameter attributes, mostly taken from the config file.
'''
self.p_index = p_index
self.name = config.get(str(p_attr),'name')
self.interval = config.getfloat(str(p_attr),'interval')
self.data_request_object = lambda: 0 #this is written by server_main.py
self.value = 0
self.timestamp = 0
self.next_schedule = time.time() + self.interval
self.logging = bool(int(config.get(str(p_attr),'logging')))
self.log_path = config.get(str(p_attr),'log_path')
self.log_lock = Lock()
self.url_timestamps = None
print("Parameter %s loaded."%str(self.name))
def get_all(self):
'''
Get all parameter attributes.
'''
with Lock():
return {
"parameter": self.p_index,
"name":self.name,
"interval":self.interval,
"value":self.value,
"timestamp":self.timestamp,
"next_schedule":self.next_schedule,
}
def set_interval(self,interval):
'''
Setup the scheduling, which corresponds to the value request interval.
'''
interval = float(interval)
if interval == 0:
interval = 120*365*24*3600 #120 years
self.next_schedule += -self.interval+interval #update next schedule
self.interval = interval
def get_last_value(self):
with Lock():
return self.value
def get_timestamp(self): | Read out the h5 file.
- range: history data range
- nchunks: number of data points to be returned
'''
if self.url_timestamps != None:
try:
with self.log_lock:
timestamps = np.array(self.hf[self.url_timestamps])
values = np.array(self.hf[self.url_values])
data_points_requested_mask = np.where(time.time()-timestamps < range*3600)
timestamps_requested = timestamps[data_points_requested_mask]
data_points_requested = values[data_points_requested_mask]
#return only nchunks data points
if len(data_points_requested) > nchunks:
timestamp_chunks = np.array(np.split(timestamps_requested[:(len(timestamps_requested)-len(timestamps_requested)%nchunks)],nchunks))
timestamps = timestamp_chunks[:,int(0.5*len(timestamps_requested)/nchunks)]
data_chunks = np.array(np.split(data_points_requested[:(len(data_points_requested)-len(data_points_requested)%nchunks)],nchunks))
#calculate medians and return them instead of the mean (due to runaways in the log file)
medians = np.sort(data_chunks,axis=-1)[:,int(0.5*len(data_points_requested)/nchunks)]
return [timestamps,medians]
else:
return [timestamps_requested,data_points_requested]
except KeyError: #AttributeError, NameError:
print('Error opening h5 log file.')
return [0]
else:
return [0]
def store_value(self,value):
'''
Store method, used by the worker.
'''
with Lock():
try:
self.value = float(value)
except ValueError:
print('type cast error, ignoring')
self.timestamp = time.time()
if self.logging:
self.append_to_log()
def create_logfile(self):
print('Create new log file for parameter %s.'%self.name)
self.fname = os.path.join(self.log_path,self.name.replace(' ','_')+time.strftime('%d%m%Y%H%M%S')+'.h5')
#print self.fname
self.hf = hdf_lib.Data(self.fname, mode='a')
self.hdf_t = self.hf.add_coordinate('timestamps')
self.hdf_v = self.hf.add_value_vector('values', x = self.hdf_t)
self.url_timestamps = '/entry/data0/timestamps'
self.url_values = '/entry/data0/values'
view = self.hf.add_view('data_vs_time', x = self.hdf_t, y = self.hdf_v) #fit
def append_to_log(self):
with self.log_lock:
self.hdf_t.append(float(self.get_timestamp()))
self.hdf_v.append(float(self.get_last_value()))
def close_logfile(self):
self.hf.close_file()
def schedule(self):
'''
Specifiy whether the parameter is to be updated,
typicalled called in each worker iteration.
Returns True if new parameter value needs to be read.
'''
if time.time() > self.next_schedule:
while time.time() > self.next_schedule:
self.next_schedule += self.interval
return True
else:
return False
def set_schedule(self):
self.next_schedule = time.time()
return True
def __init__(self,config):
'''
Reads the cfg file and instanciates all parameters accordingly.
'''
self.wants_abort = False
self.debug = True
self.cycle_time = config.getfloat('worker','cycle_time')
p_instances = config.get('parameters','p').split(",") #parameter instance names
#print(p_instances)
self.parameters = [self.PARAMETER(config,i,p) for i,p in enumerate(p_instances)] #instanciate parameter array
for i,p_i in enumerate(p_instances): #create human readable aliases, such that objects are accessible from clients according to the seetings.cfg entry in []
setattr(self,str(p_i),self.parameters[i])
self.localhost = self.LOCALHOST(config)
#self.remotehost = self.REMOTEHOST(config)
self.ctrl_lock = Lock()
def atexit(self):
self.set_wants_abort()
def get_wants_abort(self):
with self.ctrl_lock:
return self.wants_abort
def set_wants_abort(self):
with self.ctrl_lock:
self.wants_abort = True
if __name__ == "__main__":
DATA = DATA() | with Lock():
return self.timestamp
def get_history(self,range,nchunks=100):
''' | random_line_split |
data_class.py | # DATA class v1.0 written by HR,JB@KIT 2011, 2016
'''
DATA exchange class, also holds global variables for thread management.
Generalized version based on TIP.
'''
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import time
import os
from threading import Lock
import numpy as np
from qkit.storage import store as hdf_lib
from qkit.gui.plot import plot as qviewkit
class DATA(object):
'''
DATA class. Controls all access to parameter values and stored data.
'''
class LOCALHOST(object):
def __init__(self,config):
self.name = config.get('LOCALHOST','name')
self.ip = config.get('LOCALHOST','ip')
self.port = config.getint('LOCALHOST','port')
class REMOTEHOST(object):
def __init__(self,config):
self.name = config.get('REMOTEHOST','name')
self.ip = config.get('REMOTEHOST','ip')
self.port = config.getint('REMOTEHOST','port')
class PARAMETER(object):
def __init__(self,config,p_index,p_attr):
'''
Initialize parameter attributes, mostly taken from the config file.
'''
self.p_index = p_index
self.name = config.get(str(p_attr),'name')
self.interval = config.getfloat(str(p_attr),'interval')
self.data_request_object = lambda: 0 #this is written by server_main.py
self.value = 0
self.timestamp = 0
self.next_schedule = time.time() + self.interval
self.logging = bool(int(config.get(str(p_attr),'logging')))
self.log_path = config.get(str(p_attr),'log_path')
self.log_lock = Lock()
self.url_timestamps = None
print("Parameter %s loaded."%str(self.name))
def get_all(self):
'''
Get all parameter attributes.
'''
with Lock():
return {
"parameter": self.p_index,
"name":self.name,
"interval":self.interval,
"value":self.value,
"timestamp":self.timestamp,
"next_schedule":self.next_schedule,
}
def set_interval(self,interval):
'''
Setup the scheduling, which corresponds to the value request interval.
'''
interval = float(interval)
if interval == 0:
interval = 120*365*24*3600 #120 years
self.next_schedule += -self.interval+interval #update next schedule
self.interval = interval
def get_last_value(self):
with Lock():
return self.value
def get_timestamp(self):
with Lock():
return self.timestamp
def get_history(self,range,nchunks=100):
'''
Read out the h5 file.
- range: history data range
- nchunks: number of data points to be returned
'''
if self.url_timestamps != None:
try:
with self.log_lock:
timestamps = np.array(self.hf[self.url_timestamps])
values = np.array(self.hf[self.url_values])
data_points_requested_mask = np.where(time.time()-timestamps < range*3600)
timestamps_requested = timestamps[data_points_requested_mask]
data_points_requested = values[data_points_requested_mask]
#return only nchunks data points
if len(data_points_requested) > nchunks:
timestamp_chunks = np.array(np.split(timestamps_requested[:(len(timestamps_requested)-len(timestamps_requested)%nchunks)],nchunks))
timestamps = timestamp_chunks[:,int(0.5*len(timestamps_requested)/nchunks)]
data_chunks = np.array(np.split(data_points_requested[:(len(data_points_requested)-len(data_points_requested)%nchunks)],nchunks))
#calculate medians and return them instead of the mean (due to runaways in the log file)
medians = np.sort(data_chunks,axis=-1)[:,int(0.5*len(data_points_requested)/nchunks)]
return [timestamps,medians]
else:
return [timestamps_requested,data_points_requested]
except KeyError: #AttributeError, NameError:
print('Error opening h5 log file.')
return [0]
else:
return [0]
def store_value(self,value):
'''
Store method, used by the worker.
'''
with Lock():
try:
self.value = float(value)
except ValueError:
print('type cast error, ignoring')
self.timestamp = time.time()
if self.logging:
self.append_to_log()
def create_logfile(self):
print('Create new log file for parameter %s.'%self.name)
self.fname = os.path.join(self.log_path,self.name.replace(' ','_')+time.strftime('%d%m%Y%H%M%S')+'.h5')
#print self.fname
self.hf = hdf_lib.Data(self.fname, mode='a')
self.hdf_t = self.hf.add_coordinate('timestamps')
self.hdf_v = self.hf.add_value_vector('values', x = self.hdf_t)
self.url_timestamps = '/entry/data0/timestamps'
self.url_values = '/entry/data0/values'
view = self.hf.add_view('data_vs_time', x = self.hdf_t, y = self.hdf_v) #fit
def append_to_log(self):
with self.log_lock:
self.hdf_t.append(float(self.get_timestamp()))
self.hdf_v.append(float(self.get_last_value()))
def | (self):
self.hf.close_file()
def schedule(self):
'''
Specifiy whether the parameter is to be updated,
typicalled called in each worker iteration.
Returns True if new parameter value needs to be read.
'''
if time.time() > self.next_schedule:
while time.time() > self.next_schedule:
self.next_schedule += self.interval
return True
else:
return False
def set_schedule(self):
self.next_schedule = time.time()
return True
def __init__(self,config):
'''
Reads the cfg file and instanciates all parameters accordingly.
'''
self.wants_abort = False
self.debug = True
self.cycle_time = config.getfloat('worker','cycle_time')
p_instances = config.get('parameters','p').split(",") #parameter instance names
#print(p_instances)
self.parameters = [self.PARAMETER(config,i,p) for i,p in enumerate(p_instances)] #instanciate parameter array
for i,p_i in enumerate(p_instances): #create human readable aliases, such that objects are accessible from clients according to the seetings.cfg entry in []
setattr(self,str(p_i),self.parameters[i])
self.localhost = self.LOCALHOST(config)
#self.remotehost = self.REMOTEHOST(config)
self.ctrl_lock = Lock()
def atexit(self):
self.set_wants_abort()
def get_wants_abort(self):
with self.ctrl_lock:
return self.wants_abort
def set_wants_abort(self):
with self.ctrl_lock:
self.wants_abort = True
if __name__ == "__main__":
DATA = DATA()
| close_logfile | identifier_name |
data_class.py | # DATA class v1.0 written by HR,JB@KIT 2011, 2016
'''
DATA exchange class, also holds global variables for thread management.
Generalized version based on TIP.
'''
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import time
import os
from threading import Lock
import numpy as np
from qkit.storage import store as hdf_lib
from qkit.gui.plot import plot as qviewkit
class DATA(object):
'''
DATA class. Controls all access to parameter values and stored data.
'''
class LOCALHOST(object):
def __init__(self,config):
self.name = config.get('LOCALHOST','name')
self.ip = config.get('LOCALHOST','ip')
self.port = config.getint('LOCALHOST','port')
class REMOTEHOST(object):
def __init__(self,config):
|
class PARAMETER(object):
def __init__(self,config,p_index,p_attr):
'''
Initialize parameter attributes, mostly taken from the config file.
'''
self.p_index = p_index
self.name = config.get(str(p_attr),'name')
self.interval = config.getfloat(str(p_attr),'interval')
self.data_request_object = lambda: 0 #this is written by server_main.py
self.value = 0
self.timestamp = 0
self.next_schedule = time.time() + self.interval
self.logging = bool(int(config.get(str(p_attr),'logging')))
self.log_path = config.get(str(p_attr),'log_path')
self.log_lock = Lock()
self.url_timestamps = None
print("Parameter %s loaded."%str(self.name))
def get_all(self):
'''
Get all parameter attributes.
'''
with Lock():
return {
"parameter": self.p_index,
"name":self.name,
"interval":self.interval,
"value":self.value,
"timestamp":self.timestamp,
"next_schedule":self.next_schedule,
}
def set_interval(self,interval):
'''
Setup the scheduling, which corresponds to the value request interval.
'''
interval = float(interval)
if interval == 0:
interval = 120*365*24*3600 #120 years
self.next_schedule += -self.interval+interval #update next schedule
self.interval = interval
def get_last_value(self):
with Lock():
return self.value
def get_timestamp(self):
with Lock():
return self.timestamp
def get_history(self,range,nchunks=100):
'''
Read out the h5 file.
- range: history data range
- nchunks: number of data points to be returned
'''
if self.url_timestamps != None:
try:
with self.log_lock:
timestamps = np.array(self.hf[self.url_timestamps])
values = np.array(self.hf[self.url_values])
data_points_requested_mask = np.where(time.time()-timestamps < range*3600)
timestamps_requested = timestamps[data_points_requested_mask]
data_points_requested = values[data_points_requested_mask]
#return only nchunks data points
if len(data_points_requested) > nchunks:
timestamp_chunks = np.array(np.split(timestamps_requested[:(len(timestamps_requested)-len(timestamps_requested)%nchunks)],nchunks))
timestamps = timestamp_chunks[:,int(0.5*len(timestamps_requested)/nchunks)]
data_chunks = np.array(np.split(data_points_requested[:(len(data_points_requested)-len(data_points_requested)%nchunks)],nchunks))
#calculate medians and return them instead of the mean (due to runaways in the log file)
medians = np.sort(data_chunks,axis=-1)[:,int(0.5*len(data_points_requested)/nchunks)]
return [timestamps,medians]
else:
return [timestamps_requested,data_points_requested]
except KeyError: #AttributeError, NameError:
print('Error opening h5 log file.')
return [0]
else:
return [0]
def store_value(self,value):
'''
Store method, used by the worker.
'''
with Lock():
try:
self.value = float(value)
except ValueError:
print('type cast error, ignoring')
self.timestamp = time.time()
if self.logging:
self.append_to_log()
def create_logfile(self):
print('Create new log file for parameter %s.'%self.name)
self.fname = os.path.join(self.log_path,self.name.replace(' ','_')+time.strftime('%d%m%Y%H%M%S')+'.h5')
#print self.fname
self.hf = hdf_lib.Data(self.fname, mode='a')
self.hdf_t = self.hf.add_coordinate('timestamps')
self.hdf_v = self.hf.add_value_vector('values', x = self.hdf_t)
self.url_timestamps = '/entry/data0/timestamps'
self.url_values = '/entry/data0/values'
view = self.hf.add_view('data_vs_time', x = self.hdf_t, y = self.hdf_v) #fit
def append_to_log(self):
with self.log_lock:
self.hdf_t.append(float(self.get_timestamp()))
self.hdf_v.append(float(self.get_last_value()))
def close_logfile(self):
self.hf.close_file()
def schedule(self):
'''
Specifiy whether the parameter is to be updated,
typicalled called in each worker iteration.
Returns True if new parameter value needs to be read.
'''
if time.time() > self.next_schedule:
while time.time() > self.next_schedule:
self.next_schedule += self.interval
return True
else:
return False
def set_schedule(self):
self.next_schedule = time.time()
return True
def __init__(self,config):
'''
Reads the cfg file and instanciates all parameters accordingly.
'''
self.wants_abort = False
self.debug = True
self.cycle_time = config.getfloat('worker','cycle_time')
p_instances = config.get('parameters','p').split(",") #parameter instance names
#print(p_instances)
self.parameters = [self.PARAMETER(config,i,p) for i,p in enumerate(p_instances)] #instanciate parameter array
for i,p_i in enumerate(p_instances): #create human readable aliases, such that objects are accessible from clients according to the seetings.cfg entry in []
setattr(self,str(p_i),self.parameters[i])
self.localhost = self.LOCALHOST(config)
#self.remotehost = self.REMOTEHOST(config)
self.ctrl_lock = Lock()
def atexit(self):
self.set_wants_abort()
def get_wants_abort(self):
with self.ctrl_lock:
return self.wants_abort
def set_wants_abort(self):
with self.ctrl_lock:
self.wants_abort = True
if __name__ == "__main__":
DATA = DATA()
| self.name = config.get('REMOTEHOST','name')
self.ip = config.get('REMOTEHOST','ip')
self.port = config.getint('REMOTEHOST','port') | identifier_body |
MVCSBundle.ts | // ------------------------------------------------------------------------------
// Copyright (c) 2016 Goodgame Studios. All Rights Reserved.
//
// NOTICE: You are permitted to use, modify, and distribute this file
// in accordance with the terms of the license agreement accompanying it.
// ------------------------------------------------------------------------------
import { IBundle } from "../../framework/api/IBundle";
import { IContext } from "../../framework/api/IContext";
import { LogLevel } from "../../framework/api/LogLevel";
import { DirectCommandMapExtension } from "../../extensions/directCommandMap/DirectCommandMapExtension";
import { InjectableLoggerExtension } from "../../extensions/enhancedLogging/InjectableLoggerExtension";
import { ConsoleLoggingExtension } from "../../extensions/enhancedLogging/ConsoleLoggingExtension";
import { EventCommandMapExtension } from "../../extensions/eventCommandMap/EventCommandMapExtension";
import { EventDispatcherExtension } from "../../extensions/eventDispatcher/EventDispatcherExtension";
import { LocalEventMapExtension } from "../../extensions/localEventMap/LocalEventMapExtension";
/**
* For that Classic Robotlegs flavour
*
* <p>This bundle installs a number of extensions commonly used
* in typical Robotlegs applications and modules.</p>
*/
export class MVCSBundle implements IBundle {
/*============================================================================*/
/* Public Functions */
/*============================================================================*/
/**
* @inheritDoc
*/
public | (context: IContext): void {
context.install(
ConsoleLoggingExtension,
InjectableLoggerExtension,
EventDispatcherExtension,
DirectCommandMapExtension,
EventCommandMapExtension,
LocalEventMapExtension
);
}
}
| extend | identifier_name |
MVCSBundle.ts | // ------------------------------------------------------------------------------
// Copyright (c) 2016 Goodgame Studios. All Rights Reserved.
//
// NOTICE: You are permitted to use, modify, and distribute this file
// in accordance with the terms of the license agreement accompanying it.
// ------------------------------------------------------------------------------
import { IBundle } from "../../framework/api/IBundle";
import { IContext } from "../../framework/api/IContext";
import { LogLevel } from "../../framework/api/LogLevel";
import { DirectCommandMapExtension } from "../../extensions/directCommandMap/DirectCommandMapExtension";
import { InjectableLoggerExtension } from "../../extensions/enhancedLogging/InjectableLoggerExtension";
import { ConsoleLoggingExtension } from "../../extensions/enhancedLogging/ConsoleLoggingExtension";
import { EventCommandMapExtension } from "../../extensions/eventCommandMap/EventCommandMapExtension";
import { EventDispatcherExtension } from "../../extensions/eventDispatcher/EventDispatcherExtension";
import { LocalEventMapExtension } from "../../extensions/localEventMap/LocalEventMapExtension";
/**
* For that Classic Robotlegs flavour
*
* <p>This bundle installs a number of extensions commonly used
* in typical Robotlegs applications and modules.</p>
*/
export class MVCSBundle implements IBundle {
/*============================================================================*/
/* Public Functions */
/*============================================================================*/
/**
* @inheritDoc
*/
public extend(context: IContext): void |
}
| {
context.install(
ConsoleLoggingExtension,
InjectableLoggerExtension,
EventDispatcherExtension,
DirectCommandMapExtension,
EventCommandMapExtension,
LocalEventMapExtension
);
} | identifier_body |
MVCSBundle.ts | // ------------------------------------------------------------------------------
// Copyright (c) 2016 Goodgame Studios. All Rights Reserved.
//
// NOTICE: You are permitted to use, modify, and distribute this file
// in accordance with the terms of the license agreement accompanying it.
// ------------------------------------------------------------------------------
import { IBundle } from "../../framework/api/IBundle";
import { IContext } from "../../framework/api/IContext";
import { LogLevel } from "../../framework/api/LogLevel";
import { DirectCommandMapExtension } from "../../extensions/directCommandMap/DirectCommandMapExtension";
import { InjectableLoggerExtension } from "../../extensions/enhancedLogging/InjectableLoggerExtension";
import { ConsoleLoggingExtension } from "../../extensions/enhancedLogging/ConsoleLoggingExtension";
import { EventCommandMapExtension } from "../../extensions/eventCommandMap/EventCommandMapExtension";
import { EventDispatcherExtension } from "../../extensions/eventDispatcher/EventDispatcherExtension";
import { LocalEventMapExtension } from "../../extensions/localEventMap/LocalEventMapExtension";
/**
* For that Classic Robotlegs flavour
*
* <p>This bundle installs a number of extensions commonly used
* in typical Robotlegs applications and modules.</p>
*/
export class MVCSBundle implements IBundle {
| /**
* @inheritDoc
*/
public extend(context: IContext): void {
context.install(
ConsoleLoggingExtension,
InjectableLoggerExtension,
EventDispatcherExtension,
DirectCommandMapExtension,
EventCommandMapExtension,
LocalEventMapExtension
);
}
} | /*============================================================================*/
/* Public Functions */
/*============================================================================*/
| random_line_split |
snmp-decode.rs | extern crate asn1_cereal;
extern crate argparse;
extern crate serde;
extern crate serde_json;
use asn1_cereal::{tag, byte};
use asn1_cereal::ber::stream;
// SNMP ASN.1 Definition
// https://tools.ietf.org/html/rfc1157#page-30
type ObjectIdentifier = u64;
type NetworkAddress = u64;
type ObjectName = String;
struct Message {
version: i32,
community: String,
data: PDUs,
}
enum PDUs {
get_request(GetRequest),
get_next_request(GetNextRequest),
get_response(GetResponse),
set_request(SetRequest),
trap(TrapPDU),
}
|
struct GetNextRequest(PDU);
struct GetResponse(PDU);
struct SetRequest(PDU);
struct PDU {
request_id: i32,
error_status: i32,
error_index: i32,
variable_bindings: VarBindList,
}
struct TrapPDU {
enterprise: ObjectIdentifier,
agent_addr: NetworkAddress,
generic_trap: i32,
specific_trap: i32,
time_stamp: TimeTicks,
variable_bindings: VarBindList,
}
struct VarBind {
name: ObjectName,
value: ObjectSyntax,
}
type VarBindList = Vec<VarBind>;
use std::io;
use std::io::Read;
use std::fs;
use std::path::Path;
use std::collections::BTreeMap;
use argparse::{ArgumentParser, StoreTrue, StoreOption};
use serde_json::value::Value;
use serde_json::ser::to_string_pretty;
fn main() {
let opts = parse_args();
let path = Path::new(opts.file.as_ref().unwrap());
if !path.is_file() {
panic!("Supplied file does not exist");
}
// Create a buffered reader from the file.
let reader = io::BufReader::new(fs::File::open(path).unwrap()).bytes();
}
struct ProgOpts {
file: Option<String>,
verbose: bool,
}
fn parse_args() -> ProgOpts {
let mut opts = ProgOpts {
file: None,
verbose: false,
};
{
let mut ap = ArgumentParser::new();
ap.set_description("Decode ASN.1 files");
ap.refer(&mut opts.verbose)
.add_option(&["-v", "--verbose"], StoreTrue, "Verbose output");
ap.refer(&mut opts.file)
.add_argument("file", StoreOption, "ASN.1 file to decode");
ap.parse_args_or_exit();
}
opts
} | struct GetRequest(PDU); | random_line_split |
snmp-decode.rs | extern crate asn1_cereal;
extern crate argparse;
extern crate serde;
extern crate serde_json;
use asn1_cereal::{tag, byte};
use asn1_cereal::ber::stream;
// SNMP ASN.1 Definition
// https://tools.ietf.org/html/rfc1157#page-30
type ObjectIdentifier = u64;
type NetworkAddress = u64;
type ObjectName = String;
struct Message {
version: i32,
community: String,
data: PDUs,
}
enum PDUs {
get_request(GetRequest),
get_next_request(GetNextRequest),
get_response(GetResponse),
set_request(SetRequest),
trap(TrapPDU),
}
struct GetRequest(PDU);
struct GetNextRequest(PDU);
struct GetResponse(PDU);
struct SetRequest(PDU);
struct PDU {
request_id: i32,
error_status: i32,
error_index: i32,
variable_bindings: VarBindList,
}
struct TrapPDU {
enterprise: ObjectIdentifier,
agent_addr: NetworkAddress,
generic_trap: i32,
specific_trap: i32,
time_stamp: TimeTicks,
variable_bindings: VarBindList,
}
struct VarBind {
name: ObjectName,
value: ObjectSyntax,
}
type VarBindList = Vec<VarBind>;
use std::io;
use std::io::Read;
use std::fs;
use std::path::Path;
use std::collections::BTreeMap;
use argparse::{ArgumentParser, StoreTrue, StoreOption};
use serde_json::value::Value;
use serde_json::ser::to_string_pretty;
fn main() {
let opts = parse_args();
let path = Path::new(opts.file.as_ref().unwrap());
if !path.is_file() |
// Create a buffered reader from the file.
let reader = io::BufReader::new(fs::File::open(path).unwrap()).bytes();
}
struct ProgOpts {
file: Option<String>,
verbose: bool,
}
fn parse_args() -> ProgOpts {
let mut opts = ProgOpts {
file: None,
verbose: false,
};
{
let mut ap = ArgumentParser::new();
ap.set_description("Decode ASN.1 files");
ap.refer(&mut opts.verbose)
.add_option(&["-v", "--verbose"], StoreTrue, "Verbose output");
ap.refer(&mut opts.file)
.add_argument("file", StoreOption, "ASN.1 file to decode");
ap.parse_args_or_exit();
}
opts
}
| {
panic!("Supplied file does not exist");
} | conditional_block |
snmp-decode.rs | extern crate asn1_cereal;
extern crate argparse;
extern crate serde;
extern crate serde_json;
use asn1_cereal::{tag, byte};
use asn1_cereal::ber::stream;
// SNMP ASN.1 Definition
// https://tools.ietf.org/html/rfc1157#page-30
type ObjectIdentifier = u64;
type NetworkAddress = u64;
type ObjectName = String;
struct Message {
version: i32,
community: String,
data: PDUs,
}
enum PDUs {
get_request(GetRequest),
get_next_request(GetNextRequest),
get_response(GetResponse),
set_request(SetRequest),
trap(TrapPDU),
}
struct GetRequest(PDU);
struct GetNextRequest(PDU);
struct | (PDU);
struct SetRequest(PDU);
struct PDU {
request_id: i32,
error_status: i32,
error_index: i32,
variable_bindings: VarBindList,
}
struct TrapPDU {
enterprise: ObjectIdentifier,
agent_addr: NetworkAddress,
generic_trap: i32,
specific_trap: i32,
time_stamp: TimeTicks,
variable_bindings: VarBindList,
}
struct VarBind {
name: ObjectName,
value: ObjectSyntax,
}
type VarBindList = Vec<VarBind>;
use std::io;
use std::io::Read;
use std::fs;
use std::path::Path;
use std::collections::BTreeMap;
use argparse::{ArgumentParser, StoreTrue, StoreOption};
use serde_json::value::Value;
use serde_json::ser::to_string_pretty;
fn main() {
let opts = parse_args();
let path = Path::new(opts.file.as_ref().unwrap());
if !path.is_file() {
panic!("Supplied file does not exist");
}
// Create a buffered reader from the file.
let reader = io::BufReader::new(fs::File::open(path).unwrap()).bytes();
}
struct ProgOpts {
file: Option<String>,
verbose: bool,
}
fn parse_args() -> ProgOpts {
let mut opts = ProgOpts {
file: None,
verbose: false,
};
{
let mut ap = ArgumentParser::new();
ap.set_description("Decode ASN.1 files");
ap.refer(&mut opts.verbose)
.add_option(&["-v", "--verbose"], StoreTrue, "Verbose output");
ap.refer(&mut opts.file)
.add_argument("file", StoreOption, "ASN.1 file to decode");
ap.parse_args_or_exit();
}
opts
}
| GetResponse | identifier_name |
xla_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self._all_tf_types = set([
dtypes.as_dtype(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
])
self.int_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_integer
])
self._float_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_floating
])
self.complex_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_complex
])
self._numeric_tf_types = set(
self.int_tf_types | self._float_tf_types | self.complex_tf_types)
self._all_types = set(
[dtype.as_numpy_dtype for dtype in self._all_tf_types])
self.int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types])
self._float_types = set(
[dtype.as_numpy_dtype for dtype in self._float_tf_types])
self.complex_types = set([
dtype.as_numpy_dtype for dtype in self.complex_tf_types
])
self._numeric_types = set(
self.int_types | self._float_types | self.complex_types)
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
self._method_types_filter = dict()
# TODO(xpan): Make it text proto if it doesn't scale.
# Each line of the manifest file specifies an entry. The entry can be
# 1) TestNameRegex // E.g. CumprodTest.* Or
# 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16
# The 1) disables the entire test. While 2) only filter some numeric types
# so that they are not used in those tests.
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
disabled_tests = []
disabled_method_types = []
for l in manifest_file.read().splitlines():
entry = comments_re.sub('', l).strip().split(' ')
if len(entry) == 1:
disabled_tests.append(entry[0])
elif len(entry) == 2:
disabled_method_types.append(
(entry[0], entry[1].strip().split(',')))
else:
raise ValueError('Bad entry in manifest file.')
self.disabled_regex = re.compile('|'.join(disabled_tests))
for method, types in disabled_method_types:
self._method_types_filter[method] = set([
dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype
for name in types])
manifest_file.close()
@property
def all_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._all_tf_types - tf_types
@property
def float_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_types - self._method_types_filter.get(name, set())
@property
def float_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_tf_types - self._method_types_filter.get(name, set())
@property
def numeric_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._numeric_tf_types - tf_types
@property
def numeric_types(self):
|
@property
def all_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._all_types - self._method_types_filter.get(name, set())
def setUp(self):
super(XLATestCase, self).setUp()
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
super(XLATestCase, self).tearDown()
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._numeric_types - self._method_types_filter.get(name, set()) | identifier_body |
xla_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self._all_tf_types = set([
dtypes.as_dtype(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
])
self.int_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_integer
])
self._float_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_floating
])
self.complex_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_complex
])
self._numeric_tf_types = set(
self.int_tf_types | self._float_tf_types | self.complex_tf_types)
self._all_types = set(
[dtype.as_numpy_dtype for dtype in self._all_tf_types])
self.int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types])
self._float_types = set(
[dtype.as_numpy_dtype for dtype in self._float_tf_types])
self.complex_types = set([
dtype.as_numpy_dtype for dtype in self.complex_tf_types
])
self._numeric_types = set(
self.int_types | self._float_types | self.complex_types)
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
self._method_types_filter = dict()
# TODO(xpan): Make it text proto if it doesn't scale.
# Each line of the manifest file specifies an entry. The entry can be
# 1) TestNameRegex // E.g. CumprodTest.* Or
# 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16
# The 1) disables the entire test. While 2) only filter some numeric types
# so that they are not used in those tests.
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
disabled_tests = []
disabled_method_types = []
for l in manifest_file.read().splitlines():
entry = comments_re.sub('', l).strip().split(' ')
if len(entry) == 1:
|
elif len(entry) == 2:
disabled_method_types.append(
(entry[0], entry[1].strip().split(',')))
else:
raise ValueError('Bad entry in manifest file.')
self.disabled_regex = re.compile('|'.join(disabled_tests))
for method, types in disabled_method_types:
self._method_types_filter[method] = set([
dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype
for name in types])
manifest_file.close()
@property
def all_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._all_tf_types - tf_types
@property
def float_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_types - self._method_types_filter.get(name, set())
@property
def float_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_tf_types - self._method_types_filter.get(name, set())
@property
def numeric_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._numeric_tf_types - tf_types
@property
def numeric_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._numeric_types - self._method_types_filter.get(name, set())
@property
def all_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._all_types - self._method_types_filter.get(name, set())
def setUp(self):
super(XLATestCase, self).setUp()
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
super(XLATestCase, self).tearDown()
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| disabled_tests.append(entry[0]) | conditional_block |
xla_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self._all_tf_types = set([
dtypes.as_dtype(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
])
self.int_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_integer
])
self._float_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_floating
])
self.complex_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_complex
])
self._numeric_tf_types = set(
self.int_tf_types | self._float_tf_types | self.complex_tf_types)
self._all_types = set(
[dtype.as_numpy_dtype for dtype in self._all_tf_types])
self.int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types])
self._float_types = set(
[dtype.as_numpy_dtype for dtype in self._float_tf_types])
self.complex_types = set([
dtype.as_numpy_dtype for dtype in self.complex_tf_types
])
self._numeric_types = set(
self.int_types | self._float_types | self.complex_types)
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
self._method_types_filter = dict()
# TODO(xpan): Make it text proto if it doesn't scale.
# Each line of the manifest file specifies an entry. The entry can be
# 1) TestNameRegex // E.g. CumprodTest.* Or
# 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16
# The 1) disables the entire test. While 2) only filter some numeric types
# so that they are not used in those tests.
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
disabled_tests = []
disabled_method_types = []
for l in manifest_file.read().splitlines():
entry = comments_re.sub('', l).strip().split(' ')
if len(entry) == 1:
disabled_tests.append(entry[0])
elif len(entry) == 2:
disabled_method_types.append(
(entry[0], entry[1].strip().split(',')))
else:
raise ValueError('Bad entry in manifest file.')
self.disabled_regex = re.compile('|'.join(disabled_tests))
for method, types in disabled_method_types:
self._method_types_filter[method] = set([
dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype
for name in types])
manifest_file.close()
@property
def all_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._all_tf_types - tf_types
@property
def | (self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_types - self._method_types_filter.get(name, set())
@property
def float_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_tf_types - self._method_types_filter.get(name, set())
@property
def numeric_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._numeric_tf_types - tf_types
@property
def numeric_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._numeric_types - self._method_types_filter.get(name, set())
@property
def all_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._all_types - self._method_types_filter.get(name, set())
def setUp(self):
super(XLATestCase, self).setUp()
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
super(XLATestCase, self).tearDown()
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| float_types | identifier_name |
xla_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self._all_tf_types = set([
dtypes.as_dtype(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
])
self.int_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_integer
])
self._float_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_floating
])
self.complex_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_complex
])
self._numeric_tf_types = set(
self.int_tf_types | self._float_tf_types | self.complex_tf_types)
self._all_types = set(
[dtype.as_numpy_dtype for dtype in self._all_tf_types])
self.int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types])
self._float_types = set(
[dtype.as_numpy_dtype for dtype in self._float_tf_types])
self.complex_types = set([
dtype.as_numpy_dtype for dtype in self.complex_tf_types
])
self._numeric_types = set(
self.int_types | self._float_types | self.complex_types)
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
self._method_types_filter = dict()
# TODO(xpan): Make it text proto if it doesn't scale.
# Each line of the manifest file specifies an entry. The entry can be
# 1) TestNameRegex // E.g. CumprodTest.* Or
# 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16
# The 1) disables the entire test. While 2) only filter some numeric types
# so that they are not used in those tests.
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
disabled_tests = []
disabled_method_types = []
for l in manifest_file.read().splitlines():
entry = comments_re.sub('', l).strip().split(' ')
if len(entry) == 1:
disabled_tests.append(entry[0])
elif len(entry) == 2:
disabled_method_types.append(
(entry[0], entry[1].strip().split(',')))
else:
raise ValueError('Bad entry in manifest file.')
self.disabled_regex = re.compile('|'.join(disabled_tests))
for method, types in disabled_method_types:
self._method_types_filter[method] = set([
dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype
for name in types])
manifest_file.close()
@property
def all_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._all_tf_types - tf_types
@property
def float_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_types - self._method_types_filter.get(name, set())
@property
def float_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_tf_types - self._method_types_filter.get(name, set())
@property
def numeric_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._numeric_tf_types - tf_types
@property
def numeric_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._numeric_types - self._method_types_filter.get(name, set())
@property
def all_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._all_types - self._method_types_filter.get(name, set())
def setUp(self):
super(XLATestCase, self).setUp()
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
super(XLATestCase, self).tearDown()
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench, | device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device)) | builder_fn,
use_xla_jit, | random_line_split |
edit_role.rs | use std::collections::HashMap;
use bytes::buf::Buf;
use reqwest::Url;
use tokio::{fs::File, io::AsyncReadExt};
use crate::http::{AttachmentType, Http};
use crate::internal::prelude::*;
use crate::model::{guild::Role, Permissions};
/// A builder to create or edit a [`Role`] for use via a number of model methods.
///
/// These are:
///
/// - [`PartialGuild::create_role`]
/// - [`Guild::create_role`]
/// - [`Guild::edit_role`]
/// - [`GuildId::create_role`]
/// - [`GuildId::edit_role`]
/// - [`Role::edit`]
///
/// Defaults are provided for each parameter on role creation.
///
/// # Examples
///
/// Create a hoisted, mentionable role named `"a test role"`:
///
/// ```rust,no_run
/// # use serenity::{model::id::{ChannelId, GuildId}, http::Http};
/// # use std::sync::Arc;
/// #
/// # let http = Arc::new(Http::default());
/// # let (channel_id, guild_id) = (ChannelId(1), GuildId(2));
/// #
/// // assuming a `channel_id` and `guild_id` has been bound
///
/// let role = guild_id.create_role(&http, |r| r.hoist(true).mentionable(true).name("a test role"));
/// ```
///
/// [`PartialGuild::create_role`]: crate::model::guild::PartialGuild::create_role
/// [`Guild::create_role`]: crate::model::guild::Guild::create_role
/// [`Guild::edit_role`]: crate::model::guild::Guild::edit_role
/// [`GuildId::create_role`]: crate::model::id::GuildId::create_role
/// [`GuildId::edit_role`]: crate::model::id::GuildId::edit_role
#[derive(Clone, Debug, Default)]
pub struct EditRole(pub HashMap<&'static str, Value>);
impl EditRole {
/// Creates a new builder with the values of the given [`Role`].
pub fn new(role: &Role) -> Self {
let mut map = HashMap::with_capacity(9);
#[cfg(feature = "utils")]
{
map.insert("color", Value::Number(Number::from(role.colour.0)));
}
#[cfg(not(feature = "utils"))]
{
map.insert("color", Value::Number(Number::from(role.colour)));
}
map.insert("hoist", Value::Bool(role.hoist));
map.insert("managed", Value::Bool(role.managed));
map.insert("mentionable", Value::Bool(role.mentionable));
map.insert("name", Value::String(role.name.clone()));
map.insert("permissions", Value::Number(Number::from(role.permissions.bits())));
map.insert("position", Value::Number(Number::from(role.position)));
if let Some(unicode_emoji) = &role.unicode_emoji {
map.insert("unicode_emoji", Value::String(unicode_emoji.clone()));
}
if let Some(icon) = &role.icon {
map.insert("icon", Value::String(icon.clone()));
}
EditRole(map)
}
/// Sets the colour of the role.
pub fn colour(&mut self, colour: u64) -> &mut Self {
self.0.insert("color", Value::Number(Number::from(colour)));
self
}
/// Whether or not to hoist the role above lower-positioned role in the user
/// list.
pub fn hoist(&mut self, hoist: bool) -> &mut Self {
self.0.insert("hoist", Value::Bool(hoist));
self
}
/// Whether or not to make the role mentionable, notifying its users.
pub fn mentionable(&mut self, mentionable: bool) -> &mut Self {
self.0.insert("mentionable", Value::Bool(mentionable));
self
}
/// The name of the role to set.
pub fn name<S: ToString>(&mut self, name: S) -> &mut Self {
self.0.insert("name", Value::String(name.to_string()));
self
}
/// The set of permissions to assign the role.
pub fn permissions(&mut self, permissions: Permissions) -> &mut Self {
self.0.insert("permissions", Value::Number(Number::from(permissions.bits())));
self
}
/// The position to assign the role in the role list. This correlates to the
/// role's position in the user list.
pub fn | (&mut self, position: u8) -> &mut Self {
self.0.insert("position", Value::Number(Number::from(position)));
self
}
/// The unicode emoji to set as the role image.
pub fn unicode_emoji<S: ToString>(&mut self, unicode_emoji: S) -> &mut Self {
self.0.remove("icon");
self.0.insert("unicode_emoji", Value::String(unicode_emoji.to_string()));
self
}
/// The image to set as the role icon.
///
/// # Errors
///
/// May error if the icon is a URL and the HTTP request fails, or if the icon is a file
/// on a path that doesn't exist.
pub async fn icon<'a>(
&mut self,
http: impl AsRef<Http>,
icon: impl Into<AttachmentType<'a>>,
) -> Result<&mut Self> {
let icon = match icon.into() {
AttachmentType::Bytes {
data,
filename: _,
} => "data:image/png;base64,".to_string() + &base64::encode(&data.into_owned()),
AttachmentType::File {
file,
filename: _,
} => {
let mut buf = Vec::new();
file.try_clone().await?.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Path(path) => {
let mut file = File::open(path).await?;
let mut buf = vec![];
file.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Image(url) => {
let url = Url::parse(url).map_err(|_| Error::Url(url.to_string()))?;
let response = http.as_ref().client.get(url).send().await?;
let mut bytes = response.bytes().await?;
let mut picture: Vec<u8> = vec![0; bytes.len()];
bytes.copy_to_slice(&mut picture[..]);
"data:image/png;base64,".to_string() + &base64::encode(&picture)
},
};
self.0.remove("unicode_emoji");
self.0.insert("icon", Value::String(icon));
Ok(self)
}
}
| position | identifier_name |
edit_role.rs | use std::collections::HashMap;
use bytes::buf::Buf;
use reqwest::Url;
use tokio::{fs::File, io::AsyncReadExt};
use crate::http::{AttachmentType, Http};
use crate::internal::prelude::*;
use crate::model::{guild::Role, Permissions};
/// A builder to create or edit a [`Role`] for use via a number of model methods.
///
/// These are:
///
/// - [`PartialGuild::create_role`]
/// - [`Guild::create_role`]
/// - [`Guild::edit_role`]
/// - [`GuildId::create_role`]
/// - [`GuildId::edit_role`]
/// - [`Role::edit`]
///
/// Defaults are provided for each parameter on role creation.
///
/// # Examples
///
/// Create a hoisted, mentionable role named `"a test role"`:
///
/// ```rust,no_run
/// # use serenity::{model::id::{ChannelId, GuildId}, http::Http};
/// # use std::sync::Arc;
/// #
/// # let http = Arc::new(Http::default());
/// # let (channel_id, guild_id) = (ChannelId(1), GuildId(2));
/// #
/// // assuming a `channel_id` and `guild_id` has been bound
///
/// let role = guild_id.create_role(&http, |r| r.hoist(true).mentionable(true).name("a test role"));
/// ```
///
/// [`PartialGuild::create_role`]: crate::model::guild::PartialGuild::create_role
/// [`Guild::create_role`]: crate::model::guild::Guild::create_role
/// [`Guild::edit_role`]: crate::model::guild::Guild::edit_role
/// [`GuildId::create_role`]: crate::model::id::GuildId::create_role
/// [`GuildId::edit_role`]: crate::model::id::GuildId::edit_role
#[derive(Clone, Debug, Default)]
pub struct EditRole(pub HashMap<&'static str, Value>);
impl EditRole {
/// Creates a new builder with the values of the given [`Role`].
pub fn new(role: &Role) -> Self {
let mut map = HashMap::with_capacity(9);
#[cfg(feature = "utils")]
{
map.insert("color", Value::Number(Number::from(role.colour.0)));
}
#[cfg(not(feature = "utils"))]
{
map.insert("color", Value::Number(Number::from(role.colour)));
}
map.insert("hoist", Value::Bool(role.hoist));
map.insert("managed", Value::Bool(role.managed));
map.insert("mentionable", Value::Bool(role.mentionable));
map.insert("name", Value::String(role.name.clone()));
map.insert("permissions", Value::Number(Number::from(role.permissions.bits())));
map.insert("position", Value::Number(Number::from(role.position)));
if let Some(unicode_emoji) = &role.unicode_emoji {
map.insert("unicode_emoji", Value::String(unicode_emoji.clone()));
}
if let Some(icon) = &role.icon |
EditRole(map)
}
/// Sets the colour of the role.
pub fn colour(&mut self, colour: u64) -> &mut Self {
self.0.insert("color", Value::Number(Number::from(colour)));
self
}
/// Whether or not to hoist the role above lower-positioned role in the user
/// list.
pub fn hoist(&mut self, hoist: bool) -> &mut Self {
self.0.insert("hoist", Value::Bool(hoist));
self
}
/// Whether or not to make the role mentionable, notifying its users.
pub fn mentionable(&mut self, mentionable: bool) -> &mut Self {
self.0.insert("mentionable", Value::Bool(mentionable));
self
}
/// The name of the role to set.
pub fn name<S: ToString>(&mut self, name: S) -> &mut Self {
self.0.insert("name", Value::String(name.to_string()));
self
}
/// The set of permissions to assign the role.
pub fn permissions(&mut self, permissions: Permissions) -> &mut Self {
self.0.insert("permissions", Value::Number(Number::from(permissions.bits())));
self
}
/// The position to assign the role in the role list. This correlates to the
/// role's position in the user list.
pub fn position(&mut self, position: u8) -> &mut Self {
self.0.insert("position", Value::Number(Number::from(position)));
self
}
/// The unicode emoji to set as the role image.
pub fn unicode_emoji<S: ToString>(&mut self, unicode_emoji: S) -> &mut Self {
self.0.remove("icon");
self.0.insert("unicode_emoji", Value::String(unicode_emoji.to_string()));
self
}
/// The image to set as the role icon.
///
/// # Errors
///
/// May error if the icon is a URL and the HTTP request fails, or if the icon is a file
/// on a path that doesn't exist.
pub async fn icon<'a>(
&mut self,
http: impl AsRef<Http>,
icon: impl Into<AttachmentType<'a>>,
) -> Result<&mut Self> {
let icon = match icon.into() {
AttachmentType::Bytes {
data,
filename: _,
} => "data:image/png;base64,".to_string() + &base64::encode(&data.into_owned()),
AttachmentType::File {
file,
filename: _,
} => {
let mut buf = Vec::new();
file.try_clone().await?.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Path(path) => {
let mut file = File::open(path).await?;
let mut buf = vec![];
file.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Image(url) => {
let url = Url::parse(url).map_err(|_| Error::Url(url.to_string()))?;
let response = http.as_ref().client.get(url).send().await?;
let mut bytes = response.bytes().await?;
let mut picture: Vec<u8> = vec![0; bytes.len()];
bytes.copy_to_slice(&mut picture[..]);
"data:image/png;base64,".to_string() + &base64::encode(&picture)
},
};
self.0.remove("unicode_emoji");
self.0.insert("icon", Value::String(icon));
Ok(self)
}
}
| {
map.insert("icon", Value::String(icon.clone()));
} | conditional_block |
edit_role.rs | use std::collections::HashMap;
use bytes::buf::Buf;
use reqwest::Url;
use tokio::{fs::File, io::AsyncReadExt};
use crate::http::{AttachmentType, Http};
use crate::internal::prelude::*;
use crate::model::{guild::Role, Permissions};
/// A builder to create or edit a [`Role`] for use via a number of model methods.
///
/// These are:
///
/// - [`PartialGuild::create_role`]
/// - [`Guild::create_role`]
/// - [`Guild::edit_role`]
/// - [`GuildId::create_role`]
/// - [`GuildId::edit_role`]
/// - [`Role::edit`]
///
/// Defaults are provided for each parameter on role creation.
///
/// # Examples
///
/// Create a hoisted, mentionable role named `"a test role"`:
///
/// ```rust,no_run
/// # use serenity::{model::id::{ChannelId, GuildId}, http::Http};
/// # use std::sync::Arc;
/// #
/// # let http = Arc::new(Http::default());
/// # let (channel_id, guild_id) = (ChannelId(1), GuildId(2));
/// #
/// // assuming a `channel_id` and `guild_id` has been bound
///
/// let role = guild_id.create_role(&http, |r| r.hoist(true).mentionable(true).name("a test role"));
/// ```
///
/// [`PartialGuild::create_role`]: crate::model::guild::PartialGuild::create_role
/// [`Guild::create_role`]: crate::model::guild::Guild::create_role
/// [`Guild::edit_role`]: crate::model::guild::Guild::edit_role
/// [`GuildId::create_role`]: crate::model::id::GuildId::create_role
/// [`GuildId::edit_role`]: crate::model::id::GuildId::edit_role
#[derive(Clone, Debug, Default)]
pub struct EditRole(pub HashMap<&'static str, Value>);
impl EditRole {
/// Creates a new builder with the values of the given [`Role`].
pub fn new(role: &Role) -> Self {
let mut map = HashMap::with_capacity(9);
#[cfg(feature = "utils")]
{
map.insert("color", Value::Number(Number::from(role.colour.0)));
}
#[cfg(not(feature = "utils"))]
{
map.insert("color", Value::Number(Number::from(role.colour)));
}
map.insert("hoist", Value::Bool(role.hoist));
map.insert("managed", Value::Bool(role.managed));
map.insert("mentionable", Value::Bool(role.mentionable));
map.insert("name", Value::String(role.name.clone()));
map.insert("permissions", Value::Number(Number::from(role.permissions.bits())));
map.insert("position", Value::Number(Number::from(role.position)));
if let Some(unicode_emoji) = &role.unicode_emoji {
map.insert("unicode_emoji", Value::String(unicode_emoji.clone()));
}
if let Some(icon) = &role.icon {
map.insert("icon", Value::String(icon.clone()));
}
EditRole(map)
}
/// Sets the colour of the role.
pub fn colour(&mut self, colour: u64) -> &mut Self {
self.0.insert("color", Value::Number(Number::from(colour)));
self
}
/// Whether or not to hoist the role above lower-positioned role in the user
/// list.
pub fn hoist(&mut self, hoist: bool) -> &mut Self {
self.0.insert("hoist", Value::Bool(hoist));
self
}
/// Whether or not to make the role mentionable, notifying its users.
pub fn mentionable(&mut self, mentionable: bool) -> &mut Self {
self.0.insert("mentionable", Value::Bool(mentionable));
self
}
/// The name of the role to set.
pub fn name<S: ToString>(&mut self, name: S) -> &mut Self |
/// The set of permissions to assign the role.
pub fn permissions(&mut self, permissions: Permissions) -> &mut Self {
self.0.insert("permissions", Value::Number(Number::from(permissions.bits())));
self
}
/// The position to assign the role in the role list. This correlates to the
/// role's position in the user list.
pub fn position(&mut self, position: u8) -> &mut Self {
self.0.insert("position", Value::Number(Number::from(position)));
self
}
/// The unicode emoji to set as the role image.
pub fn unicode_emoji<S: ToString>(&mut self, unicode_emoji: S) -> &mut Self {
self.0.remove("icon");
self.0.insert("unicode_emoji", Value::String(unicode_emoji.to_string()));
self
}
/// The image to set as the role icon.
///
/// # Errors
///
/// May error if the icon is a URL and the HTTP request fails, or if the icon is a file
/// on a path that doesn't exist.
pub async fn icon<'a>(
&mut self,
http: impl AsRef<Http>,
icon: impl Into<AttachmentType<'a>>,
) -> Result<&mut Self> {
let icon = match icon.into() {
AttachmentType::Bytes {
data,
filename: _,
} => "data:image/png;base64,".to_string() + &base64::encode(&data.into_owned()),
AttachmentType::File {
file,
filename: _,
} => {
let mut buf = Vec::new();
file.try_clone().await?.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Path(path) => {
let mut file = File::open(path).await?;
let mut buf = vec![];
file.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Image(url) => {
let url = Url::parse(url).map_err(|_| Error::Url(url.to_string()))?;
let response = http.as_ref().client.get(url).send().await?;
let mut bytes = response.bytes().await?;
let mut picture: Vec<u8> = vec![0; bytes.len()];
bytes.copy_to_slice(&mut picture[..]);
"data:image/png;base64,".to_string() + &base64::encode(&picture)
},
};
self.0.remove("unicode_emoji");
self.0.insert("icon", Value::String(icon));
Ok(self)
}
}
| {
self.0.insert("name", Value::String(name.to_string()));
self
} | identifier_body |
edit_role.rs | use std::collections::HashMap;
use bytes::buf::Buf;
use reqwest::Url;
use tokio::{fs::File, io::AsyncReadExt};
use crate::http::{AttachmentType, Http};
use crate::internal::prelude::*;
use crate::model::{guild::Role, Permissions};
/// A builder to create or edit a [`Role`] for use via a number of model methods.
///
/// These are:
///
/// - [`PartialGuild::create_role`]
/// - [`Guild::create_role`]
/// - [`Guild::edit_role`]
/// - [`GuildId::create_role`]
/// - [`GuildId::edit_role`]
/// - [`Role::edit`]
///
/// Defaults are provided for each parameter on role creation.
///
/// # Examples
///
/// Create a hoisted, mentionable role named `"a test role"`:
///
/// ```rust,no_run
/// # use serenity::{model::id::{ChannelId, GuildId}, http::Http};
/// # use std::sync::Arc;
/// #
/// # let http = Arc::new(Http::default());
/// # let (channel_id, guild_id) = (ChannelId(1), GuildId(2));
/// #
/// // assuming a `channel_id` and `guild_id` has been bound
///
/// let role = guild_id.create_role(&http, |r| r.hoist(true).mentionable(true).name("a test role"));
/// ```
///
/// [`PartialGuild::create_role`]: crate::model::guild::PartialGuild::create_role
/// [`Guild::create_role`]: crate::model::guild::Guild::create_role
/// [`Guild::edit_role`]: crate::model::guild::Guild::edit_role
/// [`GuildId::create_role`]: crate::model::id::GuildId::create_role
/// [`GuildId::edit_role`]: crate::model::id::GuildId::edit_role
#[derive(Clone, Debug, Default)]
pub struct EditRole(pub HashMap<&'static str, Value>);
impl EditRole {
/// Creates a new builder with the values of the given [`Role`].
pub fn new(role: &Role) -> Self {
let mut map = HashMap::with_capacity(9);
#[cfg(feature = "utils")]
{
map.insert("color", Value::Number(Number::from(role.colour.0)));
}
#[cfg(not(feature = "utils"))]
{
map.insert("color", Value::Number(Number::from(role.colour)));
}
map.insert("hoist", Value::Bool(role.hoist));
map.insert("managed", Value::Bool(role.managed));
map.insert("mentionable", Value::Bool(role.mentionable));
map.insert("name", Value::String(role.name.clone()));
map.insert("permissions", Value::Number(Number::from(role.permissions.bits())));
map.insert("position", Value::Number(Number::from(role.position)));
if let Some(unicode_emoji) = &role.unicode_emoji {
map.insert("unicode_emoji", Value::String(unicode_emoji.clone()));
}
if let Some(icon) = &role.icon {
map.insert("icon", Value::String(icon.clone()));
}
EditRole(map)
}
/// Sets the colour of the role.
pub fn colour(&mut self, colour: u64) -> &mut Self {
self.0.insert("color", Value::Number(Number::from(colour)));
self
}
/// Whether or not to hoist the role above lower-positioned role in the user
/// list.
pub fn hoist(&mut self, hoist: bool) -> &mut Self {
self.0.insert("hoist", Value::Bool(hoist));
self
}
/// Whether or not to make the role mentionable, notifying its users.
pub fn mentionable(&mut self, mentionable: bool) -> &mut Self {
self.0.insert("mentionable", Value::Bool(mentionable));
self
}
/// The name of the role to set.
pub fn name<S: ToString>(&mut self, name: S) -> &mut Self {
self.0.insert("name", Value::String(name.to_string()));
self
} | self.0.insert("permissions", Value::Number(Number::from(permissions.bits())));
self
}
/// The position to assign the role in the role list. This correlates to the
/// role's position in the user list.
pub fn position(&mut self, position: u8) -> &mut Self {
self.0.insert("position", Value::Number(Number::from(position)));
self
}
/// The unicode emoji to set as the role image.
pub fn unicode_emoji<S: ToString>(&mut self, unicode_emoji: S) -> &mut Self {
self.0.remove("icon");
self.0.insert("unicode_emoji", Value::String(unicode_emoji.to_string()));
self
}
/// The image to set as the role icon.
///
/// # Errors
///
/// May error if the icon is a URL and the HTTP request fails, or if the icon is a file
/// on a path that doesn't exist.
pub async fn icon<'a>(
&mut self,
http: impl AsRef<Http>,
icon: impl Into<AttachmentType<'a>>,
) -> Result<&mut Self> {
let icon = match icon.into() {
AttachmentType::Bytes {
data,
filename: _,
} => "data:image/png;base64,".to_string() + &base64::encode(&data.into_owned()),
AttachmentType::File {
file,
filename: _,
} => {
let mut buf = Vec::new();
file.try_clone().await?.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Path(path) => {
let mut file = File::open(path).await?;
let mut buf = vec![];
file.read_to_end(&mut buf).await?;
"data:image/png;base64,".to_string() + &base64::encode(&buf)
},
AttachmentType::Image(url) => {
let url = Url::parse(url).map_err(|_| Error::Url(url.to_string()))?;
let response = http.as_ref().client.get(url).send().await?;
let mut bytes = response.bytes().await?;
let mut picture: Vec<u8> = vec![0; bytes.len()];
bytes.copy_to_slice(&mut picture[..]);
"data:image/png;base64,".to_string() + &base64::encode(&picture)
},
};
self.0.remove("unicode_emoji");
self.0.insert("icon", Value::String(icon));
Ok(self)
}
} |
/// The set of permissions to assign the role.
pub fn permissions(&mut self, permissions: Permissions) -> &mut Self { | random_line_split |
source-map.d.ts | // Type definitions for source-map v0.1.38
// Project: https://github.com/mozilla/source-map
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module SourceMap {
interface StartOfSourceMap {
file?: string;
sourceRoot?: string;
}
interface RawSourceMap extends StartOfSourceMap {
version: string;
sources: Array<string>;
names: Array<string>;
sourcesContent?: string;
mappings: string;
}
interface Position {
line: number;
column: number;
bias?: number;
}
interface MappedPosition extends Position {
source: string;
name?: string;
}
interface MappingItem {
source: string;
generatedLine: number;
generatedColumn: number;
originalLine: number;
originalColumn: number;
name: string;
}
interface Mapping {
generated: Position;
original: Position;
source: string;
name?: string;
}
interface CodeWithSourceMap {
code: string;
map: SourceMapGenerator;
}
class SourceMapConsumer {
public static GENERATED_ORDER: number;
public static ORIGINAL_ORDER: number;
public static GREATEST_LOWER_BOUND: number;
public static LEAST_UPPER_BOUND: number;
constructor(rawSourceMap: RawSourceMap);
public originalPositionFor(generatedPosition: Position): MappedPosition;
public generatedPositionFor(originalPosition: MappedPosition): Position;
public sourceContentFor(source: string): string;
public eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
}
class So |
constructor(startOfSourceMap?: StartOfSourceMap);
public static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
public addMapping(mapping: Mapping): void;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void;
public toString(): string;
}
class SourceNode {
constructor();
constructor(line: number, column: number, source: string);
constructor(line: number, column: number, source: string, chunk?: string, name?: string);
public static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode;
public add(chunk: string): void;
public prepend(chunk: string): void;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
public walkSourceContents(fn: (file: string, content: string) => void): void;
public join(sep: string): SourceNode;
public replaceRight(pattern: string, replacement: string): SourceNode;
public toString(): string;
public toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
}
}
declare module 'source-map' {
export = SourceMap;
}
| urceMapGenerator { | identifier_name |
source-map.d.ts | // Type definitions for source-map v0.1.38
// Project: https://github.com/mozilla/source-map
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module SourceMap {
interface StartOfSourceMap {
file?: string;
sourceRoot?: string;
}
interface RawSourceMap extends StartOfSourceMap {
version: string;
sources: Array<string>;
names: Array<string>;
sourcesContent?: string;
mappings: string;
}
interface Position {
line: number;
column: number;
bias?: number;
}
interface MappedPosition extends Position {
source: string;
name?: string;
}
interface MappingItem {
source: string;
generatedLine: number;
generatedColumn: number;
originalLine: number;
originalColumn: number;
name: string;
}
interface Mapping {
generated: Position;
original: Position;
source: string;
name?: string;
}
interface CodeWithSourceMap {
code: string;
map: SourceMapGenerator;
}
class SourceMapConsumer {
public static GENERATED_ORDER: number;
public static ORIGINAL_ORDER: number;
public static GREATEST_LOWER_BOUND: number;
public static LEAST_UPPER_BOUND: number;
constructor(rawSourceMap: RawSourceMap);
public originalPositionFor(generatedPosition: Position): MappedPosition;
public generatedPositionFor(originalPosition: MappedPosition): Position;
public sourceContentFor(source: string): string;
public eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
}
class SourceMapGenerator {
constructor(startOfSourceMap?: StartOfSourceMap); | public toString(): string;
}
class SourceNode {
constructor();
constructor(line: number, column: number, source: string);
constructor(line: number, column: number, source: string, chunk?: string, name?: string);
public static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode;
public add(chunk: string): void;
public prepend(chunk: string): void;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
public walkSourceContents(fn: (file: string, content: string) => void): void;
public join(sep: string): SourceNode;
public replaceRight(pattern: string, replacement: string): SourceNode;
public toString(): string;
public toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
}
}
declare module 'source-map' {
export = SourceMap;
} | public static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
public addMapping(mapping: Mapping): void;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void; | random_line_split |
types.ts | export type L10nFormat = 'language' | 'language-script' | 'language-region' | 'language-script-region';
export interface L10nProvider {
/**
* The name of the provider.
*/
name: string;
/** | * Options to pass the loader.
*/
options?: any;
}
export interface L10nLocale {
/**
* language[-script][-region][-extension]
* Where:
* - language: ISO 639 two-letter or three-letter code
* - script: ISO 15924 four-letter script code
* - region: ISO 3166 two-letter, uppercase code
* - extension: 'u' (Unicode) extensions
*/
language: string;
/**
* Alternative language to translate dates.
*/
dateLanguage?: string;
/**
* Alternative language to translate numbers.
*/
numberLanguage?: string;
/**
* ISO 4217 three-letter code.
*/
currency?: string;
/**
* Time zone name from the IANA time zone database.
*/
timeZone?: string;
/**
* Key value pairs of unit identifiers.
*/
units?: { [key: string]: string }
}
export interface L10nSchema {
locale: L10nLocale;
/**
* Language direction.
*/
dir?: 'ltr' | 'rtl';
text?: string;
}
export interface L10nDateTimeFormatOptions extends Intl.DateTimeFormatOptions {
/**
* The date formatting style.
*/
dateStyle?: 'full' | 'long' | 'medium' | 'short';
/**
* The time formatting style.
*/
timeStyle?: 'full' | 'long' | 'medium' | 'short';
}
export interface L10nNumberFormatOptions extends Intl.NumberFormatOptions {
/**
* The digits formatting.
*/
digits?: string;
} | * The asset of the provider.
*/
asset: any;
/** | random_line_split |
bugtracker.py | from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers. | 'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id) | """
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and | random_line_split |
bugtracker.py | from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def | (self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
| get_bug_info | identifier_name |
bugtracker.py | from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
|
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
| """Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
} | identifier_body |
mod.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub use self::builder::BlockFlowDisplayListBuilding;
pub use self::builder::BorderPaintingMode;
pub use self::builder::DisplayListBuildState;
pub use self::builder::FlexFlowDisplayListBuilding;
pub use self::builder::IndexableText;
pub use self::builder::InlineFlowDisplayListBuilding;
pub use self::builder::ListItemFlowDisplayListBuilding; | mod background;
mod builder;
mod conversions;
pub mod items;
mod webrender_helpers; | pub use self::builder::StackingContextCollectionFlags;
pub use self::builder::StackingContextCollectionState;
pub use self::conversions::ToLayout;
pub use self::webrender_helpers::WebRenderDisplayListConverter;
| random_line_split |
EditAssignmentDetails.js | /*
* Copyright (C) 2012 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import I18n from 'i18n!calendar'
import $ from 'jquery'
import moment from 'moment'
import natcompare from '../util/natcompare'
import commonEventFactory from './commonEventFactory'
import ValidatedFormView from '../views/ValidatedFormView'
import SisValidationHelper from '../util/SisValidationHelper'
import editAssignmentTemplate from 'jst/calendar/editAssignment'
import editAssignmentOverrideTemplate from 'jst/calendar/editAssignmentOverride'
import wrapper from 'jst/EmptyDialogFormWrapper'
import genericSelectOptionsTemplate from 'jst/calendar/genericSelectOptions'
import datePickerFormat from 'jsx/shared/helpers/datePickerFormat'
import {showFlashAlert} from 'jsx/shared/FlashAlert'
import withinMomentDates from 'jsx/shared/helpers/momentDateHelper'
import 'jquery.instructure_date_and_time'
import 'jquery.instructure_forms'
import 'jquery.instructure_misc_helpers'
import './fcMomentHandlebarsHelpers'
export default class EditAssignmentDetailsRewrite extends ValidatedFormView {
initialize(selector, event, contextChangeCB, closeCB) {
this.event = event
this.contextChangeCB = contextChangeCB
this.closeCB = closeCB
super.initialize({
title: this.event.title,
contexts: this.event.possibleContexts(),
date: this.event.startDate(),
postToSISEnabled: ENV.POST_TO_SIS,
postToSISName: ENV.SIS_NAME,
postToSIS:
this.event.eventType === 'assignment' ? this.event.assignment.post_to_sis : undefined,
datePickerFormat: this.event.allDay ? 'medium_with_weekday' : 'full_with_weekday'
})
this.currentContextInfo = null
if (this.event.override) {
this.template = editAssignmentOverrideTemplate
}
$(selector).append(this.render().el)
this.setupTimeAndDatePickers()
this.$el.find('select.context_id').triggerHandler('change', false)
if (this.model == null) {
this.model = this.generateNewEvent()
}
if (!this.event.isNewEvent()) {
this.$el.find('.context_select').hide()
this.$el.attr('method', 'PUT')
return this.$el.attr(
'action',
$.replaceTags(this.event.contextInfo.assignment_url, 'id', this.event.object.id)
)
}
}
setContext(newContext) {
this.$el
.find('select.context_id')
.val(newContext)
.triggerHandler('change', false)
}
contextInfoForCode(code) {
return this.event.possibleContexts().find(context => context.asset_string === code)
}
| () {
this.$el.find('select.context_id').change()
if (this.event.assignment && this.event.assignment.assignment_group_id) {
return this.$el
.find('.assignment_group_select .assignment_group')
.val(this.event.assignment.assignment_group_id)
}
}
moreOptions(jsEvent) {
jsEvent.preventDefault()
const pieces = $(jsEvent.target)
.attr('href')
.split('#')
const data = this.$el.getFormData({object_name: 'assignment'})
const params = {}
if (data.name) {
params.title = data.name
}
if (data.due_at && this.$el.find('.datetime_field').data('unfudged-date')) {
params.due_at = this.$el
.find('.datetime_field')
.data('unfudged-date')
.toISOString()
}
if (data.assignment_group_id) {
params.assignment_group_id = data.assignment_group_id
}
params.return_to = window.location.href
pieces[0] += `?${$.param(params)}`
return (window.location.href = pieces.join('#'))
}
contextChange(jsEvent, propagate) {
if (this.ignoreContextChange) return
const context = $(jsEvent.target).val()
this.currentContextInfo = this.contextInfoForCode(context)
this.event.contextInfo = this.currentContextInfo
if (this.currentContextInfo == null) return
if (propagate !== false) this.contextChangeCB(context)
// TODO: support adding a new assignment group from this select box
const assignmentGroupsSelectOptionsInfo = {
collection: this.currentContextInfo.assignment_groups.sort(natcompare.byKey('name'))
}
this.$el
.find('.assignment_group')
.html(genericSelectOptionsTemplate(assignmentGroupsSelectOptionsInfo))
// Update the edit and more options links with the new context
this.$el.attr('action', this.currentContextInfo.create_assignment_url)
const moreOptionsUrl = this.event.assignment
? `${this.event.assignment.html_url}/edit`
: this.currentContextInfo.new_assignment_url
return this.$el.find('.more_options_link').attr('href', moreOptionsUrl)
}
generateNewEvent() {
return commonEventFactory({}, [])
}
submitAssignment(e) {
e.preventDefault()
const data = this.getFormData()
this.disableWhileLoadingOpts = {buttons: ['.save_assignment']}
if (data.assignment != null) {
return this.submitRegularAssignment(e, data.assignment)
} else {
return this.submitOverride(e, data.assignment_override)
}
}
unfudgedDate(date) {
const unfudged = $.unfudgeDateForProfileTimezone(date)
if (unfudged) {
return unfudged.toISOString()
} else {
return ''
}
}
getFormData() {
const data = super.getFormData(...arguments)
if (data.assignment != null) {
data.assignment.due_at = this.unfudgedDate(data.assignment.due_at)
} else {
data.assignment_override.due_at = this.unfudgedDate(data.assignment_override.due_at)
}
return data
}
submitRegularAssignment(event, data) {
data.due_at = this.unfudgedDate(data.due_at)
if (this.event.isNewEvent()) {
data.context_code = $(this.$el)
.find('.context_id')
.val()
this.model = commonEventFactory(data, this.event.possibleContexts())
return this.submit(event)
} else {
this.event.title = data.title
this.event.start = data.due_at // fudged
this.model = this.event
return this.submit(event)
}
}
submitOverride(event, data) {
this.event.start = data.due_at // fudged
data.due_at = this.unfudgedDate(data.due_at)
this.model = this.event
return this.submit(event)
}
onSaveSuccess() {
return this.closeCB()
}
onSaveFail(xhr) {
let resp
if ((resp = JSON.parse(xhr.responseText))) {
showFlashAlert({message: resp.error, err: null, type: 'error'})
}
this.closeCB()
this.disableWhileLoadingOpts = {}
return super.onSaveFail(xhr)
}
validateBeforeSave(data, errors) {
if (data.assignment != null) {
data = data.assignment
errors = this._validateTitle(data, errors)
} else {
data = data.assignment_override
}
errors = this._validateDueDate(data, errors)
return errors
}
_validateTitle(data, errors) {
const post_to_sis = data.post_to_sis === '1'
let max_name_length = 256
const max_name_length_required = ENV.MAX_NAME_LENGTH_REQUIRED_FOR_ACCOUNT
if (post_to_sis && max_name_length_required) {
max_name_length = ENV.MAX_NAME_LENGTH
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
maxNameLength: max_name_length,
name: data.name,
maxNameLengthRequired: max_name_length_required
})
if (!data.name || $.trim(data.name.toString()).length === 0) {
errors['assignment[name]'] = [{message: I18n.t('name_is_required', 'Name is required!')}]
} else if (validationHelper.nameTooLong()) {
errors['assignment[name]'] = [
{
message: I18n.t('Name is too long, must be under %{length} characters', {
length: max_name_length + 1
})
}
]
}
return errors
}
_validateDueDate(data, errors) {
let dueDate
if (
this.event.eventType === 'assignment' &&
this.event.assignment.unlock_at &&
this.event.assignment.lock_at
) {
const startDate = moment(this.event.assignment.unlock_at)
const endDate = moment(this.event.assignment.lock_at)
dueDate = moment(this.event.start)
if (!withinMomentDates(dueDate, startDate, endDate)) {
const rangeErrorMessage = I18n.t(
'Assignment has a locked date. Due date cannot be set outside of locked date range.'
)
errors.lock_range = [{message: rangeErrorMessage}]
showFlashAlert({
message: rangeErrorMessage,
err: null,
type: 'error'
})
}
}
const post_to_sis = data.post_to_sis === '1'
if (!post_to_sis) {
return errors
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
dueDateRequired: ENV.DUE_DATE_REQUIRED_FOR_ACCOUNT,
dueDate: data.due_at
})
const error_tag = data.name != null ? 'assignment[due_at]' : 'assignment_override[due_at]'
if (validationHelper.dueDateMissing()) {
errors[error_tag] = [{message: I18n.t('Due Date is required!')}]
}
return errors
}
setupTimeAndDatePickers() {
const $field = this.$el.find('.datetime_field')
return $field.datetime_field({
datepicker: {
dateFormat: datePickerFormat(
this.event.allDay
? I18n.t('#date.formats.medium_with_weekday')
: I18n.t('#date.formats.full_with_weekday')
)
}
})
}
}
EditAssignmentDetailsRewrite.prototype.defaults = {
width: 440,
height: 384
}
EditAssignmentDetailsRewrite.prototype.events = {
...EditAssignmentDetailsRewrite.prototype.events,
'click .save_assignment': 'submitAssignment',
'click .more_options_link': 'moreOptions',
'change .context_id': 'contextChange'
}
EditAssignmentDetailsRewrite.prototype.template = editAssignmentTemplate
EditAssignmentDetailsRewrite.prototype.wrapper = wrapper
EditAssignmentDetailsRewrite.optionProperty('assignmentGroup')
| activate | identifier_name |
EditAssignmentDetails.js | /*
* Copyright (C) 2012 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import I18n from 'i18n!calendar'
import $ from 'jquery'
import moment from 'moment'
import natcompare from '../util/natcompare'
import commonEventFactory from './commonEventFactory'
import ValidatedFormView from '../views/ValidatedFormView'
import SisValidationHelper from '../util/SisValidationHelper'
import editAssignmentTemplate from 'jst/calendar/editAssignment'
import editAssignmentOverrideTemplate from 'jst/calendar/editAssignmentOverride'
import wrapper from 'jst/EmptyDialogFormWrapper'
import genericSelectOptionsTemplate from 'jst/calendar/genericSelectOptions'
import datePickerFormat from 'jsx/shared/helpers/datePickerFormat'
import {showFlashAlert} from 'jsx/shared/FlashAlert'
import withinMomentDates from 'jsx/shared/helpers/momentDateHelper'
import 'jquery.instructure_date_and_time'
import 'jquery.instructure_forms'
import 'jquery.instructure_misc_helpers'
import './fcMomentHandlebarsHelpers'
export default class EditAssignmentDetailsRewrite extends ValidatedFormView {
initialize(selector, event, contextChangeCB, closeCB) {
this.event = event
this.contextChangeCB = contextChangeCB
this.closeCB = closeCB
super.initialize({
title: this.event.title,
contexts: this.event.possibleContexts(),
date: this.event.startDate(),
postToSISEnabled: ENV.POST_TO_SIS,
postToSISName: ENV.SIS_NAME,
postToSIS:
this.event.eventType === 'assignment' ? this.event.assignment.post_to_sis : undefined,
datePickerFormat: this.event.allDay ? 'medium_with_weekday' : 'full_with_weekday'
})
this.currentContextInfo = null
if (this.event.override) {
this.template = editAssignmentOverrideTemplate
}
$(selector).append(this.render().el)
this.setupTimeAndDatePickers()
this.$el.find('select.context_id').triggerHandler('change', false)
if (this.model == null) {
this.model = this.generateNewEvent()
}
if (!this.event.isNewEvent()) {
this.$el.find('.context_select').hide()
this.$el.attr('method', 'PUT')
return this.$el.attr(
'action',
$.replaceTags(this.event.contextInfo.assignment_url, 'id', this.event.object.id)
)
}
}
setContext(newContext) {
this.$el
.find('select.context_id')
.val(newContext)
.triggerHandler('change', false)
}
contextInfoForCode(code) {
return this.event.possibleContexts().find(context => context.asset_string === code)
}
activate() {
this.$el.find('select.context_id').change()
if (this.event.assignment && this.event.assignment.assignment_group_id) {
return this.$el
.find('.assignment_group_select .assignment_group')
.val(this.event.assignment.assignment_group_id)
}
}
moreOptions(jsEvent) {
jsEvent.preventDefault()
const pieces = $(jsEvent.target)
.attr('href')
.split('#')
const data = this.$el.getFormData({object_name: 'assignment'})
const params = {}
if (data.name) {
params.title = data.name
}
if (data.due_at && this.$el.find('.datetime_field').data('unfudged-date')) {
params.due_at = this.$el
.find('.datetime_field')
.data('unfudged-date')
.toISOString()
}
if (data.assignment_group_id) {
params.assignment_group_id = data.assignment_group_id
}
params.return_to = window.location.href
pieces[0] += `?${$.param(params)}`
return (window.location.href = pieces.join('#'))
}
contextChange(jsEvent, propagate) {
if (this.ignoreContextChange) return
const context = $(jsEvent.target).val()
this.currentContextInfo = this.contextInfoForCode(context)
this.event.contextInfo = this.currentContextInfo
if (this.currentContextInfo == null) return
if (propagate !== false) this.contextChangeCB(context)
// TODO: support adding a new assignment group from this select box
const assignmentGroupsSelectOptionsInfo = {
collection: this.currentContextInfo.assignment_groups.sort(natcompare.byKey('name'))
}
this.$el
.find('.assignment_group')
.html(genericSelectOptionsTemplate(assignmentGroupsSelectOptionsInfo))
// Update the edit and more options links with the new context
this.$el.attr('action', this.currentContextInfo.create_assignment_url)
const moreOptionsUrl = this.event.assignment
? `${this.event.assignment.html_url}/edit`
: this.currentContextInfo.new_assignment_url
return this.$el.find('.more_options_link').attr('href', moreOptionsUrl)
}
generateNewEvent() {
return commonEventFactory({}, [])
}
submitAssignment(e) {
e.preventDefault()
const data = this.getFormData()
this.disableWhileLoadingOpts = {buttons: ['.save_assignment']}
if (data.assignment != null) {
return this.submitRegularAssignment(e, data.assignment)
} else |
}
unfudgedDate(date) {
const unfudged = $.unfudgeDateForProfileTimezone(date)
if (unfudged) {
return unfudged.toISOString()
} else {
return ''
}
}
getFormData() {
const data = super.getFormData(...arguments)
if (data.assignment != null) {
data.assignment.due_at = this.unfudgedDate(data.assignment.due_at)
} else {
data.assignment_override.due_at = this.unfudgedDate(data.assignment_override.due_at)
}
return data
}
submitRegularAssignment(event, data) {
data.due_at = this.unfudgedDate(data.due_at)
if (this.event.isNewEvent()) {
data.context_code = $(this.$el)
.find('.context_id')
.val()
this.model = commonEventFactory(data, this.event.possibleContexts())
return this.submit(event)
} else {
this.event.title = data.title
this.event.start = data.due_at // fudged
this.model = this.event
return this.submit(event)
}
}
submitOverride(event, data) {
this.event.start = data.due_at // fudged
data.due_at = this.unfudgedDate(data.due_at)
this.model = this.event
return this.submit(event)
}
onSaveSuccess() {
return this.closeCB()
}
onSaveFail(xhr) {
let resp
if ((resp = JSON.parse(xhr.responseText))) {
showFlashAlert({message: resp.error, err: null, type: 'error'})
}
this.closeCB()
this.disableWhileLoadingOpts = {}
return super.onSaveFail(xhr)
}
validateBeforeSave(data, errors) {
if (data.assignment != null) {
data = data.assignment
errors = this._validateTitle(data, errors)
} else {
data = data.assignment_override
}
errors = this._validateDueDate(data, errors)
return errors
}
_validateTitle(data, errors) {
const post_to_sis = data.post_to_sis === '1'
let max_name_length = 256
const max_name_length_required = ENV.MAX_NAME_LENGTH_REQUIRED_FOR_ACCOUNT
if (post_to_sis && max_name_length_required) {
max_name_length = ENV.MAX_NAME_LENGTH
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
maxNameLength: max_name_length,
name: data.name,
maxNameLengthRequired: max_name_length_required
})
if (!data.name || $.trim(data.name.toString()).length === 0) {
errors['assignment[name]'] = [{message: I18n.t('name_is_required', 'Name is required!')}]
} else if (validationHelper.nameTooLong()) {
errors['assignment[name]'] = [
{
message: I18n.t('Name is too long, must be under %{length} characters', {
length: max_name_length + 1
})
}
]
}
return errors
}
_validateDueDate(data, errors) {
let dueDate
if (
this.event.eventType === 'assignment' &&
this.event.assignment.unlock_at &&
this.event.assignment.lock_at
) {
const startDate = moment(this.event.assignment.unlock_at)
const endDate = moment(this.event.assignment.lock_at)
dueDate = moment(this.event.start)
if (!withinMomentDates(dueDate, startDate, endDate)) {
const rangeErrorMessage = I18n.t(
'Assignment has a locked date. Due date cannot be set outside of locked date range.'
)
errors.lock_range = [{message: rangeErrorMessage}]
showFlashAlert({
message: rangeErrorMessage,
err: null,
type: 'error'
})
}
}
const post_to_sis = data.post_to_sis === '1'
if (!post_to_sis) {
return errors
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
dueDateRequired: ENV.DUE_DATE_REQUIRED_FOR_ACCOUNT,
dueDate: data.due_at
})
const error_tag = data.name != null ? 'assignment[due_at]' : 'assignment_override[due_at]'
if (validationHelper.dueDateMissing()) {
errors[error_tag] = [{message: I18n.t('Due Date is required!')}]
}
return errors
}
setupTimeAndDatePickers() {
const $field = this.$el.find('.datetime_field')
return $field.datetime_field({
datepicker: {
dateFormat: datePickerFormat(
this.event.allDay
? I18n.t('#date.formats.medium_with_weekday')
: I18n.t('#date.formats.full_with_weekday')
)
}
})
}
}
EditAssignmentDetailsRewrite.prototype.defaults = {
width: 440,
height: 384
}
EditAssignmentDetailsRewrite.prototype.events = {
...EditAssignmentDetailsRewrite.prototype.events,
'click .save_assignment': 'submitAssignment',
'click .more_options_link': 'moreOptions',
'change .context_id': 'contextChange'
}
EditAssignmentDetailsRewrite.prototype.template = editAssignmentTemplate
EditAssignmentDetailsRewrite.prototype.wrapper = wrapper
EditAssignmentDetailsRewrite.optionProperty('assignmentGroup')
| {
return this.submitOverride(e, data.assignment_override)
} | conditional_block |
EditAssignmentDetails.js | /*
* Copyright (C) 2012 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import I18n from 'i18n!calendar'
import $ from 'jquery'
import moment from 'moment'
import natcompare from '../util/natcompare'
import commonEventFactory from './commonEventFactory'
import ValidatedFormView from '../views/ValidatedFormView'
import SisValidationHelper from '../util/SisValidationHelper'
import editAssignmentTemplate from 'jst/calendar/editAssignment'
import editAssignmentOverrideTemplate from 'jst/calendar/editAssignmentOverride'
import wrapper from 'jst/EmptyDialogFormWrapper'
import genericSelectOptionsTemplate from 'jst/calendar/genericSelectOptions'
import datePickerFormat from 'jsx/shared/helpers/datePickerFormat'
import {showFlashAlert} from 'jsx/shared/FlashAlert'
import withinMomentDates from 'jsx/shared/helpers/momentDateHelper'
import 'jquery.instructure_date_and_time'
import 'jquery.instructure_forms'
import 'jquery.instructure_misc_helpers'
import './fcMomentHandlebarsHelpers'
export default class EditAssignmentDetailsRewrite extends ValidatedFormView {
initialize(selector, event, contextChangeCB, closeCB) {
this.event = event
this.contextChangeCB = contextChangeCB
this.closeCB = closeCB
super.initialize({
title: this.event.title,
contexts: this.event.possibleContexts(),
date: this.event.startDate(),
postToSISEnabled: ENV.POST_TO_SIS,
postToSISName: ENV.SIS_NAME,
postToSIS:
this.event.eventType === 'assignment' ? this.event.assignment.post_to_sis : undefined,
datePickerFormat: this.event.allDay ? 'medium_with_weekday' : 'full_with_weekday'
})
this.currentContextInfo = null
if (this.event.override) {
this.template = editAssignmentOverrideTemplate
}
$(selector).append(this.render().el)
this.setupTimeAndDatePickers()
this.$el.find('select.context_id').triggerHandler('change', false)
if (this.model == null) {
this.model = this.generateNewEvent()
}
if (!this.event.isNewEvent()) {
this.$el.find('.context_select').hide()
this.$el.attr('method', 'PUT')
return this.$el.attr(
'action',
$.replaceTags(this.event.contextInfo.assignment_url, 'id', this.event.object.id)
)
}
}
setContext(newContext) {
this.$el
.find('select.context_id')
.val(newContext)
.triggerHandler('change', false)
}
| return this.event.possibleContexts().find(context => context.asset_string === code)
}
activate() {
this.$el.find('select.context_id').change()
if (this.event.assignment && this.event.assignment.assignment_group_id) {
return this.$el
.find('.assignment_group_select .assignment_group')
.val(this.event.assignment.assignment_group_id)
}
}
moreOptions(jsEvent) {
jsEvent.preventDefault()
const pieces = $(jsEvent.target)
.attr('href')
.split('#')
const data = this.$el.getFormData({object_name: 'assignment'})
const params = {}
if (data.name) {
params.title = data.name
}
if (data.due_at && this.$el.find('.datetime_field').data('unfudged-date')) {
params.due_at = this.$el
.find('.datetime_field')
.data('unfudged-date')
.toISOString()
}
if (data.assignment_group_id) {
params.assignment_group_id = data.assignment_group_id
}
params.return_to = window.location.href
pieces[0] += `?${$.param(params)}`
return (window.location.href = pieces.join('#'))
}
contextChange(jsEvent, propagate) {
if (this.ignoreContextChange) return
const context = $(jsEvent.target).val()
this.currentContextInfo = this.contextInfoForCode(context)
this.event.contextInfo = this.currentContextInfo
if (this.currentContextInfo == null) return
if (propagate !== false) this.contextChangeCB(context)
// TODO: support adding a new assignment group from this select box
const assignmentGroupsSelectOptionsInfo = {
collection: this.currentContextInfo.assignment_groups.sort(natcompare.byKey('name'))
}
this.$el
.find('.assignment_group')
.html(genericSelectOptionsTemplate(assignmentGroupsSelectOptionsInfo))
// Update the edit and more options links with the new context
this.$el.attr('action', this.currentContextInfo.create_assignment_url)
const moreOptionsUrl = this.event.assignment
? `${this.event.assignment.html_url}/edit`
: this.currentContextInfo.new_assignment_url
return this.$el.find('.more_options_link').attr('href', moreOptionsUrl)
}
generateNewEvent() {
return commonEventFactory({}, [])
}
submitAssignment(e) {
e.preventDefault()
const data = this.getFormData()
this.disableWhileLoadingOpts = {buttons: ['.save_assignment']}
if (data.assignment != null) {
return this.submitRegularAssignment(e, data.assignment)
} else {
return this.submitOverride(e, data.assignment_override)
}
}
unfudgedDate(date) {
const unfudged = $.unfudgeDateForProfileTimezone(date)
if (unfudged) {
return unfudged.toISOString()
} else {
return ''
}
}
getFormData() {
const data = super.getFormData(...arguments)
if (data.assignment != null) {
data.assignment.due_at = this.unfudgedDate(data.assignment.due_at)
} else {
data.assignment_override.due_at = this.unfudgedDate(data.assignment_override.due_at)
}
return data
}
submitRegularAssignment(event, data) {
data.due_at = this.unfudgedDate(data.due_at)
if (this.event.isNewEvent()) {
data.context_code = $(this.$el)
.find('.context_id')
.val()
this.model = commonEventFactory(data, this.event.possibleContexts())
return this.submit(event)
} else {
this.event.title = data.title
this.event.start = data.due_at // fudged
this.model = this.event
return this.submit(event)
}
}
submitOverride(event, data) {
this.event.start = data.due_at // fudged
data.due_at = this.unfudgedDate(data.due_at)
this.model = this.event
return this.submit(event)
}
onSaveSuccess() {
return this.closeCB()
}
onSaveFail(xhr) {
let resp
if ((resp = JSON.parse(xhr.responseText))) {
showFlashAlert({message: resp.error, err: null, type: 'error'})
}
this.closeCB()
this.disableWhileLoadingOpts = {}
return super.onSaveFail(xhr)
}
validateBeforeSave(data, errors) {
if (data.assignment != null) {
data = data.assignment
errors = this._validateTitle(data, errors)
} else {
data = data.assignment_override
}
errors = this._validateDueDate(data, errors)
return errors
}
_validateTitle(data, errors) {
const post_to_sis = data.post_to_sis === '1'
let max_name_length = 256
const max_name_length_required = ENV.MAX_NAME_LENGTH_REQUIRED_FOR_ACCOUNT
if (post_to_sis && max_name_length_required) {
max_name_length = ENV.MAX_NAME_LENGTH
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
maxNameLength: max_name_length,
name: data.name,
maxNameLengthRequired: max_name_length_required
})
if (!data.name || $.trim(data.name.toString()).length === 0) {
errors['assignment[name]'] = [{message: I18n.t('name_is_required', 'Name is required!')}]
} else if (validationHelper.nameTooLong()) {
errors['assignment[name]'] = [
{
message: I18n.t('Name is too long, must be under %{length} characters', {
length: max_name_length + 1
})
}
]
}
return errors
}
_validateDueDate(data, errors) {
let dueDate
if (
this.event.eventType === 'assignment' &&
this.event.assignment.unlock_at &&
this.event.assignment.lock_at
) {
const startDate = moment(this.event.assignment.unlock_at)
const endDate = moment(this.event.assignment.lock_at)
dueDate = moment(this.event.start)
if (!withinMomentDates(dueDate, startDate, endDate)) {
const rangeErrorMessage = I18n.t(
'Assignment has a locked date. Due date cannot be set outside of locked date range.'
)
errors.lock_range = [{message: rangeErrorMessage}]
showFlashAlert({
message: rangeErrorMessage,
err: null,
type: 'error'
})
}
}
const post_to_sis = data.post_to_sis === '1'
if (!post_to_sis) {
return errors
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
dueDateRequired: ENV.DUE_DATE_REQUIRED_FOR_ACCOUNT,
dueDate: data.due_at
})
const error_tag = data.name != null ? 'assignment[due_at]' : 'assignment_override[due_at]'
if (validationHelper.dueDateMissing()) {
errors[error_tag] = [{message: I18n.t('Due Date is required!')}]
}
return errors
}
setupTimeAndDatePickers() {
const $field = this.$el.find('.datetime_field')
return $field.datetime_field({
datepicker: {
dateFormat: datePickerFormat(
this.event.allDay
? I18n.t('#date.formats.medium_with_weekday')
: I18n.t('#date.formats.full_with_weekday')
)
}
})
}
}
EditAssignmentDetailsRewrite.prototype.defaults = {
width: 440,
height: 384
}
EditAssignmentDetailsRewrite.prototype.events = {
...EditAssignmentDetailsRewrite.prototype.events,
'click .save_assignment': 'submitAssignment',
'click .more_options_link': 'moreOptions',
'change .context_id': 'contextChange'
}
EditAssignmentDetailsRewrite.prototype.template = editAssignmentTemplate
EditAssignmentDetailsRewrite.prototype.wrapper = wrapper
EditAssignmentDetailsRewrite.optionProperty('assignmentGroup') | contextInfoForCode(code) { | random_line_split |
EditAssignmentDetails.js | /*
* Copyright (C) 2012 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import I18n from 'i18n!calendar'
import $ from 'jquery'
import moment from 'moment'
import natcompare from '../util/natcompare'
import commonEventFactory from './commonEventFactory'
import ValidatedFormView from '../views/ValidatedFormView'
import SisValidationHelper from '../util/SisValidationHelper'
import editAssignmentTemplate from 'jst/calendar/editAssignment'
import editAssignmentOverrideTemplate from 'jst/calendar/editAssignmentOverride'
import wrapper from 'jst/EmptyDialogFormWrapper'
import genericSelectOptionsTemplate from 'jst/calendar/genericSelectOptions'
import datePickerFormat from 'jsx/shared/helpers/datePickerFormat'
import {showFlashAlert} from 'jsx/shared/FlashAlert'
import withinMomentDates from 'jsx/shared/helpers/momentDateHelper'
import 'jquery.instructure_date_and_time'
import 'jquery.instructure_forms'
import 'jquery.instructure_misc_helpers'
import './fcMomentHandlebarsHelpers'
export default class EditAssignmentDetailsRewrite extends ValidatedFormView {
initialize(selector, event, contextChangeCB, closeCB) {
this.event = event
this.contextChangeCB = contextChangeCB
this.closeCB = closeCB
super.initialize({
title: this.event.title,
contexts: this.event.possibleContexts(),
date: this.event.startDate(),
postToSISEnabled: ENV.POST_TO_SIS,
postToSISName: ENV.SIS_NAME,
postToSIS:
this.event.eventType === 'assignment' ? this.event.assignment.post_to_sis : undefined,
datePickerFormat: this.event.allDay ? 'medium_with_weekday' : 'full_with_weekday'
})
this.currentContextInfo = null
if (this.event.override) {
this.template = editAssignmentOverrideTemplate
}
$(selector).append(this.render().el)
this.setupTimeAndDatePickers()
this.$el.find('select.context_id').triggerHandler('change', false)
if (this.model == null) {
this.model = this.generateNewEvent()
}
if (!this.event.isNewEvent()) {
this.$el.find('.context_select').hide()
this.$el.attr('method', 'PUT')
return this.$el.attr(
'action',
$.replaceTags(this.event.contextInfo.assignment_url, 'id', this.event.object.id)
)
}
}
setContext(newContext) {
this.$el
.find('select.context_id')
.val(newContext)
.triggerHandler('change', false)
}
contextInfoForCode(code) {
return this.event.possibleContexts().find(context => context.asset_string === code)
}
activate() {
this.$el.find('select.context_id').change()
if (this.event.assignment && this.event.assignment.assignment_group_id) {
return this.$el
.find('.assignment_group_select .assignment_group')
.val(this.event.assignment.assignment_group_id)
}
}
moreOptions(jsEvent) {
jsEvent.preventDefault()
const pieces = $(jsEvent.target)
.attr('href')
.split('#')
const data = this.$el.getFormData({object_name: 'assignment'})
const params = {}
if (data.name) {
params.title = data.name
}
if (data.due_at && this.$el.find('.datetime_field').data('unfudged-date')) {
params.due_at = this.$el
.find('.datetime_field')
.data('unfudged-date')
.toISOString()
}
if (data.assignment_group_id) {
params.assignment_group_id = data.assignment_group_id
}
params.return_to = window.location.href
pieces[0] += `?${$.param(params)}`
return (window.location.href = pieces.join('#'))
}
contextChange(jsEvent, propagate) {
if (this.ignoreContextChange) return
const context = $(jsEvent.target).val()
this.currentContextInfo = this.contextInfoForCode(context)
this.event.contextInfo = this.currentContextInfo
if (this.currentContextInfo == null) return
if (propagate !== false) this.contextChangeCB(context)
// TODO: support adding a new assignment group from this select box
const assignmentGroupsSelectOptionsInfo = {
collection: this.currentContextInfo.assignment_groups.sort(natcompare.byKey('name'))
}
this.$el
.find('.assignment_group')
.html(genericSelectOptionsTemplate(assignmentGroupsSelectOptionsInfo))
// Update the edit and more options links with the new context
this.$el.attr('action', this.currentContextInfo.create_assignment_url)
const moreOptionsUrl = this.event.assignment
? `${this.event.assignment.html_url}/edit`
: this.currentContextInfo.new_assignment_url
return this.$el.find('.more_options_link').attr('href', moreOptionsUrl)
}
generateNewEvent() {
return commonEventFactory({}, [])
}
submitAssignment(e) {
e.preventDefault()
const data = this.getFormData()
this.disableWhileLoadingOpts = {buttons: ['.save_assignment']}
if (data.assignment != null) {
return this.submitRegularAssignment(e, data.assignment)
} else {
return this.submitOverride(e, data.assignment_override)
}
}
unfudgedDate(date) {
const unfudged = $.unfudgeDateForProfileTimezone(date)
if (unfudged) {
return unfudged.toISOString()
} else {
return ''
}
}
getFormData() {
const data = super.getFormData(...arguments)
if (data.assignment != null) {
data.assignment.due_at = this.unfudgedDate(data.assignment.due_at)
} else {
data.assignment_override.due_at = this.unfudgedDate(data.assignment_override.due_at)
}
return data
}
submitRegularAssignment(event, data) {
data.due_at = this.unfudgedDate(data.due_at)
if (this.event.isNewEvent()) {
data.context_code = $(this.$el)
.find('.context_id')
.val()
this.model = commonEventFactory(data, this.event.possibleContexts())
return this.submit(event)
} else {
this.event.title = data.title
this.event.start = data.due_at // fudged
this.model = this.event
return this.submit(event)
}
}
submitOverride(event, data) {
this.event.start = data.due_at // fudged
data.due_at = this.unfudgedDate(data.due_at)
this.model = this.event
return this.submit(event)
}
onSaveSuccess() |
onSaveFail(xhr) {
let resp
if ((resp = JSON.parse(xhr.responseText))) {
showFlashAlert({message: resp.error, err: null, type: 'error'})
}
this.closeCB()
this.disableWhileLoadingOpts = {}
return super.onSaveFail(xhr)
}
validateBeforeSave(data, errors) {
if (data.assignment != null) {
data = data.assignment
errors = this._validateTitle(data, errors)
} else {
data = data.assignment_override
}
errors = this._validateDueDate(data, errors)
return errors
}
_validateTitle(data, errors) {
const post_to_sis = data.post_to_sis === '1'
let max_name_length = 256
const max_name_length_required = ENV.MAX_NAME_LENGTH_REQUIRED_FOR_ACCOUNT
if (post_to_sis && max_name_length_required) {
max_name_length = ENV.MAX_NAME_LENGTH
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
maxNameLength: max_name_length,
name: data.name,
maxNameLengthRequired: max_name_length_required
})
if (!data.name || $.trim(data.name.toString()).length === 0) {
errors['assignment[name]'] = [{message: I18n.t('name_is_required', 'Name is required!')}]
} else if (validationHelper.nameTooLong()) {
errors['assignment[name]'] = [
{
message: I18n.t('Name is too long, must be under %{length} characters', {
length: max_name_length + 1
})
}
]
}
return errors
}
_validateDueDate(data, errors) {
let dueDate
if (
this.event.eventType === 'assignment' &&
this.event.assignment.unlock_at &&
this.event.assignment.lock_at
) {
const startDate = moment(this.event.assignment.unlock_at)
const endDate = moment(this.event.assignment.lock_at)
dueDate = moment(this.event.start)
if (!withinMomentDates(dueDate, startDate, endDate)) {
const rangeErrorMessage = I18n.t(
'Assignment has a locked date. Due date cannot be set outside of locked date range.'
)
errors.lock_range = [{message: rangeErrorMessage}]
showFlashAlert({
message: rangeErrorMessage,
err: null,
type: 'error'
})
}
}
const post_to_sis = data.post_to_sis === '1'
if (!post_to_sis) {
return errors
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
dueDateRequired: ENV.DUE_DATE_REQUIRED_FOR_ACCOUNT,
dueDate: data.due_at
})
const error_tag = data.name != null ? 'assignment[due_at]' : 'assignment_override[due_at]'
if (validationHelper.dueDateMissing()) {
errors[error_tag] = [{message: I18n.t('Due Date is required!')}]
}
return errors
}
setupTimeAndDatePickers() {
const $field = this.$el.find('.datetime_field')
return $field.datetime_field({
datepicker: {
dateFormat: datePickerFormat(
this.event.allDay
? I18n.t('#date.formats.medium_with_weekday')
: I18n.t('#date.formats.full_with_weekday')
)
}
})
}
}
EditAssignmentDetailsRewrite.prototype.defaults = {
width: 440,
height: 384
}
EditAssignmentDetailsRewrite.prototype.events = {
...EditAssignmentDetailsRewrite.prototype.events,
'click .save_assignment': 'submitAssignment',
'click .more_options_link': 'moreOptions',
'change .context_id': 'contextChange'
}
EditAssignmentDetailsRewrite.prototype.template = editAssignmentTemplate
EditAssignmentDetailsRewrite.prototype.wrapper = wrapper
EditAssignmentDetailsRewrite.optionProperty('assignmentGroup')
| {
return this.closeCB()
} | identifier_body |
font_face.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [`@font-face`][ff] at-rule.
//!
//! [ff]: https://drafts.csswg.org/css-fonts/#at-font-face-rule
#![deny(missing_docs)]
#[cfg(feature = "gecko")]
use computed_values::{font_style, font_weight, font_stretch};
use computed_values::font_family::FamilyName;
use cssparser::{AtRuleParser, DeclarationListParser, DeclarationParser, Parser};
#[cfg(feature = "gecko")] use cssparser::UnicodeRange;
use parser::{ParserContext, log_css_error, Parse};
use std::fmt;
use std::iter;
use style_traits::{ToCss, OneOrMoreCommaSeparated};
use values::specified::url::SpecifiedUrl;
/// A source for a font-face rule.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub enum Source {
/// A `url()` source.
Url(UrlSource),
/// A `local()` source.
Local(FamilyName),
}
impl ToCss for Source {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
match *self {
Source::Url(ref url) => {
try!(dest.write_str("url(\""));
try!(url.to_css(dest));
},
Source::Local(ref family) => {
try!(dest.write_str("local(\""));
try!(family.to_css(dest));
},
}
dest.write_str("\")")
}
}
impl OneOrMoreCommaSeparated for Source {}
/// A `UrlSource` represents a font-face source that has been specified with a
/// `url()` function.
///
/// https://drafts.csswg.org/css-fonts/#src-desc
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct UrlSource {
/// The specified url.
pub url: SpecifiedUrl,
/// The format hints specified with the `format()` function.
pub format_hints: Vec<String>,
}
impl ToCss for UrlSource {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str(self.url.as_str())
}
}
/// Parse the block inside a `@font-face` rule.
///
/// Note that the prelude parsing code lives in the `stylesheets` module.
pub fn parse_font_face_block(context: &ParserContext, input: &mut Parser)
-> Result<FontFaceRule, ()> {
let mut rule = FontFaceRule::initial();
{
let parser = FontFaceRuleParser {
context: context,
rule: &mut rule,
missing: MissingDescriptors::new(),
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
if let Err(range) = declaration {
let pos = range.start;
let message = format!("Unsupported @font-face descriptor declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, context);
}
}
if iter.parser.missing.any() {
return Err(())
}
}
Ok(rule)
}
/// A list of effective sources that we send over through IPC to the font cache.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct EffectiveSources(Vec<Source>);
impl FontFaceRule {
/// Returns the list of effective sources for that font-face, that is the
/// sources which don't list any format hint, or the ones which list at
/// least "truetype" or "opentype".
pub fn effective_sources(&self) -> EffectiveSources {
EffectiveSources(self.sources.iter().rev().filter(|source| {
if let Source::Url(ref url_source) = **source | else {
true
}
}).cloned().collect())
}
}
impl iter::Iterator for EffectiveSources {
type Item = Source;
fn next(&mut self) -> Option<Source> {
self.0.pop()
}
}
struct FontFaceRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
rule: &'a mut FontFaceRule,
missing: MissingDescriptors,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for FontFaceRuleParser<'a, 'b> {
type Prelude = ();
type AtRule = ();
}
impl Parse for Source {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Source, ()> {
if input.try(|input| input.expect_function_matching("local")).is_ok() {
return input.parse_nested_block(|input| {
FamilyName::parse(context, input)
}).map(Source::Local)
}
let url = SpecifiedUrl::parse(context, input)?;
// Parsing optional format()
let format_hints = if input.try(|input| input.expect_function_matching("format")).is_ok() {
input.parse_nested_block(|input| {
input.parse_comma_separated(|input| {
Ok(input.expect_string()?.into_owned())
})
})?
} else {
vec![]
};
Ok(Source::Url(UrlSource {
url: url,
format_hints: format_hints,
}))
}
}
macro_rules! font_face_descriptors {
(
mandatory descriptors = [
$( #[$m_doc: meta] $m_name: tt $m_ident: ident: $m_ty: ty = $m_initial: expr, )*
]
optional descriptors = [
$( #[$o_doc: meta] $o_name: tt $o_ident: ident: $o_ty: ty = $o_initial: expr, )*
]
) => {
/// A `@font-face` rule.
///
/// https://drafts.csswg.org/css-fonts/#font-face-rule
#[derive(Debug, PartialEq, Eq)]
pub struct FontFaceRule {
$(
#[$m_doc]
pub $m_ident: $m_ty,
)*
$(
#[$o_doc]
pub $o_ident: $o_ty,
)*
}
struct MissingDescriptors {
$(
$m_ident: bool,
)*
}
impl MissingDescriptors {
fn new() -> Self {
MissingDescriptors {
$(
$m_ident: true,
)*
}
}
fn any(&self) -> bool {
$(
self.$m_ident
)||*
}
}
impl FontFaceRule {
fn initial() -> Self {
FontFaceRule {
$(
$m_ident: $m_initial,
)*
$(
$o_ident: $o_initial,
)*
}
}
}
impl ToCss for FontFaceRule {
// Serialization of FontFaceRule is not specced.
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str("@font-face {\n")?;
$(
dest.write_str(concat!(" ", $m_name, ": "))?;
ToCss::to_css(&self.$m_ident, dest)?;
dest.write_str(";\n")?;
)*
$(
// Because of parse_font_face_block,
// this condition is always true for "src" and "font-family".
// But it can be false for other descriptors.
if self.$o_ident != $o_initial {
dest.write_str(concat!(" ", $o_name, ": "))?;
ToCss::to_css(&self.$o_ident, dest)?;
dest.write_str(";\n")?;
}
)*
dest.write_str("}")
}
}
impl<'a, 'b> DeclarationParser for FontFaceRuleParser<'a, 'b> {
type Declaration = ();
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<(), ()> {
match_ignore_ascii_case! { name,
$(
$m_name => {
self.rule.$m_ident = Parse::parse(self.context, input)?;
self.missing.$m_ident = false
},
)*
$(
$o_name => self.rule.$o_ident = Parse::parse(self.context, input)?,
)*
_ => return Err(())
}
Ok(())
}
}
}
}
/// css-name rust_identifier: Type = initial_value,
#[cfg(feature = "gecko")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
/// The style of this font face
"font-style" style: font_style::T = font_style::T::normal,
/// The weight of this font face
"font-weight" weight: font_weight::T = font_weight::T::Weight400 /* normal */,
/// The stretch of this font face
"font-stretch" stretch: font_stretch::T = font_stretch::T::normal,
/// The ranges of code points outside of which this font face should not be used.
"unicode-range" unicode_range: Vec<UnicodeRange> = vec![
UnicodeRange { start: 0, end: 0x10FFFF }
],
]
}
#[cfg(feature = "servo")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
]
}
| {
let hints = &url_source.format_hints;
// We support only opentype fonts and truetype is an alias for
// that format. Sources without format hints need to be
// downloaded in case we support them.
hints.is_empty() || hints.iter().any(|hint| {
hint == "truetype" || hint == "opentype" || hint == "woff"
})
} | conditional_block |
font_face.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [`@font-face`][ff] at-rule.
//!
//! [ff]: https://drafts.csswg.org/css-fonts/#at-font-face-rule
#![deny(missing_docs)]
#[cfg(feature = "gecko")]
use computed_values::{font_style, font_weight, font_stretch};
use computed_values::font_family::FamilyName;
use cssparser::{AtRuleParser, DeclarationListParser, DeclarationParser, Parser};
#[cfg(feature = "gecko")] use cssparser::UnicodeRange;
use parser::{ParserContext, log_css_error, Parse};
use std::fmt;
use std::iter;
use style_traits::{ToCss, OneOrMoreCommaSeparated};
use values::specified::url::SpecifiedUrl;
/// A source for a font-face rule.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub enum Source {
/// A `url()` source.
Url(UrlSource),
/// A `local()` source.
Local(FamilyName),
}
impl ToCss for Source {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
match *self {
Source::Url(ref url) => {
try!(dest.write_str("url(\""));
try!(url.to_css(dest));
},
Source::Local(ref family) => {
try!(dest.write_str("local(\""));
try!(family.to_css(dest));
},
}
dest.write_str("\")")
}
}
impl OneOrMoreCommaSeparated for Source {}
/// A `UrlSource` represents a font-face source that has been specified with a
/// `url()` function.
///
/// https://drafts.csswg.org/css-fonts/#src-desc
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct | {
/// The specified url.
pub url: SpecifiedUrl,
/// The format hints specified with the `format()` function.
pub format_hints: Vec<String>,
}
impl ToCss for UrlSource {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str(self.url.as_str())
}
}
/// Parse the block inside a `@font-face` rule.
///
/// Note that the prelude parsing code lives in the `stylesheets` module.
pub fn parse_font_face_block(context: &ParserContext, input: &mut Parser)
-> Result<FontFaceRule, ()> {
let mut rule = FontFaceRule::initial();
{
let parser = FontFaceRuleParser {
context: context,
rule: &mut rule,
missing: MissingDescriptors::new(),
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
if let Err(range) = declaration {
let pos = range.start;
let message = format!("Unsupported @font-face descriptor declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, context);
}
}
if iter.parser.missing.any() {
return Err(())
}
}
Ok(rule)
}
/// A list of effective sources that we send over through IPC to the font cache.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct EffectiveSources(Vec<Source>);
impl FontFaceRule {
/// Returns the list of effective sources for that font-face, that is the
/// sources which don't list any format hint, or the ones which list at
/// least "truetype" or "opentype".
pub fn effective_sources(&self) -> EffectiveSources {
EffectiveSources(self.sources.iter().rev().filter(|source| {
if let Source::Url(ref url_source) = **source {
let hints = &url_source.format_hints;
// We support only opentype fonts and truetype is an alias for
// that format. Sources without format hints need to be
// downloaded in case we support them.
hints.is_empty() || hints.iter().any(|hint| {
hint == "truetype" || hint == "opentype" || hint == "woff"
})
} else {
true
}
}).cloned().collect())
}
}
impl iter::Iterator for EffectiveSources {
type Item = Source;
fn next(&mut self) -> Option<Source> {
self.0.pop()
}
}
struct FontFaceRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
rule: &'a mut FontFaceRule,
missing: MissingDescriptors,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for FontFaceRuleParser<'a, 'b> {
type Prelude = ();
type AtRule = ();
}
impl Parse for Source {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Source, ()> {
if input.try(|input| input.expect_function_matching("local")).is_ok() {
return input.parse_nested_block(|input| {
FamilyName::parse(context, input)
}).map(Source::Local)
}
let url = SpecifiedUrl::parse(context, input)?;
// Parsing optional format()
let format_hints = if input.try(|input| input.expect_function_matching("format")).is_ok() {
input.parse_nested_block(|input| {
input.parse_comma_separated(|input| {
Ok(input.expect_string()?.into_owned())
})
})?
} else {
vec![]
};
Ok(Source::Url(UrlSource {
url: url,
format_hints: format_hints,
}))
}
}
macro_rules! font_face_descriptors {
(
mandatory descriptors = [
$( #[$m_doc: meta] $m_name: tt $m_ident: ident: $m_ty: ty = $m_initial: expr, )*
]
optional descriptors = [
$( #[$o_doc: meta] $o_name: tt $o_ident: ident: $o_ty: ty = $o_initial: expr, )*
]
) => {
/// A `@font-face` rule.
///
/// https://drafts.csswg.org/css-fonts/#font-face-rule
#[derive(Debug, PartialEq, Eq)]
pub struct FontFaceRule {
$(
#[$m_doc]
pub $m_ident: $m_ty,
)*
$(
#[$o_doc]
pub $o_ident: $o_ty,
)*
}
struct MissingDescriptors {
$(
$m_ident: bool,
)*
}
impl MissingDescriptors {
fn new() -> Self {
MissingDescriptors {
$(
$m_ident: true,
)*
}
}
fn any(&self) -> bool {
$(
self.$m_ident
)||*
}
}
impl FontFaceRule {
fn initial() -> Self {
FontFaceRule {
$(
$m_ident: $m_initial,
)*
$(
$o_ident: $o_initial,
)*
}
}
}
impl ToCss for FontFaceRule {
// Serialization of FontFaceRule is not specced.
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str("@font-face {\n")?;
$(
dest.write_str(concat!(" ", $m_name, ": "))?;
ToCss::to_css(&self.$m_ident, dest)?;
dest.write_str(";\n")?;
)*
$(
// Because of parse_font_face_block,
// this condition is always true for "src" and "font-family".
// But it can be false for other descriptors.
if self.$o_ident != $o_initial {
dest.write_str(concat!(" ", $o_name, ": "))?;
ToCss::to_css(&self.$o_ident, dest)?;
dest.write_str(";\n")?;
}
)*
dest.write_str("}")
}
}
impl<'a, 'b> DeclarationParser for FontFaceRuleParser<'a, 'b> {
type Declaration = ();
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<(), ()> {
match_ignore_ascii_case! { name,
$(
$m_name => {
self.rule.$m_ident = Parse::parse(self.context, input)?;
self.missing.$m_ident = false
},
)*
$(
$o_name => self.rule.$o_ident = Parse::parse(self.context, input)?,
)*
_ => return Err(())
}
Ok(())
}
}
}
}
/// css-name rust_identifier: Type = initial_value,
#[cfg(feature = "gecko")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
/// The style of this font face
"font-style" style: font_style::T = font_style::T::normal,
/// The weight of this font face
"font-weight" weight: font_weight::T = font_weight::T::Weight400 /* normal */,
/// The stretch of this font face
"font-stretch" stretch: font_stretch::T = font_stretch::T::normal,
/// The ranges of code points outside of which this font face should not be used.
"unicode-range" unicode_range: Vec<UnicodeRange> = vec![
UnicodeRange { start: 0, end: 0x10FFFF }
],
]
}
#[cfg(feature = "servo")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
]
}
| UrlSource | identifier_name |
font_face.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [`@font-face`][ff] at-rule.
//!
//! [ff]: https://drafts.csswg.org/css-fonts/#at-font-face-rule
#![deny(missing_docs)]
#[cfg(feature = "gecko")]
use computed_values::{font_style, font_weight, font_stretch};
use computed_values::font_family::FamilyName;
use cssparser::{AtRuleParser, DeclarationListParser, DeclarationParser, Parser};
#[cfg(feature = "gecko")] use cssparser::UnicodeRange;
use parser::{ParserContext, log_css_error, Parse};
use std::fmt;
use std::iter;
use style_traits::{ToCss, OneOrMoreCommaSeparated};
use values::specified::url::SpecifiedUrl;
/// A source for a font-face rule.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub enum Source {
/// A `url()` source.
Url(UrlSource),
/// A `local()` source.
Local(FamilyName),
}
impl ToCss for Source {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
match *self {
Source::Url(ref url) => {
try!(dest.write_str("url(\""));
try!(url.to_css(dest));
},
Source::Local(ref family) => {
try!(dest.write_str("local(\""));
try!(family.to_css(dest));
},
}
dest.write_str("\")")
}
}
impl OneOrMoreCommaSeparated for Source {}
/// A `UrlSource` represents a font-face source that has been specified with a
/// `url()` function.
///
/// https://drafts.csswg.org/css-fonts/#src-desc
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct UrlSource {
/// The specified url.
pub url: SpecifiedUrl,
/// The format hints specified with the `format()` function.
pub format_hints: Vec<String>,
}
impl ToCss for UrlSource {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str(self.url.as_str())
}
}
/// Parse the block inside a `@font-face` rule.
///
/// Note that the prelude parsing code lives in the `stylesheets` module.
pub fn parse_font_face_block(context: &ParserContext, input: &mut Parser)
-> Result<FontFaceRule, ()> {
let mut rule = FontFaceRule::initial();
{
let parser = FontFaceRuleParser {
context: context,
rule: &mut rule,
missing: MissingDescriptors::new(),
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
if let Err(range) = declaration {
let pos = range.start;
let message = format!("Unsupported @font-face descriptor declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, context);
}
}
if iter.parser.missing.any() {
return Err(())
}
}
Ok(rule)
}
/// A list of effective sources that we send over through IPC to the font cache.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct EffectiveSources(Vec<Source>);
impl FontFaceRule {
/// Returns the list of effective sources for that font-face, that is the
/// sources which don't list any format hint, or the ones which list at
/// least "truetype" or "opentype".
pub fn effective_sources(&self) -> EffectiveSources {
EffectiveSources(self.sources.iter().rev().filter(|source| {
if let Source::Url(ref url_source) = **source {
let hints = &url_source.format_hints;
// We support only opentype fonts and truetype is an alias for
// that format. Sources without format hints need to be
// downloaded in case we support them.
hints.is_empty() || hints.iter().any(|hint| {
hint == "truetype" || hint == "opentype" || hint == "woff"
})
} else {
true
}
}).cloned().collect())
}
}
impl iter::Iterator for EffectiveSources {
type Item = Source;
fn next(&mut self) -> Option<Source> {
self.0.pop()
}
}
struct FontFaceRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
rule: &'a mut FontFaceRule,
missing: MissingDescriptors,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for FontFaceRuleParser<'a, 'b> {
type Prelude = ();
type AtRule = ();
}
impl Parse for Source {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Source, ()> |
}
macro_rules! font_face_descriptors {
(
mandatory descriptors = [
$( #[$m_doc: meta] $m_name: tt $m_ident: ident: $m_ty: ty = $m_initial: expr, )*
]
optional descriptors = [
$( #[$o_doc: meta] $o_name: tt $o_ident: ident: $o_ty: ty = $o_initial: expr, )*
]
) => {
/// A `@font-face` rule.
///
/// https://drafts.csswg.org/css-fonts/#font-face-rule
#[derive(Debug, PartialEq, Eq)]
pub struct FontFaceRule {
$(
#[$m_doc]
pub $m_ident: $m_ty,
)*
$(
#[$o_doc]
pub $o_ident: $o_ty,
)*
}
struct MissingDescriptors {
$(
$m_ident: bool,
)*
}
impl MissingDescriptors {
fn new() -> Self {
MissingDescriptors {
$(
$m_ident: true,
)*
}
}
fn any(&self) -> bool {
$(
self.$m_ident
)||*
}
}
impl FontFaceRule {
fn initial() -> Self {
FontFaceRule {
$(
$m_ident: $m_initial,
)*
$(
$o_ident: $o_initial,
)*
}
}
}
impl ToCss for FontFaceRule {
// Serialization of FontFaceRule is not specced.
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str("@font-face {\n")?;
$(
dest.write_str(concat!(" ", $m_name, ": "))?;
ToCss::to_css(&self.$m_ident, dest)?;
dest.write_str(";\n")?;
)*
$(
// Because of parse_font_face_block,
// this condition is always true for "src" and "font-family".
// But it can be false for other descriptors.
if self.$o_ident != $o_initial {
dest.write_str(concat!(" ", $o_name, ": "))?;
ToCss::to_css(&self.$o_ident, dest)?;
dest.write_str(";\n")?;
}
)*
dest.write_str("}")
}
}
impl<'a, 'b> DeclarationParser for FontFaceRuleParser<'a, 'b> {
type Declaration = ();
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<(), ()> {
match_ignore_ascii_case! { name,
$(
$m_name => {
self.rule.$m_ident = Parse::parse(self.context, input)?;
self.missing.$m_ident = false
},
)*
$(
$o_name => self.rule.$o_ident = Parse::parse(self.context, input)?,
)*
_ => return Err(())
}
Ok(())
}
}
}
}
/// css-name rust_identifier: Type = initial_value,
#[cfg(feature = "gecko")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
/// The style of this font face
"font-style" style: font_style::T = font_style::T::normal,
/// The weight of this font face
"font-weight" weight: font_weight::T = font_weight::T::Weight400 /* normal */,
/// The stretch of this font face
"font-stretch" stretch: font_stretch::T = font_stretch::T::normal,
/// The ranges of code points outside of which this font face should not be used.
"unicode-range" unicode_range: Vec<UnicodeRange> = vec![
UnicodeRange { start: 0, end: 0x10FFFF }
],
]
}
#[cfg(feature = "servo")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
]
}
| {
if input.try(|input| input.expect_function_matching("local")).is_ok() {
return input.parse_nested_block(|input| {
FamilyName::parse(context, input)
}).map(Source::Local)
}
let url = SpecifiedUrl::parse(context, input)?;
// Parsing optional format()
let format_hints = if input.try(|input| input.expect_function_matching("format")).is_ok() {
input.parse_nested_block(|input| {
input.parse_comma_separated(|input| {
Ok(input.expect_string()?.into_owned())
})
})?
} else {
vec![]
};
Ok(Source::Url(UrlSource {
url: url,
format_hints: format_hints,
}))
} | identifier_body |
font_face.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [`@font-face`][ff] at-rule.
//!
//! [ff]: https://drafts.csswg.org/css-fonts/#at-font-face-rule
#![deny(missing_docs)]
#[cfg(feature = "gecko")]
use computed_values::{font_style, font_weight, font_stretch};
use computed_values::font_family::FamilyName;
use cssparser::{AtRuleParser, DeclarationListParser, DeclarationParser, Parser};
#[cfg(feature = "gecko")] use cssparser::UnicodeRange;
use parser::{ParserContext, log_css_error, Parse};
use std::fmt;
use std::iter;
use style_traits::{ToCss, OneOrMoreCommaSeparated};
use values::specified::url::SpecifiedUrl;
/// A source for a font-face rule.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub enum Source {
/// A `url()` source.
Url(UrlSource),
/// A `local()` source.
Local(FamilyName),
}
impl ToCss for Source {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
match *self {
Source::Url(ref url) => {
try!(dest.write_str("url(\""));
try!(url.to_css(dest));
},
Source::Local(ref family) => {
try!(dest.write_str("local(\""));
try!(family.to_css(dest));
},
}
dest.write_str("\")")
}
}
impl OneOrMoreCommaSeparated for Source {}
/// A `UrlSource` represents a font-face source that has been specified with a
/// `url()` function.
///
/// https://drafts.csswg.org/css-fonts/#src-desc
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct UrlSource {
/// The specified url.
pub url: SpecifiedUrl,
/// The format hints specified with the `format()` function.
pub format_hints: Vec<String>,
}
impl ToCss for UrlSource {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str(self.url.as_str())
}
}
/// Parse the block inside a `@font-face` rule.
///
/// Note that the prelude parsing code lives in the `stylesheets` module.
pub fn parse_font_face_block(context: &ParserContext, input: &mut Parser)
-> Result<FontFaceRule, ()> {
let mut rule = FontFaceRule::initial();
{
let parser = FontFaceRuleParser {
context: context,
rule: &mut rule,
missing: MissingDescriptors::new(),
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
if let Err(range) = declaration {
let pos = range.start;
let message = format!("Unsupported @font-face descriptor declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, context);
}
}
if iter.parser.missing.any() {
return Err(())
}
}
Ok(rule)
}
/// A list of effective sources that we send over through IPC to the font cache.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct EffectiveSources(Vec<Source>);
impl FontFaceRule {
/// Returns the list of effective sources for that font-face, that is the
/// sources which don't list any format hint, or the ones which list at
/// least "truetype" or "opentype".
pub fn effective_sources(&self) -> EffectiveSources {
EffectiveSources(self.sources.iter().rev().filter(|source| {
if let Source::Url(ref url_source) = **source {
let hints = &url_source.format_hints;
// We support only opentype fonts and truetype is an alias for
// that format. Sources without format hints need to be
// downloaded in case we support them.
hints.is_empty() || hints.iter().any(|hint| {
hint == "truetype" || hint == "opentype" || hint == "woff"
})
} else {
true
}
}).cloned().collect())
}
}
impl iter::Iterator for EffectiveSources {
type Item = Source;
fn next(&mut self) -> Option<Source> {
self.0.pop()
}
}
struct FontFaceRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
rule: &'a mut FontFaceRule,
missing: MissingDescriptors,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for FontFaceRuleParser<'a, 'b> {
type Prelude = ();
type AtRule = ();
}
impl Parse for Source {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Source, ()> {
if input.try(|input| input.expect_function_matching("local")).is_ok() {
return input.parse_nested_block(|input| {
FamilyName::parse(context, input)
}).map(Source::Local)
}
let url = SpecifiedUrl::parse(context, input)?;
// Parsing optional format()
let format_hints = if input.try(|input| input.expect_function_matching("format")).is_ok() {
input.parse_nested_block(|input| {
input.parse_comma_separated(|input| {
Ok(input.expect_string()?.into_owned())
})
})?
} else {
vec![]
};
Ok(Source::Url(UrlSource {
url: url,
format_hints: format_hints,
}))
}
}
macro_rules! font_face_descriptors {
(
mandatory descriptors = [
$( #[$m_doc: meta] $m_name: tt $m_ident: ident: $m_ty: ty = $m_initial: expr, )*
]
optional descriptors = [
$( #[$o_doc: meta] $o_name: tt $o_ident: ident: $o_ty: ty = $o_initial: expr, )*
]
) => {
/// A `@font-face` rule.
///
/// https://drafts.csswg.org/css-fonts/#font-face-rule
#[derive(Debug, PartialEq, Eq)]
pub struct FontFaceRule {
$(
#[$m_doc]
pub $m_ident: $m_ty,
)*
$(
#[$o_doc]
pub $o_ident: $o_ty,
)*
}
struct MissingDescriptors {
$(
$m_ident: bool,
)*
}
impl MissingDescriptors {
fn new() -> Self {
MissingDescriptors {
$(
$m_ident: true,
)*
}
}
fn any(&self) -> bool {
$( | }
}
impl FontFaceRule {
fn initial() -> Self {
FontFaceRule {
$(
$m_ident: $m_initial,
)*
$(
$o_ident: $o_initial,
)*
}
}
}
impl ToCss for FontFaceRule {
// Serialization of FontFaceRule is not specced.
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
dest.write_str("@font-face {\n")?;
$(
dest.write_str(concat!(" ", $m_name, ": "))?;
ToCss::to_css(&self.$m_ident, dest)?;
dest.write_str(";\n")?;
)*
$(
// Because of parse_font_face_block,
// this condition is always true for "src" and "font-family".
// But it can be false for other descriptors.
if self.$o_ident != $o_initial {
dest.write_str(concat!(" ", $o_name, ": "))?;
ToCss::to_css(&self.$o_ident, dest)?;
dest.write_str(";\n")?;
}
)*
dest.write_str("}")
}
}
impl<'a, 'b> DeclarationParser for FontFaceRuleParser<'a, 'b> {
type Declaration = ();
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<(), ()> {
match_ignore_ascii_case! { name,
$(
$m_name => {
self.rule.$m_ident = Parse::parse(self.context, input)?;
self.missing.$m_ident = false
},
)*
$(
$o_name => self.rule.$o_ident = Parse::parse(self.context, input)?,
)*
_ => return Err(())
}
Ok(())
}
}
}
}
/// css-name rust_identifier: Type = initial_value,
#[cfg(feature = "gecko")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
/// The style of this font face
"font-style" style: font_style::T = font_style::T::normal,
/// The weight of this font face
"font-weight" weight: font_weight::T = font_weight::T::Weight400 /* normal */,
/// The stretch of this font face
"font-stretch" stretch: font_stretch::T = font_stretch::T::normal,
/// The ranges of code points outside of which this font face should not be used.
"unicode-range" unicode_range: Vec<UnicodeRange> = vec![
UnicodeRange { start: 0, end: 0x10FFFF }
],
]
}
#[cfg(feature = "servo")]
font_face_descriptors! {
mandatory descriptors = [
/// The name of this font face
"font-family" family: FamilyName = FamilyName(atom!("")),
/// The alternative sources for this font face.
"src" sources: Vec<Source> = Vec::new(),
]
optional descriptors = [
]
} | self.$m_ident
)||* | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.