file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
tilemap.rs |
extern crate gl;
extern crate nalgebra;
use gl::types::*;
use nalgebra::na::{Mat4};
use nalgebra::na;
use std::mem;
use std::ptr;
use super::engine;
use super::shader;
use super::math;
//static CHUNK_SIZE : u8 = 10;
pub struct TilemapChunk
{
shader :shader::ShaderProgram,
vao : u32,
vbo_vertices : u32,
vbo_indices: u32,
vbo_tileid : u32,
indices_count : u32
//save model matrix
//hold ref/owned to TilemapChunkData logical part?
// tile_texture_atlas
// tile_texture_atlas_normal? <- normal map for tiles?
}
impl TilemapChunk
{
pub fn new() -> TilemapChunk
{
TilemapChunk {
shader: shader::ShaderProgram::new(),
vao: 0,
vbo_vertices: 0,
vbo_indices: 0,
indices_count: 0,
vbo_tileid: 0, //vbo for uv(texture) coordinates?
}
}
// tile_count_x: how many tiles on the x axis
// tile_count_y: how many tiles on the y axis
pub fn setup(&mut self, tile_count_x: u32, tile_count_y: u32)
{
//create dummy tile layout
let mut tilemap_chunk_vertices : Vec<GLfloat> = Vec::new();
let mut tilemap_chunk_indices : Vec<GLuint> = Vec::new();
//create the grid vertices
//create tile plane vertices
for i in range(0u32, (tile_count_x+1)*(tile_count_y+1))
{
let x = i % (tile_count_x+1); //first this counts up (column)
let y = i / (tile_count_x+1); //then this counts up (row)
tilemap_chunk_vertices.push(0.0+x as f32);
tilemap_chunk_vertices.push(0.0+y as f32);
//println!("vertex[{}]: {}, {}", i, x, y);
//calculate indices for the triangles
//indices are related to vertex indices not the vector index
//where each vertex has 2 entries
if x < tile_count_x
&& y < tile_count_y
{
let index_of = |x :u32, y:u32| x + (y * (tile_count_x+1));
//requires 2 triangles per tile (quad)
tilemap_chunk_indices.push(i); //index of (x,y)
tilemap_chunk_indices.push(index_of(x+1,y));
tilemap_chunk_indices.push(index_of(x, y+1));
//println!("\ttriangle_one: {}", tilemap_chunk_indices.slice_from(tilemap_chunk_indices.len()-3));
tilemap_chunk_indices.push(index_of(x, y+1));
tilemap_chunk_indices.push(index_of(x+1,y));
tilemap_chunk_indices.push(index_of(x+1, y+1));
//println!("\ttriangle_two: {}", tilemap_chunk_indices.slice_from(tilemap_chunk_indices.len()-3));
}
}
self.indices_count = tilemap_chunk_indices.len() as u32;
//println!("tilemap::setup() Count of vertices: {}", tilemap_chunk_vertices.len()/2); //x,y so /2
//println!("tilemap::setup() Count of indices: {}", self.indices_count);
//println!("tilemap::setup() vertices: {}", tilemap_chunk_vertices);
//TODO shader config elsewhere?
self.shader.add_shader_file("./data/client/shader/tilemap.vs.glsl", gl::VERTEX_SHADER);
self.shader.add_shader_file("./data/client/shader/tilemap.fs.glsl", gl::FRAGMENT_SHADER);
self.shader.set_fragment_name("fragColor"); //required before linking
self.shader.link_program();
self.shader.use_program();
unsafe
{
// Create Vertex Array Object
gl::GenVertexArrays(1, &mut self.vao);
gl::BindVertexArray(self.vao);
// Create a Vertex Buffer Object and copy the vertex data to it
gl::GenBuffers(1, &mut self.vbo_vertices);
gl::BindBuffer(gl::ARRAY_BUFFER, self.vbo_vertices);
gl::BufferData(gl::ARRAY_BUFFER,
(tilemap_chunk_vertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
tilemap_chunk_vertices.as_ptr() as *const GLvoid,
gl::STATIC_DRAW);
// Specify the layout of the vertex data
let vertex_attr = self.shader.get_attrib("my_vertex");
gl::EnableVertexAttribArray(vertex_attr as GLuint);
gl::VertexAttribPointer(vertex_attr as GLuint, 2, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
//vertex indices
gl::GenBuffers(1, &mut self.vbo_indices);
gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
gl::BufferData(gl::ELEMENT_ARRAY_BUFFER, (tilemap_chunk_indices.len() * mem::size_of::<GLuint>()) as GLsizeiptr,
tilemap_chunk_indices.as_ptr() as *const GLvoid, gl::STATIC_DRAW);
//bind uniform
//disable all?:
//glBindVertexArray(0);
//glDisableVertexAttribArray
//gl::BindBuffer(*, 0) ? for booth?
}
}
/*
fn set_program_variable_vbo(&self, name: &str)
{
//in
}
*/
//move to shader?
fn set_program_uniform_mat4(&self, name: &str, m: &Mat4<f32>)
|
}
impl engine::Drawable for TilemapChunk
{
fn draw(&self, rc: &engine::RenderContext)
{
//use shader
self.shader.use_program();
let mut model : Mat4<f32> = na::zero();
math::set_identity(&mut model);
//set uniform
//let mvp = /*rc.projm **/ rc.view;
let mvp = rc.projm * rc.view * model;
self.set_program_uniform_mat4("mvp", &mvp);
//bind vao
gl::BindVertexArray(self.vao);
//render
//gl::DrawArrays(gl::TRIANGLES, 0, self.indices_count as i32);
//with indices DrawElements must be used
unsafe {
gl::DrawElements(gl::TRIANGLE_STRIP, self.indices_count as i32, gl::UNSIGNED_INT, ptr::null());
}
//GL_TRIANGLE_STRIP ?
//disable all
}
}
| {
//self.shader
let id = self.shader.get_uniform(name);
unsafe {
gl::UniformMatrix4fv(id, 1, gl::FALSE as u8, mem::transmute(m));
}
} | identifier_body |
tilemap.rs |
extern crate gl;
extern crate nalgebra;
use gl::types::*;
use nalgebra::na::{Mat4};
use nalgebra::na;
use std::mem;
use std::ptr;
use super::engine;
use super::shader;
use super::math;
//static CHUNK_SIZE : u8 = 10;
pub struct |
{
shader :shader::ShaderProgram,
vao : u32,
vbo_vertices : u32,
vbo_indices: u32,
vbo_tileid : u32,
indices_count : u32
//save model matrix
//hold ref/owned to TilemapChunkData logical part?
// tile_texture_atlas
// tile_texture_atlas_normal? <- normal map for tiles?
}
impl TilemapChunk
{
pub fn new() -> TilemapChunk
{
TilemapChunk {
shader: shader::ShaderProgram::new(),
vao: 0,
vbo_vertices: 0,
vbo_indices: 0,
indices_count: 0,
vbo_tileid: 0, //vbo for uv(texture) coordinates?
}
}
// tile_count_x: how many tiles on the x axis
// tile_count_y: how many tiles on the y axis
pub fn setup(&mut self, tile_count_x: u32, tile_count_y: u32)
{
//create dummy tile layout
let mut tilemap_chunk_vertices : Vec<GLfloat> = Vec::new();
let mut tilemap_chunk_indices : Vec<GLuint> = Vec::new();
//create the grid vertices
//create tile plane vertices
for i in range(0u32, (tile_count_x+1)*(tile_count_y+1))
{
let x = i % (tile_count_x+1); //first this counts up (column)
let y = i / (tile_count_x+1); //then this counts up (row)
tilemap_chunk_vertices.push(0.0+x as f32);
tilemap_chunk_vertices.push(0.0+y as f32);
//println!("vertex[{}]: {}, {}", i, x, y);
//calculate indices for the triangles
//indices are related to vertex indices not the vector index
//where each vertex has 2 entries
if x < tile_count_x
&& y < tile_count_y
{
let index_of = |x :u32, y:u32| x + (y * (tile_count_x+1));
//requires 2 triangles per tile (quad)
tilemap_chunk_indices.push(i); //index of (x,y)
tilemap_chunk_indices.push(index_of(x+1,y));
tilemap_chunk_indices.push(index_of(x, y+1));
//println!("\ttriangle_one: {}", tilemap_chunk_indices.slice_from(tilemap_chunk_indices.len()-3));
tilemap_chunk_indices.push(index_of(x, y+1));
tilemap_chunk_indices.push(index_of(x+1,y));
tilemap_chunk_indices.push(index_of(x+1, y+1));
//println!("\ttriangle_two: {}", tilemap_chunk_indices.slice_from(tilemap_chunk_indices.len()-3));
}
}
self.indices_count = tilemap_chunk_indices.len() as u32;
//println!("tilemap::setup() Count of vertices: {}", tilemap_chunk_vertices.len()/2); //x,y so /2
//println!("tilemap::setup() Count of indices: {}", self.indices_count);
//println!("tilemap::setup() vertices: {}", tilemap_chunk_vertices);
//TODO shader config elsewhere?
self.shader.add_shader_file("./data/client/shader/tilemap.vs.glsl", gl::VERTEX_SHADER);
self.shader.add_shader_file("./data/client/shader/tilemap.fs.glsl", gl::FRAGMENT_SHADER);
self.shader.set_fragment_name("fragColor"); //required before linking
self.shader.link_program();
self.shader.use_program();
unsafe
{
// Create Vertex Array Object
gl::GenVertexArrays(1, &mut self.vao);
gl::BindVertexArray(self.vao);
// Create a Vertex Buffer Object and copy the vertex data to it
gl::GenBuffers(1, &mut self.vbo_vertices);
gl::BindBuffer(gl::ARRAY_BUFFER, self.vbo_vertices);
gl::BufferData(gl::ARRAY_BUFFER,
(tilemap_chunk_vertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
tilemap_chunk_vertices.as_ptr() as *const GLvoid,
gl::STATIC_DRAW);
// Specify the layout of the vertex data
let vertex_attr = self.shader.get_attrib("my_vertex");
gl::EnableVertexAttribArray(vertex_attr as GLuint);
gl::VertexAttribPointer(vertex_attr as GLuint, 2, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
//vertex indices
gl::GenBuffers(1, &mut self.vbo_indices);
gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
gl::BufferData(gl::ELEMENT_ARRAY_BUFFER, (tilemap_chunk_indices.len() * mem::size_of::<GLuint>()) as GLsizeiptr,
tilemap_chunk_indices.as_ptr() as *const GLvoid, gl::STATIC_DRAW);
//bind uniform
//disable all?:
//glBindVertexArray(0);
//glDisableVertexAttribArray
//gl::BindBuffer(*, 0) ? for booth?
}
}
/*
fn set_program_variable_vbo(&self, name: &str)
{
//in
}
*/
//move to shader?
fn set_program_uniform_mat4(&self, name: &str, m: &Mat4<f32>)
{
//self.shader
let id = self.shader.get_uniform(name);
unsafe {
gl::UniformMatrix4fv(id, 1, gl::FALSE as u8, mem::transmute(m));
}
}
}
impl engine::Drawable for TilemapChunk
{
fn draw(&self, rc: &engine::RenderContext)
{
//use shader
self.shader.use_program();
let mut model : Mat4<f32> = na::zero();
math::set_identity(&mut model);
//set uniform
//let mvp = /*rc.projm **/ rc.view;
let mvp = rc.projm * rc.view * model;
self.set_program_uniform_mat4("mvp", &mvp);
//bind vao
gl::BindVertexArray(self.vao);
//render
//gl::DrawArrays(gl::TRIANGLES, 0, self.indices_count as i32);
//with indices DrawElements must be used
unsafe {
gl::DrawElements(gl::TRIANGLE_STRIP, self.indices_count as i32, gl::UNSIGNED_INT, ptr::null());
}
//GL_TRIANGLE_STRIP ?
//disable all
}
}
| TilemapChunk | identifier_name |
key-is-lower-than-zero.js | // Copyright (C) 2016 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-integer-indexed-exotic-objects-hasproperty-p
description: Return false if P's value is < 0
info: |
9.4.5.2 [[HasProperty]](P)
...
3. If Type(P) is String, then
a. Let numericIndex be ! CanonicalNumericIndexString(P).
b. If numericIndex is not undefined, then
...
iii. If ! IsValidIntegerIndex(O, numericIndex) is false, return false.
...
includes: [testBigIntTypedArray.js]
features: [align-detached-buffer-semantics-with-web-reality, BigInt, Reflect, TypedArray]
---*/
| // Prevents false positives using OrdinaryHasProperty
TypedArray.prototype[-1] = "test262";
testWithBigIntTypedArrayConstructors(function(TA) {
var sample = new TA(1);
assert.sameValue(Reflect.has(sample, "-1"), false, 'Reflect.has(sample, "-1") must return false');
}); | random_line_split | |
actions.js | /**
* This file specifies any system Action permission keys that are used by the
* apps in this Module.
* | * Action Keys are assigned to a user in the system by the Roles & Permission
* system. An Action Key is a unique string usually specified in the following
* format: "[application].[subapp].[verb]" which represents permission to
* perform [verb] for the [subapp] portion of the [application].
*
* [verbs] can be anything, but CRUD terms are preferred [ create, read, update, destroy]
*
* eg
* "adcore.permissions.admin" : does the user have permission to administrate
* permissions on the system?
*
* or perhaps you want more fine grain control:
* "adcore.permissions.user.create" : can user add permissions to a user?
* "adcore.permissions.user.destroy" : can user remove permissions from a user
* "adcore.permissions.roles.create" : can a user create roles in the system
* "adcore.permissions.roles.destroy" : can user remove a role?
* ...
*
*
*/
module.exports = {
language_code: "en",
actions: [
{
action_key: "opsportal.view",
action_description: "Allows the user to access the opsportal."
},
{
action_key: "opsportal.rbac.view",
action_description:
"Allows the user to access the Roles And Permissions Tool."
},
{
action_key: "opsportal.opnavedit.view",
action_description:
"Allows the user to edit the Ops Portal Navigation."
}
]
}; | random_line_split | |
param_tcp_rxbufsize_8k.py | from trex_astf_lib.api import *
# IPV6 tunable example
#
# ipv6.src_msb
# ipv6.dst_msb
# ipv6.enable
#
class Prof1():
|
def register():
return Prof1()
| def __init__(self):
pass
def get_profile(self, **kwargs):
# ip generator
ip_gen_c = ASTFIPGenDist(ip_range=["16.0.0.0", "16.0.0.255"], distribution="seq")
ip_gen_s = ASTFIPGenDist(ip_range=["48.0.0.0", "48.0.255.255"], distribution="seq")
ip_gen = ASTFIPGen(glob=ASTFIPGenGlobal(ip_offset="1.0.0.0"),
dist_client=ip_gen_c,
dist_server=ip_gen_s)
c_glob_info = ASTFGlobalInfo()
c_glob_info.tcp.rxbufsize = 8*1024
c_glob_info.tcp.txbufsize = 8*1024
s_glob_info = ASTFGlobalInfo()
s_glob_info.tcp.rxbufsize = 8*1024
s_glob_info.tcp.txbufsize = 8*1024
return ASTFProfile(default_ip_gen=ip_gen,
# Defaults affects all files
default_c_glob_info=c_glob_info,
default_s_glob_info=s_glob_info,
cap_list=[
ASTFCapInfo(file="../avl/delay_10_http_browsing_0.pcap", cps=1)
]
) | identifier_body |
param_tcp_rxbufsize_8k.py | from trex_astf_lib.api import *
# IPV6 tunable example
#
# ipv6.src_msb
# ipv6.dst_msb
# ipv6.enable
#
class Prof1():
def __init__(self):
pass
def get_profile(self, **kwargs):
# ip generator
ip_gen_c = ASTFIPGenDist(ip_range=["16.0.0.0", "16.0.0.255"], distribution="seq")
ip_gen_s = ASTFIPGenDist(ip_range=["48.0.0.0", "48.0.255.255"], distribution="seq")
ip_gen = ASTFIPGen(glob=ASTFIPGenGlobal(ip_offset="1.0.0.0"),
dist_client=ip_gen_c,
dist_server=ip_gen_s)
c_glob_info = ASTFGlobalInfo()
c_glob_info.tcp.rxbufsize = 8*1024
c_glob_info.tcp.txbufsize = 8*1024
s_glob_info = ASTFGlobalInfo()
s_glob_info.tcp.rxbufsize = 8*1024
s_glob_info.tcp.txbufsize = 8*1024
return ASTFProfile(default_ip_gen=ip_gen,
# Defaults affects all files
default_c_glob_info=c_glob_info,
default_s_glob_info=s_glob_info,
cap_list=[
ASTFCapInfo(file="../avl/delay_10_http_browsing_0.pcap", cps=1)
]
)
def | ():
return Prof1()
| register | identifier_name |
param_tcp_rxbufsize_8k.py | from trex_astf_lib.api import *
# IPV6 tunable example
#
# ipv6.src_msb
# ipv6.dst_msb
# ipv6.enable
#
class Prof1():
def __init__(self):
pass
def get_profile(self, **kwargs):
# ip generator
ip_gen_c = ASTFIPGenDist(ip_range=["16.0.0.0", "16.0.0.255"], distribution="seq")
ip_gen_s = ASTFIPGenDist(ip_range=["48.0.0.0", "48.0.255.255"], distribution="seq")
ip_gen = ASTFIPGen(glob=ASTFIPGenGlobal(ip_offset="1.0.0.0"),
dist_client=ip_gen_c,
dist_server=ip_gen_s)
c_glob_info = ASTFGlobalInfo()
c_glob_info.tcp.rxbufsize = 8*1024
c_glob_info.tcp.txbufsize = 8*1024
s_glob_info = ASTFGlobalInfo()
s_glob_info.tcp.rxbufsize = 8*1024
s_glob_info.tcp.txbufsize = 8*1024
return ASTFProfile(default_ip_gen=ip_gen,
# Defaults affects all files
default_c_glob_info=c_glob_info,
default_s_glob_info=s_glob_info,
cap_list=[
ASTFCapInfo(file="../avl/delay_10_http_browsing_0.pcap", cps=1)
]
)
| def register():
return Prof1() | random_line_split | |
projects.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#import logging
from astakos.im.models import Resource
from synnefo_admin.admin.resources.projects.utils import get_project_quota_category
from .common import AdminTestCase
class TestAdminProjects(AdminTestCase):
"""Test suite for project-related tests."""
def test_quota(self):
| """Test if project quota are measured properly."""
# Get the reported description of the resource.
resource = Resource.objects.get(name=u"σέρβις1.ρίσορς11")
desc = resource.report_desc
# Get the member and project quota.
member_quota = get_project_quota_category(self.project, "member")
project_quota = get_project_quota_category(self.project, "limit")
# Compare them to the ones in the application.
self.assertEqual(member_quota, [(desc, '512')])
self.assertEqual(project_quota, [(desc, '1024')])
| identifier_body | |
projects.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#import logging
from astakos.im.models import Resource
from synnefo_admin.admin.resources.projects.utils import get_project_quota_category
from .common import AdminTestCase
class TestAdminProjects(AdminTestCase):
"""Test suite for project-related tests."""
def | (self):
"""Test if project quota are measured properly."""
# Get the reported description of the resource.
resource = Resource.objects.get(name=u"σέρβις1.ρίσορς11")
desc = resource.report_desc
# Get the member and project quota.
member_quota = get_project_quota_category(self.project, "member")
project_quota = get_project_quota_category(self.project, "limit")
# Compare them to the ones in the application.
self.assertEqual(member_quota, [(desc, '512')])
self.assertEqual(project_quota, [(desc, '1024')])
| test_quota | identifier_name |
projects.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#import logging
from astakos.im.models import Resource
from synnefo_admin.admin.resources.projects.utils import get_project_quota_category
from .common import AdminTestCase
class TestAdminProjects(AdminTestCase):
"""Test suite for project-related tests."""
def test_quota(self):
"""Test if project quota are measured properly."""
# Get the reported description of the resource.
resource = Resource.objects.get(name=u"σέρβις1.ρίσορς11")
desc = resource.report_desc
# Get the member and project quota.
member_quota = get_project_quota_category(self.project, "member")
project_quota = get_project_quota_category(self.project, "limit")
# Compare them to the ones in the application.
self.assertEqual(member_quota, [(desc, '512')])
self.assertEqual(project_quota, [(desc, '1024')]) | # (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of | random_line_split |
test_cc2_tensor.py | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from nose.plugins.attrib import attr
from nose.tools import nottest
import numpy as np
from neon.util.testing import assert_tensor_equal
@attr('cuda')
class TestGPUTensor(object):
def setup(self):
from neon.backends.cc2 import GPUTensor
self.gpt = GPUTensor
def test_empty_creation(self):
tns = self.gpt([])
expected_shape = (0, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_1d_creation(self):
tns = self.gpt([1, 2, 3, 4])
expected_shape = (4, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_creation(self):
tns = self.gpt([[1, 2], [3, 4]])
expected_shape = (2, 2)
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_ndarray_creation(self):
tns = self.gpt(np.array([[1.5, 2.5], [3.3, 9.2],
[0.111111, 5]]))
assert tns.shape == (3, 2)
@nottest # TODO: add >2 dimension support to cudanet
def test_higher_dim_creation(self):
shapes = ((1, 1, 1), (1, 2, 3, 4), (1, 2, 3, 4, 5, 6, 7))
for shape in shapes:
tns = self.gpt(np.empty(shape))
assert tns.shape == shape
def test_str(self):
tns = self.gpt([[1, 2], [3, 4]])
assert str(tns) == "[[ 1. 2.]\n [ 3. 4.]]"
def test_scalar_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[1, 0]
assert res.shape == (1, 1)
assert_tensor_equal(res, self.gpt([[3]]))
def test_range_slicing(self):
|
@nottest # TODO: add scalar assignment to self.gpt class
def test_scalar_slice_assignment(self):
tns = self.gpt([[1, 2], [3, 4]])
tns[1, 0] = 9
assert_tensor_equal(tns, self.gpt([[1, 2], [9, 4]]))
def test_asnumpyarray(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.asnumpyarray()
assert isinstance(res, np.ndarray)
assert_tensor_equal(res, np.array([[1, 2], [3, 4]]))
@nottest # TODO: fix this for self.gpt
def test_transpose(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.transpose()
assert_tensor_equal(res, self.gpt([[1, 3], [2, 4]]))
def test_fill(self):
tns = self.gpt([[1, 2], [3, 4]])
tns.fill(-9.5)
assert_tensor_equal(tns, self.gpt([[-9.5, -9.5], [-9.5, -9.5]]))
| tns = self.gpt([[1, 2], [3, 4]])
res = tns[0:2, 0]
assert res.shape == (2, 1)
assert_tensor_equal(res, self.gpt([1, 3])) | identifier_body |
test_cc2_tensor.py | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from nose.plugins.attrib import attr
from nose.tools import nottest
import numpy as np
from neon.util.testing import assert_tensor_equal
@attr('cuda')
class TestGPUTensor(object):
def setup(self):
from neon.backends.cc2 import GPUTensor
self.gpt = GPUTensor
def test_empty_creation(self):
tns = self.gpt([])
expected_shape = (0, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_1d_creation(self):
tns = self.gpt([1, 2, 3, 4])
expected_shape = (4, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_creation(self):
tns = self.gpt([[1, 2], [3, 4]])
expected_shape = (2, 2)
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_ndarray_creation(self):
tns = self.gpt(np.array([[1.5, 2.5], [3.3, 9.2],
[0.111111, 5]]))
assert tns.shape == (3, 2)
@nottest # TODO: add >2 dimension support to cudanet
def | (self):
shapes = ((1, 1, 1), (1, 2, 3, 4), (1, 2, 3, 4, 5, 6, 7))
for shape in shapes:
tns = self.gpt(np.empty(shape))
assert tns.shape == shape
def test_str(self):
tns = self.gpt([[1, 2], [3, 4]])
assert str(tns) == "[[ 1. 2.]\n [ 3. 4.]]"
def test_scalar_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[1, 0]
assert res.shape == (1, 1)
assert_tensor_equal(res, self.gpt([[3]]))
def test_range_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[0:2, 0]
assert res.shape == (2, 1)
assert_tensor_equal(res, self.gpt([1, 3]))
@nottest # TODO: add scalar assignment to self.gpt class
def test_scalar_slice_assignment(self):
tns = self.gpt([[1, 2], [3, 4]])
tns[1, 0] = 9
assert_tensor_equal(tns, self.gpt([[1, 2], [9, 4]]))
def test_asnumpyarray(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.asnumpyarray()
assert isinstance(res, np.ndarray)
assert_tensor_equal(res, np.array([[1, 2], [3, 4]]))
@nottest # TODO: fix this for self.gpt
def test_transpose(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.transpose()
assert_tensor_equal(res, self.gpt([[1, 3], [2, 4]]))
def test_fill(self):
tns = self.gpt([[1, 2], [3, 4]])
tns.fill(-9.5)
assert_tensor_equal(tns, self.gpt([[-9.5, -9.5], [-9.5, -9.5]]))
| test_higher_dim_creation | identifier_name |
test_cc2_tensor.py | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from nose.plugins.attrib import attr
from nose.tools import nottest
import numpy as np
from neon.util.testing import assert_tensor_equal
@attr('cuda')
class TestGPUTensor(object):
def setup(self):
from neon.backends.cc2 import GPUTensor
self.gpt = GPUTensor
def test_empty_creation(self):
tns = self.gpt([])
expected_shape = (0, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_1d_creation(self):
tns = self.gpt([1, 2, 3, 4])
expected_shape = (4, )
while len(expected_shape) < tns._min_dims:
|
assert tns.shape == expected_shape
def test_2d_creation(self):
tns = self.gpt([[1, 2], [3, 4]])
expected_shape = (2, 2)
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_ndarray_creation(self):
tns = self.gpt(np.array([[1.5, 2.5], [3.3, 9.2],
[0.111111, 5]]))
assert tns.shape == (3, 2)
@nottest # TODO: add >2 dimension support to cudanet
def test_higher_dim_creation(self):
shapes = ((1, 1, 1), (1, 2, 3, 4), (1, 2, 3, 4, 5, 6, 7))
for shape in shapes:
tns = self.gpt(np.empty(shape))
assert tns.shape == shape
def test_str(self):
tns = self.gpt([[1, 2], [3, 4]])
assert str(tns) == "[[ 1. 2.]\n [ 3. 4.]]"
def test_scalar_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[1, 0]
assert res.shape == (1, 1)
assert_tensor_equal(res, self.gpt([[3]]))
def test_range_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[0:2, 0]
assert res.shape == (2, 1)
assert_tensor_equal(res, self.gpt([1, 3]))
@nottest # TODO: add scalar assignment to self.gpt class
def test_scalar_slice_assignment(self):
tns = self.gpt([[1, 2], [3, 4]])
tns[1, 0] = 9
assert_tensor_equal(tns, self.gpt([[1, 2], [9, 4]]))
def test_asnumpyarray(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.asnumpyarray()
assert isinstance(res, np.ndarray)
assert_tensor_equal(res, np.array([[1, 2], [3, 4]]))
@nottest # TODO: fix this for self.gpt
def test_transpose(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.transpose()
assert_tensor_equal(res, self.gpt([[1, 3], [2, 4]]))
def test_fill(self):
tns = self.gpt([[1, 2], [3, 4]])
tns.fill(-9.5)
assert_tensor_equal(tns, self.gpt([[-9.5, -9.5], [-9.5, -9.5]]))
| expected_shape += (1, ) | conditional_block |
test_cc2_tensor.py | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from nose.plugins.attrib import attr
from nose.tools import nottest
import numpy as np
from neon.util.testing import assert_tensor_equal
@attr('cuda')
class TestGPUTensor(object):
def setup(self):
from neon.backends.cc2 import GPUTensor
self.gpt = GPUTensor
def test_empty_creation(self):
tns = self.gpt([])
expected_shape = (0, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_1d_creation(self):
tns = self.gpt([1, 2, 3, 4])
expected_shape = (4, )
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_creation(self):
tns = self.gpt([[1, 2], [3, 4]])
expected_shape = (2, 2)
while len(expected_shape) < tns._min_dims:
expected_shape += (1, )
assert tns.shape == expected_shape
def test_2d_ndarray_creation(self):
tns = self.gpt(np.array([[1.5, 2.5], [3.3, 9.2],
[0.111111, 5]]))
assert tns.shape == (3, 2)
@nottest # TODO: add >2 dimension support to cudanet
def test_higher_dim_creation(self):
shapes = ((1, 1, 1), (1, 2, 3, 4), (1, 2, 3, 4, 5, 6, 7))
for shape in shapes:
tns = self.gpt(np.empty(shape))
assert tns.shape == shape
def test_str(self):
tns = self.gpt([[1, 2], [3, 4]])
assert str(tns) == "[[ 1. 2.]\n [ 3. 4.]]"
def test_scalar_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[1, 0]
assert res.shape == (1, 1)
assert_tensor_equal(res, self.gpt([[3]]))
def test_range_slicing(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns[0:2, 0]
assert res.shape == (2, 1)
assert_tensor_equal(res, self.gpt([1, 3]))
@nottest # TODO: add scalar assignment to self.gpt class
def test_scalar_slice_assignment(self):
tns = self.gpt([[1, 2], [3, 4]]) | def test_asnumpyarray(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.asnumpyarray()
assert isinstance(res, np.ndarray)
assert_tensor_equal(res, np.array([[1, 2], [3, 4]]))
@nottest # TODO: fix this for self.gpt
def test_transpose(self):
tns = self.gpt([[1, 2], [3, 4]])
res = tns.transpose()
assert_tensor_equal(res, self.gpt([[1, 3], [2, 4]]))
def test_fill(self):
tns = self.gpt([[1, 2], [3, 4]])
tns.fill(-9.5)
assert_tensor_equal(tns, self.gpt([[-9.5, -9.5], [-9.5, -9.5]])) | tns[1, 0] = 9
assert_tensor_equal(tns, self.gpt([[1, 2], [9, 4]]))
| random_line_split |
index.js | import React from 'react';
import { render } from 'react-dom';
import App from './components/app.js';
import Login from './components/Login.js';
import About from './components/About.js';
import MyApp from './components/MyApp.js';
import MyServer from './components/MyServer.js';
import AllApps from './components/AllApplications.js';
import AllServers from './components/AllServers.js';
import { Provider } from 'react-redux';
import configureStore from './ipsumStore.js';
import { browserHistory, Router, Route, IndexRoute } from 'react-router';
import { syncHistoryWithStore } from 'react-router-redux';
import MainPage from './components/MainPage.js';
import actions from './actions/ipsumActions.js';
import auth from './util/authHelpers.js';
import tokens from './components/tokens.js';
import addLoadBalancer from './components/AddLoadBalancer.js';
import resthandler from './util/restHelpers.js';
const store = configureStore();
const history = syncHistoryWithStore(browserHistory, store);
const logout = () => {
console.log('LO hit');
auth.logout();
store.dispatch(actions.USER_RESET());
window.location.href = '/login';
};
const restoreSession = () => {
return new Promise((resolve, reject) => {
resthandler.get('/user/sessionreload', (err, res) => {
if (err) {
console.log(err);
} else |
});
});
};
store.subscribe(() => {
const state = store.getState();
localStorage.setItem('state', JSON.stringify(state));
});
restoreSession()
.then(() => {
render(
<Provider store={store}>
<Router history={history}>
<Route path="/login" component={Login} />
<Route path="/about" component={About} />
<Route path="/logout" onEnter={logout} />
<Route path="/auth/github/callback" />
<Route path="/" component={App} onEnter={auth.requireAuth} >
<IndexRoute component={MainPage} onEnter={auth.requireAuth} />
<Route path="/allApps" component={AllApps} onEnter={auth.requireAuth} />
<Route path="/allServers" component={AllServers} onEnter={auth.requireAuth} />
<Route path="/myServer" component={MyServer} onEnter={auth.requireAuth} />
<Route path="/myApp" component={MyApp} onEnter={auth.requireAuth} />
<Route path="/tokens" component={tokens} onEnter={auth.requireAuth} />
<Route path="/loadBalancer" component={addLoadBalancer} onEnter={auth.requireAuth} />
</Route>
</Router>
</Provider>,
document.getElementById('app')
);
});
| {
resolve(res.text);
} | conditional_block |
index.js | import React from 'react';
import { render } from 'react-dom';
import App from './components/app.js';
import Login from './components/Login.js';
import About from './components/About.js';
import MyApp from './components/MyApp.js';
import MyServer from './components/MyServer.js';
import AllApps from './components/AllApplications.js';
import AllServers from './components/AllServers.js';
import { Provider } from 'react-redux';
import configureStore from './ipsumStore.js';
import { browserHistory, Router, Route, IndexRoute } from 'react-router';
import { syncHistoryWithStore } from 'react-router-redux';
import MainPage from './components/MainPage.js';
import actions from './actions/ipsumActions.js';
import auth from './util/authHelpers.js';
import tokens from './components/tokens.js';
import addLoadBalancer from './components/AddLoadBalancer.js';
import resthandler from './util/restHelpers.js';
const store = configureStore();
const history = syncHistoryWithStore(browserHistory, store); | console.log('LO hit');
auth.logout();
store.dispatch(actions.USER_RESET());
window.location.href = '/login';
};
const restoreSession = () => {
return new Promise((resolve, reject) => {
resthandler.get('/user/sessionreload', (err, res) => {
if (err) {
console.log(err);
} else {
resolve(res.text);
}
});
});
};
store.subscribe(() => {
const state = store.getState();
localStorage.setItem('state', JSON.stringify(state));
});
restoreSession()
.then(() => {
render(
<Provider store={store}>
<Router history={history}>
<Route path="/login" component={Login} />
<Route path="/about" component={About} />
<Route path="/logout" onEnter={logout} />
<Route path="/auth/github/callback" />
<Route path="/" component={App} onEnter={auth.requireAuth} >
<IndexRoute component={MainPage} onEnter={auth.requireAuth} />
<Route path="/allApps" component={AllApps} onEnter={auth.requireAuth} />
<Route path="/allServers" component={AllServers} onEnter={auth.requireAuth} />
<Route path="/myServer" component={MyServer} onEnter={auth.requireAuth} />
<Route path="/myApp" component={MyApp} onEnter={auth.requireAuth} />
<Route path="/tokens" component={tokens} onEnter={auth.requireAuth} />
<Route path="/loadBalancer" component={addLoadBalancer} onEnter={auth.requireAuth} />
</Route>
</Router>
</Provider>,
document.getElementById('app')
);
}); |
const logout = () => { | random_line_split |
front.py | ##########
import web
import hmac
from time import strftime
from datetime import datetime
from hashlib import sha256
from lib.utils import db
from lib.utils import render
from lib.utils import etherpad
from lib.validate import valid_user, valid_pw, make_salt
##########
class FrontPage:
def GET(self):
return render('front.html')
def POST(self):
uid = web.input().signup_uid
pw = web.input().signup_pw
if valid_user(uid) and valid_pw(pw):
# Makes random 16-character alphabet
# Stored in the db
salt = make_salt()
# Specifies that hmac uses sha256 instead of md5
# hmac complicates the hash
hashed_pw = hmac.new(salt, pw, sha256).hexdigest()
db.insert('users', username = uid,
pw = hashed_pw, salt = salt,
joined = datetime.now())
raise web.seeother('/home')
else:
| raise web.seeother('/') | conditional_block | |
front.py | ##########
import web
import hmac
from time import strftime
from datetime import datetime
from hashlib import sha256
from lib.utils import db
from lib.utils import render
from lib.utils import etherpad
from lib.validate import valid_user, valid_pw, make_salt
##########
class FrontPage:
def GET(self):
|
def POST(self):
uid = web.input().signup_uid
pw = web.input().signup_pw
if valid_user(uid) and valid_pw(pw):
# Makes random 16-character alphabet
# Stored in the db
salt = make_salt()
# Specifies that hmac uses sha256 instead of md5
# hmac complicates the hash
hashed_pw = hmac.new(salt, pw, sha256).hexdigest()
db.insert('users', username = uid,
pw = hashed_pw, salt = salt,
joined = datetime.now())
raise web.seeother('/home')
else:
raise web.seeother('/')
| return render('front.html') | identifier_body |
front.py | ##########
import web
import hmac
from time import strftime
from datetime import datetime
from hashlib import sha256
from lib.utils import db
from lib.utils import render
from lib.utils import etherpad
from lib.validate import valid_user, valid_pw, make_salt
##########
class FrontPage:
def | (self):
return render('front.html')
def POST(self):
uid = web.input().signup_uid
pw = web.input().signup_pw
if valid_user(uid) and valid_pw(pw):
# Makes random 16-character alphabet
# Stored in the db
salt = make_salt()
# Specifies that hmac uses sha256 instead of md5
# hmac complicates the hash
hashed_pw = hmac.new(salt, pw, sha256).hexdigest()
db.insert('users', username = uid,
pw = hashed_pw, salt = salt,
joined = datetime.now())
raise web.seeother('/home')
else:
raise web.seeother('/')
| GET | identifier_name |
front.py | ##########
import web
import hmac
from time import strftime
from datetime import datetime
from hashlib import sha256
from lib.utils import db
from lib.utils import render
from lib.utils import etherpad
from lib.validate import valid_user, valid_pw, make_salt
##########
class FrontPage:
def GET(self):
return render('front.html')
def POST(self):
uid = web.input().signup_uid
pw = web.input().signup_pw
if valid_user(uid) and valid_pw(pw):
# Makes random 16-character alphabet
# Stored in the db
salt = make_salt() | # Specifies that hmac uses sha256 instead of md5
# hmac complicates the hash
hashed_pw = hmac.new(salt, pw, sha256).hexdigest()
db.insert('users', username = uid,
pw = hashed_pw, salt = salt,
joined = datetime.now())
raise web.seeother('/home')
else:
raise web.seeother('/') | random_line_split | |
user-security-question.component.ts | import Swal from 'sweetalert2';
import {Component} from '@angular/core';
import {UserService} from "./user.service";
import {UserSecurityModel, UserModel} from "./user.model";
import {Validators, FormBuilder, FormGroup} from "@angular/forms";
import {QUESTION_LIST} from '../../../shared/configs/security-question.config';
import { Router } from '@angular/router';
import { Config } from '../../../shared/configs/general.config';
@Component({
selector: 'user-security',
templateUrl: './user-security-update.html'
})
export class UserSecurityUpdateComponent {
userId:string;
showCancel: boolean;
objUserSecurity:UserSecurityModel = new UserSecurityModel();
userSecurityForm:FormGroup;
isSubmitted:boolean = false;
questionlist:string[] = QUESTION_LIST;
constructor(private router: Router, private _objUserService:UserService, private _formBuilder:FormBuilder) {
if(router.routerState.snapshot.url.split('/').length>3){
this.userId = router.routerState.snapshot.url.split('/')[3];
this.showCancel = true;
}else{
let userInfo: UserModel = JSON.parse(Config.getUserInfoToken());
this.userId = userInfo._id;
this.showCancel = false;
}
this.userSecurityForm = this._formBuilder.group({
"securityQuestion": ['', Validators.required],
"securityAnswer": ['', Validators.required]
});
}
getSecurityQuestionDetail() {
this._objUserService.getUserDetail(this.userId)
.subscribe(res => console.log(res));
}
updateSecurity() {
this.isSubmitted = true;
this.objUserSecurity._id = this.userId;
if (this.userSecurityForm.valid) |
}
successStatusMessage(res:any) {
Swal("Success !", res.message, "success");
this.triggerCancelForm();
}
errorMessage(objResponse:any) {
Swal("Alert !", objResponse, "info");
}
triggerCancelForm() {
if(this.showCancel)
this.router.navigate(['/user-management']);
else
this.router.navigate(['/profile/security']);
}
}
| {
this._objUserService.updateSecurityQuestion(this.objUserSecurity)
.subscribe(res => this.successStatusMessage(res),
error => this.errorMessage);
} | conditional_block |
user-security-question.component.ts | import Swal from 'sweetalert2'; | import {Validators, FormBuilder, FormGroup} from "@angular/forms";
import {QUESTION_LIST} from '../../../shared/configs/security-question.config';
import { Router } from '@angular/router';
import { Config } from '../../../shared/configs/general.config';
@Component({
selector: 'user-security',
templateUrl: './user-security-update.html'
})
export class UserSecurityUpdateComponent {
userId:string;
showCancel: boolean;
objUserSecurity:UserSecurityModel = new UserSecurityModel();
userSecurityForm:FormGroup;
isSubmitted:boolean = false;
questionlist:string[] = QUESTION_LIST;
constructor(private router: Router, private _objUserService:UserService, private _formBuilder:FormBuilder) {
if(router.routerState.snapshot.url.split('/').length>3){
this.userId = router.routerState.snapshot.url.split('/')[3];
this.showCancel = true;
}else{
let userInfo: UserModel = JSON.parse(Config.getUserInfoToken());
this.userId = userInfo._id;
this.showCancel = false;
}
this.userSecurityForm = this._formBuilder.group({
"securityQuestion": ['', Validators.required],
"securityAnswer": ['', Validators.required]
});
}
getSecurityQuestionDetail() {
this._objUserService.getUserDetail(this.userId)
.subscribe(res => console.log(res));
}
updateSecurity() {
this.isSubmitted = true;
this.objUserSecurity._id = this.userId;
if (this.userSecurityForm.valid) {
this._objUserService.updateSecurityQuestion(this.objUserSecurity)
.subscribe(res => this.successStatusMessage(res),
error => this.errorMessage);
}
}
successStatusMessage(res:any) {
Swal("Success !", res.message, "success");
this.triggerCancelForm();
}
errorMessage(objResponse:any) {
Swal("Alert !", objResponse, "info");
}
triggerCancelForm() {
if(this.showCancel)
this.router.navigate(['/user-management']);
else
this.router.navigate(['/profile/security']);
}
} | import {Component} from '@angular/core';
import {UserService} from "./user.service";
import {UserSecurityModel, UserModel} from "./user.model"; | random_line_split |
user-security-question.component.ts | import Swal from 'sweetalert2';
import {Component} from '@angular/core';
import {UserService} from "./user.service";
import {UserSecurityModel, UserModel} from "./user.model";
import {Validators, FormBuilder, FormGroup} from "@angular/forms";
import {QUESTION_LIST} from '../../../shared/configs/security-question.config';
import { Router } from '@angular/router';
import { Config } from '../../../shared/configs/general.config';
@Component({
selector: 'user-security',
templateUrl: './user-security-update.html'
})
export class UserSecurityUpdateComponent {
userId:string;
showCancel: boolean;
objUserSecurity:UserSecurityModel = new UserSecurityModel();
userSecurityForm:FormGroup;
isSubmitted:boolean = false;
questionlist:string[] = QUESTION_LIST;
constructor(private router: Router, private _objUserService:UserService, private _formBuilder:FormBuilder) {
if(router.routerState.snapshot.url.split('/').length>3){
this.userId = router.routerState.snapshot.url.split('/')[3];
this.showCancel = true;
}else{
let userInfo: UserModel = JSON.parse(Config.getUserInfoToken());
this.userId = userInfo._id;
this.showCancel = false;
}
this.userSecurityForm = this._formBuilder.group({
"securityQuestion": ['', Validators.required],
"securityAnswer": ['', Validators.required]
});
}
getSecurityQuestionDetail() {
this._objUserService.getUserDetail(this.userId)
.subscribe(res => console.log(res));
}
updateSecurity() {
this.isSubmitted = true;
this.objUserSecurity._id = this.userId;
if (this.userSecurityForm.valid) {
this._objUserService.updateSecurityQuestion(this.objUserSecurity)
.subscribe(res => this.successStatusMessage(res),
error => this.errorMessage);
}
}
successStatusMessage(res:any) {
Swal("Success !", res.message, "success");
this.triggerCancelForm();
}
errorMessage(objResponse:any) {
Swal("Alert !", objResponse, "info");
}
triggerCancelForm() |
}
| {
if(this.showCancel)
this.router.navigate(['/user-management']);
else
this.router.navigate(['/profile/security']);
} | identifier_body |
user-security-question.component.ts | import Swal from 'sweetalert2';
import {Component} from '@angular/core';
import {UserService} from "./user.service";
import {UserSecurityModel, UserModel} from "./user.model";
import {Validators, FormBuilder, FormGroup} from "@angular/forms";
import {QUESTION_LIST} from '../../../shared/configs/security-question.config';
import { Router } from '@angular/router';
import { Config } from '../../../shared/configs/general.config';
@Component({
selector: 'user-security',
templateUrl: './user-security-update.html'
})
export class UserSecurityUpdateComponent {
userId:string;
showCancel: boolean;
objUserSecurity:UserSecurityModel = new UserSecurityModel();
userSecurityForm:FormGroup;
isSubmitted:boolean = false;
questionlist:string[] = QUESTION_LIST;
constructor(private router: Router, private _objUserService:UserService, private _formBuilder:FormBuilder) {
if(router.routerState.snapshot.url.split('/').length>3){
this.userId = router.routerState.snapshot.url.split('/')[3];
this.showCancel = true;
}else{
let userInfo: UserModel = JSON.parse(Config.getUserInfoToken());
this.userId = userInfo._id;
this.showCancel = false;
}
this.userSecurityForm = this._formBuilder.group({
"securityQuestion": ['', Validators.required],
"securityAnswer": ['', Validators.required]
});
}
getSecurityQuestionDetail() {
this._objUserService.getUserDetail(this.userId)
.subscribe(res => console.log(res));
}
updateSecurity() {
this.isSubmitted = true;
this.objUserSecurity._id = this.userId;
if (this.userSecurityForm.valid) {
this._objUserService.updateSecurityQuestion(this.objUserSecurity)
.subscribe(res => this.successStatusMessage(res),
error => this.errorMessage);
}
}
successStatusMessage(res:any) {
Swal("Success !", res.message, "success");
this.triggerCancelForm();
}
errorMessage(objResponse:any) {
Swal("Alert !", objResponse, "info");
}
| () {
if(this.showCancel)
this.router.navigate(['/user-management']);
else
this.router.navigate(['/profile/security']);
}
}
| triggerCancelForm | identifier_name |
stage.ts | 'use strict';
import { Class, Instance, isInstanceOf } from 'immutable-class';
export interface MarinParameters {
left?: number;
right?: number;
top?: number;
bottom?: number;
}
export interface StageValue {
x: number;
y: number;
width: number;
height: number;
}
// ToDo: make this a higher object
export class Stage { | x: rect.left,
y: rect.top,
width: rect.width,
height: rect.height
});
}
static fromSize(width: number, height: number): Stage {
return new Stage({
x: 0,
y: 0,
width,
height
});
}
public x: number;
public y: number;
public width: number;
public height: number;
constructor(parameters: StageValue) {
this.x = parameters.x;
this.y = parameters.y;
this.width = parameters.width;
this.height = parameters.height;
}
public valueOf(): StageValue {
return {
x: this.x,
y: this.y,
width: this.width,
height: this.height
};
}
private sizeOnlyValue(): StageValue {
return {
x: 0,
y: 0,
width: this.width,
height: this.height
};
}
public toString(): string {
return `[stage: ${this.width}x${this.height}}]`;
}
public getTransform(): string {
return `translate(${this.x},${this.y})`;
}
public within(param: MarinParameters): Stage {
var value = this.sizeOnlyValue();
var { left, right, top, bottom } = param;
if (left) {
value.x = left;
value.width -= left;
}
if (right) {
value.width -= right;
}
if (top) {
value.y = top;
value.height -= top;
}
if (bottom) {
value.height -= bottom;
}
return new Stage(value);
}
} | static fromClientRect(rect: ClientRect): Stage {
return new Stage({ | random_line_split |
stage.ts | 'use strict';
import { Class, Instance, isInstanceOf } from 'immutable-class';
export interface MarinParameters {
left?: number;
right?: number;
top?: number;
bottom?: number;
}
export interface StageValue {
x: number;
y: number;
width: number;
height: number;
}
// ToDo: make this a higher object
export class Stage {
static fromClientRect(rect: ClientRect): Stage {
return new Stage({
x: rect.left,
y: rect.top,
width: rect.width,
height: rect.height
});
}
static fromSize(width: number, height: number): Stage {
return new Stage({
x: 0,
y: 0,
width,
height
});
}
public x: number;
public y: number;
public width: number;
public height: number;
constructor(parameters: StageValue) {
this.x = parameters.x;
this.y = parameters.y;
this.width = parameters.width;
this.height = parameters.height;
}
public valueOf(): StageValue {
return {
x: this.x,
y: this.y,
width: this.width,
height: this.height
};
}
private sizeOnlyValue(): StageValue {
return {
x: 0,
y: 0,
width: this.width,
height: this.height
};
}
public toString(): string {
return `[stage: ${this.width}x${this.height}}]`;
}
public getTransform(): string {
return `translate(${this.x},${this.y})`;
}
public within(param: MarinParameters): Stage {
var value = this.sizeOnlyValue();
var { left, right, top, bottom } = param;
if (left) {
value.x = left;
value.width -= left;
}
if (right) {
value.width -= right;
}
if (top) |
if (bottom) {
value.height -= bottom;
}
return new Stage(value);
}
}
| {
value.y = top;
value.height -= top;
} | conditional_block |
stage.ts | 'use strict';
import { Class, Instance, isInstanceOf } from 'immutable-class';
export interface MarinParameters {
left?: number;
right?: number;
top?: number;
bottom?: number;
}
export interface StageValue {
x: number;
y: number;
width: number;
height: number;
}
// ToDo: make this a higher object
export class Stage {
static fromClientRect(rect: ClientRect): Stage {
return new Stage({
x: rect.left,
y: rect.top,
width: rect.width,
height: rect.height
});
}
static fromSize(width: number, height: number): Stage {
return new Stage({
x: 0,
y: 0,
width,
height
});
}
public x: number;
public y: number;
public width: number;
public height: number;
constructor(parameters: StageValue) {
this.x = parameters.x;
this.y = parameters.y;
this.width = parameters.width;
this.height = parameters.height;
}
public valueOf(): StageValue {
return {
x: this.x,
y: this.y,
width: this.width,
height: this.height
};
}
private sizeOnlyValue(): StageValue {
return {
x: 0,
y: 0,
width: this.width,
height: this.height
};
}
public toString(): string |
public getTransform(): string {
return `translate(${this.x},${this.y})`;
}
public within(param: MarinParameters): Stage {
var value = this.sizeOnlyValue();
var { left, right, top, bottom } = param;
if (left) {
value.x = left;
value.width -= left;
}
if (right) {
value.width -= right;
}
if (top) {
value.y = top;
value.height -= top;
}
if (bottom) {
value.height -= bottom;
}
return new Stage(value);
}
}
| {
return `[stage: ${this.width}x${this.height}}]`;
} | identifier_body |
stage.ts | 'use strict';
import { Class, Instance, isInstanceOf } from 'immutable-class';
export interface MarinParameters {
left?: number;
right?: number;
top?: number;
bottom?: number;
}
export interface StageValue {
x: number;
y: number;
width: number;
height: number;
}
// ToDo: make this a higher object
export class Stage {
static fromClientRect(rect: ClientRect): Stage {
return new Stage({
x: rect.left,
y: rect.top,
width: rect.width,
height: rect.height
});
}
static fromSize(width: number, height: number): Stage {
return new Stage({
x: 0,
y: 0,
width,
height
});
}
public x: number;
public y: number;
public width: number;
public height: number;
constructor(parameters: StageValue) {
this.x = parameters.x;
this.y = parameters.y;
this.width = parameters.width;
this.height = parameters.height;
}
public valueOf(): StageValue {
return {
x: this.x,
y: this.y,
width: this.width,
height: this.height
};
}
private sizeOnlyValue(): StageValue {
return {
x: 0,
y: 0,
width: this.width,
height: this.height
};
}
public toString(): string {
return `[stage: ${this.width}x${this.height}}]`;
}
public | (): string {
return `translate(${this.x},${this.y})`;
}
public within(param: MarinParameters): Stage {
var value = this.sizeOnlyValue();
var { left, right, top, bottom } = param;
if (left) {
value.x = left;
value.width -= left;
}
if (right) {
value.width -= right;
}
if (top) {
value.y = top;
value.height -= top;
}
if (bottom) {
value.height -= bottom;
}
return new Stage(value);
}
}
| getTransform | identifier_name |
ViewGenerator.ts | /*
* Copyright (C) 2020 Graylog, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the Server Side Public License, version 1,
* as published by MongoDB, Inc.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Server Side Public License for more details.
* | * <http://www.mongodb.com/licensing/server-side-public-license>.
*/
import View from './View';
import ViewStateGenerator from './ViewStateGenerator';
import type { ViewType } from './View';
import Search from '../search/Search';
import QueryGenerator from '../queries/QueryGenerator';
export default async (type: ViewType, streamId: string | undefined | null) => {
const query = QueryGenerator(streamId);
const search = Search.create().toBuilder().queries([query]).build();
const viewState = await ViewStateGenerator(type, streamId);
return View.create()
.toBuilder()
.type(type)
.state({ [query.id]: viewState })
.search(search)
.build();
}; | * You should have received a copy of the Server Side Public License
* along with this program. If not, see | random_line_split |
rightLogShift.js | 'use strict';
module.exports = function (math, config) {
var util = require('../../util/index'),
Matrix = math.type.Matrix,
Unit = require('../../type/Unit'),
collection = math.collection,
isBoolean = util['boolean'].isBoolean,
isInteger = util.number.isInteger,
isNumber = util.number.isNumber,
isCollection = collection.isCollection;
/**
* Bitwise right logical shift of value x by y number of bits, `x >>> y`.
* For matrices, the function is evaluated element wise.
* For units, the function is evaluated on the best prefix base.
*
* Syntax:
*
* math.rightLogShift(x, y)
*
* Examples:
*
* math.rightLogShift(4, 2); // returns Number 1
*
* math.rightLogShift([16, -32, 64], 4); // returns Array [1, 2, 3]
*
* See also:
*
* bitAnd, bitNot, bitOr, bitXor, leftShift, rightArithShift
*
* @param {Number | Boolean | Array | Matrix | null} x Value to be shifted
* @param {Number | Boolean | null} y Amount of shifts
* @return {Number | Array | Matrix} `x` zero-filled shifted right `y` times
*/
math.rightLogShift = function rightLogShift(x, y) {
if (arguments.length != 2) {
throw new math.error.ArgumentsError('rightLogShift', arguments.length, 2);
}
if (isNumber(x) && isNumber(y)) |
if (isCollection(x) && isNumber(y)) {
return collection.deepMap2(x, y, rightLogShift);
}
if (isBoolean(x) || x === null) {
return rightLogShift(+x, y);
}
if (isBoolean(y) || y === null) {
return rightLogShift(x, +y);
}
throw new math.error.UnsupportedTypeError('rightLogShift', math['typeof'](x), math['typeof'](y));
};
};
| {
if (!isInteger(x) || !isInteger(y)) {
throw new Error('Parameters in function rightLogShift must be integer numbers');
}
return x >>> y;
} | conditional_block |
rightLogShift.js | 'use strict';
module.exports = function (math, config) {
var util = require('../../util/index'),
Matrix = math.type.Matrix,
Unit = require('../../type/Unit'),
collection = math.collection,
isBoolean = util['boolean'].isBoolean,
isInteger = util.number.isInteger, | isCollection = collection.isCollection;
/**
* Bitwise right logical shift of value x by y number of bits, `x >>> y`.
* For matrices, the function is evaluated element wise.
* For units, the function is evaluated on the best prefix base.
*
* Syntax:
*
* math.rightLogShift(x, y)
*
* Examples:
*
* math.rightLogShift(4, 2); // returns Number 1
*
* math.rightLogShift([16, -32, 64], 4); // returns Array [1, 2, 3]
*
* See also:
*
* bitAnd, bitNot, bitOr, bitXor, leftShift, rightArithShift
*
* @param {Number | Boolean | Array | Matrix | null} x Value to be shifted
* @param {Number | Boolean | null} y Amount of shifts
* @return {Number | Array | Matrix} `x` zero-filled shifted right `y` times
*/
math.rightLogShift = function rightLogShift(x, y) {
if (arguments.length != 2) {
throw new math.error.ArgumentsError('rightLogShift', arguments.length, 2);
}
if (isNumber(x) && isNumber(y)) {
if (!isInteger(x) || !isInteger(y)) {
throw new Error('Parameters in function rightLogShift must be integer numbers');
}
return x >>> y;
}
if (isCollection(x) && isNumber(y)) {
return collection.deepMap2(x, y, rightLogShift);
}
if (isBoolean(x) || x === null) {
return rightLogShift(+x, y);
}
if (isBoolean(y) || y === null) {
return rightLogShift(x, +y);
}
throw new math.error.UnsupportedTypeError('rightLogShift', math['typeof'](x), math['typeof'](y));
};
}; | isNumber = util.number.isNumber, | random_line_split |
public_map.js | /**
* Necessary tasks for public map(vis) view
*
*/
$(function() {
$.extend( $.easing, {
easeInQuad: function (x, t, b, c, d) { | }
});
cdb.init(function() {
cdb.config.set(config);
if (cdb.config.isOrganizationUrl()) cdb.config.set('url_prefix', cdb.config.organizationUrl());
cdb.templates.namespace = 'cartodb/';
// No attributions and no links in this map (at least from cartodb)
cartodb.config.set({
cartodb_attributions: "",
cartodb_logo_link: ""
});
// Check if device is a mobile
var mobileDevice = /Android|webOS|iPad|iPhone|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
// Window view
var public_window = new cdb.open.PublicMapWindow({
el: window,
user_name: user_name,
owner_username: owner_username,
vis_id: vis_id,
vis_name: vis_name,
vizdata: vizdata,
config: config,
map_options: map_options,
isMobileDevice: mobileDevice,
belong_organization: belong_organization
});
});
}); | return c*(t/=d)*t + b; | random_line_split |
Gruntfile.js | /*
* grunt-assetic-dump
* https://github.com/adam187/grunt-assetic-dump
*
* Copyright (c) 2013 Adam Misiorny |
'use strict';
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
jshint: {
all: [
'Gruntfile.js',
'tasks/*.js',
'<%= nodeunit.tests %>',
],
options: {
jshintrc: '.jshintrc',
},
},
// Before generating any new files, remove any previously-created files.
clean: {
tests: ['tmp'],
},
// Configuration to be run (and then tested).
assetic_dump: {
options: {
configFile: 'test/app/config/config.yml',
assetsBaseDir: 'test/fixtures/',
webDir: 'tmp/'
},
default_options: {
},
custom_options: {
webDir: 'tmp/custom_options/',
options: {
separator: '/* sep */\n',
banner: '/* banner */\n',
footer: '/* footer */\n'
}
}
},
// Unit tests.
nodeunit: {
tests: ['test/*_test.js'],
},
});
// Actually load this plugin's task(s).
grunt.loadTasks('tasks');
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-nodeunit');
// Whenever the "test" task is run, first clean the "tmp" dir, then run this
// plugin's task(s), then test the result.
grunt.registerTask('test', ['clean', 'assetic_dump', 'nodeunit']);
// By default, lint and run all tests.
grunt.registerTask('default', ['jshint', 'test']);
}; | * Licensed under the MIT license.
*/ | random_line_split |
background.js | function contextMenusOnClick(info,tab,opt) {
var balloon;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'prepareBalloon'},function(){
var F = info.selectionText;
$.ajax({
url : 'http://api.microsofttranslator.com/V2/Ajax.svc/Translate',
data : {
'appId' : '76518BFCEBBF18E107C7073FBD4A735001B56BB1',
'text' : F,
'from' : opt.split("|")[0],
'to' : opt.split("|")[1],
'contentType' : 'text/plain'
},
'success' : function(T) {
T = T.replace(/^"|"$/gi,'');
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
},
'error' : function(jqXHR, textStatus, errorThrown) {
var T = 'ERROR! ' + textStatus;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
}
});
})
});
});
});
}
var injCallBack = function(S){
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'getContextMenus','string':S}, getRequestResponseCallback)
});
}
var getRequestResponseCallback = function getRequestResponseCallback(response) {
/*
* TODO
*/
};
function | (opt){
var optString = '';
var L = JSONSwitch(LANGUAGES);
optString += opt.split('|')[0] ? L[opt.split('|')[0]] : t('detectLanguage');
optString += ' » ';
optString += opt.split('|')[1] ? L[opt.split('|')[1]] : t('detectLanguage');
chrome.contextMenus.create({
"title": optString,
"contexts":['selection'],
"onclick": function(opt){
return function(info,tab) {
contextMenusOnClick(info,tab,opt)
}
}(opt)
});
}
function start() {
if(localStorage.getItem('version') === null) {
localStorage.setItem('version','0');
}
if(localStorage.getItem('version') !== null && localStorage.getItem('version') !== '1.1.8.3'){
window.open('info.html');
localStorage.setItem('version','1.1.8.3');
}
if (localStorage.getItem('from') === null) {
localStorage.setItem('from', '');
}
if (localStorage.getItem('to') === null) {
localStorage.setItem('to', '');
}
if (localStorage.getItem('preferred') === null) {
localStorage.setItem('preferred', JSON.stringify(["|"+window.navigator.language]));
window.open('options.html');
}
var preferred = JSON.parse(localStorage.getItem('preferred'));
chrome.contextMenus.removeAll();
for (var i = 0, max = preferred.length; i < max; i++) {
createcontextMenusOption(preferred[i]);
}
}
$(document).ready(function(){
LANGUAGES = {};
LOCALE = "";
chrome.i18n.getAcceptLanguages( function(L) {
LOCALE = L[0];
currentLanguages = Microsoft.Translator.GetLanguages();
languageNames = Microsoft.Translator.getLanguageNames(LOCALE);
for(var i = 0; i < currentLanguages.length; i++) {
LANGUAGES[languageNames[i]] = currentLanguages[i];
}
start();
} );
});
| createcontextMenusOption | identifier_name |
background.js | function contextMenusOnClick(info,tab,opt) {
var balloon;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'prepareBalloon'},function(){
var F = info.selectionText;
$.ajax({
url : 'http://api.microsofttranslator.com/V2/Ajax.svc/Translate',
data : {
'appId' : '76518BFCEBBF18E107C7073FBD4A735001B56BB1',
'text' : F,
'from' : opt.split("|")[0],
'to' : opt.split("|")[1],
'contentType' : 'text/plain'
},
'success' : function(T) {
T = T.replace(/^"|"$/gi,'');
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
},
'error' : function(jqXHR, textStatus, errorThrown) {
var T = 'ERROR! ' + textStatus;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
}
});
})
});
});
});
}
var injCallBack = function(S){
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'getContextMenus','string':S}, getRequestResponseCallback)
});
}
var getRequestResponseCallback = function getRequestResponseCallback(response) {
/*
* TODO
*/
};
function createcontextMenusOption(opt){
var optString = '';
var L = JSONSwitch(LANGUAGES);
optString += opt.split('|')[0] ? L[opt.split('|')[0]] : t('detectLanguage');
optString += ' » ';
optString += opt.split('|')[1] ? L[opt.split('|')[1]] : t('detectLanguage');
chrome.contextMenus.create({
"title": optString,
"contexts":['selection'],
"onclick": function(opt){
return function(info,tab) {
contextMenusOnClick(info,tab,opt)
}
}(opt)
});
}
function start() {
if(localStorage.getItem('version') === null) {
localStorage.setItem('version','0');
}
if(localStorage.getItem('version') !== null && localStorage.getItem('version') !== '1.1.8.3'){
window.open('info.html');
localStorage.setItem('version','1.1.8.3');
}
if (localStorage.getItem('from') === null) {
localStorage.setItem('from', '');
}
if (localStorage.getItem('to') === null) {
localStorage.setItem('to', '');
}
if (localStorage.getItem('preferred') === null) { | var preferred = JSON.parse(localStorage.getItem('preferred'));
chrome.contextMenus.removeAll();
for (var i = 0, max = preferred.length; i < max; i++) {
createcontextMenusOption(preferred[i]);
}
}
$(document).ready(function(){
LANGUAGES = {};
LOCALE = "";
chrome.i18n.getAcceptLanguages( function(L) {
LOCALE = L[0];
currentLanguages = Microsoft.Translator.GetLanguages();
languageNames = Microsoft.Translator.getLanguageNames(LOCALE);
for(var i = 0; i < currentLanguages.length; i++) {
LANGUAGES[languageNames[i]] = currentLanguages[i];
}
start();
} );
});
|
localStorage.setItem('preferred', JSON.stringify(["|"+window.navigator.language]));
window.open('options.html');
}
| conditional_block |
background.js | function contextMenusOnClick(info,tab,opt) |
var injCallBack = function(S){
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'getContextMenus','string':S}, getRequestResponseCallback)
});
}
var getRequestResponseCallback = function getRequestResponseCallback(response) {
/*
* TODO
*/
};
function createcontextMenusOption(opt){
var optString = '';
var L = JSONSwitch(LANGUAGES);
optString += opt.split('|')[0] ? L[opt.split('|')[0]] : t('detectLanguage');
optString += ' » ';
optString += opt.split('|')[1] ? L[opt.split('|')[1]] : t('detectLanguage');
chrome.contextMenus.create({
"title": optString,
"contexts":['selection'],
"onclick": function(opt){
return function(info,tab) {
contextMenusOnClick(info,tab,opt)
}
}(opt)
});
}
function start() {
if(localStorage.getItem('version') === null) {
localStorage.setItem('version','0');
}
if(localStorage.getItem('version') !== null && localStorage.getItem('version') !== '1.1.8.3'){
window.open('info.html');
localStorage.setItem('version','1.1.8.3');
}
if (localStorage.getItem('from') === null) {
localStorage.setItem('from', '');
}
if (localStorage.getItem('to') === null) {
localStorage.setItem('to', '');
}
if (localStorage.getItem('preferred') === null) {
localStorage.setItem('preferred', JSON.stringify(["|"+window.navigator.language]));
window.open('options.html');
}
var preferred = JSON.parse(localStorage.getItem('preferred'));
chrome.contextMenus.removeAll();
for (var i = 0, max = preferred.length; i < max; i++) {
createcontextMenusOption(preferred[i]);
}
}
$(document).ready(function(){
LANGUAGES = {};
LOCALE = "";
chrome.i18n.getAcceptLanguages( function(L) {
LOCALE = L[0];
currentLanguages = Microsoft.Translator.GetLanguages();
languageNames = Microsoft.Translator.getLanguageNames(LOCALE);
for(var i = 0; i < currentLanguages.length; i++) {
LANGUAGES[languageNames[i]] = currentLanguages[i];
}
start();
} );
});
| {
var balloon;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'prepareBalloon'},function(){
var F = info.selectionText;
$.ajax({
url : 'http://api.microsofttranslator.com/V2/Ajax.svc/Translate',
data : {
'appId' : '76518BFCEBBF18E107C7073FBD4A735001B56BB1',
'text' : F,
'from' : opt.split("|")[0],
'to' : opt.split("|")[1],
'contentType' : 'text/plain'
},
'success' : function(T) {
T = T.replace(/^"|"$/gi,'');
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
},
'error' : function(jqXHR, textStatus, errorThrown) {
var T = 'ERROR! ' + textStatus;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
}
});
})
});
});
});
} | identifier_body |
background.js | function contextMenusOnClick(info,tab,opt) {
var balloon;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() { | var F = info.selectionText;
$.ajax({
url : 'http://api.microsofttranslator.com/V2/Ajax.svc/Translate',
data : {
'appId' : '76518BFCEBBF18E107C7073FBD4A735001B56BB1',
'text' : F,
'from' : opt.split("|")[0],
'to' : opt.split("|")[1],
'contentType' : 'text/plain'
},
'success' : function(T) {
T = T.replace(/^"|"$/gi,'');
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
},
'error' : function(jqXHR, textStatus, errorThrown) {
var T = 'ERROR! ' + textStatus;
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.executeScript(tab.id,{file:'js/content.js',allFrames:true},function() {injCallBack(T)});
});
}
});
})
});
});
});
}
var injCallBack = function(S){
chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'getContextMenus','string':S}, getRequestResponseCallback)
});
}
var getRequestResponseCallback = function getRequestResponseCallback(response) {
/*
* TODO
*/
};
function createcontextMenusOption(opt){
var optString = '';
var L = JSONSwitch(LANGUAGES);
optString += opt.split('|')[0] ? L[opt.split('|')[0]] : t('detectLanguage');
optString += ' » ';
optString += opt.split('|')[1] ? L[opt.split('|')[1]] : t('detectLanguage');
chrome.contextMenus.create({
"title": optString,
"contexts":['selection'],
"onclick": function(opt){
return function(info,tab) {
contextMenusOnClick(info,tab,opt)
}
}(opt)
});
}
function start() {
if(localStorage.getItem('version') === null) {
localStorage.setItem('version','0');
}
if(localStorage.getItem('version') !== null && localStorage.getItem('version') !== '1.1.8.3'){
window.open('info.html');
localStorage.setItem('version','1.1.8.3');
}
if (localStorage.getItem('from') === null) {
localStorage.setItem('from', '');
}
if (localStorage.getItem('to') === null) {
localStorage.setItem('to', '');
}
if (localStorage.getItem('preferred') === null) {
localStorage.setItem('preferred', JSON.stringify(["|"+window.navigator.language]));
window.open('options.html');
}
var preferred = JSON.parse(localStorage.getItem('preferred'));
chrome.contextMenus.removeAll();
for (var i = 0, max = preferred.length; i < max; i++) {
createcontextMenusOption(preferred[i]);
}
}
$(document).ready(function(){
LANGUAGES = {};
LOCALE = "";
chrome.i18n.getAcceptLanguages( function(L) {
LOCALE = L[0];
currentLanguages = Microsoft.Translator.GetLanguages();
languageNames = Microsoft.Translator.getLanguageNames(LOCALE);
for(var i = 0; i < currentLanguages.length; i++) {
LANGUAGES[languageNames[i]] = currentLanguages[i];
}
start();
} );
}); | chrome.tabs.getSelected(null, function(tab) { // get selected string in current tab
chrome.tabs.sendRequest(tab.id,{'method':'prepareBalloon'},function(){ | random_line_split |
editor.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import URI from 'vs/base/common/uri';
import { TPromise } from 'vs/base/common/winjs.base';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import Event from 'vs/base/common/event';
import { IDisposable } from 'vs/base/common/lifecycle';
export const IEditorService = createDecorator<IEditorService>('editorService');
export interface IEditorService {
_serviceBrand: any;
/**
* Specific overload to open an instance of IResourceInput.
*/
openEditor(input: IResourceInput, sideBySide?: boolean): TPromise<IEditor>;
}
export interface IEditorModel {
onDispose: Event<void>;
/**
* Loads the model.
*/
load(): TPromise<IEditorModel>;
/**
* Dispose associated resources
*/
dispose(): void;
}
export interface IBaseResourceInput {
/**
* Optional options to use when opening the text input.
*/
options?: ITextEditorOptions;
/**
* Label to show for the diff editor
*/
label?: string;
/**
* Description to show for the diff editor
*/
description?: string;
}
export interface IResourceInput extends IBaseResourceInput {
/**
* The resource URL of the resource to open.
*/
resource: URI;
/**
* The encoding of the text input if known.
*/
encoding?: string;
}
export interface IUntitledResourceInput extends IBaseResourceInput {
/**
* Optional resource. If the resource is not provided a new untitled file is created.
*/
resource?: URI;
/**
* Optional file path. Using the file resource will associate the file to the untitled resource.
*/
filePath?: string;
/**
* Optional language of the untitled resource.
*/
language?: string;
/**
* Optional contents of the untitled resource.
*/
contents?: string;
/**
* Optional encoding of the untitled resource.
*/
encoding?: string;
}
export interface IResourceDiffInput extends IBaseResourceInput {
/**
* The left hand side URI to open inside a diff editor. |
/**
* The right hand side URI to open inside a diff editor.
*/
rightResource: URI;
}
export interface IResourceSideBySideInput extends IBaseResourceInput {
/**
* The right hand side URI to open inside a side by side editor.
*/
masterResource: URI;
/**
* The left hand side URI to open inside a side by side editor.
*/
detailResource: URI;
}
export interface IEditorControl {
}
export interface IEditor {
/**
* The assigned input of this editor.
*/
input: IEditorInput;
/**
* The assigned options of this editor.
*/
options: IEditorOptions;
/**
* The assigned position of this editor.
*/
position: Position;
/**
* Returns the unique identifier of this editor.
*/
getId(): string;
/**
* Returns the underlying control of this editor.
*/
getControl(): IEditorControl;
/**
* Asks the underlying control to focus.
*/
focus(): void;
/**
* Finds out if this editor is visible or not.
*/
isVisible(): boolean;
}
/**
* Possible locations for opening an editor.
*/
export enum Position {
/** Opens the editor in the first position replacing the input currently showing */
ONE = 0,
/** Opens the editor in the second position replacing the input currently showing */
TWO = 1,
/** Opens the editor in the third most position replacing the input currently showing */
THREE = 2
}
export const POSITIONS = [Position.ONE, Position.TWO, Position.THREE];
export enum Direction {
LEFT,
RIGHT
}
export enum Verbosity {
SHORT,
MEDIUM,
LONG
}
export interface IRevertOptions {
/**
* Forces to load the contents of the editor again even if the editor is not dirty.
*/
force?: boolean;
/**
* A soft revert will clear dirty state of an editor but will not attempt to load it.
*/
soft?: boolean;
}
export interface IEditorInput extends IDisposable {
/**
* Triggered when this input is disposed.
*/
onDispose: Event<void>;
/**
* Returns the associated resource of this input.
*/
getResource(): URI;
/**
* Returns the display name of this input.
*/
getName(): string;
/**
* Returns the display description of this input.
*/
getDescription(verbosity?: Verbosity): string;
/**
* Returns the display title of this input.
*/
getTitle(verbosity?: Verbosity): string;
/**
* Resolves the input.
*/
resolve(): TPromise<IEditorModel>;
/**
* Returns if this input is dirty or not.
*/
isDirty(): boolean;
/**
* Reverts this input.
*/
revert(options?: IRevertOptions): TPromise<boolean>;
/**
* Returns if the other object matches this input.
*/
matches(other: any): boolean;
}
export interface IEditorOptions {
/**
* Tells the editor to not receive keyboard focus when the editor is being opened. By default,
* the editor will receive keyboard focus on open.
*/
preserveFocus?: boolean;
/**
* Tells the editor to replace the editor input in the editor even if it is identical to the one
* already showing. By default, the editor will not replace the input if it is identical to the
* one showing.
*/
forceOpen?: boolean;
/**
* Will reveal the editor if it is already opened and visible in any of the opened editor groups.
*/
revealIfVisible?: boolean;
/**
* Will reveal the editor if it is already opened (even when not visible) in any of the opened editor groups.
*/
revealIfOpened?: boolean;
/**
* An editor that is pinned remains in the editor stack even when another editor is being opened.
* An editor that is not pinned will always get replaced by another editor that is not pinned.
*/
pinned?: boolean;
/**
* The index in the document stack where to insert the editor into when opening.
*/
index?: number;
/**
* An active editor that is opened will show its contents directly. Set to true to open an editor
* in the background.
*/
inactive?: boolean;
}
export interface ITextEditorSelection {
startLineNumber: number;
startColumn: number;
endLineNumber?: number;
endColumn?: number;
}
export interface ITextEditorOptions extends IEditorOptions {
/**
* Text editor selection.
*/
selection?: ITextEditorSelection;
/**
* Text editor view state.
*/
viewState?: object;
/**
* Option to scroll vertically or horizontally as necessary and reveal a range centered vertically only if it lies outside the viewport.
*/
revealInCenterIfOutsideViewport?: boolean;
} | */
leftResource: URI; | random_line_split |
ScrollBar.py | #
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2002-2003 Free Software Foundation
#
# FILE:
# ScrollBar.py
#
# DESCRIPTION:
#
# NOTES:
#
#from gnue.common.apps import GDebug
import math
import string
import curses
from constants import *
from Control import Control
from Button import Button
class ScrollBar(Control):
"""
horizontal only :-(
"""
def __init__(self, Parent, SBName, Y, X, W, **properties):
apply(Control.__init__, (self,Parent,SBName),properties)
self.CANGETFOCUS = 1
self.H = 1
self.W = W
self.Y = Y
self.X = X
self.PARENT = Parent
self.SetMethod("SYSPAINT", self.Paint)
self.SetMethod("SYSRUN",self.Run)
self.SetMethod("GOTFOCUS", self.__GotFocus)
self.SetMethod("LOSTFOCUS", self.__LostFocus)
self.SetMethod("CLICK", self._ChangePos)
self._max = 1
self._val = 0
self.stepsize = 1
self.__initButtons()
def __initButtons(self):
if string.find(str(self.__class__), '.ScrollBar') != -1:
Y = self.Y
X = self.X
W = self.W
Parent = self.PARENT
self.rightarrow = Button(Parent,'rightarrow',Y,X+W - 3,3,'>')
self.rightarrow.SetMethod("CLICK",self._Inc)
Parent.AddControl(self.rightarrow)
self.left2arrow = Button(Parent,'left2arrow',Y,X+W - 6,3,'<')
self.left2arrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.left2arrow)
self.leftarrow = Button(Parent,'leftarrow',Y,X,3,'<')
self.leftarrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.leftarrow)
def __GotFocus(self,v1,v2,v3):
self.FOCUS = 1
self.Paint(None,None,None)
return 1
def __LostFocus(self,v1,v2,v3):
self.FOCUS = 0
self.Paint(None,None,None)
return 1
def Run(self, v1,v2,v3):
if v1 :
self.ExecMethod("CLICK", self, v2, v3)
Container = self.PARENT.Screen()
global BACKWARDS
while 1:
ch = Container.GetChar()
if self.PARENT.BreakOrder(ch) :
return
if ch in (Container.TokNextField, Container.TokDownArrow, Container.TokUpArrow):
BACKWARDS = 0
if ch == Container.TokUpArrow:
|
return
elif ch == Container.TokLeftArrow:
self._Dec(None,None,None)
elif ch == Container.TokRightArrow:
self._Inc(None, None, None)
def _ChangePos(self,arg1,arg2,newX):
X = newX - self.start
if X >= (self.WorkingArea-1):
val = self._max
else:
val = float(X) / self.stepsize
val = int(math.ceil(val))
self.Set(val)
self._Action()
def Init(self, Max):
self._max = Max
self._val = 0
self.WorkingArea = float(self.W-9)
self.start = 3
self.UsedSpace = int(math.floor(self.WorkingArea / float(self._max)))
self.stepsize = self.WorkingArea / self._max
if self.UsedSpace < 1:
self.UsedSpace = 1
self.Paint(None,None,None)
def Paint(self,v1,v2,v3):
## TODO: This is all wrong... it only partially supports _ABSX
Pos = int(math.ceil(float(self._val) * (self.stepsize))) + self.start + self._ABSX
Screen = self.PARENT.Screen()
Screen.AutoRefresh = 0
# clear the bar region in reverse standard-color
self.SetColor(1)
self.LoLight()
for i in range(0, int(self.WorkingArea)):
Screen.PutAt(self._ABSY, self.start + i + self._ABSX, ' ', curses.A_REVERSE)
# correct position
if Pos >= (self.WorkingArea + self.start):
Pos = (self.start + self.WorkingArea)
elif Pos < (self.start + self.UsedSpace):
Pos = self.start + self.UsedSpace
# draw the handle hilight
if self.FOCUS:
self.SetColor(3)
else:
self.SetColor(2)
self.LoLight()
for i in range(0, self.UsedSpace):
Screen.PutAt(self._ABSY, (Pos - self.UsedSpace) + i, ' ', curses.A_REVERSE)
Screen.AutoRefresh = 1
Screen.Refresh()
def Dec(self,arg1,arg2,arg3):
if self._val > 0:
self._val -= 1
self.Paint(None,None,None)
def _Dec(self, arg1,arg2,arg3):
self.Dec(None,None,None)
self._Action()
def Inc(self,arg1,arg2,arg3):
if self._val < self._max:
self._val += 1
self.Paint(None,None,None)
def _Inc(self, arg1,arg2,arg3):
self.Inc(None,None,None)
self._Action()
def Set(self,newVal):
if newVal < 0:
newVal = 0
elif newVal > self._max:
newVal =self._max
self._val = newVal
self.Paint(None,None,None)
def __del__(self):
Parent = self.PARENT
Parent.DelControl(self.rightarrow)
Parent.DelControl(self.leftarrow)
Parent.DelControl(self.left2arrow)
def _Action(self):
action = self.GetMethod("CHANGED")
if action != None:
apply(action,(self._val,self._max,None))
| BACKWARDS = 1 | conditional_block |
ScrollBar.py | #
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2002-2003 Free Software Foundation
#
# FILE:
# ScrollBar.py
#
# DESCRIPTION:
#
# NOTES:
#
#from gnue.common.apps import GDebug
import math
import string
import curses
from constants import *
from Control import Control
from Button import Button
class ScrollBar(Control):
"""
horizontal only :-(
"""
def __init__(self, Parent, SBName, Y, X, W, **properties):
apply(Control.__init__, (self,Parent,SBName),properties)
self.CANGETFOCUS = 1
self.H = 1
self.W = W
self.Y = Y
self.X = X
self.PARENT = Parent
self.SetMethod("SYSPAINT", self.Paint)
self.SetMethod("SYSRUN",self.Run)
self.SetMethod("GOTFOCUS", self.__GotFocus)
self.SetMethod("LOSTFOCUS", self.__LostFocus)
self.SetMethod("CLICK", self._ChangePos)
self._max = 1
self._val = 0
self.stepsize = 1
self.__initButtons()
def __initButtons(self):
if string.find(str(self.__class__), '.ScrollBar') != -1:
Y = self.Y
X = self.X
W = self.W
Parent = self.PARENT
self.rightarrow = Button(Parent,'rightarrow',Y,X+W - 3,3,'>')
self.rightarrow.SetMethod("CLICK",self._Inc)
Parent.AddControl(self.rightarrow)
self.left2arrow = Button(Parent,'left2arrow',Y,X+W - 6,3,'<')
self.left2arrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.left2arrow)
self.leftarrow = Button(Parent,'leftarrow',Y,X,3,'<')
self.leftarrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.leftarrow)
def __GotFocus(self,v1,v2,v3):
self.FOCUS = 1
self.Paint(None,None,None)
return 1
def __LostFocus(self,v1,v2,v3):
self.FOCUS = 0
self.Paint(None,None,None)
return 1
def Run(self, v1,v2,v3):
if v1 :
self.ExecMethod("CLICK", self, v2, v3)
Container = self.PARENT.Screen()
global BACKWARDS
while 1:
ch = Container.GetChar()
if self.PARENT.BreakOrder(ch) :
return
if ch in (Container.TokNextField, Container.TokDownArrow, Container.TokUpArrow):
BACKWARDS = 0
if ch == Container.TokUpArrow:
BACKWARDS = 1
return
elif ch == Container.TokLeftArrow:
self._Dec(None,None,None)
elif ch == Container.TokRightArrow:
self._Inc(None, None, None)
def _ChangePos(self,arg1,arg2,newX):
X = newX - self.start
if X >= (self.WorkingArea-1):
val = self._max
else:
val = float(X) / self.stepsize
val = int(math.ceil(val))
self.Set(val)
self._Action()
def Init(self, Max):
self._max = Max
self._val = 0
self.WorkingArea = float(self.W-9)
self.start = 3
self.UsedSpace = int(math.floor(self.WorkingArea / float(self._max)))
self.stepsize = self.WorkingArea / self._max
if self.UsedSpace < 1:
self.UsedSpace = 1
self.Paint(None,None,None)
def Paint(self,v1,v2,v3):
## TODO: This is all wrong... it only partially supports _ABSX
Pos = int(math.ceil(float(self._val) * (self.stepsize))) + self.start + self._ABSX
Screen = self.PARENT.Screen()
Screen.AutoRefresh = 0
# clear the bar region in reverse standard-color
self.SetColor(1)
self.LoLight()
for i in range(0, int(self.WorkingArea)):
Screen.PutAt(self._ABSY, self.start + i + self._ABSX, ' ', curses.A_REVERSE)
# correct position
if Pos >= (self.WorkingArea + self.start):
Pos = (self.start + self.WorkingArea)
elif Pos < (self.start + self.UsedSpace):
Pos = self.start + self.UsedSpace
# draw the handle hilight
if self.FOCUS:
self.SetColor(3)
else:
self.SetColor(2)
self.LoLight()
for i in range(0, self.UsedSpace):
Screen.PutAt(self._ABSY, (Pos - self.UsedSpace) + i, ' ', curses.A_REVERSE)
Screen.AutoRefresh = 1
Screen.Refresh()
def Dec(self,arg1,arg2,arg3):
if self._val > 0:
self._val -= 1
self.Paint(None,None,None)
def _Dec(self, arg1,arg2,arg3):
self.Dec(None,None,None)
self._Action()
def Inc(self,arg1,arg2,arg3):
|
def _Inc(self, arg1,arg2,arg3):
self.Inc(None,None,None)
self._Action()
def Set(self,newVal):
if newVal < 0:
newVal = 0
elif newVal > self._max:
newVal =self._max
self._val = newVal
self.Paint(None,None,None)
def __del__(self):
Parent = self.PARENT
Parent.DelControl(self.rightarrow)
Parent.DelControl(self.leftarrow)
Parent.DelControl(self.left2arrow)
def _Action(self):
action = self.GetMethod("CHANGED")
if action != None:
apply(action,(self._val,self._max,None))
| if self._val < self._max:
self._val += 1
self.Paint(None,None,None) | identifier_body |
ScrollBar.py | #
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2002-2003 Free Software Foundation
#
# FILE:
# ScrollBar.py
#
# DESCRIPTION:
#
# NOTES:
#
#from gnue.common.apps import GDebug
import math
import string
import curses
from constants import *
from Control import Control
from Button import Button
class ScrollBar(Control):
"""
horizontal only :-(
"""
def __init__(self, Parent, SBName, Y, X, W, **properties):
apply(Control.__init__, (self,Parent,SBName),properties)
self.CANGETFOCUS = 1
self.H = 1
self.W = W
self.Y = Y
self.X = X
self.PARENT = Parent
self.SetMethod("SYSPAINT", self.Paint)
self.SetMethod("SYSRUN",self.Run)
self.SetMethod("GOTFOCUS", self.__GotFocus)
self.SetMethod("LOSTFOCUS", self.__LostFocus)
self.SetMethod("CLICK", self._ChangePos)
self._max = 1
self._val = 0
self.stepsize = 1
self.__initButtons()
def __initButtons(self):
if string.find(str(self.__class__), '.ScrollBar') != -1:
Y = self.Y
X = self.X
W = self.W
Parent = self.PARENT
self.rightarrow = Button(Parent,'rightarrow',Y,X+W - 3,3,'>')
self.rightarrow.SetMethod("CLICK",self._Inc)
Parent.AddControl(self.rightarrow)
self.left2arrow = Button(Parent,'left2arrow',Y,X+W - 6,3,'<')
self.left2arrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.left2arrow)
self.leftarrow = Button(Parent,'leftarrow',Y,X,3,'<')
self.leftarrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.leftarrow)
def __GotFocus(self,v1,v2,v3):
self.FOCUS = 1
self.Paint(None,None,None)
return 1
def __LostFocus(self,v1,v2,v3):
self.FOCUS = 0
self.Paint(None,None,None)
return 1
def Run(self, v1,v2,v3):
if v1 :
self.ExecMethod("CLICK", self, v2, v3)
Container = self.PARENT.Screen()
global BACKWARDS
while 1:
ch = Container.GetChar()
if self.PARENT.BreakOrder(ch) :
return
if ch in (Container.TokNextField, Container.TokDownArrow, Container.TokUpArrow):
BACKWARDS = 0
if ch == Container.TokUpArrow:
BACKWARDS = 1
return
elif ch == Container.TokLeftArrow:
self._Dec(None,None,None)
elif ch == Container.TokRightArrow:
self._Inc(None, None, None)
def _ChangePos(self,arg1,arg2,newX):
X = newX - self.start
if X >= (self.WorkingArea-1):
val = self._max
else:
val = float(X) / self.stepsize
val = int(math.ceil(val))
self.Set(val)
self._Action()
def Init(self, Max):
self._max = Max
self._val = 0
self.WorkingArea = float(self.W-9)
self.start = 3
self.UsedSpace = int(math.floor(self.WorkingArea / float(self._max)))
self.stepsize = self.WorkingArea / self._max
if self.UsedSpace < 1:
self.UsedSpace = 1
self.Paint(None,None,None)
def Paint(self,v1,v2,v3):
## TODO: This is all wrong... it only partially supports _ABSX
Pos = int(math.ceil(float(self._val) * (self.stepsize))) + self.start + self._ABSX
Screen = self.PARENT.Screen()
Screen.AutoRefresh = 0
# clear the bar region in reverse standard-color
self.SetColor(1)
self.LoLight()
for i in range(0, int(self.WorkingArea)):
Screen.PutAt(self._ABSY, self.start + i + self._ABSX, ' ', curses.A_REVERSE)
# correct position
if Pos >= (self.WorkingArea + self.start):
Pos = (self.start + self.WorkingArea)
elif Pos < (self.start + self.UsedSpace):
Pos = self.start + self.UsedSpace
# draw the handle hilight
if self.FOCUS:
self.SetColor(3)
else:
self.SetColor(2)
self.LoLight()
for i in range(0, self.UsedSpace):
Screen.PutAt(self._ABSY, (Pos - self.UsedSpace) + i, ' ', curses.A_REVERSE)
Screen.AutoRefresh = 1
Screen.Refresh()
def Dec(self,arg1,arg2,arg3):
if self._val > 0:
self._val -= 1
self.Paint(None,None,None)
def _Dec(self, arg1,arg2,arg3):
self.Dec(None,None,None)
self._Action()
def | (self,arg1,arg2,arg3):
if self._val < self._max:
self._val += 1
self.Paint(None,None,None)
def _Inc(self, arg1,arg2,arg3):
self.Inc(None,None,None)
self._Action()
def Set(self,newVal):
if newVal < 0:
newVal = 0
elif newVal > self._max:
newVal =self._max
self._val = newVal
self.Paint(None,None,None)
def __del__(self):
Parent = self.PARENT
Parent.DelControl(self.rightarrow)
Parent.DelControl(self.leftarrow)
Parent.DelControl(self.left2arrow)
def _Action(self):
action = self.GetMethod("CHANGED")
if action != None:
apply(action,(self._val,self._max,None))
| Inc | identifier_name |
ScrollBar.py | #
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2002-2003 Free Software Foundation
#
# FILE:
# ScrollBar.py
#
# DESCRIPTION:
#
# NOTES:
#
#from gnue.common.apps import GDebug
import math
import string
import curses
from constants import *
from Control import Control
from Button import Button
class ScrollBar(Control):
"""
horizontal only :-(
"""
def __init__(self, Parent, SBName, Y, X, W, **properties):
apply(Control.__init__, (self,Parent,SBName),properties)
self.CANGETFOCUS = 1
self.H = 1
self.W = W
self.Y = Y
self.X = X
self.PARENT = Parent
self.SetMethod("SYSPAINT", self.Paint)
self.SetMethod("SYSRUN",self.Run)
self.SetMethod("GOTFOCUS", self.__GotFocus)
self.SetMethod("LOSTFOCUS", self.__LostFocus)
self.SetMethod("CLICK", self._ChangePos)
self._max = 1
self._val = 0
self.stepsize = 1
self.__initButtons()
def __initButtons(self):
if string.find(str(self.__class__), '.ScrollBar') != -1:
Y = self.Y
X = self.X
W = self.W
Parent = self.PARENT
self.rightarrow = Button(Parent,'rightarrow',Y,X+W - 3,3,'>')
self.rightarrow.SetMethod("CLICK",self._Inc)
Parent.AddControl(self.rightarrow)
self.left2arrow = Button(Parent,'left2arrow',Y,X+W - 6,3,'<')
self.left2arrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.left2arrow)
self.leftarrow = Button(Parent,'leftarrow',Y,X,3,'<')
self.leftarrow.SetMethod("CLICK",self._Dec)
Parent.AddControl(self.leftarrow)
def __GotFocus(self,v1,v2,v3):
self.FOCUS = 1
self.Paint(None,None,None)
return 1
def __LostFocus(self,v1,v2,v3):
self.FOCUS = 0
self.Paint(None,None,None)
return 1
def Run(self, v1,v2,v3):
if v1 :
self.ExecMethod("CLICK", self, v2, v3)
Container = self.PARENT.Screen()
global BACKWARDS
while 1:
ch = Container.GetChar()
if self.PARENT.BreakOrder(ch) :
return
if ch in (Container.TokNextField, Container.TokDownArrow, Container.TokUpArrow):
BACKWARDS = 0
| BACKWARDS = 1
return
elif ch == Container.TokLeftArrow:
self._Dec(None,None,None)
elif ch == Container.TokRightArrow:
self._Inc(None, None, None)
def _ChangePos(self,arg1,arg2,newX):
X = newX - self.start
if X >= (self.WorkingArea-1):
val = self._max
else:
val = float(X) / self.stepsize
val = int(math.ceil(val))
self.Set(val)
self._Action()
def Init(self, Max):
self._max = Max
self._val = 0
self.WorkingArea = float(self.W-9)
self.start = 3
self.UsedSpace = int(math.floor(self.WorkingArea / float(self._max)))
self.stepsize = self.WorkingArea / self._max
if self.UsedSpace < 1:
self.UsedSpace = 1
self.Paint(None,None,None)
def Paint(self,v1,v2,v3):
## TODO: This is all wrong... it only partially supports _ABSX
Pos = int(math.ceil(float(self._val) * (self.stepsize))) + self.start + self._ABSX
Screen = self.PARENT.Screen()
Screen.AutoRefresh = 0
# clear the bar region in reverse standard-color
self.SetColor(1)
self.LoLight()
for i in range(0, int(self.WorkingArea)):
Screen.PutAt(self._ABSY, self.start + i + self._ABSX, ' ', curses.A_REVERSE)
# correct position
if Pos >= (self.WorkingArea + self.start):
Pos = (self.start + self.WorkingArea)
elif Pos < (self.start + self.UsedSpace):
Pos = self.start + self.UsedSpace
# draw the handle hilight
if self.FOCUS:
self.SetColor(3)
else:
self.SetColor(2)
self.LoLight()
for i in range(0, self.UsedSpace):
Screen.PutAt(self._ABSY, (Pos - self.UsedSpace) + i, ' ', curses.A_REVERSE)
Screen.AutoRefresh = 1
Screen.Refresh()
def Dec(self,arg1,arg2,arg3):
if self._val > 0:
self._val -= 1
self.Paint(None,None,None)
def _Dec(self, arg1,arg2,arg3):
self.Dec(None,None,None)
self._Action()
def Inc(self,arg1,arg2,arg3):
if self._val < self._max:
self._val += 1
self.Paint(None,None,None)
def _Inc(self, arg1,arg2,arg3):
self.Inc(None,None,None)
self._Action()
def Set(self,newVal):
if newVal < 0:
newVal = 0
elif newVal > self._max:
newVal =self._max
self._val = newVal
self.Paint(None,None,None)
def __del__(self):
Parent = self.PARENT
Parent.DelControl(self.rightarrow)
Parent.DelControl(self.leftarrow)
Parent.DelControl(self.left2arrow)
def _Action(self):
action = self.GetMethod("CHANGED")
if action != None:
apply(action,(self._val,self._max,None)) | if ch == Container.TokUpArrow:
| random_line_split |
main.rs | use std::collections::HashSet;
fn main() {
struct PentNums {
n: usize,
curr: usize
}
impl PentNums {
fn new(index: usize) -> PentNums {
if index == 0 |
PentNums{n: index, curr: PentNums::get(index)}
}
fn get(index: usize) -> usize {
(index * ((index * 3) - 1)) / 2
}
}
impl Iterator for PentNums {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.n += 1;
self.curr = (self.n * ((self.n * 3) - 1)) / 2;
Some(self.curr)
}
}
fn is_pent(num: usize, mut h: &mut HashSet<usize>) -> bool {
if h.contains(&num) {
return true
}
let mut p = PentNums::new(h.len());
for elem in p {
if num == elem {
h.insert(num);
return true;
} else if elem > num {
return false;
}
}
false
}
// assumes a and b are pentagonal
fn sum_diff_pent(a: usize, b: usize, mut h: &mut HashSet<usize>) -> bool {
if a >= b {
return false;
}
let e = a + b;
// println!("{:?}", e);
if !is_pent(e, &mut h) {
return false;
}
let d = b - a;
// println!("{:?}", d);
if !is_pent(d, &mut h) {
return false;
}
true
}
let mut pA = PentNums::new(8)
.take(2500)
.collect::<Vec<_>>();
let mut h: HashSet<usize> = HashSet::new();
for num in pA.clone() {
h.insert(num);
}
'outer: for curr in pA {
// println!("{:?}", curr);
let mut pB = PentNums::new(4)
.take(2500)
.collect::<Vec<_>>();
for elem in pB {
// println!("{:?}", elem);
if elem >= curr {
continue 'outer;
}
if sum_diff_pent(elem, curr, &mut h) {
println!("{:?}", curr - elem);
break 'outer;
}
}
}
}
| {
return PentNums{n: index, curr: 0};
} | conditional_block |
main.rs | use std::collections::HashSet;
fn main() {
struct PentNums {
n: usize,
curr: usize
}
impl PentNums {
fn new(index: usize) -> PentNums {
if index == 0 {
return PentNums{n: index, curr: 0};
}
PentNums{n: index, curr: PentNums::get(index)}
}
fn get(index: usize) -> usize {
(index * ((index * 3) - 1)) / 2
}
}
impl Iterator for PentNums {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.n += 1;
self.curr = (self.n * ((self.n * 3) - 1)) / 2;
Some(self.curr)
}
}
fn is_pent(num: usize, mut h: &mut HashSet<usize>) -> bool {
if h.contains(&num) {
return true
}
let mut p = PentNums::new(h.len());
for elem in p {
if num == elem {
h.insert(num);
return true;
} else if elem > num {
return false;
}
}
false
}
// assumes a and b are pentagonal
fn | (a: usize, b: usize, mut h: &mut HashSet<usize>) -> bool {
if a >= b {
return false;
}
let e = a + b;
// println!("{:?}", e);
if !is_pent(e, &mut h) {
return false;
}
let d = b - a;
// println!("{:?}", d);
if !is_pent(d, &mut h) {
return false;
}
true
}
let mut pA = PentNums::new(8)
.take(2500)
.collect::<Vec<_>>();
let mut h: HashSet<usize> = HashSet::new();
for num in pA.clone() {
h.insert(num);
}
'outer: for curr in pA {
// println!("{:?}", curr);
let mut pB = PentNums::new(4)
.take(2500)
.collect::<Vec<_>>();
for elem in pB {
// println!("{:?}", elem);
if elem >= curr {
continue 'outer;
}
if sum_diff_pent(elem, curr, &mut h) {
println!("{:?}", curr - elem);
break 'outer;
}
}
}
}
| sum_diff_pent | identifier_name |
main.rs | use std::collections::HashSet;
fn main() {
struct PentNums {
n: usize,
curr: usize
}
impl PentNums {
fn new(index: usize) -> PentNums {
if index == 0 {
return PentNums{n: index, curr: 0};
}
PentNums{n: index, curr: PentNums::get(index)}
}
fn get(index: usize) -> usize {
(index * ((index * 3) - 1)) / 2
}
}
impl Iterator for PentNums {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.n += 1;
self.curr = (self.n * ((self.n * 3) - 1)) / 2;
Some(self.curr)
}
}
fn is_pent(num: usize, mut h: &mut HashSet<usize>) -> bool |
// assumes a and b are pentagonal
fn sum_diff_pent(a: usize, b: usize, mut h: &mut HashSet<usize>) -> bool {
if a >= b {
return false;
}
let e = a + b;
// println!("{:?}", e);
if !is_pent(e, &mut h) {
return false;
}
let d = b - a;
// println!("{:?}", d);
if !is_pent(d, &mut h) {
return false;
}
true
}
let mut pA = PentNums::new(8)
.take(2500)
.collect::<Vec<_>>();
let mut h: HashSet<usize> = HashSet::new();
for num in pA.clone() {
h.insert(num);
}
'outer: for curr in pA {
// println!("{:?}", curr);
let mut pB = PentNums::new(4)
.take(2500)
.collect::<Vec<_>>();
for elem in pB {
// println!("{:?}", elem);
if elem >= curr {
continue 'outer;
}
if sum_diff_pent(elem, curr, &mut h) {
println!("{:?}", curr - elem);
break 'outer;
}
}
}
}
| {
if h.contains(&num) {
return true
}
let mut p = PentNums::new(h.len());
for elem in p {
if num == elem {
h.insert(num);
return true;
} else if elem > num {
return false;
}
}
false
} | identifier_body |
main.rs | use std::collections::HashSet;
fn main() {
struct PentNums {
n: usize,
curr: usize
}
impl PentNums {
fn new(index: usize) -> PentNums {
if index == 0 {
return PentNums{n: index, curr: 0};
}
PentNums{n: index, curr: PentNums::get(index)}
}
fn get(index: usize) -> usize {
(index * ((index * 3) - 1)) / 2
}
}
impl Iterator for PentNums {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.n += 1;
self.curr = (self.n * ((self.n * 3) - 1)) / 2;
Some(self.curr)
}
}
fn is_pent(num: usize, mut h: &mut HashSet<usize>) -> bool {
if h.contains(&num) {
return true
}
let mut p = PentNums::new(h.len());
for elem in p {
if num == elem {
h.insert(num);
return true;
} else if elem > num {
return false;
}
}
false
}
// assumes a and b are pentagonal
fn sum_diff_pent(a: usize, b: usize, mut h: &mut HashSet<usize>) -> bool {
if a >= b {
return false;
}
let e = a + b;
// println!("{:?}", e);
if !is_pent(e, &mut h) {
return false;
}
let d = b - a;
// println!("{:?}", d);
if !is_pent(d, &mut h) {
return false;
}
true
}
let mut pA = PentNums::new(8)
.take(2500)
.collect::<Vec<_>>();
let mut h: HashSet<usize> = HashSet::new();
for num in pA.clone() { | }
'outer: for curr in pA {
// println!("{:?}", curr);
let mut pB = PentNums::new(4)
.take(2500)
.collect::<Vec<_>>();
for elem in pB {
// println!("{:?}", elem);
if elem >= curr {
continue 'outer;
}
if sum_diff_pent(elem, curr, &mut h) {
println!("{:?}", curr - elem);
break 'outer;
}
}
}
} | h.insert(num); | random_line_split |
makemessages.py | from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, popen_wrapper,
)
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_str
from django.utils.functional import cached_property, total_ordering
from django.utils.jslex import prepare_js_for_gettext
from django.utils.text import get_text_list
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU "
"gettext tools 0.15 or newer installed." % program)
def gettext_popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding="utf-8"):
"""
Makes sure text obtained from stdout of gettext utilities is Unicode.
"""
stdout, stderr, status_code = popen_wrapper(args, os_err_exc_type=os_err_exc_type)
if os.name == 'nt' and six.PY3 and stdout_encoding != DEFAULT_LOCALE_ENCODING:
# This looks weird because it's undoing what
# subprocess.Popen(universal_newlines=True).communicate()
# does when capturing PO files contents from stdout of gettext command
# line programs. No need to do anything on Python 2 because it's
# already a byte-string there (#23271).
stdout = stdout.encode(DEFAULT_LOCALE_ENCODING).decode(stdout_encoding)
if six.PY2:
stdout = stdout.decode(stdout_encoding)
return stdout, stderr, status_code
@total_ordering
class TranslatableFile(object):
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").')
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.')
parser.add_argument('--extension', '-e', dest='extensions',
help='The file extension(s) to examine (default: "html,txt", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
action='append')
parser.add_argument('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining '
'source code and templates for translation strings.')
parser.add_argument('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.')
parser.add_argument('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.")
parser.add_argument('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines.")
parser.add_argument('--no-location', action='store_true', dest='no_location',
default=False, help="Don't write '#: filename:line' lines.")
parser.add_argument('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings.")
parser.add_argument('--keep-pot', action='store_true', dest='keep_pot',
default=False, help="Keep .pot file after making messages. Useful when debugging.")
def handle(self, *args, **options):
locale = options.get('locale')
exclude = options.get('exclude')
self.domain = options.get('domain')
self.verbosity = options.get('verbosity')
process_all = options.get('all')
extensions = options.get('extensions')
self.symlinks = options.get('symlinks')
# Need to ensure that the i18n framework is enabled
if settings.configured:
settings.USE_I18N = True
else:
settings.configure(USE_I18N=True)
ignore_patterns = options.get('ignore_patterns')
if options.get('use_default_ignore_patterns'):
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
self.ignore_patterns = list(set(ignore_patterns))
# Avoid messing with mutable class variables
if options.get('no_wrap'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
if options.get('no_location'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
self.no_obsolete = options.get('no_obsolete')
self.keep_pot = options.get('keep_pot')
if self.domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains "
"'django' and 'djangojs'")
if self.domain == 'djangojs':
exts = extensions if extensions else ['js']
else:
exts = extensions if extensions else ['html', 'txt', 'py']
self.extensions = handle_extensions(exts)
if (locale is None and not exclude and not process_all) or self.domain is None:
raise CommandError("Type '%s help %s' for usage information." % (
os.path.basename(sys.argv[0]), sys.argv[1]))
if self.verbosity > 1:
self.stdout.write('examining files with the extensions: %s\n'
% get_text_list(list(self.extensions), 'and'))
self.invoked_for_django = False
self.locale_paths = []
self.default_locale_path = None
if os.path.isdir(os.path.join('conf', 'locale')):
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
self.default_locale_path = self.locale_paths[0]
self.invoked_for_django = True
else:
self.locale_paths.extend(list(settings.LOCALE_PATHS))
# Allow to run makemessages inside an app dir
if os.path.isdir('locale'):
self.locale_paths.append(os.path.abspath('locale'))
if self.locale_paths:
self.default_locale_path = self.locale_paths[0]
if not os.path.exists(self.default_locale_path):
os.makedirs(self.default_locale_path)
# Build locale list
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
all_locales = map(os.path.basename, locale_dirs)
# Account for excluded locales
if process_all:
locales = all_locales
else:
locales = locale or all_locales
locales = set(locales) - set(exclude)
if locales:
check_programs('msguniq', 'msgmerge', 'msgattrib')
check_programs('xgettext')
try:
potfiles = self.build_potfiles()
# Build po files for each selected locale
for locale in locales:
if self.verbosity > 0:
self.stdout.write("processing locale %s\n" % locale)
for potfile in potfiles:
self.write_po_file(potfile, locale)
finally:
if not self.keep_pot:
self.remove_potfiles()
@cached_property
def gettext_version(self):
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
# when looking up the version. It's especially a problem on Windows.
out, err, status = gettext_popen_wrapper(
['xgettext', '--version'],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
)
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
if m:
return tuple(int(d) for d in m.groups() if d is not None)
else:
raise CommandError("Unable to get gettext version. Is it installed?")
def build_potfiles(self):
"""
Build pot files and apply msguniq to them.
"""
file_list = self.find_files(".")
self.remove_potfiles()
for f in file_list:
try:
f.process(self, self.domain)
except UnicodeDecodeError as e:
self.stdout.write(
"UnicodeDecodeError: skipped file %s in %s (reason: %s)" % (
f.file,
f.dirpath,
e,
)
)
potfiles = []
for path in self.locale_paths:
potfile = os.path.join(path, '%s.pot' % str(self.domain))
if not os.path.exists(potfile):
|
args = ['msguniq'] + self.msguniq_options + [potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
with io.open(potfile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
potfiles.append(potfile)
return potfiles
def remove_potfiles(self):
for path in self.locale_paths:
pot_path = os.path.join(path, '%s.pot' % str(self.domain))
if os.path.exists(pot_path):
os.unlink(pot_path)
def find_files(self, root):
"""
Helper method to get all files in the given root. Also check that there
is a matching locale dir for each file.
"""
def is_ignored(path, ignore_patterns):
"""
Check if the given path should be ignored or not.
"""
filename = os.path.basename(path)
ignore = lambda pattern: (fnmatch.fnmatchcase(filename, pattern) or
fnmatch.fnmatchcase(path, pattern))
return any(ignore(pattern) for pattern in ignore_patterns)
ignore_patterns = [os.path.normcase(p) for p in self.ignore_patterns]
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
norm_patterns = []
for p in ignore_patterns:
for dir_suffix in dir_suffixes:
if p.endswith(dir_suffix):
norm_patterns.append(p[:-len(dir_suffix)])
break
else:
norm_patterns.append(p)
all_files = []
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
for dirname in dirnames[:]:
if (is_ignored(os.path.normpath(os.path.join(dirpath, dirname)), norm_patterns) or
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
dirnames.remove(dirname)
if self.verbosity > 1:
self.stdout.write('ignoring directory %s\n' % dirname)
elif dirname == 'locale':
dirnames.remove(dirname)
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
for filename in filenames:
file_path = os.path.normpath(os.path.join(dirpath, filename))
file_ext = os.path.splitext(filename)[1]
if file_ext not in self.extensions or is_ignored(file_path, self.ignore_patterns):
if self.verbosity > 1:
self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
else:
locale_dir = None
for path in self.locale_paths:
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
locale_dir = path
break
if not locale_dir:
locale_dir = self.default_locale_path
if not locale_dir:
raise CommandError(
"Unable to find a locale path to store translations for file %s" % file_path)
all_files.append(TranslatableFile(dirpath, filename, locale_dir))
return sorted(all_files)
def write_po_file(self, potfile, locale):
"""
Creates or updates the PO file for self.domain and :param locale:.
Uses contents of the existing :param potfile:.
Uses msgmerge, and msgattrib GNU gettext utilities.
"""
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % str(self.domain))
if os.path.exists(pofile):
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
else:
with io.open(potfile, 'r', encoding='utf-8') as fp:
msgs = fp.read()
if not self.invoked_for_django:
msgs = self.copy_plural_forms(msgs, locale)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
with io.open(pofile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
if self.no_obsolete:
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
def copy_plural_forms(self, msgs, locale):
"""
Copies plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
django_dir = os.path.normpath(os.path.join(os.path.dirname(upath(django.__file__))))
if self.domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
with io.open(django_po, 'r', encoding='utf-8') as fp:
m = plural_forms_re.search(fp.read())
if m:
plural_form_line = force_str(m.group('value'))
if self.verbosity > 1:
self.stdout.write("copying plural forms: %s\n" % plural_form_line)
lines = []
found = False
for line in msgs.split('\n'):
if not found and (not line or plural_forms_re.search(line)):
line = '%s\n' % plural_form_line
found = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs
| continue | conditional_block |
makemessages.py | from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, popen_wrapper,
)
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_str
from django.utils.functional import cached_property, total_ordering
from django.utils.jslex import prepare_js_for_gettext
from django.utils.text import get_text_list
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU "
"gettext tools 0.15 or newer installed." % program)
def gettext_popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding="utf-8"):
"""
Makes sure text obtained from stdout of gettext utilities is Unicode.
"""
stdout, stderr, status_code = popen_wrapper(args, os_err_exc_type=os_err_exc_type)
if os.name == 'nt' and six.PY3 and stdout_encoding != DEFAULT_LOCALE_ENCODING:
# This looks weird because it's undoing what
# subprocess.Popen(universal_newlines=True).communicate()
# does when capturing PO files contents from stdout of gettext command
# line programs. No need to do anything on Python 2 because it's
# already a byte-string there (#23271).
stdout = stdout.encode(DEFAULT_LOCALE_ENCODING).decode(stdout_encoding)
if six.PY2:
stdout = stdout.decode(stdout_encoding)
return stdout, stderr, status_code
@total_ordering
class TranslatableFile(object):
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").')
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.')
parser.add_argument('--extension', '-e', dest='extensions',
help='The file extension(s) to examine (default: "html,txt", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
action='append')
parser.add_argument('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining '
'source code and templates for translation strings.')
parser.add_argument('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.')
parser.add_argument('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.")
parser.add_argument('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines.")
parser.add_argument('--no-location', action='store_true', dest='no_location',
default=False, help="Don't write '#: filename:line' lines.")
parser.add_argument('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings.")
parser.add_argument('--keep-pot', action='store_true', dest='keep_pot',
default=False, help="Keep .pot file after making messages. Useful when debugging.")
def handle(self, *args, **options):
locale = options.get('locale')
exclude = options.get('exclude')
self.domain = options.get('domain')
self.verbosity = options.get('verbosity')
process_all = options.get('all')
extensions = options.get('extensions')
self.symlinks = options.get('symlinks')
# Need to ensure that the i18n framework is enabled
if settings.configured:
settings.USE_I18N = True
else:
settings.configure(USE_I18N=True)
ignore_patterns = options.get('ignore_patterns')
if options.get('use_default_ignore_patterns'):
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
self.ignore_patterns = list(set(ignore_patterns))
# Avoid messing with mutable class variables
if options.get('no_wrap'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
if options.get('no_location'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
self.no_obsolete = options.get('no_obsolete')
self.keep_pot = options.get('keep_pot')
if self.domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains "
"'django' and 'djangojs'")
if self.domain == 'djangojs':
exts = extensions if extensions else ['js']
else:
exts = extensions if extensions else ['html', 'txt', 'py']
self.extensions = handle_extensions(exts)
if (locale is None and not exclude and not process_all) or self.domain is None:
raise CommandError("Type '%s help %s' for usage information." % (
os.path.basename(sys.argv[0]), sys.argv[1]))
if self.verbosity > 1:
self.stdout.write('examining files with the extensions: %s\n'
% get_text_list(list(self.extensions), 'and'))
self.invoked_for_django = False
self.locale_paths = []
self.default_locale_path = None
if os.path.isdir(os.path.join('conf', 'locale')):
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
self.default_locale_path = self.locale_paths[0]
self.invoked_for_django = True
else:
self.locale_paths.extend(list(settings.LOCALE_PATHS))
# Allow to run makemessages inside an app dir
if os.path.isdir('locale'):
self.locale_paths.append(os.path.abspath('locale'))
if self.locale_paths:
self.default_locale_path = self.locale_paths[0]
if not os.path.exists(self.default_locale_path):
os.makedirs(self.default_locale_path)
# Build locale list
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
all_locales = map(os.path.basename, locale_dirs)
# Account for excluded locales
if process_all:
locales = all_locales
else:
locales = locale or all_locales
locales = set(locales) - set(exclude)
if locales:
check_programs('msguniq', 'msgmerge', 'msgattrib')
check_programs('xgettext')
try:
potfiles = self.build_potfiles()
# Build po files for each selected locale
for locale in locales:
if self.verbosity > 0:
self.stdout.write("processing locale %s\n" % locale)
for potfile in potfiles:
self.write_po_file(potfile, locale)
finally:
if not self.keep_pot:
self.remove_potfiles()
@cached_property
def gettext_version(self):
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
# when looking up the version. It's especially a problem on Windows.
out, err, status = gettext_popen_wrapper(
['xgettext', '--version'],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
)
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
if m:
return tuple(int(d) for d in m.groups() if d is not None)
else:
raise CommandError("Unable to get gettext version. Is it installed?")
def build_potfiles(self):
"""
Build pot files and apply msguniq to them.
"""
file_list = self.find_files(".")
self.remove_potfiles()
for f in file_list:
try:
f.process(self, self.domain)
except UnicodeDecodeError as e:
self.stdout.write(
"UnicodeDecodeError: skipped file %s in %s (reason: %s)" % (
f.file,
f.dirpath,
e,
)
)
potfiles = []
for path in self.locale_paths:
potfile = os.path.join(path, '%s.pot' % str(self.domain))
if not os.path.exists(potfile):
continue
args = ['msguniq'] + self.msguniq_options + [potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
with io.open(potfile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
potfiles.append(potfile)
return potfiles
def remove_potfiles(self):
for path in self.locale_paths:
pot_path = os.path.join(path, '%s.pot' % str(self.domain))
if os.path.exists(pot_path):
os.unlink(pot_path)
def find_files(self, root):
"""
Helper method to get all files in the given root. Also check that there
is a matching locale dir for each file.
"""
def is_ignored(path, ignore_patterns):
"""
Check if the given path should be ignored or not.
"""
filename = os.path.basename(path)
ignore = lambda pattern: (fnmatch.fnmatchcase(filename, pattern) or
fnmatch.fnmatchcase(path, pattern))
return any(ignore(pattern) for pattern in ignore_patterns)
ignore_patterns = [os.path.normcase(p) for p in self.ignore_patterns]
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
norm_patterns = []
for p in ignore_patterns:
for dir_suffix in dir_suffixes:
if p.endswith(dir_suffix):
norm_patterns.append(p[:-len(dir_suffix)])
break
else:
norm_patterns.append(p)
all_files = []
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
for dirname in dirnames[:]:
if (is_ignored(os.path.normpath(os.path.join(dirpath, dirname)), norm_patterns) or
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
dirnames.remove(dirname)
if self.verbosity > 1:
self.stdout.write('ignoring directory %s\n' % dirname)
elif dirname == 'locale':
dirnames.remove(dirname)
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
for filename in filenames:
file_path = os.path.normpath(os.path.join(dirpath, filename))
file_ext = os.path.splitext(filename)[1]
if file_ext not in self.extensions or is_ignored(file_path, self.ignore_patterns):
if self.verbosity > 1:
self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
else:
locale_dir = None
for path in self.locale_paths:
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
locale_dir = path
break
if not locale_dir:
locale_dir = self.default_locale_path
if not locale_dir:
raise CommandError(
"Unable to find a locale path to store translations for file %s" % file_path)
all_files.append(TranslatableFile(dirpath, filename, locale_dir))
return sorted(all_files)
def | (self, potfile, locale):
"""
Creates or updates the PO file for self.domain and :param locale:.
Uses contents of the existing :param potfile:.
Uses msgmerge, and msgattrib GNU gettext utilities.
"""
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % str(self.domain))
if os.path.exists(pofile):
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
else:
with io.open(potfile, 'r', encoding='utf-8') as fp:
msgs = fp.read()
if not self.invoked_for_django:
msgs = self.copy_plural_forms(msgs, locale)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
with io.open(pofile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
if self.no_obsolete:
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
def copy_plural_forms(self, msgs, locale):
"""
Copies plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
django_dir = os.path.normpath(os.path.join(os.path.dirname(upath(django.__file__))))
if self.domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
with io.open(django_po, 'r', encoding='utf-8') as fp:
m = plural_forms_re.search(fp.read())
if m:
plural_form_line = force_str(m.group('value'))
if self.verbosity > 1:
self.stdout.write("copying plural forms: %s\n" % plural_form_line)
lines = []
found = False
for line in msgs.split('\n'):
if not found and (not line or plural_forms_re.search(line)):
line = '%s\n' % plural_form_line
found = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs
| write_po_file | identifier_name |
makemessages.py | from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, popen_wrapper,
)
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_str
from django.utils.functional import cached_property, total_ordering
from django.utils.jslex import prepare_js_for_gettext
from django.utils.text import get_text_list
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU "
"gettext tools 0.15 or newer installed." % program)
def gettext_popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding="utf-8"):
"""
Makes sure text obtained from stdout of gettext utilities is Unicode.
"""
stdout, stderr, status_code = popen_wrapper(args, os_err_exc_type=os_err_exc_type)
if os.name == 'nt' and six.PY3 and stdout_encoding != DEFAULT_LOCALE_ENCODING:
# This looks weird because it's undoing what
# subprocess.Popen(universal_newlines=True).communicate()
# does when capturing PO files contents from stdout of gettext command
# line programs. No need to do anything on Python 2 because it's
# already a byte-string there (#23271).
stdout = stdout.encode(DEFAULT_LOCALE_ENCODING).decode(stdout_encoding)
if six.PY2:
stdout = stdout.decode(stdout_encoding)
return stdout, stderr, status_code
@total_ordering
class TranslatableFile(object):
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
|
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").')
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.')
parser.add_argument('--extension', '-e', dest='extensions',
help='The file extension(s) to examine (default: "html,txt", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
action='append')
parser.add_argument('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining '
'source code and templates for translation strings.')
parser.add_argument('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.')
parser.add_argument('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.")
parser.add_argument('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines.")
parser.add_argument('--no-location', action='store_true', dest='no_location',
default=False, help="Don't write '#: filename:line' lines.")
parser.add_argument('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings.")
parser.add_argument('--keep-pot', action='store_true', dest='keep_pot',
default=False, help="Keep .pot file after making messages. Useful when debugging.")
def handle(self, *args, **options):
locale = options.get('locale')
exclude = options.get('exclude')
self.domain = options.get('domain')
self.verbosity = options.get('verbosity')
process_all = options.get('all')
extensions = options.get('extensions')
self.symlinks = options.get('symlinks')
# Need to ensure that the i18n framework is enabled
if settings.configured:
settings.USE_I18N = True
else:
settings.configure(USE_I18N=True)
ignore_patterns = options.get('ignore_patterns')
if options.get('use_default_ignore_patterns'):
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
self.ignore_patterns = list(set(ignore_patterns))
# Avoid messing with mutable class variables
if options.get('no_wrap'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
if options.get('no_location'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
self.no_obsolete = options.get('no_obsolete')
self.keep_pot = options.get('keep_pot')
if self.domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains "
"'django' and 'djangojs'")
if self.domain == 'djangojs':
exts = extensions if extensions else ['js']
else:
exts = extensions if extensions else ['html', 'txt', 'py']
self.extensions = handle_extensions(exts)
if (locale is None and not exclude and not process_all) or self.domain is None:
raise CommandError("Type '%s help %s' for usage information." % (
os.path.basename(sys.argv[0]), sys.argv[1]))
if self.verbosity > 1:
self.stdout.write('examining files with the extensions: %s\n'
% get_text_list(list(self.extensions), 'and'))
self.invoked_for_django = False
self.locale_paths = []
self.default_locale_path = None
if os.path.isdir(os.path.join('conf', 'locale')):
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
self.default_locale_path = self.locale_paths[0]
self.invoked_for_django = True
else:
self.locale_paths.extend(list(settings.LOCALE_PATHS))
# Allow to run makemessages inside an app dir
if os.path.isdir('locale'):
self.locale_paths.append(os.path.abspath('locale'))
if self.locale_paths:
self.default_locale_path = self.locale_paths[0]
if not os.path.exists(self.default_locale_path):
os.makedirs(self.default_locale_path)
# Build locale list
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
all_locales = map(os.path.basename, locale_dirs)
# Account for excluded locales
if process_all:
locales = all_locales
else:
locales = locale or all_locales
locales = set(locales) - set(exclude)
if locales:
check_programs('msguniq', 'msgmerge', 'msgattrib')
check_programs('xgettext')
try:
potfiles = self.build_potfiles()
# Build po files for each selected locale
for locale in locales:
if self.verbosity > 0:
self.stdout.write("processing locale %s\n" % locale)
for potfile in potfiles:
self.write_po_file(potfile, locale)
finally:
if not self.keep_pot:
self.remove_potfiles()
@cached_property
def gettext_version(self):
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
# when looking up the version. It's especially a problem on Windows.
out, err, status = gettext_popen_wrapper(
['xgettext', '--version'],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
)
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
if m:
return tuple(int(d) for d in m.groups() if d is not None)
else:
raise CommandError("Unable to get gettext version. Is it installed?")
def build_potfiles(self):
"""
Build pot files and apply msguniq to them.
"""
file_list = self.find_files(".")
self.remove_potfiles()
for f in file_list:
try:
f.process(self, self.domain)
except UnicodeDecodeError as e:
self.stdout.write(
"UnicodeDecodeError: skipped file %s in %s (reason: %s)" % (
f.file,
f.dirpath,
e,
)
)
potfiles = []
for path in self.locale_paths:
potfile = os.path.join(path, '%s.pot' % str(self.domain))
if not os.path.exists(potfile):
continue
args = ['msguniq'] + self.msguniq_options + [potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
with io.open(potfile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
potfiles.append(potfile)
return potfiles
def remove_potfiles(self):
for path in self.locale_paths:
pot_path = os.path.join(path, '%s.pot' % str(self.domain))
if os.path.exists(pot_path):
os.unlink(pot_path)
def find_files(self, root):
"""
Helper method to get all files in the given root. Also check that there
is a matching locale dir for each file.
"""
def is_ignored(path, ignore_patterns):
"""
Check if the given path should be ignored or not.
"""
filename = os.path.basename(path)
ignore = lambda pattern: (fnmatch.fnmatchcase(filename, pattern) or
fnmatch.fnmatchcase(path, pattern))
return any(ignore(pattern) for pattern in ignore_patterns)
ignore_patterns = [os.path.normcase(p) for p in self.ignore_patterns]
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
norm_patterns = []
for p in ignore_patterns:
for dir_suffix in dir_suffixes:
if p.endswith(dir_suffix):
norm_patterns.append(p[:-len(dir_suffix)])
break
else:
norm_patterns.append(p)
all_files = []
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
for dirname in dirnames[:]:
if (is_ignored(os.path.normpath(os.path.join(dirpath, dirname)), norm_patterns) or
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
dirnames.remove(dirname)
if self.verbosity > 1:
self.stdout.write('ignoring directory %s\n' % dirname)
elif dirname == 'locale':
dirnames.remove(dirname)
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
for filename in filenames:
file_path = os.path.normpath(os.path.join(dirpath, filename))
file_ext = os.path.splitext(filename)[1]
if file_ext not in self.extensions or is_ignored(file_path, self.ignore_patterns):
if self.verbosity > 1:
self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
else:
locale_dir = None
for path in self.locale_paths:
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
locale_dir = path
break
if not locale_dir:
locale_dir = self.default_locale_path
if not locale_dir:
raise CommandError(
"Unable to find a locale path to store translations for file %s" % file_path)
all_files.append(TranslatableFile(dirpath, filename, locale_dir))
return sorted(all_files)
def write_po_file(self, potfile, locale):
"""
Creates or updates the PO file for self.domain and :param locale:.
Uses contents of the existing :param potfile:.
Uses msgmerge, and msgattrib GNU gettext utilities.
"""
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % str(self.domain))
if os.path.exists(pofile):
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
else:
with io.open(potfile, 'r', encoding='utf-8') as fp:
msgs = fp.read()
if not self.invoked_for_django:
msgs = self.copy_plural_forms(msgs, locale)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
with io.open(pofile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
if self.no_obsolete:
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
def copy_plural_forms(self, msgs, locale):
"""
Copies plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
django_dir = os.path.normpath(os.path.join(os.path.dirname(upath(django.__file__))))
if self.domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
with io.open(django_po, 'r', encoding='utf-8') as fp:
m = plural_forms_re.search(fp.read())
if m:
plural_form_line = force_str(m.group('value'))
if self.verbosity > 1:
self.stdout.write("copying plural forms: %s\n" % plural_form_line)
lines = []
found = False
for line in msgs.split('\n'):
if not found and (not line or plural_forms_re.search(line)):
line = '%s\n' % plural_form_line
found = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs
| return self.path < other.path | identifier_body |
makemessages.py | from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, popen_wrapper,
)
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_str
from django.utils.functional import cached_property, total_ordering
from django.utils.jslex import prepare_js_for_gettext
from django.utils.text import get_text_list
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU "
"gettext tools 0.15 or newer installed." % program)
def gettext_popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding="utf-8"):
"""
Makes sure text obtained from stdout of gettext utilities is Unicode.
"""
stdout, stderr, status_code = popen_wrapper(args, os_err_exc_type=os_err_exc_type)
if os.name == 'nt' and six.PY3 and stdout_encoding != DEFAULT_LOCALE_ENCODING:
# This looks weird because it's undoing what
# subprocess.Popen(universal_newlines=True).communicate()
# does when capturing PO files contents from stdout of gettext command
# line programs. No need to do anything on Python 2 because it's
# already a byte-string there (#23271).
stdout = stdout.encode(DEFAULT_LOCALE_ENCODING).decode(stdout_encoding)
if six.PY2:
stdout = stdout.decode(stdout_encoding)
return stdout, stderr, status_code
@total_ordering
class TranslatableFile(object):
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").')
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.')
parser.add_argument('--extension', '-e', dest='extensions',
| action='append')
parser.add_argument('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining '
'source code and templates for translation strings.')
parser.add_argument('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.')
parser.add_argument('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.")
parser.add_argument('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines.")
parser.add_argument('--no-location', action='store_true', dest='no_location',
default=False, help="Don't write '#: filename:line' lines.")
parser.add_argument('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings.")
parser.add_argument('--keep-pot', action='store_true', dest='keep_pot',
default=False, help="Keep .pot file after making messages. Useful when debugging.")
def handle(self, *args, **options):
locale = options.get('locale')
exclude = options.get('exclude')
self.domain = options.get('domain')
self.verbosity = options.get('verbosity')
process_all = options.get('all')
extensions = options.get('extensions')
self.symlinks = options.get('symlinks')
# Need to ensure that the i18n framework is enabled
if settings.configured:
settings.USE_I18N = True
else:
settings.configure(USE_I18N=True)
ignore_patterns = options.get('ignore_patterns')
if options.get('use_default_ignore_patterns'):
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
self.ignore_patterns = list(set(ignore_patterns))
# Avoid messing with mutable class variables
if options.get('no_wrap'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
if options.get('no_location'):
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
self.no_obsolete = options.get('no_obsolete')
self.keep_pot = options.get('keep_pot')
if self.domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains "
"'django' and 'djangojs'")
if self.domain == 'djangojs':
exts = extensions if extensions else ['js']
else:
exts = extensions if extensions else ['html', 'txt', 'py']
self.extensions = handle_extensions(exts)
if (locale is None and not exclude and not process_all) or self.domain is None:
raise CommandError("Type '%s help %s' for usage information." % (
os.path.basename(sys.argv[0]), sys.argv[1]))
if self.verbosity > 1:
self.stdout.write('examining files with the extensions: %s\n'
% get_text_list(list(self.extensions), 'and'))
self.invoked_for_django = False
self.locale_paths = []
self.default_locale_path = None
if os.path.isdir(os.path.join('conf', 'locale')):
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
self.default_locale_path = self.locale_paths[0]
self.invoked_for_django = True
else:
self.locale_paths.extend(list(settings.LOCALE_PATHS))
# Allow to run makemessages inside an app dir
if os.path.isdir('locale'):
self.locale_paths.append(os.path.abspath('locale'))
if self.locale_paths:
self.default_locale_path = self.locale_paths[0]
if not os.path.exists(self.default_locale_path):
os.makedirs(self.default_locale_path)
# Build locale list
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
all_locales = map(os.path.basename, locale_dirs)
# Account for excluded locales
if process_all:
locales = all_locales
else:
locales = locale or all_locales
locales = set(locales) - set(exclude)
if locales:
check_programs('msguniq', 'msgmerge', 'msgattrib')
check_programs('xgettext')
try:
potfiles = self.build_potfiles()
# Build po files for each selected locale
for locale in locales:
if self.verbosity > 0:
self.stdout.write("processing locale %s\n" % locale)
for potfile in potfiles:
self.write_po_file(potfile, locale)
finally:
if not self.keep_pot:
self.remove_potfiles()
@cached_property
def gettext_version(self):
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
# when looking up the version. It's especially a problem on Windows.
out, err, status = gettext_popen_wrapper(
['xgettext', '--version'],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
)
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
if m:
return tuple(int(d) for d in m.groups() if d is not None)
else:
raise CommandError("Unable to get gettext version. Is it installed?")
def build_potfiles(self):
"""
Build pot files and apply msguniq to them.
"""
file_list = self.find_files(".")
self.remove_potfiles()
for f in file_list:
try:
f.process(self, self.domain)
except UnicodeDecodeError as e:
self.stdout.write(
"UnicodeDecodeError: skipped file %s in %s (reason: %s)" % (
f.file,
f.dirpath,
e,
)
)
potfiles = []
for path in self.locale_paths:
potfile = os.path.join(path, '%s.pot' % str(self.domain))
if not os.path.exists(potfile):
continue
args = ['msguniq'] + self.msguniq_options + [potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
with io.open(potfile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
potfiles.append(potfile)
return potfiles
def remove_potfiles(self):
for path in self.locale_paths:
pot_path = os.path.join(path, '%s.pot' % str(self.domain))
if os.path.exists(pot_path):
os.unlink(pot_path)
def find_files(self, root):
"""
Helper method to get all files in the given root. Also check that there
is a matching locale dir for each file.
"""
def is_ignored(path, ignore_patterns):
"""
Check if the given path should be ignored or not.
"""
filename = os.path.basename(path)
ignore = lambda pattern: (fnmatch.fnmatchcase(filename, pattern) or
fnmatch.fnmatchcase(path, pattern))
return any(ignore(pattern) for pattern in ignore_patterns)
ignore_patterns = [os.path.normcase(p) for p in self.ignore_patterns]
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
norm_patterns = []
for p in ignore_patterns:
for dir_suffix in dir_suffixes:
if p.endswith(dir_suffix):
norm_patterns.append(p[:-len(dir_suffix)])
break
else:
norm_patterns.append(p)
all_files = []
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
for dirname in dirnames[:]:
if (is_ignored(os.path.normpath(os.path.join(dirpath, dirname)), norm_patterns) or
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
dirnames.remove(dirname)
if self.verbosity > 1:
self.stdout.write('ignoring directory %s\n' % dirname)
elif dirname == 'locale':
dirnames.remove(dirname)
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
for filename in filenames:
file_path = os.path.normpath(os.path.join(dirpath, filename))
file_ext = os.path.splitext(filename)[1]
if file_ext not in self.extensions or is_ignored(file_path, self.ignore_patterns):
if self.verbosity > 1:
self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
else:
locale_dir = None
for path in self.locale_paths:
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
locale_dir = path
break
if not locale_dir:
locale_dir = self.default_locale_path
if not locale_dir:
raise CommandError(
"Unable to find a locale path to store translations for file %s" % file_path)
all_files.append(TranslatableFile(dirpath, filename, locale_dir))
return sorted(all_files)
def write_po_file(self, potfile, locale):
"""
Creates or updates the PO file for self.domain and :param locale:.
Uses contents of the existing :param potfile:.
Uses msgmerge, and msgattrib GNU gettext utilities.
"""
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % str(self.domain))
if os.path.exists(pofile):
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
else:
with io.open(potfile, 'r', encoding='utf-8') as fp:
msgs = fp.read()
if not self.invoked_for_django:
msgs = self.copy_plural_forms(msgs, locale)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
with io.open(pofile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
if self.no_obsolete:
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
def copy_plural_forms(self, msgs, locale):
"""
Copies plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
django_dir = os.path.normpath(os.path.join(os.path.dirname(upath(django.__file__))))
if self.domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
with io.open(django_po, 'r', encoding='utf-8') as fp:
m = plural_forms_re.search(fp.read())
if m:
plural_form_line = force_str(m.group('value'))
if self.verbosity > 1:
self.stdout.write("copying plural forms: %s\n" % plural_form_line)
lines = []
found = False
for line in msgs.split('\n'):
if not found and (not line or plural_forms_re.search(line)):
line = '%s\n' % plural_form_line
found = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs | help='The file extension(s) to examine (default: "html,txt", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
| random_line_split |
urls.py | from django.conf.urls import url
from . import views
urlpatterns = [ # pylint:disable=invalid-name
url(r'^status$', views.Control.as_view()),
url(r'^status/(?P<group>([0-9]))$', views.Control.as_view()),
url(r'^control/(?P<command>([a-z_-]+))/(?P<group>([0-9]))$',
views.Control.as_view()),
url(r'^control/(?P<command>([a-z_-]+))/(?P<group>([0-9]))/(?P<parameter>([0-9]+))$',
views.Control.as_view()), | views.ControlPerSource.as_view()),
url(r'^timed/(?P<command>([a-z-_]+))/(?P<action>([a-z-_]+))$',
views.TimedProgram.as_view()),
] | url(r'^control/source/(?P<source>([a-z_-]+))/(?P<command>([a-z_-]+))$', | random_line_split |
paragraph.ts | import { OpenXmlElement } from "./dom";
import { CommonProperties, Length, ns, parseCommonProperty } from "./common";
import { Borders } from "./border";
import { parseSectionProperties, SectionProperties } from "./section";
import { LineSpacing, parseLineSpacing } from "./line-spacing";
import { XmlParser } from "../parser/xml-parser";
import { parseRunProperties, RunProperties } from "./run";
export interface WmlParagraph extends OpenXmlElement, ParagraphProperties {
}
export interface ParagraphProperties extends CommonProperties {
sectionProps: SectionProperties;
tabs: ParagraphTab[];
numbering: ParagraphNumbering;
border: Borders;
textAlignment: "auto" | "baseline" | "bottom" | "center" | "top" | string;
lineSpacing: LineSpacing;
keepLines: boolean;
keepNext: boolean;
pageBreakBefore: boolean;
outlineLevel: number;
styleName: string;
runProps: RunProperties;
}
export interface ParagraphTab {
style: "bar" | "center" | "clear" | "decimal" | "end" | "num" | "start" | "left" | "right";
leader: "none" | "dot" | "heavy" | "hyphen" | "middleDot" | "underscore";
position: Length;
}
export interface ParagraphNumbering {
id: string;
level: number;
}
export function parseParagraphProperties(elem: Element, xml: XmlParser): ParagraphProperties |
export function parseParagraphProperty(elem: Element, props: ParagraphProperties, xml: XmlParser) {
if (elem.namespaceURI != ns.wordml)
return false;
if(parseCommonProperty(elem, props, xml))
return true;
switch (elem.localName) {
case "tabs":
props.tabs = parseTabs(elem, xml);
break;
case "sectPr":
props.sectionProps = parseSectionProperties(elem, xml);
break;
case "numPr":
props.numbering = parseNumbering(elem, xml);
break;
case "spacing":
props.lineSpacing = parseLineSpacing(elem, xml);
return false; // TODO
break;
case "textAlignment":
props.textAlignment = xml.attr(elem, "val");
return false; //TODO
break;
case "keepNext":
props.keepLines = xml.boolAttr(elem, "val", true);
break;
case "keepNext":
props.keepNext = xml.boolAttr(elem, "val", true);
break;
case "pageBreakBefore":
props.pageBreakBefore = xml.boolAttr(elem, "val", true);
break;
case "outlineLvl":
props.outlineLevel = xml.intAttr(elem, "val");
break;
case "pStyle":
props.styleName = xml.attr(elem, "val");
break;
case "rPr":
props.runProps = parseRunProperties(elem, xml);
break;
default:
return false;
}
return true;
}
export function parseTabs(elem: Element, xml: XmlParser): ParagraphTab[] {
return xml.elements(elem, "tab")
.map(e => <ParagraphTab>{
position: xml.lengthAttr(e, "pos"),
leader: xml.attr(e, "leader"),
style: xml.attr(e, "val")
});
}
export function parseNumbering(elem: Element, xml: XmlParser): ParagraphNumbering {
var result = <ParagraphNumbering>{};
for (let e of xml.elements(elem)) {
switch (e.localName) {
case "numId":
result.id = xml.attr(e, "val");
break;
case "ilvl":
result.level = xml.intAttr(e, "val");
break;
}
}
return result;
} | {
let result = <ParagraphProperties>{};
for(let el of xml.elements(elem)) {
parseParagraphProperty(el, result, xml);
}
return result;
} | identifier_body |
paragraph.ts | import { OpenXmlElement } from "./dom";
import { CommonProperties, Length, ns, parseCommonProperty } from "./common";
import { Borders } from "./border";
import { parseSectionProperties, SectionProperties } from "./section";
import { LineSpacing, parseLineSpacing } from "./line-spacing";
import { XmlParser } from "../parser/xml-parser";
import { parseRunProperties, RunProperties } from "./run";
export interface WmlParagraph extends OpenXmlElement, ParagraphProperties {
}
export interface ParagraphProperties extends CommonProperties {
sectionProps: SectionProperties;
tabs: ParagraphTab[];
numbering: ParagraphNumbering;
border: Borders;
textAlignment: "auto" | "baseline" | "bottom" | "center" | "top" | string;
lineSpacing: LineSpacing;
keepLines: boolean;
keepNext: boolean;
pageBreakBefore: boolean;
outlineLevel: number;
styleName: string;
runProps: RunProperties;
}
export interface ParagraphTab {
style: "bar" | "center" | "clear" | "decimal" | "end" | "num" | "start" | "left" | "right";
leader: "none" | "dot" | "heavy" | "hyphen" | "middleDot" | "underscore";
position: Length;
}
export interface ParagraphNumbering {
id: string;
level: number;
}
export function parseParagraphProperties(elem: Element, xml: XmlParser): ParagraphProperties {
let result = <ParagraphProperties>{};
for(let el of xml.elements(elem)) {
parseParagraphProperty(el, result, xml);
}
return result;
}
export function parseParagraphProperty(elem: Element, props: ParagraphProperties, xml: XmlParser) {
if (elem.namespaceURI != ns.wordml)
return false;
if(parseCommonProperty(elem, props, xml))
return true;
switch (elem.localName) {
case "tabs":
props.tabs = parseTabs(elem, xml);
break;
case "sectPr":
props.sectionProps = parseSectionProperties(elem, xml);
break;
case "numPr":
props.numbering = parseNumbering(elem, xml);
break;
case "spacing":
props.lineSpacing = parseLineSpacing(elem, xml);
return false; // TODO
break;
case "textAlignment":
props.textAlignment = xml.attr(elem, "val");
return false; //TODO
break;
case "keepNext":
props.keepLines = xml.boolAttr(elem, "val", true);
break;
case "keepNext":
props.keepNext = xml.boolAttr(elem, "val", true);
break;
case "pageBreakBefore":
props.pageBreakBefore = xml.boolAttr(elem, "val", true);
break;
case "outlineLvl":
props.outlineLevel = xml.intAttr(elem, "val");
break;
case "pStyle":
props.styleName = xml.attr(elem, "val");
break;
case "rPr":
props.runProps = parseRunProperties(elem, xml);
break;
default:
return false;
}
return true;
}
export function parseTabs(elem: Element, xml: XmlParser): ParagraphTab[] {
return xml.elements(elem, "tab")
.map(e => <ParagraphTab>{
position: xml.lengthAttr(e, "pos"),
leader: xml.attr(e, "leader"),
style: xml.attr(e, "val")
});
}
export function | (elem: Element, xml: XmlParser): ParagraphNumbering {
var result = <ParagraphNumbering>{};
for (let e of xml.elements(elem)) {
switch (e.localName) {
case "numId":
result.id = xml.attr(e, "val");
break;
case "ilvl":
result.level = xml.intAttr(e, "val");
break;
}
}
return result;
} | parseNumbering | identifier_name |
paragraph.ts | import { OpenXmlElement } from "./dom";
import { CommonProperties, Length, ns, parseCommonProperty } from "./common";
import { Borders } from "./border";
import { parseSectionProperties, SectionProperties } from "./section";
import { LineSpacing, parseLineSpacing } from "./line-spacing";
import { XmlParser } from "../parser/xml-parser";
import { parseRunProperties, RunProperties } from "./run";
export interface WmlParagraph extends OpenXmlElement, ParagraphProperties {
}
export interface ParagraphProperties extends CommonProperties {
sectionProps: SectionProperties;
tabs: ParagraphTab[];
numbering: ParagraphNumbering;
border: Borders;
textAlignment: "auto" | "baseline" | "bottom" | "center" | "top" | string;
lineSpacing: LineSpacing;
keepLines: boolean;
keepNext: boolean;
pageBreakBefore: boolean;
outlineLevel: number;
styleName: string;
runProps: RunProperties;
}
export interface ParagraphTab {
style: "bar" | "center" | "clear" | "decimal" | "end" | "num" | "start" | "left" | "right";
leader: "none" | "dot" | "heavy" | "hyphen" | "middleDot" | "underscore";
position: Length;
}
export interface ParagraphNumbering {
id: string;
level: number;
}
export function parseParagraphProperties(elem: Element, xml: XmlParser): ParagraphProperties {
let result = <ParagraphProperties>{};
for(let el of xml.elements(elem)) {
parseParagraphProperty(el, result, xml);
}
return result;
}
export function parseParagraphProperty(elem: Element, props: ParagraphProperties, xml: XmlParser) {
if (elem.namespaceURI != ns.wordml)
return false;
if(parseCommonProperty(elem, props, xml))
return true;
switch (elem.localName) {
case "tabs":
props.tabs = parseTabs(elem, xml);
break;
case "sectPr":
props.sectionProps = parseSectionProperties(elem, xml);
break;
case "numPr":
props.numbering = parseNumbering(elem, xml);
break;
case "spacing":
props.lineSpacing = parseLineSpacing(elem, xml);
return false; // TODO
break; |
case "keepNext":
props.keepLines = xml.boolAttr(elem, "val", true);
break;
case "keepNext":
props.keepNext = xml.boolAttr(elem, "val", true);
break;
case "pageBreakBefore":
props.pageBreakBefore = xml.boolAttr(elem, "val", true);
break;
case "outlineLvl":
props.outlineLevel = xml.intAttr(elem, "val");
break;
case "pStyle":
props.styleName = xml.attr(elem, "val");
break;
case "rPr":
props.runProps = parseRunProperties(elem, xml);
break;
default:
return false;
}
return true;
}
export function parseTabs(elem: Element, xml: XmlParser): ParagraphTab[] {
return xml.elements(elem, "tab")
.map(e => <ParagraphTab>{
position: xml.lengthAttr(e, "pos"),
leader: xml.attr(e, "leader"),
style: xml.attr(e, "val")
});
}
export function parseNumbering(elem: Element, xml: XmlParser): ParagraphNumbering {
var result = <ParagraphNumbering>{};
for (let e of xml.elements(elem)) {
switch (e.localName) {
case "numId":
result.id = xml.attr(e, "val");
break;
case "ilvl":
result.level = xml.intAttr(e, "val");
break;
}
}
return result;
} |
case "textAlignment":
props.textAlignment = xml.attr(elem, "val");
return false; //TODO
break; | random_line_split |
globalize.culture.en-ZA.js | /*
* Globalize Culture en-ZA
*
* http://github.com/jquery/globalize
*
* Copyright Software Freedom Conservancy, Inc.
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* This file was generated by the Globalize Culture Generator
* Translation: bugs found in this file need to be fixed in the generator
*/
(function( window, undefined ) {
var Globalize;
if ( typeof require !== "undefined" &&
typeof exports !== "undefined" &&
typeof module !== "undefined" ) {
// Assume CommonJS
Globalize = require( "globalize" );
} else |
Globalize.addCultureInfo( "en-ZA", "default", {
name: "en-ZA",
englishName: "English (South Africa)",
nativeName: "English (South Africa)",
numberFormat: {
",": " ",
percent: {
pattern: ["-n%","n%"],
",": " "
},
currency: {
pattern: ["$-n","$ n"],
",": " ",
".": ",",
symbol: "R"
}
},
calendars: {
standard: {
patterns: {
d: "yyyy/MM/dd",
D: "dd MMMM yyyy",
t: "hh:mm tt",
T: "hh:mm:ss tt",
f: "dd MMMM yyyy hh:mm tt",
F: "dd MMMM yyyy hh:mm:ss tt",
M: "dd MMMM",
Y: "MMMM yyyy"
}
}
}
});
}( this ));
| {
// Global variable
Globalize = window.Globalize;
} | conditional_block |
globalize.culture.en-ZA.js | /*
* Globalize Culture en-ZA
*
* http://github.com/jquery/globalize
*
* Copyright Software Freedom Conservancy, Inc.
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* This file was generated by the Globalize Culture Generator
* Translation: bugs found in this file need to be fixed in the generator
*/
(function( window, undefined ) {
var Globalize;
if ( typeof require !== "undefined" &&
typeof exports !== "undefined" &&
typeof module !== "undefined" ) {
// Assume CommonJS
Globalize = require( "globalize" );
} else {
// Global variable
Globalize = window.Globalize;
}
Globalize.addCultureInfo( "en-ZA", "default", {
name: "en-ZA",
englishName: "English (South Africa)",
nativeName: "English (South Africa)",
numberFormat: {
",": " ", | ",": " "
},
currency: {
pattern: ["$-n","$ n"],
",": " ",
".": ",",
symbol: "R"
}
},
calendars: {
standard: {
patterns: {
d: "yyyy/MM/dd",
D: "dd MMMM yyyy",
t: "hh:mm tt",
T: "hh:mm:ss tt",
f: "dd MMMM yyyy hh:mm tt",
F: "dd MMMM yyyy hh:mm:ss tt",
M: "dd MMMM",
Y: "MMMM yyyy"
}
}
}
});
}( this )); | percent: {
pattern: ["-n%","n%"], | random_line_split |
test_score.py | """
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from score import score
VAL_DATA='data/val-5k-256.rec'
def download_data():
return mx.test_utils.download(
'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA)
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-50', 'imagenet1k-resnet-152']
accs = [.77, .78]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val=VAL_DATA,
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1] | def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.75
(speed,) = score(model=m,
data_val=VAL_DATA,
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
test_imagenet1k_inception_bn(**kwargs) | print('Tested %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
| random_line_split |
test_score.py | """
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from score import score
VAL_DATA='data/val-5k-256.rec'
def download_data():
|
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-50', 'imagenet1k-resnet-152']
accs = [.77, .78]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val=VAL_DATA,
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.75
(speed,) = score(model=m,
data_val=VAL_DATA,
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
test_imagenet1k_inception_bn(**kwargs)
| return mx.test_utils.download(
'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA) | identifier_body |
test_score.py | """
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from score import score
VAL_DATA='data/val-5k-256.rec'
def download_data():
return mx.test_utils.download(
'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA)
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-50', 'imagenet1k-resnet-152']
accs = [.77, .78]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val=VAL_DATA,
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.75
(speed,) = score(model=m,
data_val=VAL_DATA,
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
| gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
test_imagenet1k_inception_bn(**kwargs) | conditional_block | |
test_score.py | """
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from score import score
VAL_DATA='data/val-5k-256.rec'
def download_data():
return mx.test_utils.download(
'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA)
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-50', 'imagenet1k-resnet-152']
accs = [.77, .78]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val=VAL_DATA,
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def | (**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.75
(speed,) = score(model=m,
data_val=VAL_DATA,
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
test_imagenet1k_inception_bn(**kwargs)
| test_imagenet1k_inception_bn | identifier_name |
detector.py | from __future__ import print_function
import mxnet as mx
import numpy as np
from timeit import default_timer as timer
from dataset.testdb import TestDB
from dataset.iterator import DetIter
class Detector(object):
"""
SSD detector which hold a detection network and wraps detection API
Parameters:
----------
symbol : mx.Symbol
detection network Symbol
model_prefix : str
name prefix of trained model
epoch : int
load epoch of trained model
data_shape : int
input data resize shape
mean_pixels : tuple of float
(mean_r, mean_g, mean_b)
batch_size : int
run detection with batch size
ctx : mx.ctx
device to use, if None, use mx.cpu() as default context
"""
def __init__(self, symbol, model_prefix, epoch, data_shape, mean_pixels, \
batch_size=1, ctx=None):
self.ctx = ctx
if self.ctx is None:
self.ctx = mx.cpu()
load_symbol, args, auxs = mx.model.load_checkpoint(model_prefix, epoch)
if symbol is None:
symbol = load_symbol
self.mod = mx.mod.Module(symbol, label_names=None, context=ctx)
self.data_shape = data_shape
self.mod.bind(data_shapes=[('data', (batch_size, 3, data_shape, data_shape))])
self.mod.set_params(args, auxs)
self.data_shape = data_shape
self.mean_pixels = mean_pixels
def detect(self, det_iter, show_timer=False):
"""
detect all images in iterator
Parameters:
----------
det_iter : DetIter
iterator for all testing images
show_timer : Boolean
whether to print out detection exec time
Returns:
----------
list of detection results
"""
num_images = det_iter._size
if not isinstance(det_iter, mx.io.PrefetchingIter):
det_iter = mx.io.PrefetchingIter(det_iter)
start = timer()
detections = self.mod.predict(det_iter).asnumpy()
time_elapsed = timer() - start
if show_timer:
print("Detection time for {} images: {:.4f} sec".format(
num_images, time_elapsed))
result = []
for i in range(detections.shape[0]):
det = detections[i, :, :]
res = det[np.where(det[:, 0] >= 0)[0]]
result.append(res)
return result
def im_detect(self, im_list, root_dir=None, extension=None, show_timer=False):
|
def visualize_detection(self, img, dets, classes=[], thresh=0.6):
"""
visualize detections in one image
Parameters:
----------
img : numpy.array
image, in bgr format
dets : numpy.array
ssd detections, numpy.array([[id, score, x1, y1, x2, y2]...])
each row is one object
classes : tuple or list of str
class names
thresh : float
score threshold
"""
import matplotlib.pyplot as plt
import random
plt.imshow(img)
height = img.shape[0]
width = img.shape[1]
colors = dict()
for i in range(dets.shape[0]):
cls_id = int(dets[i, 0])
if cls_id >= 0:
score = dets[i, 1]
if score > thresh:
if cls_id not in colors:
colors[cls_id] = (random.random(), random.random(), random.random())
xmin = int(dets[i, 2] * width)
ymin = int(dets[i, 3] * height)
xmax = int(dets[i, 4] * width)
ymax = int(dets[i, 5] * height)
rect = plt.Rectangle((xmin, ymin), xmax - xmin,
ymax - ymin, fill=False,
edgecolor=colors[cls_id],
linewidth=3.5)
plt.gca().add_patch(rect)
class_name = str(cls_id)
if classes and len(classes) > cls_id:
class_name = classes[cls_id]
plt.gca().text(xmin, ymin - 2,
'{:s} {:.3f}'.format(class_name, score),
bbox=dict(facecolor=colors[cls_id], alpha=0.5),
fontsize=12, color='white')
plt.show()
def detect_and_visualize(self, im_list, root_dir=None, extension=None,
classes=[], thresh=0.6, show_timer=False):
"""
wrapper for im_detect and visualize_detection
Parameters:
----------
im_list : list of str or str
image path or list of image paths
root_dir : str or None
directory of input images, optional if image path already
has full directory information
extension : str or None
image extension, eg. ".jpg", optional
Returns:
----------
"""
import cv2
dets = self.im_detect(im_list, root_dir, extension, show_timer=show_timer)
if not isinstance(im_list, list):
im_list = [im_list]
assert len(dets) == len(im_list)
for k, det in enumerate(dets):
img = cv2.imread(im_list[k])
img[:, :, (0, 1, 2)] = img[:, :, (2, 1, 0)]
self.visualize_detection(img, det, classes, thresh)
| """
wrapper for detecting multiple images
Parameters:
----------
im_list : list of str
image path or list of image paths
root_dir : str
directory of input images, optional if image path already
has full directory information
extension : str
image extension, eg. ".jpg", optional
Returns:
----------
list of detection results in format [det0, det1...], det is in
format np.array([id, score, xmin, ymin, xmax, ymax]...)
"""
test_db = TestDB(im_list, root_dir=root_dir, extension=extension)
test_iter = DetIter(test_db, 1, self.data_shape, self.mean_pixels,
is_train=False)
return self.detect(test_iter, show_timer) | identifier_body |
detector.py | from __future__ import print_function
import mxnet as mx
import numpy as np
from timeit import default_timer as timer
from dataset.testdb import TestDB
from dataset.iterator import DetIter
class Detector(object):
"""
SSD detector which hold a detection network and wraps detection API
Parameters:
----------
symbol : mx.Symbol
detection network Symbol
model_prefix : str
name prefix of trained model
epoch : int
load epoch of trained model
data_shape : int
input data resize shape
mean_pixels : tuple of float
(mean_r, mean_g, mean_b)
batch_size : int
run detection with batch size
ctx : mx.ctx
device to use, if None, use mx.cpu() as default context
"""
def __init__(self, symbol, model_prefix, epoch, data_shape, mean_pixels, \
batch_size=1, ctx=None):
self.ctx = ctx
if self.ctx is None:
self.ctx = mx.cpu()
load_symbol, args, auxs = mx.model.load_checkpoint(model_prefix, epoch)
if symbol is None:
symbol = load_symbol
self.mod = mx.mod.Module(symbol, label_names=None, context=ctx)
self.data_shape = data_shape
self.mod.bind(data_shapes=[('data', (batch_size, 3, data_shape, data_shape))])
self.mod.set_params(args, auxs)
self.data_shape = data_shape
self.mean_pixels = mean_pixels
def detect(self, det_iter, show_timer=False):
"""
detect all images in iterator
Parameters:
----------
det_iter : DetIter
iterator for all testing images
show_timer : Boolean
whether to print out detection exec time
Returns:
----------
list of detection results
"""
num_images = det_iter._size
if not isinstance(det_iter, mx.io.PrefetchingIter):
det_iter = mx.io.PrefetchingIter(det_iter)
start = timer()
detections = self.mod.predict(det_iter).asnumpy()
time_elapsed = timer() - start
if show_timer:
print("Detection time for {} images: {:.4f} sec".format(
num_images, time_elapsed))
result = []
for i in range(detections.shape[0]):
det = detections[i, :, :]
res = det[np.where(det[:, 0] >= 0)[0]]
result.append(res)
return result
def im_detect(self, im_list, root_dir=None, extension=None, show_timer=False):
"""
wrapper for detecting multiple images
Parameters:
----------
im_list : list of str
image path or list of image paths
root_dir : str
directory of input images, optional if image path already
has full directory information
extension : str
image extension, eg. ".jpg", optional
Returns:
----------
list of detection results in format [det0, det1...], det is in
format np.array([id, score, xmin, ymin, xmax, ymax]...)
"""
test_db = TestDB(im_list, root_dir=root_dir, extension=extension)
test_iter = DetIter(test_db, 1, self.data_shape, self.mean_pixels,
is_train=False)
return self.detect(test_iter, show_timer)
def visualize_detection(self, img, dets, classes=[], thresh=0.6):
"""
visualize detections in one image
Parameters:
----------
img : numpy.array
image, in bgr format
dets : numpy.array
ssd detections, numpy.array([[id, score, x1, y1, x2, y2]...])
each row is one object
classes : tuple or list of str
class names
thresh : float
score threshold
"""
import matplotlib.pyplot as plt
import random
plt.imshow(img)
height = img.shape[0]
width = img.shape[1]
colors = dict()
for i in range(dets.shape[0]):
cls_id = int(dets[i, 0])
if cls_id >= 0:
score = dets[i, 1]
if score > thresh:
if cls_id not in colors:
colors[cls_id] = (random.random(), random.random(), random.random())
xmin = int(dets[i, 2] * width)
ymin = int(dets[i, 3] * height)
xmax = int(dets[i, 4] * width)
ymax = int(dets[i, 5] * height)
rect = plt.Rectangle((xmin, ymin), xmax - xmin,
ymax - ymin, fill=False,
edgecolor=colors[cls_id],
linewidth=3.5)
plt.gca().add_patch(rect)
class_name = str(cls_id)
if classes and len(classes) > cls_id:
class_name = classes[cls_id]
plt.gca().text(xmin, ymin - 2,
'{:s} {:.3f}'.format(class_name, score),
bbox=dict(facecolor=colors[cls_id], alpha=0.5),
fontsize=12, color='white')
plt.show()
def | (self, im_list, root_dir=None, extension=None,
classes=[], thresh=0.6, show_timer=False):
"""
wrapper for im_detect and visualize_detection
Parameters:
----------
im_list : list of str or str
image path or list of image paths
root_dir : str or None
directory of input images, optional if image path already
has full directory information
extension : str or None
image extension, eg. ".jpg", optional
Returns:
----------
"""
import cv2
dets = self.im_detect(im_list, root_dir, extension, show_timer=show_timer)
if not isinstance(im_list, list):
im_list = [im_list]
assert len(dets) == len(im_list)
for k, det in enumerate(dets):
img = cv2.imread(im_list[k])
img[:, :, (0, 1, 2)] = img[:, :, (2, 1, 0)]
self.visualize_detection(img, det, classes, thresh)
| detect_and_visualize | identifier_name |
detector.py | from __future__ import print_function
import mxnet as mx
import numpy as np
from timeit import default_timer as timer
from dataset.testdb import TestDB
from dataset.iterator import DetIter
class Detector(object):
"""
SSD detector which hold a detection network and wraps detection API
Parameters:
----------
symbol : mx.Symbol
detection network Symbol
model_prefix : str
name prefix of trained model
epoch : int
load epoch of trained model
data_shape : int
input data resize shape
mean_pixels : tuple of float
(mean_r, mean_g, mean_b)
batch_size : int
run detection with batch size
ctx : mx.ctx
device to use, if None, use mx.cpu() as default context
"""
def __init__(self, symbol, model_prefix, epoch, data_shape, mean_pixels, \
batch_size=1, ctx=None):
self.ctx = ctx
if self.ctx is None:
self.ctx = mx.cpu()
load_symbol, args, auxs = mx.model.load_checkpoint(model_prefix, epoch)
if symbol is None:
symbol = load_symbol
self.mod = mx.mod.Module(symbol, label_names=None, context=ctx)
self.data_shape = data_shape
self.mod.bind(data_shapes=[('data', (batch_size, 3, data_shape, data_shape))])
self.mod.set_params(args, auxs)
self.data_shape = data_shape
self.mean_pixels = mean_pixels
def detect(self, det_iter, show_timer=False):
"""
detect all images in iterator
Parameters:
----------
det_iter : DetIter
iterator for all testing images
show_timer : Boolean
whether to print out detection exec time
Returns:
----------
list of detection results
"""
num_images = det_iter._size
if not isinstance(det_iter, mx.io.PrefetchingIter):
det_iter = mx.io.PrefetchingIter(det_iter)
start = timer()
detections = self.mod.predict(det_iter).asnumpy()
time_elapsed = timer() - start
if show_timer:
print("Detection time for {} images: {:.4f} sec".format(
num_images, time_elapsed))
result = []
for i in range(detections.shape[0]):
det = detections[i, :, :]
res = det[np.where(det[:, 0] >= 0)[0]]
result.append(res)
return result
def im_detect(self, im_list, root_dir=None, extension=None, show_timer=False):
"""
wrapper for detecting multiple images
Parameters:
----------
im_list : list of str
image path or list of image paths
root_dir : str
directory of input images, optional if image path already
has full directory information
extension : str
image extension, eg. ".jpg", optional
Returns:
----------
list of detection results in format [det0, det1...], det is in
format np.array([id, score, xmin, ymin, xmax, ymax]...)
"""
test_db = TestDB(im_list, root_dir=root_dir, extension=extension)
test_iter = DetIter(test_db, 1, self.data_shape, self.mean_pixels,
is_train=False)
return self.detect(test_iter, show_timer)
def visualize_detection(self, img, dets, classes=[], thresh=0.6):
"""
visualize detections in one image
Parameters:
----------
img : numpy.array
image, in bgr format
dets : numpy.array
ssd detections, numpy.array([[id, score, x1, y1, x2, y2]...])
each row is one object
classes : tuple or list of str
class names | score threshold
"""
import matplotlib.pyplot as plt
import random
plt.imshow(img)
height = img.shape[0]
width = img.shape[1]
colors = dict()
for i in range(dets.shape[0]):
cls_id = int(dets[i, 0])
if cls_id >= 0:
score = dets[i, 1]
if score > thresh:
if cls_id not in colors:
colors[cls_id] = (random.random(), random.random(), random.random())
xmin = int(dets[i, 2] * width)
ymin = int(dets[i, 3] * height)
xmax = int(dets[i, 4] * width)
ymax = int(dets[i, 5] * height)
rect = plt.Rectangle((xmin, ymin), xmax - xmin,
ymax - ymin, fill=False,
edgecolor=colors[cls_id],
linewidth=3.5)
plt.gca().add_patch(rect)
class_name = str(cls_id)
if classes and len(classes) > cls_id:
class_name = classes[cls_id]
plt.gca().text(xmin, ymin - 2,
'{:s} {:.3f}'.format(class_name, score),
bbox=dict(facecolor=colors[cls_id], alpha=0.5),
fontsize=12, color='white')
plt.show()
def detect_and_visualize(self, im_list, root_dir=None, extension=None,
classes=[], thresh=0.6, show_timer=False):
"""
wrapper for im_detect and visualize_detection
Parameters:
----------
im_list : list of str or str
image path or list of image paths
root_dir : str or None
directory of input images, optional if image path already
has full directory information
extension : str or None
image extension, eg. ".jpg", optional
Returns:
----------
"""
import cv2
dets = self.im_detect(im_list, root_dir, extension, show_timer=show_timer)
if not isinstance(im_list, list):
im_list = [im_list]
assert len(dets) == len(im_list)
for k, det in enumerate(dets):
img = cv2.imread(im_list[k])
img[:, :, (0, 1, 2)] = img[:, :, (2, 1, 0)]
self.visualize_detection(img, det, classes, thresh) | thresh : float | random_line_split |
detector.py | from __future__ import print_function
import mxnet as mx
import numpy as np
from timeit import default_timer as timer
from dataset.testdb import TestDB
from dataset.iterator import DetIter
class Detector(object):
"""
SSD detector which hold a detection network and wraps detection API
Parameters:
----------
symbol : mx.Symbol
detection network Symbol
model_prefix : str
name prefix of trained model
epoch : int
load epoch of trained model
data_shape : int
input data resize shape
mean_pixels : tuple of float
(mean_r, mean_g, mean_b)
batch_size : int
run detection with batch size
ctx : mx.ctx
device to use, if None, use mx.cpu() as default context
"""
def __init__(self, symbol, model_prefix, epoch, data_shape, mean_pixels, \
batch_size=1, ctx=None):
self.ctx = ctx
if self.ctx is None:
self.ctx = mx.cpu()
load_symbol, args, auxs = mx.model.load_checkpoint(model_prefix, epoch)
if symbol is None:
symbol = load_symbol
self.mod = mx.mod.Module(symbol, label_names=None, context=ctx)
self.data_shape = data_shape
self.mod.bind(data_shapes=[('data', (batch_size, 3, data_shape, data_shape))])
self.mod.set_params(args, auxs)
self.data_shape = data_shape
self.mean_pixels = mean_pixels
def detect(self, det_iter, show_timer=False):
"""
detect all images in iterator
Parameters:
----------
det_iter : DetIter
iterator for all testing images
show_timer : Boolean
whether to print out detection exec time
Returns:
----------
list of detection results
"""
num_images = det_iter._size
if not isinstance(det_iter, mx.io.PrefetchingIter):
|
start = timer()
detections = self.mod.predict(det_iter).asnumpy()
time_elapsed = timer() - start
if show_timer:
print("Detection time for {} images: {:.4f} sec".format(
num_images, time_elapsed))
result = []
for i in range(detections.shape[0]):
det = detections[i, :, :]
res = det[np.where(det[:, 0] >= 0)[0]]
result.append(res)
return result
def im_detect(self, im_list, root_dir=None, extension=None, show_timer=False):
"""
wrapper for detecting multiple images
Parameters:
----------
im_list : list of str
image path or list of image paths
root_dir : str
directory of input images, optional if image path already
has full directory information
extension : str
image extension, eg. ".jpg", optional
Returns:
----------
list of detection results in format [det0, det1...], det is in
format np.array([id, score, xmin, ymin, xmax, ymax]...)
"""
test_db = TestDB(im_list, root_dir=root_dir, extension=extension)
test_iter = DetIter(test_db, 1, self.data_shape, self.mean_pixels,
is_train=False)
return self.detect(test_iter, show_timer)
def visualize_detection(self, img, dets, classes=[], thresh=0.6):
"""
visualize detections in one image
Parameters:
----------
img : numpy.array
image, in bgr format
dets : numpy.array
ssd detections, numpy.array([[id, score, x1, y1, x2, y2]...])
each row is one object
classes : tuple or list of str
class names
thresh : float
score threshold
"""
import matplotlib.pyplot as plt
import random
plt.imshow(img)
height = img.shape[0]
width = img.shape[1]
colors = dict()
for i in range(dets.shape[0]):
cls_id = int(dets[i, 0])
if cls_id >= 0:
score = dets[i, 1]
if score > thresh:
if cls_id not in colors:
colors[cls_id] = (random.random(), random.random(), random.random())
xmin = int(dets[i, 2] * width)
ymin = int(dets[i, 3] * height)
xmax = int(dets[i, 4] * width)
ymax = int(dets[i, 5] * height)
rect = plt.Rectangle((xmin, ymin), xmax - xmin,
ymax - ymin, fill=False,
edgecolor=colors[cls_id],
linewidth=3.5)
plt.gca().add_patch(rect)
class_name = str(cls_id)
if classes and len(classes) > cls_id:
class_name = classes[cls_id]
plt.gca().text(xmin, ymin - 2,
'{:s} {:.3f}'.format(class_name, score),
bbox=dict(facecolor=colors[cls_id], alpha=0.5),
fontsize=12, color='white')
plt.show()
def detect_and_visualize(self, im_list, root_dir=None, extension=None,
classes=[], thresh=0.6, show_timer=False):
"""
wrapper for im_detect and visualize_detection
Parameters:
----------
im_list : list of str or str
image path or list of image paths
root_dir : str or None
directory of input images, optional if image path already
has full directory information
extension : str or None
image extension, eg. ".jpg", optional
Returns:
----------
"""
import cv2
dets = self.im_detect(im_list, root_dir, extension, show_timer=show_timer)
if not isinstance(im_list, list):
im_list = [im_list]
assert len(dets) == len(im_list)
for k, det in enumerate(dets):
img = cv2.imread(im_list[k])
img[:, :, (0, 1, 2)] = img[:, :, (2, 1, 0)]
self.visualize_detection(img, det, classes, thresh)
| det_iter = mx.io.PrefetchingIter(det_iter) | conditional_block |
ISearchTvResult.ts | import { BaseRequestOptions, INewSeasonRequests } from "./IRequestModel";
export interface ISearchTvResult {
id: number;
title: string; // used in the request
aliases: string[];
banner: string;
seriesId: number;
status: string;
firstAired: string;
network: string;
networkId: string;
runtime: string;
genre: string[];
overview: string;
lastUpdated: number;
airsDayOfWeek: string;
airsTime: string;
rating: string;
imdbId: string;
siteRating: number;
trailer: string;
homepage: string;
seasonRequests: INewSeasonRequests[];
requestAll: boolean;
approved: boolean;
requested: boolean;
available: boolean;
plexUrl: string;
embyUrl: string;
jellyfinUrl: string;
quality: string;
firstSeason: boolean;
latestSeason: boolean;
theTvDbId: string;
subscribed: boolean;
showSubscribe: boolean;
fullyAvailable: boolean;
backdropPath: string;
partlyAvailable: boolean;
background: any;
open: boolean; // THIS IS FOR THE UI
}
export interface ITvRequestViewModelV2 extends ITvRequestViewModelBase {
theMovieDbId: number;
} | firstSeason: boolean;
latestSeason: boolean;
languageProfile: number | undefined;
seasons: ISeasonsViewModel[];
}
export interface ISeasonsViewModel {
seasonNumber: number;
episodes: IEpisodesViewModel[];
}
export interface IEpisodesViewModel {
episodeNumber: number;
} |
export interface ITvRequestViewModelBase extends BaseRequestOptions {
requestAll: boolean; | random_line_split |
Iterable.js |
/*
* Iterable is used internally to provide functional style methods to indexed collections.
* The contract a collection must follow to inherit from Iterable is:
* - Exposing a property named items, the Array representation of the collection.
* - Either specify a fromArray method or override _createNew so that new collections
* can be built from an existing instance.
*
* None of the Iterable methods mutates the collection.
*
* For any method accepting a callback or predicate as a parameter, you need to ensure
* the value of 'this' inside the method is either bound or not used.
*/
var Iterable = function() {};
/*
* The current Array representation of the collection.
* It should be considered read-only and never modified directly.
*/
Iterable.prototype.items = null;
/*
* Returns the number of items in this collection.
*/
Iterable.prototype.size = function() {
return this.items.length;
};
/*
* Indicates whether this collection is empty.
*/
Iterable.prototype.isEmpty = function() {
return this.size() == 0;
};
/*
* Returns the item located at the specified index.
*/
Iterable.prototype.itemAt = function(index) {
return this.items[index];
};
/*
* Returns the first item of this collection.
*/
Iterable.prototype.first = function() {
return this.items[0];
};
/*
* Returns the last item of this collection.
*/
Iterable.prototype.last = function() {
return this.items[this.items.length - 1];
};
/*
* Applies a function to all items of this collection.
*/
Iterable.prototype.each = function(callback) {
for (var i = 0, length = this.items.length; i < length; i++) {
this._invoke(callback, i, i);
}
};
/*
* Builds a new collection by applying a function to all items of this collection.
*
* ArrayMap will require that you return [key, value] tuples to create a new ArrayMap.
*
* Note: If you intended to invoke filter and map in succession
* you can merge these operations into just one map() call
* by returning Collection.NOT_MAPPED for the items that shouldn't be in the final collection.
*/
Iterable.prototype.map = function(callback) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
var mapped = this._invoke(callback, i);
if (mapped != Collection.NOT_MAPPED) result.push(mapped);
}
return this._createNew(result);
};
Collection.NOT_MAPPED = {};
/*
* Builds a List of the extracted properties of this collection of objects.
* This is a special case of map(). The property can be arbitrarily nested.
*/
Iterable.prototype.pluck = function(property) {
var doPluck = getPluckFunction(property);
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
result.push(doPluck(this.items[i]));
}
return List.fromArray(result);
}
/*
* Selects all items of this collection which satisfy a predicate.
*/
Iterable.prototype.filter = function(predicate) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) result.push(this.items[i]);
}
return this._createNew(result);
};
/*
* Counts the number of items in this collection which satisfy a predicate.
*/
Iterable.prototype.count = function(predicate) {
var count = 0;
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) count++;
}
return count;
};
/*
* Finds the first item of the collection satisfying a predicate, if any.
*/
Iterable.prototype.find = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) return this.items[i];
}
return undefined;
};
/*
* Finds the first item of this collection of objects that owns a property set to a given value.
* This is a special case of find(). The property can be arbitrarily nested.
*/
Iterable.prototype.findBy = function(property, value) {
var doPluck = getPluckFunction(property);
for (var i = 0, length = this.items.length; i < length; i++) {
if (doPluck(this.items[i]) === value) return this.items[i];
}
return undefined;
};
/*
* Tests whether a predicate holds for some of the items of this collection.
*/
Iterable.prototype.some = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) return true;
}
return false;
};
/*
* Tests whether a predicate holds for all items of this collection.
*/
Iterable.prototype.every = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (!this._invoke(predicate, i)) return false;
}
return true;
};
/*
* Partitions items in fixed size collections.
*/
Iterable.prototype.grouped = function(size) {
var groups = [];
var current = [];
for (var i = 0, length = this.items.length; i < length; i++) {
current.push(this.items[i]);
if ((current.length === size) || (i === length - 1)) {
groups[groups.length] = this._createNew(current);
current = [];
}
}
return List.fromArray(groups);
};
/*
* Partitions this collection into a map of Lists according to a discriminator function.
*/
Iterable.prototype.groupBy = function(discriminator) {
var groups = Map();
for (var i = 0, length = this.items.length; i < length; i++) {
var item = this.items[i];
var itemGroup = this._invoke(discriminator, i);
var group = groups.get(itemGroup);
if (!group) groups.put(itemGroup, List());
groups.get(itemGroup).add(item);
}
return groups;
};
/*
* Folds the items of this collection using the specified operator.
*/
Iterable.prototype.fold = function(initialValue, operator) {
var result = initialValue;
for (var i = 0, length = this.items.length; i < length; i++) {
result = this._invoke(operator, i, result);
}
return result;
};
/*
* Partitions this collection in two collections according to a predicate.
* The first element of the returned Array contains the items that satisfied the predicate.
*/
Iterable.prototype.partition = function(predicate) {
var yes = [], no = [];
for (var i = 0, length = this.items.length; i < length; i++) {
(this._invoke(predicate, i) ? yes : no).push(this.items[i]);
}
return [this._createNew(yes), this._createNew(no)];
};
/*
* Selects all items except the first n ones.
*/
Iterable.prototype.drop = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(n));
};
/*
* Selects all items except the last n ones.
*/
Iterable.prototype.dropRight = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(0, this.items.length - n));
};
/*
* Drops items till the predicate no longer hold.
*/
Iterable.prototype.dropWhile = function(predicate) {
var result = this.items.slice();
var index = 0;
while (result.length && this._invoke(predicate, index)) {
result.shift();
index++;
}
return this._createNew(result);
};
/*
* Selects the first n items.
*/
Iterable.prototype.take = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(0, n));
};
/*
* Selects the last n items.
*/
Iterable.prototype.takeRight = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(-n));
};
/*
* Selects items till the predicate no longer hold.
*/
Iterable.prototype.takeWhile = function(predicate) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) result.push(this.items[i]);
else break;
}
return this._createNew(result);
};
/*
* Returns a new collection with the items in reversed order.
*/
Iterable.prototype.reverse = function() {
return this._createNew(this.items.slice().reverse());
};
/*
* Selects an interval of items.
*/
Iterable.prototype.slice = function(start, end) {
return this._createNew(this.items.slice(start, end));
};
/*
* Returns a new sorted collection.
* The sort is stable.
*
* An option Object can be passed to modify the sort behavior.
* All options are compatible with each other.
* The supported options are:
*
* ignoreCase: Assuming strings are going to be sorted, ignore their cases. Defaults to false.
*
* localCompare: Assuming strings are going to be sorted,
* handle locale-specific characters correctly at the cost of reduced sort speed. Defaults to false.
*
* by: Assuming objects are being sorted, a String (See pluck) or Function either pointing to or computing the value
* that should be used for the sort. Defaults to null.
*
* reverse: Reverse the sort. Defaults to false.
*/
Iterable.prototype.sorted = function(options) {
var o = options || {},
by = o.by !== undefined ? o.by : null,
localeCompare = o.localeCompare !== undefined ? o.localeCompare : false,
ignoreCase = o.ignoreCase !== undefined ? o.ignoreCase : false,
reverse = o.reverse !== undefined ? o.reverse : false,
result = [],
mapped = [],
missingData = [],
sortFunction,
item;
if (isString(by)) by = getPluckFunction(by);
for (var i = 0, length = this.items.length; i < length; i++) {
item = this.items[i];
if (by && item)
item = by(item);
if (item === null || item === undefined || item === '') {
missingData.push(item);
continue;
}
if (ignoreCase)
item = item.toUpperCase();
mapped.push({
index: i,
value: item
});
}
if (localeCompare) {
sortFunction = function(a, b) {
if (a.value !== b.value) {
return a.value.localeCompare(b.value);
}
return a.index < b.index ? -1 : 1;
};
}
else {
sortFunction = function(a, b) {
if (a.value !== b.value) {
return (a.value < b.value) ? -1 : 1;
}
return a.index < b.index ? -1 : 1;
};
}
mapped.sort(sortFunction);
for (var i = 0, length = mapped.length; i < length; i++) {
result.push(this.items[mapped[i].index]);
}
if (missingData.length)
result = result.concat(missingData);
if (reverse)
result.reverse();
return this._createNew(result);
};
/*
* Displays all items of this collection as a string.
*/
Iterable.prototype.mkString = function(start, sep, end) {
return start + this.items.join(sep) + end;
};
/*
* Converts this collection to a List.
*/
Iterable.prototype.toList = function() {
return List.fromArray(this.items);
};
/*
* Converts this collection to an Array.
* If you do not require a new Array instance, consider using the items property instead.
*/
Iterable.prototype.toArray = function() {
return cloneArray(this.items);
};
/*
* Creates a (shallow) copy of this collection.
*/
Iterable.prototype.clone = function() {
return this._createNew(this.items.slice());
};
Iterable.prototype.toString = function() {
return this.constructor.typeName + '(' + this.items.join(', ') + ')';
};
/**
* Creates a new Iterable of the same kind but with a specific set of items.
* The default implementation simply delegates to the type constructor's fromArray factory method.
* Some iterables may override this method to better prepare the newly created instance.
*/
Iterable.prototype._createNew = function(array) {
return this.constructor.fromArray(array);
};
/**
* Invokes a function for a particular item index.
* This indirection is required as different clients of Iterable may require
* the callbacks and predicates to be called with a specific signature. For instance,
* an associative collection would invoke the function with a key and a value as parameters.
* This default implementation simply call the function with the current item.
*/
Iterable.prototype._invoke = function(func, forIndex, extraParam) {
return func(this.items[forIndex], extraParam);
};
var getPluckFunction = function(property) {
var propertyChain = property.split('.');
if (propertyChain.length == 1)
return function(item) {
return item[propertyChain[0]];
};
else
return function(item) {
var i = 0, currentContext = item, length = propertyChain.length;
while (i < length) |
return currentContext;
};
};
Collection.Iterable = Iterable; | {
if (currentContext == null && i != length) return undefined;
currentContext = currentContext[propertyChain[i]];
i++;
} | conditional_block |
Iterable.js | /*
* Iterable is used internally to provide functional style methods to indexed collections.
* The contract a collection must follow to inherit from Iterable is:
* - Exposing a property named items, the Array representation of the collection.
* - Either specify a fromArray method or override _createNew so that new collections
* can be built from an existing instance.
*
* None of the Iterable methods mutates the collection.
*
* For any method accepting a callback or predicate as a parameter, you need to ensure
* the value of 'this' inside the method is either bound or not used.
*/
var Iterable = function() {};
/* | * The current Array representation of the collection.
* It should be considered read-only and never modified directly.
*/
Iterable.prototype.items = null;
/*
* Returns the number of items in this collection.
*/
Iterable.prototype.size = function() {
return this.items.length;
};
/*
* Indicates whether this collection is empty.
*/
Iterable.prototype.isEmpty = function() {
return this.size() == 0;
};
/*
* Returns the item located at the specified index.
*/
Iterable.prototype.itemAt = function(index) {
return this.items[index];
};
/*
* Returns the first item of this collection.
*/
Iterable.prototype.first = function() {
return this.items[0];
};
/*
* Returns the last item of this collection.
*/
Iterable.prototype.last = function() {
return this.items[this.items.length - 1];
};
/*
* Applies a function to all items of this collection.
*/
Iterable.prototype.each = function(callback) {
for (var i = 0, length = this.items.length; i < length; i++) {
this._invoke(callback, i, i);
}
};
/*
* Builds a new collection by applying a function to all items of this collection.
*
* ArrayMap will require that you return [key, value] tuples to create a new ArrayMap.
*
* Note: If you intended to invoke filter and map in succession
* you can merge these operations into just one map() call
* by returning Collection.NOT_MAPPED for the items that shouldn't be in the final collection.
*/
Iterable.prototype.map = function(callback) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
var mapped = this._invoke(callback, i);
if (mapped != Collection.NOT_MAPPED) result.push(mapped);
}
return this._createNew(result);
};
Collection.NOT_MAPPED = {};
/*
* Builds a List of the extracted properties of this collection of objects.
* This is a special case of map(). The property can be arbitrarily nested.
*/
Iterable.prototype.pluck = function(property) {
var doPluck = getPluckFunction(property);
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
result.push(doPluck(this.items[i]));
}
return List.fromArray(result);
}
/*
* Selects all items of this collection which satisfy a predicate.
*/
Iterable.prototype.filter = function(predicate) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) result.push(this.items[i]);
}
return this._createNew(result);
};
/*
* Counts the number of items in this collection which satisfy a predicate.
*/
Iterable.prototype.count = function(predicate) {
var count = 0;
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) count++;
}
return count;
};
/*
* Finds the first item of the collection satisfying a predicate, if any.
*/
Iterable.prototype.find = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) return this.items[i];
}
return undefined;
};
/*
* Finds the first item of this collection of objects that owns a property set to a given value.
* This is a special case of find(). The property can be arbitrarily nested.
*/
Iterable.prototype.findBy = function(property, value) {
var doPluck = getPluckFunction(property);
for (var i = 0, length = this.items.length; i < length; i++) {
if (doPluck(this.items[i]) === value) return this.items[i];
}
return undefined;
};
/*
* Tests whether a predicate holds for some of the items of this collection.
*/
Iterable.prototype.some = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) return true;
}
return false;
};
/*
* Tests whether a predicate holds for all items of this collection.
*/
Iterable.prototype.every = function(predicate) {
for (var i = 0, length = this.items.length; i < length; i++) {
if (!this._invoke(predicate, i)) return false;
}
return true;
};
/*
* Partitions items in fixed size collections.
*/
Iterable.prototype.grouped = function(size) {
var groups = [];
var current = [];
for (var i = 0, length = this.items.length; i < length; i++) {
current.push(this.items[i]);
if ((current.length === size) || (i === length - 1)) {
groups[groups.length] = this._createNew(current);
current = [];
}
}
return List.fromArray(groups);
};
/*
* Partitions this collection into a map of Lists according to a discriminator function.
*/
Iterable.prototype.groupBy = function(discriminator) {
var groups = Map();
for (var i = 0, length = this.items.length; i < length; i++) {
var item = this.items[i];
var itemGroup = this._invoke(discriminator, i);
var group = groups.get(itemGroup);
if (!group) groups.put(itemGroup, List());
groups.get(itemGroup).add(item);
}
return groups;
};
/*
* Folds the items of this collection using the specified operator.
*/
Iterable.prototype.fold = function(initialValue, operator) {
var result = initialValue;
for (var i = 0, length = this.items.length; i < length; i++) {
result = this._invoke(operator, i, result);
}
return result;
};
/*
* Partitions this collection in two collections according to a predicate.
* The first element of the returned Array contains the items that satisfied the predicate.
*/
Iterable.prototype.partition = function(predicate) {
var yes = [], no = [];
for (var i = 0, length = this.items.length; i < length; i++) {
(this._invoke(predicate, i) ? yes : no).push(this.items[i]);
}
return [this._createNew(yes), this._createNew(no)];
};
/*
* Selects all items except the first n ones.
*/
Iterable.prototype.drop = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(n));
};
/*
* Selects all items except the last n ones.
*/
Iterable.prototype.dropRight = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(0, this.items.length - n));
};
/*
* Drops items till the predicate no longer hold.
*/
Iterable.prototype.dropWhile = function(predicate) {
var result = this.items.slice();
var index = 0;
while (result.length && this._invoke(predicate, index)) {
result.shift();
index++;
}
return this._createNew(result);
};
/*
* Selects the first n items.
*/
Iterable.prototype.take = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(0, n));
};
/*
* Selects the last n items.
*/
Iterable.prototype.takeRight = function(n) {
n = Math.min(n, this.items.length);
return this._createNew(this.items.slice(-n));
};
/*
* Selects items till the predicate no longer hold.
*/
Iterable.prototype.takeWhile = function(predicate) {
var result = [];
for (var i = 0, length = this.items.length; i < length; i++) {
if (this._invoke(predicate, i)) result.push(this.items[i]);
else break;
}
return this._createNew(result);
};
/*
* Returns a new collection with the items in reversed order.
*/
Iterable.prototype.reverse = function() {
return this._createNew(this.items.slice().reverse());
};
/*
* Selects an interval of items.
*/
Iterable.prototype.slice = function(start, end) {
return this._createNew(this.items.slice(start, end));
};
/*
* Returns a new sorted collection.
* The sort is stable.
*
* An option Object can be passed to modify the sort behavior.
* All options are compatible with each other.
* The supported options are:
*
* ignoreCase: Assuming strings are going to be sorted, ignore their cases. Defaults to false.
*
* localCompare: Assuming strings are going to be sorted,
* handle locale-specific characters correctly at the cost of reduced sort speed. Defaults to false.
*
* by: Assuming objects are being sorted, a String (See pluck) or Function either pointing to or computing the value
* that should be used for the sort. Defaults to null.
*
* reverse: Reverse the sort. Defaults to false.
*/
Iterable.prototype.sorted = function(options) {
var o = options || {},
by = o.by !== undefined ? o.by : null,
localeCompare = o.localeCompare !== undefined ? o.localeCompare : false,
ignoreCase = o.ignoreCase !== undefined ? o.ignoreCase : false,
reverse = o.reverse !== undefined ? o.reverse : false,
result = [],
mapped = [],
missingData = [],
sortFunction,
item;
if (isString(by)) by = getPluckFunction(by);
for (var i = 0, length = this.items.length; i < length; i++) {
item = this.items[i];
if (by && item)
item = by(item);
if (item === null || item === undefined || item === '') {
missingData.push(item);
continue;
}
if (ignoreCase)
item = item.toUpperCase();
mapped.push({
index: i,
value: item
});
}
if (localeCompare) {
sortFunction = function(a, b) {
if (a.value !== b.value) {
return a.value.localeCompare(b.value);
}
return a.index < b.index ? -1 : 1;
};
}
else {
sortFunction = function(a, b) {
if (a.value !== b.value) {
return (a.value < b.value) ? -1 : 1;
}
return a.index < b.index ? -1 : 1;
};
}
mapped.sort(sortFunction);
for (var i = 0, length = mapped.length; i < length; i++) {
result.push(this.items[mapped[i].index]);
}
if (missingData.length)
result = result.concat(missingData);
if (reverse)
result.reverse();
return this._createNew(result);
};
/*
* Displays all items of this collection as a string.
*/
Iterable.prototype.mkString = function(start, sep, end) {
return start + this.items.join(sep) + end;
};
/*
* Converts this collection to a List.
*/
Iterable.prototype.toList = function() {
return List.fromArray(this.items);
};
/*
* Converts this collection to an Array.
* If you do not require a new Array instance, consider using the items property instead.
*/
Iterable.prototype.toArray = function() {
return cloneArray(this.items);
};
/*
* Creates a (shallow) copy of this collection.
*/
Iterable.prototype.clone = function() {
return this._createNew(this.items.slice());
};
Iterable.prototype.toString = function() {
return this.constructor.typeName + '(' + this.items.join(', ') + ')';
};
/**
* Creates a new Iterable of the same kind but with a specific set of items.
* The default implementation simply delegates to the type constructor's fromArray factory method.
* Some iterables may override this method to better prepare the newly created instance.
*/
Iterable.prototype._createNew = function(array) {
return this.constructor.fromArray(array);
};
/**
* Invokes a function for a particular item index.
* This indirection is required as different clients of Iterable may require
* the callbacks and predicates to be called with a specific signature. For instance,
* an associative collection would invoke the function with a key and a value as parameters.
* This default implementation simply call the function with the current item.
*/
Iterable.prototype._invoke = function(func, forIndex, extraParam) {
return func(this.items[forIndex], extraParam);
};
var getPluckFunction = function(property) {
var propertyChain = property.split('.');
if (propertyChain.length == 1)
return function(item) {
return item[propertyChain[0]];
};
else
return function(item) {
var i = 0, currentContext = item, length = propertyChain.length;
while (i < length) {
if (currentContext == null && i != length) return undefined;
currentContext = currentContext[propertyChain[i]];
i++;
}
return currentContext;
};
};
Collection.Iterable = Iterable; | random_line_split | |
calibrator.py | import todsynth
import os
import numpy
import json
import pandas
class | ( object ):
'''
A todsynth.calibrator object is a container that stores coefficients
that transform RAW dac units to physical units for a given TOD.
'''
# Calibrator description.
#000000000000000000000000000000000000000000000000000000000000000000000000
name = ""
description = ""
calType = ""
# Information stored in the form of a dictionary. Careful not to abuse
# of this in the sense of using it to process data!
info = {}
#000000000000000000000000000000000000000000000000000000000000000000000000
# Calibration coefficients
coeffs = numpy.empty(0)
# Detector index to Unique Identifier array
__uid = numpy.empty(0)
def __init__( self ):
'''
self.name = name
self.description = descrp
self.calType = calType
'''
def setCoeffs( self, c , uid=None ):
'''
Set calibrator coefficients to c.
'''
# Perform numpy.copy() to avoid cross referencing stuff
self.__coeffs = numpy.copy( c )
if uid is not None:
self.__uid = numpy.copy(uid)
self.coeffs = self.coeffs[ self.__uid ]
else:
self.__uid = numpy.arange( len( self.coeffs ) )
def getCoeffs( self ):
'''
Get a *copy* of the coefficients array.
'''
return numpy.copy( self.coeffs )
def updateInfo( self, prop, value ):
'''
Update calibrator info with a pair of prop : value
'''
self.info.update( { 'prop' : value } )
def storeInPath( self , outPath ):
'''
Stores the calibrator in JSON format at the specified path.
'''
# Serialize this object
data = {
'coefficients' : self.__coeffs,
'uid' : self.__uid }
# Create PANDAS DataFrame out data
df = pandas.DataFrame( data )
# Save DataFrame to HDF5 format
df.to_csv( os.path.join(
outPath, "%s.%s.cal" % (self.name,self.calType) ),
index=False,
sep=' ',
header=True )
@classmethod
def readFromPath( cls, systemPath ):
'''
'''
self = cls()
name,caltype,_ = os.path.basename( systemPath ).split('.')
self.name = name
self.calType = caltype
self.description = ''
# Load file
calDF = pandas.read_csv(
systemPath,
header=0,
names=['coefficients', 'uid'],
delimiter=' ' )
self.setCoeffs( calDF['coefficients'], uid = calDF['uid'] )
return self
| Calibrator | identifier_name |
calibrator.py | import todsynth
import os
import numpy
import json
import pandas
class Calibrator( object ):
| '''
A todsynth.calibrator object is a container that stores coefficients
that transform RAW dac units to physical units for a given TOD.
'''
# Calibrator description.
#000000000000000000000000000000000000000000000000000000000000000000000000
name = ""
description = ""
calType = ""
# Information stored in the form of a dictionary. Careful not to abuse
# of this in the sense of using it to process data!
info = {}
#000000000000000000000000000000000000000000000000000000000000000000000000
# Calibration coefficients
coeffs = numpy.empty(0)
# Detector index to Unique Identifier array
__uid = numpy.empty(0)
def __init__( self ):
'''
self.name = name
self.description = descrp
self.calType = calType
'''
def setCoeffs( self, c , uid=None ):
'''
Set calibrator coefficients to c.
'''
# Perform numpy.copy() to avoid cross referencing stuff
self.__coeffs = numpy.copy( c )
if uid is not None:
self.__uid = numpy.copy(uid)
self.coeffs = self.coeffs[ self.__uid ]
else:
self.__uid = numpy.arange( len( self.coeffs ) )
def getCoeffs( self ):
'''
Get a *copy* of the coefficients array.
'''
return numpy.copy( self.coeffs )
def updateInfo( self, prop, value ):
'''
Update calibrator info with a pair of prop : value
'''
self.info.update( { 'prop' : value } )
def storeInPath( self , outPath ):
'''
Stores the calibrator in JSON format at the specified path.
'''
# Serialize this object
data = {
'coefficients' : self.__coeffs,
'uid' : self.__uid }
# Create PANDAS DataFrame out data
df = pandas.DataFrame( data )
# Save DataFrame to HDF5 format
df.to_csv( os.path.join(
outPath, "%s.%s.cal" % (self.name,self.calType) ),
index=False,
sep=' ',
header=True )
@classmethod
def readFromPath( cls, systemPath ):
'''
'''
self = cls()
name,caltype,_ = os.path.basename( systemPath ).split('.')
self.name = name
self.calType = caltype
self.description = ''
# Load file
calDF = pandas.read_csv(
systemPath,
header=0,
names=['coefficients', 'uid'],
delimiter=' ' )
self.setCoeffs( calDF['coefficients'], uid = calDF['uid'] )
return self | identifier_body | |
calibrator.py | import todsynth
import os
import numpy
import json
import pandas
class Calibrator( object ):
'''
A todsynth.calibrator object is a container that stores coefficients
that transform RAW dac units to physical units for a given TOD.
'''
# Calibrator description.
#000000000000000000000000000000000000000000000000000000000000000000000000
name = ""
description = ""
calType = ""
# Information stored in the form of a dictionary. Careful not to abuse
# of this in the sense of using it to process data!
info = {}
#000000000000000000000000000000000000000000000000000000000000000000000000
# Calibration coefficients
coeffs = numpy.empty(0)
# Detector index to Unique Identifier array
__uid = numpy.empty(0)
def __init__( self ):
'''
self.name = name
self.description = descrp
self.calType = calType
'''
def setCoeffs( self, c , uid=None ):
'''
Set calibrator coefficients to c.
'''
# Perform numpy.copy() to avoid cross referencing stuff
self.__coeffs = numpy.copy( c )
if uid is not None:
self.__uid = numpy.copy(uid)
self.coeffs = self.coeffs[ self.__uid ]
else:
|
def getCoeffs( self ):
'''
Get a *copy* of the coefficients array.
'''
return numpy.copy( self.coeffs )
def updateInfo( self, prop, value ):
'''
Update calibrator info with a pair of prop : value
'''
self.info.update( { 'prop' : value } )
def storeInPath( self , outPath ):
'''
Stores the calibrator in JSON format at the specified path.
'''
# Serialize this object
data = {
'coefficients' : self.__coeffs,
'uid' : self.__uid }
# Create PANDAS DataFrame out data
df = pandas.DataFrame( data )
# Save DataFrame to HDF5 format
df.to_csv( os.path.join(
outPath, "%s.%s.cal" % (self.name,self.calType) ),
index=False,
sep=' ',
header=True )
@classmethod
def readFromPath( cls, systemPath ):
'''
'''
self = cls()
name,caltype,_ = os.path.basename( systemPath ).split('.')
self.name = name
self.calType = caltype
self.description = ''
# Load file
calDF = pandas.read_csv(
systemPath,
header=0,
names=['coefficients', 'uid'],
delimiter=' ' )
self.setCoeffs( calDF['coefficients'], uid = calDF['uid'] )
return self
| self.__uid = numpy.arange( len( self.coeffs ) ) | conditional_block |
calibrator.py | import todsynth
import os
import numpy
import json
import pandas
class Calibrator( object ):
'''
A todsynth.calibrator object is a container that stores coefficients
that transform RAW dac units to physical units for a given TOD.
'''
# Calibrator description.
#000000000000000000000000000000000000000000000000000000000000000000000000
name = ""
description = ""
calType = ""
# Information stored in the form of a dictionary. Careful not to abuse
# of this in the sense of using it to process data!
info = {}
#000000000000000000000000000000000000000000000000000000000000000000000000
# Calibration coefficients
coeffs = numpy.empty(0)
# Detector index to Unique Identifier array
__uid = numpy.empty(0)
def __init__( self ):
'''
self.name = name
self.description = descrp
self.calType = calType
'''
def setCoeffs( self, c , uid=None ):
'''
Set calibrator coefficients to c.
'''
# Perform numpy.copy() to avoid cross referencing stuff
self.__coeffs = numpy.copy( c )
if uid is not None:
self.__uid = numpy.copy(uid)
self.coeffs = self.coeffs[ self.__uid ]
else:
self.__uid = numpy.arange( len( self.coeffs ) )
def getCoeffs( self ):
'''
Get a *copy* of the coefficients array.
'''
return numpy.copy( self.coeffs )
def updateInfo( self, prop, value ):
'''
Update calibrator info with a pair of prop : value
'''
self.info.update( { 'prop' : value } )
def storeInPath( self , outPath ):
'''
Stores the calibrator in JSON format at the specified path.
'''
# Serialize this object
data = {
'coefficients' : self.__coeffs,
'uid' : self.__uid }
# Create PANDAS DataFrame out data
df = pandas.DataFrame( data )
# Save DataFrame to HDF5 format
df.to_csv( os.path.join(
outPath, "%s.%s.cal" % (self.name,self.calType) ),
index=False, | @classmethod
def readFromPath( cls, systemPath ):
'''
'''
self = cls()
name,caltype,_ = os.path.basename( systemPath ).split('.')
self.name = name
self.calType = caltype
self.description = ''
# Load file
calDF = pandas.read_csv(
systemPath,
header=0,
names=['coefficients', 'uid'],
delimiter=' ' )
self.setCoeffs( calDF['coefficients'], uid = calDF['uid'] )
return self | sep=' ',
header=True )
| random_line_split |
issue_tracker_service.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a layer of abstraction for the issue tracker API."""
import logging
from apiclient import discovery
from apiclient import errors
import httplib2
_DISCOVERY_URI = ('https://monorail-prod.appspot.com'
'/_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest')
class IssueTrackerService(object):
"""Class for updating bug issues."""
def __init__(self, http=None, additional_credentials=None):
"""Initializes an object for adding and updating bugs on the issue tracker.
This object can be re-used to make multiple requests without calling
apliclient.discovery.build multiple times.
This class makes requests to the Monorail API.
API explorer: https://goo.gl/xWd0dX
Args:
http: A Http object to pass to request.execute; this should be an
Http object that's already authenticated via OAuth2.
additional_credentials: A credentials object, e.g. an instance of
oauth2client.client.SignedJwtAssertionCredentials. This includes
the email and secret key of a service account.
"""
self._http = http or httplib2.Http()
if additional_credentials:
additional_credentials.authorize(self._http)
self._service = discovery.build(
'monorail', 'v1', discoveryServiceUrl=_DISCOVERY_URI,
http=self._http)
def AddBugComment(self, bug_id, comment, status=None, cc_list=None,
merge_issue=None, labels=None, owner=None):
"""Adds a comment with the bisect results to the given bug.
Args:
bug_id: Bug ID of the issue to update.
comment: Bisect results information.
status: A string status for bug, e.g. Assigned, Duplicate, WontFix, etc.
cc_list: List of email addresses of users to add to the CC list.
merge_issue: ID of the issue to be merged into; specifying this option
implies that the status should be "Duplicate".
labels: List of labels for bug.
owner: Owner of the bug.
Returns:
True if successful, False otherwise.
"""
if not bug_id or bug_id < 0:
return False
body = {'content': comment}
updates = {}
# Mark issue as duplicate when relevant bug ID is found in the datastore.
# Avoid marking an issue as duplicate of itself.
if merge_issue and int(merge_issue) != bug_id:
status = 'Duplicate'
updates['mergedInto'] = merge_issue
logging.info('Bug %s marked as duplicate of %s', bug_id, merge_issue)
if status:
updates['status'] = status
if cc_list:
updates['cc'] = cc_list
if labels:
updates['labels'] = labels
if owner:
updates['owner'] = owner
body['updates'] = updates
return self._MakeCommentRequest(bug_id, body)
def List(self, **kwargs):
"""Make a request to the issue tracker to list bugs."""
request = self._service.issues().list(projectId='chromium', **kwargs)
return self._ExecuteRequest(request)
def _MakeCommentRequest(self, bug_id, body):
"""Make a request to the issue tracker to update a bug."""
request = self._service.issues().comments().insert(
projectId='chromium',
issueId=bug_id,
body=body)
response = self._ExecuteRequest(request)
if not response:
logging.error('Error updating bug %s with body %s', bug_id, body)
return False
return True
def NewBug(self, title, description, labels=None, components=None,
owner=None):
"""Creates a new bug.
Args:
title: The short title text of the bug.
description: The body text for the bug.
labels: Starting labels for the bug.
components: Starting components for the bug.
owner: Starting owner account name.
Returns:
The new bug ID if successfully created, or None.
"""
body = {
'title': title,
'summary': title,
'description': description,
'labels': labels or [],
'components': components or [],
'status': 'Assigned',
}
if owner:
body['owner'] = {'name': owner}
return self._MakeCreateRequest(body)
def _MakeCreateRequest(self, body):
"""Makes a request to create a new bug.
Args:
body: The request body parameter dictionary.
Returns:
A bug ID if successful, or None otherwise.
"""
request = self._service.issues().insert(projectId='chromium', body=body)
response = self._ExecuteRequest(request)
if response and 'id' in response:
return response['id']
return None
def GetLastBugCommentsAndTimestamp(self, bug_id):
"""Gets last updated comments and timestamp in the given bug.
Args:
bug_id: Bug ID of the issue to update.
Returns:
A dictionary with last comment and timestamp, or None on failure.
"""
if not bug_id or bug_id < 0:
|
response = self._MakeGetCommentsRequest(bug_id)
if response and all(v in response.keys()
for v in ['totalResults', 'items']):
bug_comments = response.get('items')[response.get('totalResults') - 1]
if bug_comments.get('content') and bug_comments.get('published'):
return {
'comment': bug_comments.get('content'),
'timestamp': bug_comments.get('published')
}
return None
def _MakeGetCommentsRequest(self, bug_id):
"""Make a request to the issue tracker to get comments in the bug."""
# TODO (prasadv): By default the max number of comments retrieved in
# one request is 100. Since bisect-fyi jobs may have more then 100
# comments for now we set this maxResults count as 10000.
# Remove this max count once we find a way to clear old comments
# on FYI issues.
request = self._service.issues().comments().list(
projectId='chromium',
issueId=bug_id,
maxResults=10000)
return self._ExecuteRequest(request)
def _ExecuteRequest(self, request):
"""Make a request to the issue tracker.
Args:
request: The request object, which has a execute method.
Returns:
The response if there was one, or else None.
"""
try:
response = request.execute(http=self._http)
return response
except errors.HttpError as e:
logging.error(e)
return None
| return None | conditional_block |
issue_tracker_service.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a layer of abstraction for the issue tracker API."""
import logging
from apiclient import discovery
from apiclient import errors
import httplib2
_DISCOVERY_URI = ('https://monorail-prod.appspot.com'
'/_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest')
class IssueTrackerService(object):
"""Class for updating bug issues."""
def __init__(self, http=None, additional_credentials=None):
"""Initializes an object for adding and updating bugs on the issue tracker.
This object can be re-used to make multiple requests without calling
apliclient.discovery.build multiple times.
This class makes requests to the Monorail API.
API explorer: https://goo.gl/xWd0dX
Args:
http: A Http object to pass to request.execute; this should be an
Http object that's already authenticated via OAuth2.
additional_credentials: A credentials object, e.g. an instance of
oauth2client.client.SignedJwtAssertionCredentials. This includes
the email and secret key of a service account.
"""
self._http = http or httplib2.Http()
if additional_credentials:
additional_credentials.authorize(self._http)
self._service = discovery.build(
'monorail', 'v1', discoveryServiceUrl=_DISCOVERY_URI,
http=self._http)
def AddBugComment(self, bug_id, comment, status=None, cc_list=None,
merge_issue=None, labels=None, owner=None):
"""Adds a comment with the bisect results to the given bug.
Args:
bug_id: Bug ID of the issue to update.
comment: Bisect results information.
status: A string status for bug, e.g. Assigned, Duplicate, WontFix, etc.
cc_list: List of email addresses of users to add to the CC list.
merge_issue: ID of the issue to be merged into; specifying this option
implies that the status should be "Duplicate".
labels: List of labels for bug.
owner: Owner of the bug.
Returns:
True if successful, False otherwise.
"""
if not bug_id or bug_id < 0:
return False
body = {'content': comment}
updates = {}
# Mark issue as duplicate when relevant bug ID is found in the datastore.
# Avoid marking an issue as duplicate of itself.
if merge_issue and int(merge_issue) != bug_id:
status = 'Duplicate'
updates['mergedInto'] = merge_issue
logging.info('Bug %s marked as duplicate of %s', bug_id, merge_issue)
if status:
updates['status'] = status
if cc_list:
updates['cc'] = cc_list
if labels:
updates['labels'] = labels
if owner:
updates['owner'] = owner
body['updates'] = updates
return self._MakeCommentRequest(bug_id, body)
def List(self, **kwargs):
"""Make a request to the issue tracker to list bugs."""
request = self._service.issues().list(projectId='chromium', **kwargs)
return self._ExecuteRequest(request)
def _MakeCommentRequest(self, bug_id, body):
"""Make a request to the issue tracker to update a bug."""
request = self._service.issues().comments().insert(
projectId='chromium',
issueId=bug_id,
body=body)
response = self._ExecuteRequest(request)
if not response:
logging.error('Error updating bug %s with body %s', bug_id, body)
return False
return True
def NewBug(self, title, description, labels=None, components=None,
owner=None):
"""Creates a new bug.
Args:
title: The short title text of the bug.
description: The body text for the bug.
labels: Starting labels for the bug.
components: Starting components for the bug.
owner: Starting owner account name.
Returns:
The new bug ID if successfully created, or None.
"""
body = {
'title': title,
'summary': title,
'description': description,
'labels': labels or [],
'components': components or [],
'status': 'Assigned',
}
if owner:
body['owner'] = {'name': owner}
return self._MakeCreateRequest(body)
def _MakeCreateRequest(self, body):
"""Makes a request to create a new bug.
Args:
body: The request body parameter dictionary.
Returns:
A bug ID if successful, or None otherwise.
"""
request = self._service.issues().insert(projectId='chromium', body=body)
response = self._ExecuteRequest(request)
if response and 'id' in response:
return response['id']
return None
def GetLastBugCommentsAndTimestamp(self, bug_id):
"""Gets last updated comments and timestamp in the given bug.
Args:
bug_id: Bug ID of the issue to update.
Returns:
A dictionary with last comment and timestamp, or None on failure.
"""
if not bug_id or bug_id < 0:
return None
response = self._MakeGetCommentsRequest(bug_id)
if response and all(v in response.keys()
for v in ['totalResults', 'items']):
bug_comments = response.get('items')[response.get('totalResults') - 1]
if bug_comments.get('content') and bug_comments.get('published'):
return {
'comment': bug_comments.get('content'),
'timestamp': bug_comments.get('published')
}
return None
def _MakeGetCommentsRequest(self, bug_id):
"""Make a request to the issue tracker to get comments in the bug."""
# TODO (prasadv): By default the max number of comments retrieved in
# one request is 100. Since bisect-fyi jobs may have more then 100
# comments for now we set this maxResults count as 10000.
# Remove this max count once we find a way to clear old comments
# on FYI issues.
request = self._service.issues().comments().list(
projectId='chromium',
issueId=bug_id,
maxResults=10000)
return self._ExecuteRequest(request)
def | (self, request):
"""Make a request to the issue tracker.
Args:
request: The request object, which has a execute method.
Returns:
The response if there was one, or else None.
"""
try:
response = request.execute(http=self._http)
return response
except errors.HttpError as e:
logging.error(e)
return None
| _ExecuteRequest | identifier_name |
issue_tracker_service.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a layer of abstraction for the issue tracker API."""
import logging
from apiclient import discovery
from apiclient import errors
import httplib2
_DISCOVERY_URI = ('https://monorail-prod.appspot.com'
'/_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest')
class IssueTrackerService(object):
"""Class for updating bug issues."""
def __init__(self, http=None, additional_credentials=None):
"""Initializes an object for adding and updating bugs on the issue tracker.
This object can be re-used to make multiple requests without calling
apliclient.discovery.build multiple times.
This class makes requests to the Monorail API.
API explorer: https://goo.gl/xWd0dX
Args:
http: A Http object to pass to request.execute; this should be an
Http object that's already authenticated via OAuth2.
additional_credentials: A credentials object, e.g. an instance of
oauth2client.client.SignedJwtAssertionCredentials. This includes
the email and secret key of a service account.
"""
self._http = http or httplib2.Http()
if additional_credentials:
additional_credentials.authorize(self._http)
self._service = discovery.build(
'monorail', 'v1', discoveryServiceUrl=_DISCOVERY_URI,
http=self._http)
def AddBugComment(self, bug_id, comment, status=None, cc_list=None,
merge_issue=None, labels=None, owner=None):
"""Adds a comment with the bisect results to the given bug.
Args:
bug_id: Bug ID of the issue to update.
comment: Bisect results information.
status: A string status for bug, e.g. Assigned, Duplicate, WontFix, etc.
cc_list: List of email addresses of users to add to the CC list.
merge_issue: ID of the issue to be merged into; specifying this option
implies that the status should be "Duplicate".
labels: List of labels for bug.
owner: Owner of the bug.
Returns:
True if successful, False otherwise.
"""
if not bug_id or bug_id < 0:
return False
body = {'content': comment}
updates = {}
# Mark issue as duplicate when relevant bug ID is found in the datastore.
# Avoid marking an issue as duplicate of itself.
if merge_issue and int(merge_issue) != bug_id:
status = 'Duplicate'
updates['mergedInto'] = merge_issue
logging.info('Bug %s marked as duplicate of %s', bug_id, merge_issue)
if status:
updates['status'] = status
if cc_list:
updates['cc'] = cc_list
if labels:
updates['labels'] = labels
if owner:
updates['owner'] = owner
body['updates'] = updates
return self._MakeCommentRequest(bug_id, body)
def List(self, **kwargs):
"""Make a request to the issue tracker to list bugs."""
request = self._service.issues().list(projectId='chromium', **kwargs)
return self._ExecuteRequest(request)
def _MakeCommentRequest(self, bug_id, body):
"""Make a request to the issue tracker to update a bug."""
request = self._service.issues().comments().insert(
projectId='chromium',
issueId=bug_id,
body=body)
response = self._ExecuteRequest(request)
if not response:
logging.error('Error updating bug %s with body %s', bug_id, body)
return False
return True
def NewBug(self, title, description, labels=None, components=None,
owner=None):
"""Creates a new bug.
Args:
title: The short title text of the bug.
description: The body text for the bug.
labels: Starting labels for the bug.
components: Starting components for the bug.
owner: Starting owner account name.
Returns:
The new bug ID if successfully created, or None.
"""
body = {
'title': title,
'summary': title,
'description': description,
'labels': labels or [],
'components': components or [],
'status': 'Assigned',
}
if owner:
body['owner'] = {'name': owner}
return self._MakeCreateRequest(body)
def _MakeCreateRequest(self, body):
"""Makes a request to create a new bug.
Args:
body: The request body parameter dictionary.
Returns:
A bug ID if successful, or None otherwise.
"""
request = self._service.issues().insert(projectId='chromium', body=body)
response = self._ExecuteRequest(request)
if response and 'id' in response:
return response['id']
return None
def GetLastBugCommentsAndTimestamp(self, bug_id):
"""Gets last updated comments and timestamp in the given bug.
Args:
bug_id: Bug ID of the issue to update.
Returns:
A dictionary with last comment and timestamp, or None on failure.
"""
if not bug_id or bug_id < 0:
return None
response = self._MakeGetCommentsRequest(bug_id)
if response and all(v in response.keys()
for v in ['totalResults', 'items']):
bug_comments = response.get('items')[response.get('totalResults') - 1] | return {
'comment': bug_comments.get('content'),
'timestamp': bug_comments.get('published')
}
return None
def _MakeGetCommentsRequest(self, bug_id):
"""Make a request to the issue tracker to get comments in the bug."""
# TODO (prasadv): By default the max number of comments retrieved in
# one request is 100. Since bisect-fyi jobs may have more then 100
# comments for now we set this maxResults count as 10000.
# Remove this max count once we find a way to clear old comments
# on FYI issues.
request = self._service.issues().comments().list(
projectId='chromium',
issueId=bug_id,
maxResults=10000)
return self._ExecuteRequest(request)
def _ExecuteRequest(self, request):
"""Make a request to the issue tracker.
Args:
request: The request object, which has a execute method.
Returns:
The response if there was one, or else None.
"""
try:
response = request.execute(http=self._http)
return response
except errors.HttpError as e:
logging.error(e)
return None | if bug_comments.get('content') and bug_comments.get('published'): | random_line_split |
issue_tracker_service.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a layer of abstraction for the issue tracker API."""
import logging
from apiclient import discovery
from apiclient import errors
import httplib2
_DISCOVERY_URI = ('https://monorail-prod.appspot.com'
'/_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest')
class IssueTrackerService(object):
"""Class for updating bug issues."""
def __init__(self, http=None, additional_credentials=None):
"""Initializes an object for adding and updating bugs on the issue tracker.
This object can be re-used to make multiple requests without calling
apliclient.discovery.build multiple times.
This class makes requests to the Monorail API.
API explorer: https://goo.gl/xWd0dX
Args:
http: A Http object to pass to request.execute; this should be an
Http object that's already authenticated via OAuth2.
additional_credentials: A credentials object, e.g. an instance of
oauth2client.client.SignedJwtAssertionCredentials. This includes
the email and secret key of a service account.
"""
self._http = http or httplib2.Http()
if additional_credentials:
additional_credentials.authorize(self._http)
self._service = discovery.build(
'monorail', 'v1', discoveryServiceUrl=_DISCOVERY_URI,
http=self._http)
def AddBugComment(self, bug_id, comment, status=None, cc_list=None,
merge_issue=None, labels=None, owner=None):
"""Adds a comment with the bisect results to the given bug.
Args:
bug_id: Bug ID of the issue to update.
comment: Bisect results information.
status: A string status for bug, e.g. Assigned, Duplicate, WontFix, etc.
cc_list: List of email addresses of users to add to the CC list.
merge_issue: ID of the issue to be merged into; specifying this option
implies that the status should be "Duplicate".
labels: List of labels for bug.
owner: Owner of the bug.
Returns:
True if successful, False otherwise.
"""
if not bug_id or bug_id < 0:
return False
body = {'content': comment}
updates = {}
# Mark issue as duplicate when relevant bug ID is found in the datastore.
# Avoid marking an issue as duplicate of itself.
if merge_issue and int(merge_issue) != bug_id:
status = 'Duplicate'
updates['mergedInto'] = merge_issue
logging.info('Bug %s marked as duplicate of %s', bug_id, merge_issue)
if status:
updates['status'] = status
if cc_list:
updates['cc'] = cc_list
if labels:
updates['labels'] = labels
if owner:
updates['owner'] = owner
body['updates'] = updates
return self._MakeCommentRequest(bug_id, body)
def List(self, **kwargs):
"""Make a request to the issue tracker to list bugs."""
request = self._service.issues().list(projectId='chromium', **kwargs)
return self._ExecuteRequest(request)
def _MakeCommentRequest(self, bug_id, body):
"""Make a request to the issue tracker to update a bug."""
request = self._service.issues().comments().insert(
projectId='chromium',
issueId=bug_id,
body=body)
response = self._ExecuteRequest(request)
if not response:
logging.error('Error updating bug %s with body %s', bug_id, body)
return False
return True
def NewBug(self, title, description, labels=None, components=None,
owner=None):
"""Creates a new bug.
Args:
title: The short title text of the bug.
description: The body text for the bug.
labels: Starting labels for the bug.
components: Starting components for the bug.
owner: Starting owner account name.
Returns:
The new bug ID if successfully created, or None.
"""
body = {
'title': title,
'summary': title,
'description': description,
'labels': labels or [],
'components': components or [],
'status': 'Assigned',
}
if owner:
body['owner'] = {'name': owner}
return self._MakeCreateRequest(body)
def _MakeCreateRequest(self, body):
"""Makes a request to create a new bug.
Args:
body: The request body parameter dictionary.
Returns:
A bug ID if successful, or None otherwise.
"""
request = self._service.issues().insert(projectId='chromium', body=body)
response = self._ExecuteRequest(request)
if response and 'id' in response:
return response['id']
return None
def GetLastBugCommentsAndTimestamp(self, bug_id):
"""Gets last updated comments and timestamp in the given bug.
Args:
bug_id: Bug ID of the issue to update.
Returns:
A dictionary with last comment and timestamp, or None on failure.
"""
if not bug_id or bug_id < 0:
return None
response = self._MakeGetCommentsRequest(bug_id)
if response and all(v in response.keys()
for v in ['totalResults', 'items']):
bug_comments = response.get('items')[response.get('totalResults') - 1]
if bug_comments.get('content') and bug_comments.get('published'):
return {
'comment': bug_comments.get('content'),
'timestamp': bug_comments.get('published')
}
return None
def _MakeGetCommentsRequest(self, bug_id):
"""Make a request to the issue tracker to get comments in the bug."""
# TODO (prasadv): By default the max number of comments retrieved in
# one request is 100. Since bisect-fyi jobs may have more then 100
# comments for now we set this maxResults count as 10000.
# Remove this max count once we find a way to clear old comments
# on FYI issues.
request = self._service.issues().comments().list(
projectId='chromium',
issueId=bug_id,
maxResults=10000)
return self._ExecuteRequest(request)
def _ExecuteRequest(self, request):
| """Make a request to the issue tracker.
Args:
request: The request object, which has a execute method.
Returns:
The response if there was one, or else None.
"""
try:
response = request.execute(http=self._http)
return response
except errors.HttpError as e:
logging.error(e)
return None | identifier_body | |
views.py | from __future__ import unicode_literals
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from blog.forms import PostForm, CommentForm
from django.views.generic import (TemplateView, ListView,
DetailView, CreateView, UpdateView,
DeleteView)
from blog.models import Post, Comment
# Create your views here.
class IndexView(TemplateView):
template_name = 'index.html'
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['post_list'] = self.get_queryset()
return context
class AboutView(TemplateView):
template_name = 'about.html'
class PostListView(ListView):
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
class PostDetailView(DetailView):
model = Post
class CreatePostView(LoginRequiredMixin,CreateView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
form_class = PostForm
model = Post
def get_context_data(self,**kwargs):
c = super(CreatePostView, self).get_context_data(**kwargs)
user = self.request.user
return c
def form_valid(self, form):
form.instance.author = self.request.user
form.instance.title = form.instance.title.title()
form.save()
return super(CreatePostView, self).form_valid(form)
class PostUpdateView(LoginRequiredMixin,UpdateView):
login_url = '/login/'
redirect_field_name = 'blog/post_detail.html'
form_class = PostForm
model = Post
class PostDeleteView(LoginRequiredMixin,DeleteView):
model=Post
success_url = reverse_lazy('post_list')
class DraftListView(LoginRequiredMixin,ListView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
model = Post
template_name = 'blog/post_draft_list.html'
def get_queryset(self):
return Post.objects.filter(published_date__isnull=True).order_by('-create_date')
###################################################
###################################################
### ###
### COMMENTS VIEW FUNCTIONS ###
### ###
###################################################
###################################################
@login_required
def post_publish(request,pk):
post = get_object_or_404(Post,pk=pk)
post.publish()
return redirect('post_detail',pk=pk)
def add_comment_to_post(request,pk):
post = get_object_or_404(Post,pk=pk)
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.post = post
comment.save()
return redirect('post_detail',pk=post.pk)
else:
form = CommentForm()
return render(request,'blog/comment_form.html',{'form':form})
@login_required
def comment_approve(request,pk):
comment = get_object_or_404(Comment,pk=pk)
comment.approve()
return redirect('post_detail',pk=comment.post.pk)
@login_required
def comment_remove(request,pk):
comment = get_object_or_404(Comment,pk=pk)
post_pk = comment.post.pk
comment.delete()
return redirect('post_detail',pk=post_pk) | # -*- coding: utf-8 -*- | random_line_split | |
views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from blog.forms import PostForm, CommentForm
from django.views.generic import (TemplateView, ListView,
DetailView, CreateView, UpdateView,
DeleteView)
from blog.models import Post, Comment
# Create your views here.
class IndexView(TemplateView):
template_name = 'index.html'
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['post_list'] = self.get_queryset()
return context
class AboutView(TemplateView):
template_name = 'about.html'
class PostListView(ListView):
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
class PostDetailView(DetailView):
model = Post
class CreatePostView(LoginRequiredMixin,CreateView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
form_class = PostForm
model = Post
def get_context_data(self,**kwargs):
c = super(CreatePostView, self).get_context_data(**kwargs)
user = self.request.user
return c
def form_valid(self, form):
form.instance.author = self.request.user
form.instance.title = form.instance.title.title()
form.save()
return super(CreatePostView, self).form_valid(form)
class PostUpdateView(LoginRequiredMixin,UpdateView):
login_url = '/login/'
redirect_field_name = 'blog/post_detail.html'
form_class = PostForm
model = Post
class PostDeleteView(LoginRequiredMixin,DeleteView):
model=Post
success_url = reverse_lazy('post_list')
class DraftListView(LoginRequiredMixin,ListView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
model = Post
template_name = 'blog/post_draft_list.html'
def get_queryset(self):
return Post.objects.filter(published_date__isnull=True).order_by('-create_date')
###################################################
###################################################
### ###
### COMMENTS VIEW FUNCTIONS ###
### ###
###################################################
###################################################
@login_required
def post_publish(request,pk):
post = get_object_or_404(Post,pk=pk)
post.publish()
return redirect('post_detail',pk=pk)
def add_comment_to_post(request,pk):
post = get_object_or_404(Post,pk=pk)
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.post = post
comment.save()
return redirect('post_detail',pk=post.pk)
else:
form = CommentForm()
return render(request,'blog/comment_form.html',{'form':form})
@login_required
def comment_approve(request,pk):
|
@login_required
def comment_remove(request,pk):
comment = get_object_or_404(Comment,pk=pk)
post_pk = comment.post.pk
comment.delete()
return redirect('post_detail',pk=post_pk) | comment = get_object_or_404(Comment,pk=pk)
comment.approve()
return redirect('post_detail',pk=comment.post.pk) | identifier_body |
views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from blog.forms import PostForm, CommentForm
from django.views.generic import (TemplateView, ListView,
DetailView, CreateView, UpdateView,
DeleteView)
from blog.models import Post, Comment
# Create your views here.
class IndexView(TemplateView):
template_name = 'index.html'
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['post_list'] = self.get_queryset()
return context
class AboutView(TemplateView):
template_name = 'about.html'
class PostListView(ListView):
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
class PostDetailView(DetailView):
model = Post
class CreatePostView(LoginRequiredMixin,CreateView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
form_class = PostForm
model = Post
def get_context_data(self,**kwargs):
c = super(CreatePostView, self).get_context_data(**kwargs)
user = self.request.user
return c
def | (self, form):
form.instance.author = self.request.user
form.instance.title = form.instance.title.title()
form.save()
return super(CreatePostView, self).form_valid(form)
class PostUpdateView(LoginRequiredMixin,UpdateView):
login_url = '/login/'
redirect_field_name = 'blog/post_detail.html'
form_class = PostForm
model = Post
class PostDeleteView(LoginRequiredMixin,DeleteView):
model=Post
success_url = reverse_lazy('post_list')
class DraftListView(LoginRequiredMixin,ListView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
model = Post
template_name = 'blog/post_draft_list.html'
def get_queryset(self):
return Post.objects.filter(published_date__isnull=True).order_by('-create_date')
###################################################
###################################################
### ###
### COMMENTS VIEW FUNCTIONS ###
### ###
###################################################
###################################################
@login_required
def post_publish(request,pk):
post = get_object_or_404(Post,pk=pk)
post.publish()
return redirect('post_detail',pk=pk)
def add_comment_to_post(request,pk):
post = get_object_or_404(Post,pk=pk)
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.post = post
comment.save()
return redirect('post_detail',pk=post.pk)
else:
form = CommentForm()
return render(request,'blog/comment_form.html',{'form':form})
@login_required
def comment_approve(request,pk):
comment = get_object_or_404(Comment,pk=pk)
comment.approve()
return redirect('post_detail',pk=comment.post.pk)
@login_required
def comment_remove(request,pk):
comment = get_object_or_404(Comment,pk=pk)
post_pk = comment.post.pk
comment.delete()
return redirect('post_detail',pk=post_pk) | form_valid | identifier_name |
views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from blog.forms import PostForm, CommentForm
from django.views.generic import (TemplateView, ListView,
DetailView, CreateView, UpdateView,
DeleteView)
from blog.models import Post, Comment
# Create your views here.
class IndexView(TemplateView):
template_name = 'index.html'
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['post_list'] = self.get_queryset()
return context
class AboutView(TemplateView):
template_name = 'about.html'
class PostListView(ListView):
model = Post
def get_queryset(self):
return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
class PostDetailView(DetailView):
model = Post
class CreatePostView(LoginRequiredMixin,CreateView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
form_class = PostForm
model = Post
def get_context_data(self,**kwargs):
c = super(CreatePostView, self).get_context_data(**kwargs)
user = self.request.user
return c
def form_valid(self, form):
form.instance.author = self.request.user
form.instance.title = form.instance.title.title()
form.save()
return super(CreatePostView, self).form_valid(form)
class PostUpdateView(LoginRequiredMixin,UpdateView):
login_url = '/login/'
redirect_field_name = 'blog/post_detail.html'
form_class = PostForm
model = Post
class PostDeleteView(LoginRequiredMixin,DeleteView):
model=Post
success_url = reverse_lazy('post_list')
class DraftListView(LoginRequiredMixin,ListView):
login_url = '/login/'
redirect_field_name = 'blog/post_list.html'
model = Post
template_name = 'blog/post_draft_list.html'
def get_queryset(self):
return Post.objects.filter(published_date__isnull=True).order_by('-create_date')
###################################################
###################################################
### ###
### COMMENTS VIEW FUNCTIONS ###
### ###
###################################################
###################################################
@login_required
def post_publish(request,pk):
post = get_object_or_404(Post,pk=pk)
post.publish()
return redirect('post_detail',pk=pk)
def add_comment_to_post(request,pk):
post = get_object_or_404(Post,pk=pk)
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
|
else:
form = CommentForm()
return render(request,'blog/comment_form.html',{'form':form})
@login_required
def comment_approve(request,pk):
comment = get_object_or_404(Comment,pk=pk)
comment.approve()
return redirect('post_detail',pk=comment.post.pk)
@login_required
def comment_remove(request,pk):
comment = get_object_or_404(Comment,pk=pk)
post_pk = comment.post.pk
comment.delete()
return redirect('post_detail',pk=post_pk) | comment = form.save(commit=False)
comment.post = post
comment.save()
return redirect('post_detail',pk=post.pk) | conditional_block |
builder.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Helper module to build up a selector safely and efficiently.
//!
//! Our selector representation is designed to optimize matching, and has
//! several requirements:
//! * All simple selectors and combinators are stored inline in the same buffer
//! as Component instances.
//! * We store the top-level compound selectors from right to left, i.e. in
//! matching order.
//! * We store the simple selectors for each combinator from left to right, so
//! that we match the cheaper simple selectors first.
//!
//! Meeting all these constraints without extra memmove traffic during parsing
//! is non-trivial. This module encapsulates those details and presents an
//! easy-to-use API for the parser.
use parser::{Combinator, Component, SelectorImpl};
use servo_arc::{Arc, HeaderWithLength, ThinArc};
use sink::Push;
use smallvec::{self, SmallVec};
use std::cmp;
use std::iter;
use std::ops::{AddAssign, Add};
use std::ptr;
use std::slice;
/// Top-level SelectorBuilder struct. This should be stack-allocated by the
/// consumer and never moved (because it contains a lot of inline data that
/// would be slow to memmov).
///
/// After instantation, callers may call the push_simple_selector() and
/// push_combinator() methods to append selector data as it is encountered
/// (from left to right). Once the process is complete, callers should invoke
/// build(), which transforms the contents of the SelectorBuilder into a heap-
/// allocated Selector and leaves the builder in a drained state.
#[derive(Debug)]
pub struct SelectorBuilder<Impl: SelectorImpl> {
/// The entire sequence of simple selectors, from left to right, without combinators.
///
/// We make this large because the result of parsing a selector is fed into a new
/// Arc-ed allocation, so any spilled vec would be a wasted allocation. Also,
/// Components are large enough that we don't have much cache locality benefit
/// from reserving stack space for fewer of them.
simple_selectors: SmallVec<[Component<Impl>; 32]>,
/// The combinators, and the length of the compound selector to their left.
combinators: SmallVec<[(Combinator, usize); 16]>,
/// The length of the current compount selector.
current_len: usize,
}
impl<Impl: SelectorImpl> Default for SelectorBuilder<Impl> {
#[inline(always)]
fn default() -> Self {
SelectorBuilder {
simple_selectors: SmallVec::new(),
combinators: SmallVec::new(),
current_len: 0,
}
}
}
impl<Impl: SelectorImpl> Push<Component<Impl>> for SelectorBuilder<Impl> {
fn push(&mut self, value: Component<Impl>) {
self.push_simple_selector(value);
}
}
impl<Impl: SelectorImpl> SelectorBuilder<Impl> {
/// Pushes a simple selector onto the current compound selector.
#[inline(always)]
pub fn push_simple_selector(&mut self, ss: Component<Impl>) {
debug_assert!(!ss.is_combinator());
self.simple_selectors.push(ss);
self.current_len += 1;
}
/// Completes the current compound selector and starts a new one, delimited
/// by the given combinator.
#[inline(always)]
pub fn push_combinator(&mut self, c: Combinator) {
self.combinators.push((c, self.current_len));
self.current_len = 0;
}
/// Returns true if no simple selectors have ever been pushed to this builder.
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.simple_selectors.is_empty()
}
/// Returns true if combinators have ever been pushed to this builder.
#[inline(always)]
pub fn has_combinators(&self) -> bool {
!self.combinators.is_empty()
}
/// Consumes the builder, producing a Selector.
#[inline(always)]
pub fn build(
&mut self,
parsed_pseudo: bool,
parsed_slotted: bool,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// Compute the specificity and flags.
let mut spec = SpecificityAndFlags(specificity(&*self, self.simple_selectors.iter()));
if parsed_pseudo {
spec.0 |= HAS_PSEUDO_BIT;
}
if parsed_slotted {
spec.0 |= HAS_SLOTTED_BIT;
}
self.build_with_specificity_and_flags(spec)
}
/// Builds with an explicit SpecificityAndFlags. This is separated from build() so
/// that unit tests can pass an explicit specificity.
#[inline(always)]
pub fn build_with_specificity_and_flags(
&mut self,
spec: SpecificityAndFlags,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// First, compute the total number of Components we'll need to allocate
// space for.
let full_len = self.simple_selectors.len() + self.combinators.len();
// Create the header.
let header = HeaderWithLength::new(spec, full_len);
// Create the Arc using an iterator that drains our buffers.
// Use a raw pointer to be able to call set_len despite "borrowing" the slice.
// This is similar to SmallVec::drain, but we use a slice here because
// we’re gonna traverse it non-linearly.
let raw_simple_selectors: *const [Component<Impl>] = &*self.simple_selectors;
unsafe {
// Panic-safety: if SelectorBuilderIter is not iterated to the end,
// some simple selectors will safely leak.
self.simple_selectors.set_len(0)
}
let (rest, current) = split_from_end(unsafe { &*raw_simple_selectors }, self.current_len);
let iter = SelectorBuilderIter {
current_simple_selectors: current.iter(),
rest_of_simple_selectors: rest,
combinators: self.combinators.drain().rev(),
};
Arc::into_thin(Arc::from_header_and_iter(header, iter))
}
}
struct SelectorBuilderIter<'a, Impl: SelectorImpl> {
current_simple_selectors: slice::Iter<'a, Component<Impl>>,
rest_of_simple_selectors: &'a [Component<Impl>],
combinators: iter::Rev<smallvec::Drain<'a, (Combinator, usize)>>,
}
impl<'a, Impl: SelectorImpl> ExactSizeIterator for SelectorBuilderIter<'a, Impl> {
fn len(&self) -> usize {
self.current_simple_selectors.len() + self.rest_of_simple_selectors.len() +
self.combinators.len()
}
}
impl<'a, Impl: SelectorImpl> Iterator for SelectorBuilderIter<'a, Impl> {
type Item = Component<Impl>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(simple_selector_ref) = self.current_simple_selectors.next() {
// Move a simple selector out of this slice iterator.
// This is safe because we’ve called SmallVec::set_len(0) above,
// so SmallVec::drop won’t drop this simple selector.
unsafe { Some(ptr::read(simple_selector_ref)) }
} else {
self.combinators.next().map(|(combinator, len)| {
let (rest, current) = split_from_end(self.rest_of_simple_selectors, len);
self.rest_of_simple_selectors = rest;
self.current_simple_selectors = current.iter();
Component::Combinator(combinator)
})
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len(), Some(self.len()))
}
}
fn split_from_end<T>(s: &[T], at: usize) -> (&[T], &[T]) {
s.split_at(s.len() - at)
}
pub const HAS_PSEUDO_BIT: u32 = 1 << 30;
pub const HAS_SLOTTED_BIT: u32 = 1 << 31;
/// We use ten bits for each specificity kind (id, class, element), and the two
/// high bits for the pseudo and slotted flags.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct SpecificityAndFlags(pub u32);
impl SpecificityAndFlags {
#[inline]
pub fn specificity(&self) -> u32 {
self.0 & !(HAS_PSEUDO_BIT | HAS_SLOTTED_BIT)
}
#[inline]
pub fn has_pseudo_element(&self) -> bool {
(self.0 & HAS_PSEUDO_BIT) != 0
}
#[inline]
pub fn is_slotted(&self) -> bool {
(self.0 & HAS_SLOTTED_BIT) != 0
}
}
const MAX_10BIT: u32 = (1u32 << 10) - 1;
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)]
struct Specificity {
id_selectors: u32,
class_like_selectors: u32,
element_selectors: u32,
}
impl AddAssign for Specificity {
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.id_selectors += rhs.id_selectors;
self.class_like_selectors += rhs.class_like_selectors;
self.element_selectors += rhs.element_selectors;
}
}
impl Add for Specificity {
type Output = Specificity;
fn add(self, rhs: Specificity) -> Specificity {
Specificity {
id_selectors: self.id_selectors + rhs.id_selectors,
class_like_selectors: self.class_like_selectors + rhs.class_like_selectors,
element_selectors: self.element_selectors + rhs.element_selectors,
}
}
}
impl Default for Specificity {
fn default() -> Specificity {
Specificity {
id_selectors: 0,
class_like_selectors: 0,
element_selectors: 0,
}
}
}
impl From<u32> for Specificity {
#[inline]
fn from(value: u32) -> Specificity {
assert!(value <= MAX_10BIT << 20 | MAX_10BIT << 10 | MAX_10BIT);
Specificity {
id_selectors: value >> 20,
class_like_selectors: (value >> 10) & MAX_10BIT,
element_selectors: value & MAX_10BIT,
}
}
}
impl From<Specificity> for u32 {
#[inline]
fn from(specificity: Specificity) -> u32 {
cmp::min(specificity.id_selectors, MAX_10BIT) << 20 |
cmp::min(specificity.class_like_selectors, MAX_10BIT) << 10 |
cmp::min(specificity.element_selectors, MAX_10BIT)
}
}
fn specificity<Impl>(builder: &SelectorBuilder<Impl>, iter: slice::Iter<Component<Impl>>) -> u32
where
Impl: SelectorImpl,
{
complex_selector_specificity(builder, iter).into()
}
fn complex_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
mut iter: slice::Iter<Component<Impl>>,
) -> Specificity
where
Impl: SelectorImpl,
{
fn simple_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
simple_selector: &Component<Impl>,
specificity: &mut Specificity,
) where
Impl: SelectorImpl,
{
match *simple_selector {
Component::Combinator(ref combinator) => { | combinator,
builder,
);
}
Component::PseudoElement(..) | Component::LocalName(..) => {
specificity.element_selectors += 1
},
Component::Slotted(ref selector) => {
specificity.element_selectors += 1;
// Note that due to the way ::slotted works we only compete with
// other ::slotted rules, so the above rule doesn't really
// matter, but we do it still for consistency with other
// pseudo-elements.
//
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
},
Component::Host(ref selector) => {
specificity.class_like_selectors += 1;
if let Some(ref selector) = *selector {
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
}
}
Component::ID(..) => {
specificity.id_selectors += 1;
},
Component::Class(..) |
Component::AttributeInNoNamespace { .. } |
Component::AttributeInNoNamespaceExists { .. } |
Component::AttributeOther(..) |
Component::FirstChild |
Component::LastChild |
Component::OnlyChild |
Component::Root |
Component::Empty |
Component::Scope |
Component::NthChild(..) |
Component::NthLastChild(..) |
Component::NthOfType(..) |
Component::NthLastOfType(..) |
Component::FirstOfType |
Component::LastOfType |
Component::OnlyOfType |
Component::NonTSPseudoClass(..) => {
specificity.class_like_selectors += 1;
},
Component::ExplicitUniversalType |
Component::ExplicitAnyNamespace |
Component::ExplicitNoNamespace |
Component::DefaultNamespace(..) |
Component::Namespace(..) => {
// Does not affect specificity
},
Component::Negation(ref negated) => {
for ss in negated.iter() {
simple_selector_specificity(builder, &ss, specificity);
}
},
}
}
let mut specificity = Default::default();
for simple_selector in &mut iter {
simple_selector_specificity(builder, &simple_selector, &mut specificity);
}
specificity
} | unreachable!(
"Found combinator {:?} in simple selectors vector? {:?}", | random_line_split |
builder.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Helper module to build up a selector safely and efficiently.
//!
//! Our selector representation is designed to optimize matching, and has
//! several requirements:
//! * All simple selectors and combinators are stored inline in the same buffer
//! as Component instances.
//! * We store the top-level compound selectors from right to left, i.e. in
//! matching order.
//! * We store the simple selectors for each combinator from left to right, so
//! that we match the cheaper simple selectors first.
//!
//! Meeting all these constraints without extra memmove traffic during parsing
//! is non-trivial. This module encapsulates those details and presents an
//! easy-to-use API for the parser.
use parser::{Combinator, Component, SelectorImpl};
use servo_arc::{Arc, HeaderWithLength, ThinArc};
use sink::Push;
use smallvec::{self, SmallVec};
use std::cmp;
use std::iter;
use std::ops::{AddAssign, Add};
use std::ptr;
use std::slice;
/// Top-level SelectorBuilder struct. This should be stack-allocated by the
/// consumer and never moved (because it contains a lot of inline data that
/// would be slow to memmov).
///
/// After instantation, callers may call the push_simple_selector() and
/// push_combinator() methods to append selector data as it is encountered
/// (from left to right). Once the process is complete, callers should invoke
/// build(), which transforms the contents of the SelectorBuilder into a heap-
/// allocated Selector and leaves the builder in a drained state.
#[derive(Debug)]
pub struct SelectorBuilder<Impl: SelectorImpl> {
/// The entire sequence of simple selectors, from left to right, without combinators.
///
/// We make this large because the result of parsing a selector is fed into a new
/// Arc-ed allocation, so any spilled vec would be a wasted allocation. Also,
/// Components are large enough that we don't have much cache locality benefit
/// from reserving stack space for fewer of them.
simple_selectors: SmallVec<[Component<Impl>; 32]>,
/// The combinators, and the length of the compound selector to their left.
combinators: SmallVec<[(Combinator, usize); 16]>,
/// The length of the current compount selector.
current_len: usize,
}
impl<Impl: SelectorImpl> Default for SelectorBuilder<Impl> {
#[inline(always)]
fn default() -> Self {
SelectorBuilder {
simple_selectors: SmallVec::new(),
combinators: SmallVec::new(),
current_len: 0,
}
}
}
impl<Impl: SelectorImpl> Push<Component<Impl>> for SelectorBuilder<Impl> {
fn push(&mut self, value: Component<Impl>) {
self.push_simple_selector(value);
}
}
impl<Impl: SelectorImpl> SelectorBuilder<Impl> {
/// Pushes a simple selector onto the current compound selector.
#[inline(always)]
pub fn push_simple_selector(&mut self, ss: Component<Impl>) {
debug_assert!(!ss.is_combinator());
self.simple_selectors.push(ss);
self.current_len += 1;
}
/// Completes the current compound selector and starts a new one, delimited
/// by the given combinator.
#[inline(always)]
pub fn push_combinator(&mut self, c: Combinator) {
self.combinators.push((c, self.current_len));
self.current_len = 0;
}
/// Returns true if no simple selectors have ever been pushed to this builder.
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.simple_selectors.is_empty()
}
/// Returns true if combinators have ever been pushed to this builder.
#[inline(always)]
pub fn has_combinators(&self) -> bool {
!self.combinators.is_empty()
}
/// Consumes the builder, producing a Selector.
#[inline(always)]
pub fn build(
&mut self,
parsed_pseudo: bool,
parsed_slotted: bool,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// Compute the specificity and flags.
let mut spec = SpecificityAndFlags(specificity(&*self, self.simple_selectors.iter()));
if parsed_pseudo {
spec.0 |= HAS_PSEUDO_BIT;
}
if parsed_slotted {
spec.0 |= HAS_SLOTTED_BIT;
}
self.build_with_specificity_and_flags(spec)
}
/// Builds with an explicit SpecificityAndFlags. This is separated from build() so
/// that unit tests can pass an explicit specificity.
#[inline(always)]
pub fn build_with_specificity_and_flags(
&mut self,
spec: SpecificityAndFlags,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// First, compute the total number of Components we'll need to allocate
// space for.
let full_len = self.simple_selectors.len() + self.combinators.len();
// Create the header.
let header = HeaderWithLength::new(spec, full_len);
// Create the Arc using an iterator that drains our buffers.
// Use a raw pointer to be able to call set_len despite "borrowing" the slice.
// This is similar to SmallVec::drain, but we use a slice here because
// we’re gonna traverse it non-linearly.
let raw_simple_selectors: *const [Component<Impl>] = &*self.simple_selectors;
unsafe {
// Panic-safety: if SelectorBuilderIter is not iterated to the end,
// some simple selectors will safely leak.
self.simple_selectors.set_len(0)
}
let (rest, current) = split_from_end(unsafe { &*raw_simple_selectors }, self.current_len);
let iter = SelectorBuilderIter {
current_simple_selectors: current.iter(),
rest_of_simple_selectors: rest,
combinators: self.combinators.drain().rev(),
};
Arc::into_thin(Arc::from_header_and_iter(header, iter))
}
}
struct SelectorBuilderIter<'a, Impl: SelectorImpl> {
current_simple_selectors: slice::Iter<'a, Component<Impl>>,
rest_of_simple_selectors: &'a [Component<Impl>],
combinators: iter::Rev<smallvec::Drain<'a, (Combinator, usize)>>,
}
impl<'a, Impl: SelectorImpl> ExactSizeIterator for SelectorBuilderIter<'a, Impl> {
fn le | self) -> usize {
self.current_simple_selectors.len() + self.rest_of_simple_selectors.len() +
self.combinators.len()
}
}
impl<'a, Impl: SelectorImpl> Iterator for SelectorBuilderIter<'a, Impl> {
type Item = Component<Impl>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(simple_selector_ref) = self.current_simple_selectors.next() {
// Move a simple selector out of this slice iterator.
// This is safe because we’ve called SmallVec::set_len(0) above,
// so SmallVec::drop won’t drop this simple selector.
unsafe { Some(ptr::read(simple_selector_ref)) }
} else {
self.combinators.next().map(|(combinator, len)| {
let (rest, current) = split_from_end(self.rest_of_simple_selectors, len);
self.rest_of_simple_selectors = rest;
self.current_simple_selectors = current.iter();
Component::Combinator(combinator)
})
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len(), Some(self.len()))
}
}
fn split_from_end<T>(s: &[T], at: usize) -> (&[T], &[T]) {
s.split_at(s.len() - at)
}
pub const HAS_PSEUDO_BIT: u32 = 1 << 30;
pub const HAS_SLOTTED_BIT: u32 = 1 << 31;
/// We use ten bits for each specificity kind (id, class, element), and the two
/// high bits for the pseudo and slotted flags.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct SpecificityAndFlags(pub u32);
impl SpecificityAndFlags {
#[inline]
pub fn specificity(&self) -> u32 {
self.0 & !(HAS_PSEUDO_BIT | HAS_SLOTTED_BIT)
}
#[inline]
pub fn has_pseudo_element(&self) -> bool {
(self.0 & HAS_PSEUDO_BIT) != 0
}
#[inline]
pub fn is_slotted(&self) -> bool {
(self.0 & HAS_SLOTTED_BIT) != 0
}
}
const MAX_10BIT: u32 = (1u32 << 10) - 1;
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)]
struct Specificity {
id_selectors: u32,
class_like_selectors: u32,
element_selectors: u32,
}
impl AddAssign for Specificity {
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.id_selectors += rhs.id_selectors;
self.class_like_selectors += rhs.class_like_selectors;
self.element_selectors += rhs.element_selectors;
}
}
impl Add for Specificity {
type Output = Specificity;
fn add(self, rhs: Specificity) -> Specificity {
Specificity {
id_selectors: self.id_selectors + rhs.id_selectors,
class_like_selectors: self.class_like_selectors + rhs.class_like_selectors,
element_selectors: self.element_selectors + rhs.element_selectors,
}
}
}
impl Default for Specificity {
fn default() -> Specificity {
Specificity {
id_selectors: 0,
class_like_selectors: 0,
element_selectors: 0,
}
}
}
impl From<u32> for Specificity {
#[inline]
fn from(value: u32) -> Specificity {
assert!(value <= MAX_10BIT << 20 | MAX_10BIT << 10 | MAX_10BIT);
Specificity {
id_selectors: value >> 20,
class_like_selectors: (value >> 10) & MAX_10BIT,
element_selectors: value & MAX_10BIT,
}
}
}
impl From<Specificity> for u32 {
#[inline]
fn from(specificity: Specificity) -> u32 {
cmp::min(specificity.id_selectors, MAX_10BIT) << 20 |
cmp::min(specificity.class_like_selectors, MAX_10BIT) << 10 |
cmp::min(specificity.element_selectors, MAX_10BIT)
}
}
fn specificity<Impl>(builder: &SelectorBuilder<Impl>, iter: slice::Iter<Component<Impl>>) -> u32
where
Impl: SelectorImpl,
{
complex_selector_specificity(builder, iter).into()
}
fn complex_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
mut iter: slice::Iter<Component<Impl>>,
) -> Specificity
where
Impl: SelectorImpl,
{
fn simple_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
simple_selector: &Component<Impl>,
specificity: &mut Specificity,
) where
Impl: SelectorImpl,
{
match *simple_selector {
Component::Combinator(ref combinator) => {
unreachable!(
"Found combinator {:?} in simple selectors vector? {:?}",
combinator,
builder,
);
}
Component::PseudoElement(..) | Component::LocalName(..) => {
specificity.element_selectors += 1
},
Component::Slotted(ref selector) => {
specificity.element_selectors += 1;
// Note that due to the way ::slotted works we only compete with
// other ::slotted rules, so the above rule doesn't really
// matter, but we do it still for consistency with other
// pseudo-elements.
//
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
},
Component::Host(ref selector) => {
specificity.class_like_selectors += 1;
if let Some(ref selector) = *selector {
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
}
}
Component::ID(..) => {
specificity.id_selectors += 1;
},
Component::Class(..) |
Component::AttributeInNoNamespace { .. } |
Component::AttributeInNoNamespaceExists { .. } |
Component::AttributeOther(..) |
Component::FirstChild |
Component::LastChild |
Component::OnlyChild |
Component::Root |
Component::Empty |
Component::Scope |
Component::NthChild(..) |
Component::NthLastChild(..) |
Component::NthOfType(..) |
Component::NthLastOfType(..) |
Component::FirstOfType |
Component::LastOfType |
Component::OnlyOfType |
Component::NonTSPseudoClass(..) => {
specificity.class_like_selectors += 1;
},
Component::ExplicitUniversalType |
Component::ExplicitAnyNamespace |
Component::ExplicitNoNamespace |
Component::DefaultNamespace(..) |
Component::Namespace(..) => {
// Does not affect specificity
},
Component::Negation(ref negated) => {
for ss in negated.iter() {
simple_selector_specificity(builder, &ss, specificity);
}
},
}
}
let mut specificity = Default::default();
for simple_selector in &mut iter {
simple_selector_specificity(builder, &simple_selector, &mut specificity);
}
specificity
}
| n(& | identifier_name |
builder.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Helper module to build up a selector safely and efficiently.
//!
//! Our selector representation is designed to optimize matching, and has
//! several requirements:
//! * All simple selectors and combinators are stored inline in the same buffer
//! as Component instances.
//! * We store the top-level compound selectors from right to left, i.e. in
//! matching order.
//! * We store the simple selectors for each combinator from left to right, so
//! that we match the cheaper simple selectors first.
//!
//! Meeting all these constraints without extra memmove traffic during parsing
//! is non-trivial. This module encapsulates those details and presents an
//! easy-to-use API for the parser.
use parser::{Combinator, Component, SelectorImpl};
use servo_arc::{Arc, HeaderWithLength, ThinArc};
use sink::Push;
use smallvec::{self, SmallVec};
use std::cmp;
use std::iter;
use std::ops::{AddAssign, Add};
use std::ptr;
use std::slice;
/// Top-level SelectorBuilder struct. This should be stack-allocated by the
/// consumer and never moved (because it contains a lot of inline data that
/// would be slow to memmov).
///
/// After instantation, callers may call the push_simple_selector() and
/// push_combinator() methods to append selector data as it is encountered
/// (from left to right). Once the process is complete, callers should invoke
/// build(), which transforms the contents of the SelectorBuilder into a heap-
/// allocated Selector and leaves the builder in a drained state.
#[derive(Debug)]
pub struct SelectorBuilder<Impl: SelectorImpl> {
/// The entire sequence of simple selectors, from left to right, without combinators.
///
/// We make this large because the result of parsing a selector is fed into a new
/// Arc-ed allocation, so any spilled vec would be a wasted allocation. Also,
/// Components are large enough that we don't have much cache locality benefit
/// from reserving stack space for fewer of them.
simple_selectors: SmallVec<[Component<Impl>; 32]>,
/// The combinators, and the length of the compound selector to their left.
combinators: SmallVec<[(Combinator, usize); 16]>,
/// The length of the current compount selector.
current_len: usize,
}
impl<Impl: SelectorImpl> Default for SelectorBuilder<Impl> {
#[inline(always)]
fn default() -> Self {
SelectorBuilder {
simple_selectors: SmallVec::new(),
combinators: SmallVec::new(),
current_len: 0,
}
}
}
impl<Impl: SelectorImpl> Push<Component<Impl>> for SelectorBuilder<Impl> {
fn push(&mut self, value: Component<Impl>) {
self.push_simple_selector(value);
}
}
impl<Impl: SelectorImpl> SelectorBuilder<Impl> {
/// Pushes a simple selector onto the current compound selector.
#[inline(always)]
pub fn push_simple_selector(&mut self, ss: Component<Impl>) {
debug_assert!(!ss.is_combinator());
self.simple_selectors.push(ss);
self.current_len += 1;
}
/// Completes the current compound selector and starts a new one, delimited
/// by the given combinator.
#[inline(always)]
pub fn push_combinator(&mut self, c: Combinator) {
self.combinators.push((c, self.current_len));
self.current_len = 0;
}
/// Returns true if no simple selectors have ever been pushed to this builder.
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.simple_selectors.is_empty()
}
/// Returns true if combinators have ever been pushed to this builder.
#[inline(always)]
pub fn has_combinators(&self) -> bool {
!self.combinators.is_empty()
}
/// Consumes the builder, producing a Selector.
#[inline(always)]
pub fn build(
&mut self,
parsed_pseudo: bool,
parsed_slotted: bool,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// Compute the specificity and flags.
let mut spec = SpecificityAndFlags(specificity(&*self, self.simple_selectors.iter()));
if parsed_pseudo {
spec.0 |= HAS_PSEUDO_BIT;
}
if parsed_slotted |
self.build_with_specificity_and_flags(spec)
}
/// Builds with an explicit SpecificityAndFlags. This is separated from build() so
/// that unit tests can pass an explicit specificity.
#[inline(always)]
pub fn build_with_specificity_and_flags(
&mut self,
spec: SpecificityAndFlags,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// First, compute the total number of Components we'll need to allocate
// space for.
let full_len = self.simple_selectors.len() + self.combinators.len();
// Create the header.
let header = HeaderWithLength::new(spec, full_len);
// Create the Arc using an iterator that drains our buffers.
// Use a raw pointer to be able to call set_len despite "borrowing" the slice.
// This is similar to SmallVec::drain, but we use a slice here because
// we’re gonna traverse it non-linearly.
let raw_simple_selectors: *const [Component<Impl>] = &*self.simple_selectors;
unsafe {
// Panic-safety: if SelectorBuilderIter is not iterated to the end,
// some simple selectors will safely leak.
self.simple_selectors.set_len(0)
}
let (rest, current) = split_from_end(unsafe { &*raw_simple_selectors }, self.current_len);
let iter = SelectorBuilderIter {
current_simple_selectors: current.iter(),
rest_of_simple_selectors: rest,
combinators: self.combinators.drain().rev(),
};
Arc::into_thin(Arc::from_header_and_iter(header, iter))
}
}
struct SelectorBuilderIter<'a, Impl: SelectorImpl> {
current_simple_selectors: slice::Iter<'a, Component<Impl>>,
rest_of_simple_selectors: &'a [Component<Impl>],
combinators: iter::Rev<smallvec::Drain<'a, (Combinator, usize)>>,
}
impl<'a, Impl: SelectorImpl> ExactSizeIterator for SelectorBuilderIter<'a, Impl> {
fn len(&self) -> usize {
self.current_simple_selectors.len() + self.rest_of_simple_selectors.len() +
self.combinators.len()
}
}
impl<'a, Impl: SelectorImpl> Iterator for SelectorBuilderIter<'a, Impl> {
type Item = Component<Impl>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(simple_selector_ref) = self.current_simple_selectors.next() {
// Move a simple selector out of this slice iterator.
// This is safe because we’ve called SmallVec::set_len(0) above,
// so SmallVec::drop won’t drop this simple selector.
unsafe { Some(ptr::read(simple_selector_ref)) }
} else {
self.combinators.next().map(|(combinator, len)| {
let (rest, current) = split_from_end(self.rest_of_simple_selectors, len);
self.rest_of_simple_selectors = rest;
self.current_simple_selectors = current.iter();
Component::Combinator(combinator)
})
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len(), Some(self.len()))
}
}
fn split_from_end<T>(s: &[T], at: usize) -> (&[T], &[T]) {
s.split_at(s.len() - at)
}
pub const HAS_PSEUDO_BIT: u32 = 1 << 30;
pub const HAS_SLOTTED_BIT: u32 = 1 << 31;
/// We use ten bits for each specificity kind (id, class, element), and the two
/// high bits for the pseudo and slotted flags.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct SpecificityAndFlags(pub u32);
impl SpecificityAndFlags {
#[inline]
pub fn specificity(&self) -> u32 {
self.0 & !(HAS_PSEUDO_BIT | HAS_SLOTTED_BIT)
}
#[inline]
pub fn has_pseudo_element(&self) -> bool {
(self.0 & HAS_PSEUDO_BIT) != 0
}
#[inline]
pub fn is_slotted(&self) -> bool {
(self.0 & HAS_SLOTTED_BIT) != 0
}
}
const MAX_10BIT: u32 = (1u32 << 10) - 1;
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)]
struct Specificity {
id_selectors: u32,
class_like_selectors: u32,
element_selectors: u32,
}
impl AddAssign for Specificity {
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.id_selectors += rhs.id_selectors;
self.class_like_selectors += rhs.class_like_selectors;
self.element_selectors += rhs.element_selectors;
}
}
impl Add for Specificity {
type Output = Specificity;
fn add(self, rhs: Specificity) -> Specificity {
Specificity {
id_selectors: self.id_selectors + rhs.id_selectors,
class_like_selectors: self.class_like_selectors + rhs.class_like_selectors,
element_selectors: self.element_selectors + rhs.element_selectors,
}
}
}
impl Default for Specificity {
fn default() -> Specificity {
Specificity {
id_selectors: 0,
class_like_selectors: 0,
element_selectors: 0,
}
}
}
impl From<u32> for Specificity {
#[inline]
fn from(value: u32) -> Specificity {
assert!(value <= MAX_10BIT << 20 | MAX_10BIT << 10 | MAX_10BIT);
Specificity {
id_selectors: value >> 20,
class_like_selectors: (value >> 10) & MAX_10BIT,
element_selectors: value & MAX_10BIT,
}
}
}
impl From<Specificity> for u32 {
#[inline]
fn from(specificity: Specificity) -> u32 {
cmp::min(specificity.id_selectors, MAX_10BIT) << 20 |
cmp::min(specificity.class_like_selectors, MAX_10BIT) << 10 |
cmp::min(specificity.element_selectors, MAX_10BIT)
}
}
fn specificity<Impl>(builder: &SelectorBuilder<Impl>, iter: slice::Iter<Component<Impl>>) -> u32
where
Impl: SelectorImpl,
{
complex_selector_specificity(builder, iter).into()
}
fn complex_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
mut iter: slice::Iter<Component<Impl>>,
) -> Specificity
where
Impl: SelectorImpl,
{
fn simple_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
simple_selector: &Component<Impl>,
specificity: &mut Specificity,
) where
Impl: SelectorImpl,
{
match *simple_selector {
Component::Combinator(ref combinator) => {
unreachable!(
"Found combinator {:?} in simple selectors vector? {:?}",
combinator,
builder,
);
}
Component::PseudoElement(..) | Component::LocalName(..) => {
specificity.element_selectors += 1
},
Component::Slotted(ref selector) => {
specificity.element_selectors += 1;
// Note that due to the way ::slotted works we only compete with
// other ::slotted rules, so the above rule doesn't really
// matter, but we do it still for consistency with other
// pseudo-elements.
//
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
},
Component::Host(ref selector) => {
specificity.class_like_selectors += 1;
if let Some(ref selector) = *selector {
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
}
}
Component::ID(..) => {
specificity.id_selectors += 1;
},
Component::Class(..) |
Component::AttributeInNoNamespace { .. } |
Component::AttributeInNoNamespaceExists { .. } |
Component::AttributeOther(..) |
Component::FirstChild |
Component::LastChild |
Component::OnlyChild |
Component::Root |
Component::Empty |
Component::Scope |
Component::NthChild(..) |
Component::NthLastChild(..) |
Component::NthOfType(..) |
Component::NthLastOfType(..) |
Component::FirstOfType |
Component::LastOfType |
Component::OnlyOfType |
Component::NonTSPseudoClass(..) => {
specificity.class_like_selectors += 1;
},
Component::ExplicitUniversalType |
Component::ExplicitAnyNamespace |
Component::ExplicitNoNamespace |
Component::DefaultNamespace(..) |
Component::Namespace(..) => {
// Does not affect specificity
},
Component::Negation(ref negated) => {
for ss in negated.iter() {
simple_selector_specificity(builder, &ss, specificity);
}
},
}
}
let mut specificity = Default::default();
for simple_selector in &mut iter {
simple_selector_specificity(builder, &simple_selector, &mut specificity);
}
specificity
}
| {
spec.0 |= HAS_SLOTTED_BIT;
} | conditional_block |
builder.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Helper module to build up a selector safely and efficiently.
//!
//! Our selector representation is designed to optimize matching, and has
//! several requirements:
//! * All simple selectors and combinators are stored inline in the same buffer
//! as Component instances.
//! * We store the top-level compound selectors from right to left, i.e. in
//! matching order.
//! * We store the simple selectors for each combinator from left to right, so
//! that we match the cheaper simple selectors first.
//!
//! Meeting all these constraints without extra memmove traffic during parsing
//! is non-trivial. This module encapsulates those details and presents an
//! easy-to-use API for the parser.
use parser::{Combinator, Component, SelectorImpl};
use servo_arc::{Arc, HeaderWithLength, ThinArc};
use sink::Push;
use smallvec::{self, SmallVec};
use std::cmp;
use std::iter;
use std::ops::{AddAssign, Add};
use std::ptr;
use std::slice;
/// Top-level SelectorBuilder struct. This should be stack-allocated by the
/// consumer and never moved (because it contains a lot of inline data that
/// would be slow to memmov).
///
/// After instantation, callers may call the push_simple_selector() and
/// push_combinator() methods to append selector data as it is encountered
/// (from left to right). Once the process is complete, callers should invoke
/// build(), which transforms the contents of the SelectorBuilder into a heap-
/// allocated Selector and leaves the builder in a drained state.
#[derive(Debug)]
pub struct SelectorBuilder<Impl: SelectorImpl> {
/// The entire sequence of simple selectors, from left to right, without combinators.
///
/// We make this large because the result of parsing a selector is fed into a new
/// Arc-ed allocation, so any spilled vec would be a wasted allocation. Also,
/// Components are large enough that we don't have much cache locality benefit
/// from reserving stack space for fewer of them.
simple_selectors: SmallVec<[Component<Impl>; 32]>,
/// The combinators, and the length of the compound selector to their left.
combinators: SmallVec<[(Combinator, usize); 16]>,
/// The length of the current compount selector.
current_len: usize,
}
impl<Impl: SelectorImpl> Default for SelectorBuilder<Impl> {
#[inline(always)]
fn default() -> Self {
SelectorBuilder {
simple_selectors: SmallVec::new(),
combinators: SmallVec::new(),
current_len: 0,
}
}
}
impl<Impl: SelectorImpl> Push<Component<Impl>> for SelectorBuilder<Impl> {
fn push(&mut self, value: Component<Impl>) {
self.push_simple_selector(value);
}
}
impl<Impl: SelectorImpl> SelectorBuilder<Impl> {
/// Pushes a simple selector onto the current compound selector.
#[inline(always)]
pub fn push_simple_selector(&mut self, ss: Component<Impl>) {
debug_assert!(!ss.is_combinator());
self.simple_selectors.push(ss);
self.current_len += 1;
}
/// Completes the current compound selector and starts a new one, delimited
/// by the given combinator.
#[inline(always)]
pub fn push_combinator(&mut self, c: Combinator) {
self.combinators.push((c, self.current_len));
self.current_len = 0;
}
/// Returns true if no simple selectors have ever been pushed to this builder.
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.simple_selectors.is_empty()
}
/// Returns true if combinators have ever been pushed to this builder.
#[inline(always)]
pub fn has_combinators(&self) -> bool {
!self.combinators.is_empty()
}
/// Consumes the builder, producing a Selector.
#[inline(always)]
pub fn build(
&mut self,
parsed_pseudo: bool,
parsed_slotted: bool,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// Compute the specificity and flags.
let mut spec = SpecificityAndFlags(specificity(&*self, self.simple_selectors.iter()));
if parsed_pseudo {
spec.0 |= HAS_PSEUDO_BIT;
}
if parsed_slotted {
spec.0 |= HAS_SLOTTED_BIT;
}
self.build_with_specificity_and_flags(spec)
}
/// Builds with an explicit SpecificityAndFlags. This is separated from build() so
/// that unit tests can pass an explicit specificity.
#[inline(always)]
pub fn build_with_specificity_and_flags(
&mut self,
spec: SpecificityAndFlags,
) -> ThinArc<SpecificityAndFlags, Component<Impl>> {
// First, compute the total number of Components we'll need to allocate
// space for.
let full_len = self.simple_selectors.len() + self.combinators.len();
// Create the header.
let header = HeaderWithLength::new(spec, full_len);
// Create the Arc using an iterator that drains our buffers.
// Use a raw pointer to be able to call set_len despite "borrowing" the slice.
// This is similar to SmallVec::drain, but we use a slice here because
// we’re gonna traverse it non-linearly.
let raw_simple_selectors: *const [Component<Impl>] = &*self.simple_selectors;
unsafe {
// Panic-safety: if SelectorBuilderIter is not iterated to the end,
// some simple selectors will safely leak.
self.simple_selectors.set_len(0)
}
let (rest, current) = split_from_end(unsafe { &*raw_simple_selectors }, self.current_len);
let iter = SelectorBuilderIter {
current_simple_selectors: current.iter(),
rest_of_simple_selectors: rest,
combinators: self.combinators.drain().rev(),
};
Arc::into_thin(Arc::from_header_and_iter(header, iter))
}
}
struct SelectorBuilderIter<'a, Impl: SelectorImpl> {
current_simple_selectors: slice::Iter<'a, Component<Impl>>,
rest_of_simple_selectors: &'a [Component<Impl>],
combinators: iter::Rev<smallvec::Drain<'a, (Combinator, usize)>>,
}
impl<'a, Impl: SelectorImpl> ExactSizeIterator for SelectorBuilderIter<'a, Impl> {
fn len(&self) -> usize {
self.current_simple_selectors.len() + self.rest_of_simple_selectors.len() +
self.combinators.len()
}
}
impl<'a, Impl: SelectorImpl> Iterator for SelectorBuilderIter<'a, Impl> {
type Item = Component<Impl>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(simple_selector_ref) = self.current_simple_selectors.next() {
// Move a simple selector out of this slice iterator.
// This is safe because we’ve called SmallVec::set_len(0) above,
// so SmallVec::drop won’t drop this simple selector.
unsafe { Some(ptr::read(simple_selector_ref)) }
} else {
self.combinators.next().map(|(combinator, len)| {
let (rest, current) = split_from_end(self.rest_of_simple_selectors, len);
self.rest_of_simple_selectors = rest;
self.current_simple_selectors = current.iter();
Component::Combinator(combinator)
})
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len(), Some(self.len()))
}
}
fn split_from_end<T>(s: &[T], at: usize) -> (&[T], &[T]) {
s.split_at(s.len() - at)
}
pub const HAS_PSEUDO_BIT: u32 = 1 << 30;
pub const HAS_SLOTTED_BIT: u32 = 1 << 31;
/// We use ten bits for each specificity kind (id, class, element), and the two
/// high bits for the pseudo and slotted flags.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct SpecificityAndFlags(pub u32);
impl SpecificityAndFlags {
#[inline]
pub fn specificity(&self) -> u32 {
self.0 & !(HAS_PSEUDO_BIT | HAS_SLOTTED_BIT)
}
#[inline]
pub fn has_pseudo_element(&self) -> bool {
| #[inline]
pub fn is_slotted(&self) -> bool {
(self.0 & HAS_SLOTTED_BIT) != 0
}
}
const MAX_10BIT: u32 = (1u32 << 10) - 1;
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)]
struct Specificity {
id_selectors: u32,
class_like_selectors: u32,
element_selectors: u32,
}
impl AddAssign for Specificity {
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.id_selectors += rhs.id_selectors;
self.class_like_selectors += rhs.class_like_selectors;
self.element_selectors += rhs.element_selectors;
}
}
impl Add for Specificity {
type Output = Specificity;
fn add(self, rhs: Specificity) -> Specificity {
Specificity {
id_selectors: self.id_selectors + rhs.id_selectors,
class_like_selectors: self.class_like_selectors + rhs.class_like_selectors,
element_selectors: self.element_selectors + rhs.element_selectors,
}
}
}
impl Default for Specificity {
fn default() -> Specificity {
Specificity {
id_selectors: 0,
class_like_selectors: 0,
element_selectors: 0,
}
}
}
impl From<u32> for Specificity {
#[inline]
fn from(value: u32) -> Specificity {
assert!(value <= MAX_10BIT << 20 | MAX_10BIT << 10 | MAX_10BIT);
Specificity {
id_selectors: value >> 20,
class_like_selectors: (value >> 10) & MAX_10BIT,
element_selectors: value & MAX_10BIT,
}
}
}
impl From<Specificity> for u32 {
#[inline]
fn from(specificity: Specificity) -> u32 {
cmp::min(specificity.id_selectors, MAX_10BIT) << 20 |
cmp::min(specificity.class_like_selectors, MAX_10BIT) << 10 |
cmp::min(specificity.element_selectors, MAX_10BIT)
}
}
fn specificity<Impl>(builder: &SelectorBuilder<Impl>, iter: slice::Iter<Component<Impl>>) -> u32
where
Impl: SelectorImpl,
{
complex_selector_specificity(builder, iter).into()
}
fn complex_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
mut iter: slice::Iter<Component<Impl>>,
) -> Specificity
where
Impl: SelectorImpl,
{
fn simple_selector_specificity<Impl>(
builder: &SelectorBuilder<Impl>,
simple_selector: &Component<Impl>,
specificity: &mut Specificity,
) where
Impl: SelectorImpl,
{
match *simple_selector {
Component::Combinator(ref combinator) => {
unreachable!(
"Found combinator {:?} in simple selectors vector? {:?}",
combinator,
builder,
);
}
Component::PseudoElement(..) | Component::LocalName(..) => {
specificity.element_selectors += 1
},
Component::Slotted(ref selector) => {
specificity.element_selectors += 1;
// Note that due to the way ::slotted works we only compete with
// other ::slotted rules, so the above rule doesn't really
// matter, but we do it still for consistency with other
// pseudo-elements.
//
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
},
Component::Host(ref selector) => {
specificity.class_like_selectors += 1;
if let Some(ref selector) = *selector {
// See: https://github.com/w3c/csswg-drafts/issues/1915
*specificity += Specificity::from(selector.specificity());
}
}
Component::ID(..) => {
specificity.id_selectors += 1;
},
Component::Class(..) |
Component::AttributeInNoNamespace { .. } |
Component::AttributeInNoNamespaceExists { .. } |
Component::AttributeOther(..) |
Component::FirstChild |
Component::LastChild |
Component::OnlyChild |
Component::Root |
Component::Empty |
Component::Scope |
Component::NthChild(..) |
Component::NthLastChild(..) |
Component::NthOfType(..) |
Component::NthLastOfType(..) |
Component::FirstOfType |
Component::LastOfType |
Component::OnlyOfType |
Component::NonTSPseudoClass(..) => {
specificity.class_like_selectors += 1;
},
Component::ExplicitUniversalType |
Component::ExplicitAnyNamespace |
Component::ExplicitNoNamespace |
Component::DefaultNamespace(..) |
Component::Namespace(..) => {
// Does not affect specificity
},
Component::Negation(ref negated) => {
for ss in negated.iter() {
simple_selector_specificity(builder, &ss, specificity);
}
},
}
}
let mut specificity = Default::default();
for simple_selector in &mut iter {
simple_selector_specificity(builder, &simple_selector, &mut specificity);
}
specificity
}
| (self.0 & HAS_PSEUDO_BIT) != 0
}
| identifier_body |
test-pcopy.py | #!/usr/bin/python
import libploop
import shutil
import io
import os
import socket
import time
import subprocess as sp
import unittest
import hashlib
sleep_sec = 3
def hashfile(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
print (hasher.hexdigest())
return hasher.hexdigest()
def start_image_filller():
pid = os.fork()
if pid == 0:
os.execl('/bin/dd', 'dd', 'if=/dev/urandom', "of=/dev/ploop0", 'bs=4096', 'count=131072', 'oflag=direct')
os._exit(1)
else:
print "Start filler pid=%d" % pid
time.sleep(sleep_sec)
return pid
def start_pcopy_receiver(fname, fd):
print "Start receiver"
t = libploop.ploopcopy_thr_receiver(fname, fd)
t.start()
return t
def get_storage():
return '/vz/test'
def get_image():
return os.path.join(get_storage(), "test.hds")
def get_ddxml():
return os.path.join(get_storage(), 'DiskDescriptor.xml')
def get_mnt_dir():
return '_'.join([get_storage(), "mnt"])
def ploop_create(img):
ret = sp.call(["ploop", "init", "-s10g", img])
if ret != 0:
raise Exception("failed to create image")
def ploop_mount(ddxml):
ret = sp.call(["ploop", "mount", "-d/dev/ploop0", ddxml])
if ret != 0:
raise Exception("failed to mount image")
def ploop_umount(ddxml):
return sp.call(["ploop", "umount", "-d/dev/ploop0"])
def do_ploop_copy(ddxml, fd):
print "do_ploop_copy"
ploop_mount(ddxml)
pc = libploop.ploopcopy(ddxml, fd);
pid = start_image_filller()
print "Start copy"
pc.copy_start()
for n in range(0, 10):
print "Iter:", n
transferred = pc.copy_next_iteration()
print "transferred:", transferred
time.sleep(sleep_sec)
print "Wait filler %d" % pid
os.kill(pid, 15)
os.waitpid(pid, 0)
print "Stop sopy"
pc.copy_stop()
ploop_umount(ddxml)
class | (unittest.TestCase):
def setUp(self):
if not os.path.exists('/dev/ploop0'):
sp.call(['mknod', '/dev/ploop0', 'b', '182', '0'])
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
if not os.path.exists(get_storage()):
os.mkdir(get_storage())
if not os.path.exists(get_mnt_dir()):
os.mkdir(get_mnt_dir())
ploop_create(get_image())
self.out = os.path.join(get_storage(), "out.hds")
self.ddxml = get_ddxml()
def tearDown(self):
print "tearDown"
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
def test_aremote(self):
print "Start remote"
parent, child = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM)
self.rcv_thr = start_pcopy_receiver(self.out, child.fileno())
do_ploop_copy(self.ddxml, parent.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
def test_local(self):
print "Start local"
f = open(self.out, 'wb')
do_ploop_copy(self.ddxml, f.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
if __name__ == '__main__':
unittest.main()
| testPcopy | identifier_name |
test-pcopy.py | #!/usr/bin/python
import libploop
import shutil
import io
import os
import socket
import time
import subprocess as sp
import unittest
import hashlib
sleep_sec = 3
def hashfile(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
print (hasher.hexdigest())
return hasher.hexdigest()
def start_image_filller():
pid = os.fork()
if pid == 0:
os.execl('/bin/dd', 'dd', 'if=/dev/urandom', "of=/dev/ploop0", 'bs=4096', 'count=131072', 'oflag=direct')
os._exit(1)
else:
print "Start filler pid=%d" % pid
time.sleep(sleep_sec)
return pid
def start_pcopy_receiver(fname, fd):
print "Start receiver"
t = libploop.ploopcopy_thr_receiver(fname, fd)
t.start()
return t
def get_storage():
return '/vz/test'
def get_image():
return os.path.join(get_storage(), "test.hds")
def get_ddxml():
return os.path.join(get_storage(), 'DiskDescriptor.xml')
def get_mnt_dir():
return '_'.join([get_storage(), "mnt"])
def ploop_create(img):
ret = sp.call(["ploop", "init", "-s10g", img])
if ret != 0:
raise Exception("failed to create image")
def ploop_mount(ddxml):
ret = sp.call(["ploop", "mount", "-d/dev/ploop0", ddxml])
if ret != 0:
raise Exception("failed to mount image")
def ploop_umount(ddxml):
return sp.call(["ploop", "umount", "-d/dev/ploop0"])
def do_ploop_copy(ddxml, fd):
print "do_ploop_copy"
ploop_mount(ddxml)
pc = libploop.ploopcopy(ddxml, fd);
pid = start_image_filller()
print "Start copy"
pc.copy_start()
for n in range(0, 10):
print "Iter:", n
transferred = pc.copy_next_iteration()
print "transferred:", transferred
time.sleep(sleep_sec)
print "Wait filler %d" % pid
os.kill(pid, 15)
os.waitpid(pid, 0)
print "Stop sopy"
pc.copy_stop()
ploop_umount(ddxml)
class testPcopy(unittest.TestCase):
def setUp(self):
|
def tearDown(self):
print "tearDown"
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
def test_aremote(self):
print "Start remote"
parent, child = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM)
self.rcv_thr = start_pcopy_receiver(self.out, child.fileno())
do_ploop_copy(self.ddxml, parent.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
def test_local(self):
print "Start local"
f = open(self.out, 'wb')
do_ploop_copy(self.ddxml, f.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
if __name__ == '__main__':
unittest.main()
| if not os.path.exists('/dev/ploop0'):
sp.call(['mknod', '/dev/ploop0', 'b', '182', '0'])
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
if not os.path.exists(get_storage()):
os.mkdir(get_storage())
if not os.path.exists(get_mnt_dir()):
os.mkdir(get_mnt_dir())
ploop_create(get_image())
self.out = os.path.join(get_storage(), "out.hds")
self.ddxml = get_ddxml() | identifier_body |
test-pcopy.py | #!/usr/bin/python
import libploop
import shutil
import io
import os
import socket
import time
import subprocess as sp
import unittest
import hashlib
sleep_sec = 3
def hashfile(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
print (hasher.hexdigest())
return hasher.hexdigest()
def start_image_filller():
pid = os.fork()
if pid == 0:
os.execl('/bin/dd', 'dd', 'if=/dev/urandom', "of=/dev/ploop0", 'bs=4096', 'count=131072', 'oflag=direct')
os._exit(1)
else:
print "Start filler pid=%d" % pid
time.sleep(sleep_sec)
return pid
def start_pcopy_receiver(fname, fd):
print "Start receiver"
t = libploop.ploopcopy_thr_receiver(fname, fd)
t.start()
return t
def get_storage():
return '/vz/test'
def get_image():
return os.path.join(get_storage(), "test.hds")
def get_ddxml():
return os.path.join(get_storage(), 'DiskDescriptor.xml')
def get_mnt_dir():
return '_'.join([get_storage(), "mnt"])
def ploop_create(img):
ret = sp.call(["ploop", "init", "-s10g", img])
if ret != 0:
raise Exception("failed to create image")
def ploop_mount(ddxml):
ret = sp.call(["ploop", "mount", "-d/dev/ploop0", ddxml])
if ret != 0:
raise Exception("failed to mount image")
def ploop_umount(ddxml):
return sp.call(["ploop", "umount", "-d/dev/ploop0"])
def do_ploop_copy(ddxml, fd):
print "do_ploop_copy"
ploop_mount(ddxml)
pc = libploop.ploopcopy(ddxml, fd);
pid = start_image_filller()
print "Start copy"
pc.copy_start()
for n in range(0, 10):
print "Iter:", n
transferred = pc.copy_next_iteration() |
print "Wait filler %d" % pid
os.kill(pid, 15)
os.waitpid(pid, 0)
print "Stop sopy"
pc.copy_stop()
ploop_umount(ddxml)
class testPcopy(unittest.TestCase):
def setUp(self):
if not os.path.exists('/dev/ploop0'):
sp.call(['mknod', '/dev/ploop0', 'b', '182', '0'])
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
if not os.path.exists(get_storage()):
os.mkdir(get_storage())
if not os.path.exists(get_mnt_dir()):
os.mkdir(get_mnt_dir())
ploop_create(get_image())
self.out = os.path.join(get_storage(), "out.hds")
self.ddxml = get_ddxml()
def tearDown(self):
print "tearDown"
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
def test_aremote(self):
print "Start remote"
parent, child = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM)
self.rcv_thr = start_pcopy_receiver(self.out, child.fileno())
do_ploop_copy(self.ddxml, parent.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
def test_local(self):
print "Start local"
f = open(self.out, 'wb')
do_ploop_copy(self.ddxml, f.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
if __name__ == '__main__':
unittest.main() | print "transferred:", transferred
time.sleep(sleep_sec) | random_line_split |
test-pcopy.py | #!/usr/bin/python
import libploop
import shutil
import io
import os
import socket
import time
import subprocess as sp
import unittest
import hashlib
sleep_sec = 3
def hashfile(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
print (hasher.hexdigest())
return hasher.hexdigest()
def start_image_filller():
pid = os.fork()
if pid == 0:
os.execl('/bin/dd', 'dd', 'if=/dev/urandom', "of=/dev/ploop0", 'bs=4096', 'count=131072', 'oflag=direct')
os._exit(1)
else:
print "Start filler pid=%d" % pid
time.sleep(sleep_sec)
return pid
def start_pcopy_receiver(fname, fd):
print "Start receiver"
t = libploop.ploopcopy_thr_receiver(fname, fd)
t.start()
return t
def get_storage():
return '/vz/test'
def get_image():
return os.path.join(get_storage(), "test.hds")
def get_ddxml():
return os.path.join(get_storage(), 'DiskDescriptor.xml')
def get_mnt_dir():
return '_'.join([get_storage(), "mnt"])
def ploop_create(img):
ret = sp.call(["ploop", "init", "-s10g", img])
if ret != 0:
raise Exception("failed to create image")
def ploop_mount(ddxml):
ret = sp.call(["ploop", "mount", "-d/dev/ploop0", ddxml])
if ret != 0:
raise Exception("failed to mount image")
def ploop_umount(ddxml):
return sp.call(["ploop", "umount", "-d/dev/ploop0"])
def do_ploop_copy(ddxml, fd):
print "do_ploop_copy"
ploop_mount(ddxml)
pc = libploop.ploopcopy(ddxml, fd);
pid = start_image_filller()
print "Start copy"
pc.copy_start()
for n in range(0, 10):
print "Iter:", n
transferred = pc.copy_next_iteration()
print "transferred:", transferred
time.sleep(sleep_sec)
print "Wait filler %d" % pid
os.kill(pid, 15)
os.waitpid(pid, 0)
print "Stop sopy"
pc.copy_stop()
ploop_umount(ddxml)
class testPcopy(unittest.TestCase):
def setUp(self):
if not os.path.exists('/dev/ploop0'):
sp.call(['mknod', '/dev/ploop0', 'b', '182', '0'])
if os.path.exists(get_ddxml()):
|
if not os.path.exists(get_storage()):
os.mkdir(get_storage())
if not os.path.exists(get_mnt_dir()):
os.mkdir(get_mnt_dir())
ploop_create(get_image())
self.out = os.path.join(get_storage(), "out.hds")
self.ddxml = get_ddxml()
def tearDown(self):
print "tearDown"
if os.path.exists(get_ddxml()):
ploop_umount(get_ddxml())
shutil.rmtree(get_storage())
def test_aremote(self):
print "Start remote"
parent, child = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM)
self.rcv_thr = start_pcopy_receiver(self.out, child.fileno())
do_ploop_copy(self.ddxml, parent.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
def test_local(self):
print "Start local"
f = open(self.out, 'wb')
do_ploop_copy(self.ddxml, f.fileno())
src = hashfile(open(get_image(), 'rb'), hashlib.md5())
dst = hashfile(open(self.out, 'rb'), hashlib.md5())
self.assertEqual(src, dst)
if __name__ == '__main__':
unittest.main()
| ploop_umount(get_ddxml())
shutil.rmtree(get_storage()) | conditional_block |
nytimes-scrape.py | model_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.response-format?" + \
"[q=search term&" + \
"fq=filter-field:(filter-term)&additional-params=values]" + \
"&api-key=9key"
"""http://api.nytimes.com/svc/search/v2/articlesearch.json?q=terrorism+OR+terrorist
&begin_date=19900102&end_date=19900103&sort=newest&api-key=
key"""
search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json?" + \
"[q=terror]" + \
"&api-key=key"
precise_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json"
terms = "?q=terrorism+OR+terrorist"
api = "&api-key=key"
print(precise_search+terms+dates+api)
"""
aggressive for looping in order to overcome the ten article limit. instead search each key word PER JOUR, and then concat the jsons into a nice pandas dataframe, and then eventually a csv.
"""
months_list = ["%.2d" % i for i in range(1,2)]
days_list = ["%.2d" % i for i in range(1,32)]
json_files = []
print(months_list)
for x in months_list:
month_s = x
month_e = x
for y in days_list:
day_s = y
day_e = str(int(y)+1).zfill(2)
year_s = "1990"
year_e = "1990"
start = year_s + month_s + day_s
end = year_e + month_e + day_e
dates = "&begin_date="+start+"&end_date="+end+"&sort=newest"
#print(start + " "+end + "\n" +dates)
r = requests.get(precise_search+terms+dates+api)
original_json = json.loads(r.text)
response_json = original_json['response']
json_file = response_json['docs']
json_files.append(json_file)
frames = []
for x in json_files:
|
#print(frames)
result = pd.concat(frames)
result
| df = pd.DataFrame.from_dict(x)
frames.append(df) | conditional_block |
nytimes-scrape.py | model_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.response-format?" + \
"[q=search term&" + \
"fq=filter-field:(filter-term)&additional-params=values]" + \
"&api-key=9key"
"""http://api.nytimes.com/svc/search/v2/articlesearch.json?q=terrorism+OR+terrorist
&begin_date=19900102&end_date=19900103&sort=newest&api-key=
key"""
search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json?" + \
"[q=terror]" + \
"&api-key=key"
precise_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json"
terms = "?q=terrorism+OR+terrorist"
api = "&api-key=key"
print(precise_search+terms+dates+api)
""" | json_files = []
print(months_list)
for x in months_list:
month_s = x
month_e = x
for y in days_list:
day_s = y
day_e = str(int(y)+1).zfill(2)
year_s = "1990"
year_e = "1990"
start = year_s + month_s + day_s
end = year_e + month_e + day_e
dates = "&begin_date="+start+"&end_date="+end+"&sort=newest"
#print(start + " "+end + "\n" +dates)
r = requests.get(precise_search+terms+dates+api)
original_json = json.loads(r.text)
response_json = original_json['response']
json_file = response_json['docs']
json_files.append(json_file)
frames = []
for x in json_files:
df = pd.DataFrame.from_dict(x)
frames.append(df)
#print(frames)
result = pd.concat(frames)
result | aggressive for looping in order to overcome the ten article limit. instead search each key word PER JOUR, and then concat the jsons into a nice pandas dataframe, and then eventually a csv.
"""
months_list = ["%.2d" % i for i in range(1,2)]
days_list = ["%.2d" % i for i in range(1,32)] | random_line_split |
run-sequence.d.ts | // Compiled using typings@0.6.8
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/40c60850ad6c8175a62d5ab48c4e016ea5b3dffe/run-sequence/run-sequence.d.ts
// Type definitions for run-sequence
// Project: https://github.com/OverZealous/run-sequence
// Definitions by: Keita Kagurazaka <https://github.com/k-kagurazaka>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module "run-sequence" { | (...streams: (string | string[] | gulp.TaskCallback)[]): NodeJS.ReadWriteStream;
use(gulp: gulp.Gulp): IRunSequence;
}
var _tmp: IRunSequence;
export = _tmp;
} | import gulp = require('gulp');
interface IRunSequence { | random_line_split |
zones.py | #!/usr/bin/env python
import sys, os, re, tarfile, json
FILES = {
'africa', 'antarctica', 'asia', 'australasia',
'europe', 'northamerica', 'southamerica',
}
WS_SPLIT = re.compile("[ \t]+")
def lines(fn):
with tarfile.open(fn, 'r:*') as tar:
for info in tar:
if not info.isfile() or info.name not in FILES:
continue
f = tar.extractfile(info)
for ln in f:
ln = ln.decode('iso-8859-1')
ln = ln.rstrip()
ln = ln.split('#', 1)[0]
ln = ln.rstrip(' \t')
if ln:
yield ln
f.close()
def offset(s):
if s in {'-', '0'}:
return 0
dir, s = (-1, s[1:]) if s[0] == '-' else (1, s)
words = [int(n) for n in s.split(':')]
assert 1 <= len(words) < 4, words
words = words + [0] * (3 - len(words))
assert 0 <= words[0] < 24, words
assert 0 <= words[1] < 60, words
assert 0 <= words[2] < 60, words
return dir * sum((i * num) for (i, num) in zip(words, (3600, 60, 1)))
def zoneline(ls):
ls[1] = None if ls[1] == '-' else ls[1]
tmp = offset(ls[0]), ls[1], ls[2], ls[3:]
return {k: v for (k, v) in zip('orfu', tmp)}
def parse(fn):
zones, rules, zone = {}, {}, None
for ln in lines(fn):
# see zic(8) for documentation
words = WS_SPLIT.split(ln)
if words[0] == 'Zone':
assert words[1] not in zones, words[1]
zone = []
zone.append(zoneline(words[2:]))
if '/' in words[1]:
zones[words[1]] = zone
elif words[0] == '':
assert zone is not None
zone.append(zoneline(words[1:]))
elif words[0] == 'Rule':
zone = None
words[8] = offset(words[8])
rule = rules.setdefault(words[1], [])
rule.append(words[2:])
elif words[0] == 'Link':
zone = None # ignore
else:
assert False, ln
return {'zones': zones, 'rules': rules}
if __name__ == '__main__': |
path = sys.argv[1]
version = re.match('tzdata(.*)\.tar\.gz$', os.path.basename(path))
if version is None:
raise StandardError('argument must be tzdata archive')
print(json.dumps(parse(path))) | random_line_split | |
zones.py | #!/usr/bin/env python
import sys, os, re, tarfile, json
FILES = {
'africa', 'antarctica', 'asia', 'australasia',
'europe', 'northamerica', 'southamerica',
}
WS_SPLIT = re.compile("[ \t]+")
def lines(fn):
with tarfile.open(fn, 'r:*') as tar:
for info in tar:
if not info.isfile() or info.name not in FILES:
continue
f = tar.extractfile(info)
for ln in f:
ln = ln.decode('iso-8859-1')
ln = ln.rstrip()
ln = ln.split('#', 1)[0]
ln = ln.rstrip(' \t')
if ln:
yield ln
f.close()
def offset(s):
if s in {'-', '0'}:
return 0
dir, s = (-1, s[1:]) if s[0] == '-' else (1, s)
words = [int(n) for n in s.split(':')]
assert 1 <= len(words) < 4, words
words = words + [0] * (3 - len(words))
assert 0 <= words[0] < 24, words
assert 0 <= words[1] < 60, words
assert 0 <= words[2] < 60, words
return dir * sum((i * num) for (i, num) in zip(words, (3600, 60, 1)))
def zoneline(ls):
ls[1] = None if ls[1] == '-' else ls[1]
tmp = offset(ls[0]), ls[1], ls[2], ls[3:]
return {k: v for (k, v) in zip('orfu', tmp)}
def parse(fn):
zones, rules, zone = {}, {}, None
for ln in lines(fn):
# see zic(8) for documentation
|
return {'zones': zones, 'rules': rules}
if __name__ == '__main__':
path = sys.argv[1]
version = re.match('tzdata(.*)\.tar\.gz$', os.path.basename(path))
if version is None:
raise StandardError('argument must be tzdata archive')
print(json.dumps(parse(path)))
| words = WS_SPLIT.split(ln)
if words[0] == 'Zone':
assert words[1] not in zones, words[1]
zone = []
zone.append(zoneline(words[2:]))
if '/' in words[1]:
zones[words[1]] = zone
elif words[0] == '':
assert zone is not None
zone.append(zoneline(words[1:]))
elif words[0] == 'Rule':
zone = None
words[8] = offset(words[8])
rule = rules.setdefault(words[1], [])
rule.append(words[2:])
elif words[0] == 'Link':
zone = None # ignore
else:
assert False, ln | conditional_block |
zones.py | #!/usr/bin/env python
import sys, os, re, tarfile, json
FILES = {
'africa', 'antarctica', 'asia', 'australasia',
'europe', 'northamerica', 'southamerica',
}
WS_SPLIT = re.compile("[ \t]+")
def lines(fn):
with tarfile.open(fn, 'r:*') as tar:
for info in tar:
if not info.isfile() or info.name not in FILES:
continue
f = tar.extractfile(info)
for ln in f:
ln = ln.decode('iso-8859-1')
ln = ln.rstrip()
ln = ln.split('#', 1)[0]
ln = ln.rstrip(' \t')
if ln:
yield ln
f.close()
def offset(s):
if s in {'-', '0'}:
return 0
dir, s = (-1, s[1:]) if s[0] == '-' else (1, s)
words = [int(n) for n in s.split(':')]
assert 1 <= len(words) < 4, words
words = words + [0] * (3 - len(words))
assert 0 <= words[0] < 24, words
assert 0 <= words[1] < 60, words
assert 0 <= words[2] < 60, words
return dir * sum((i * num) for (i, num) in zip(words, (3600, 60, 1)))
def zoneline(ls):
|
def parse(fn):
zones, rules, zone = {}, {}, None
for ln in lines(fn):
# see zic(8) for documentation
words = WS_SPLIT.split(ln)
if words[0] == 'Zone':
assert words[1] not in zones, words[1]
zone = []
zone.append(zoneline(words[2:]))
if '/' in words[1]:
zones[words[1]] = zone
elif words[0] == '':
assert zone is not None
zone.append(zoneline(words[1:]))
elif words[0] == 'Rule':
zone = None
words[8] = offset(words[8])
rule = rules.setdefault(words[1], [])
rule.append(words[2:])
elif words[0] == 'Link':
zone = None # ignore
else:
assert False, ln
return {'zones': zones, 'rules': rules}
if __name__ == '__main__':
path = sys.argv[1]
version = re.match('tzdata(.*)\.tar\.gz$', os.path.basename(path))
if version is None:
raise StandardError('argument must be tzdata archive')
print(json.dumps(parse(path)))
| ls[1] = None if ls[1] == '-' else ls[1]
tmp = offset(ls[0]), ls[1], ls[2], ls[3:]
return {k: v for (k, v) in zip('orfu', tmp)} | identifier_body |
zones.py | #!/usr/bin/env python
import sys, os, re, tarfile, json
FILES = {
'africa', 'antarctica', 'asia', 'australasia',
'europe', 'northamerica', 'southamerica',
}
WS_SPLIT = re.compile("[ \t]+")
def lines(fn):
with tarfile.open(fn, 'r:*') as tar:
for info in tar:
if not info.isfile() or info.name not in FILES:
continue
f = tar.extractfile(info)
for ln in f:
ln = ln.decode('iso-8859-1')
ln = ln.rstrip()
ln = ln.split('#', 1)[0]
ln = ln.rstrip(' \t')
if ln:
yield ln
f.close()
def | (s):
if s in {'-', '0'}:
return 0
dir, s = (-1, s[1:]) if s[0] == '-' else (1, s)
words = [int(n) for n in s.split(':')]
assert 1 <= len(words) < 4, words
words = words + [0] * (3 - len(words))
assert 0 <= words[0] < 24, words
assert 0 <= words[1] < 60, words
assert 0 <= words[2] < 60, words
return dir * sum((i * num) for (i, num) in zip(words, (3600, 60, 1)))
def zoneline(ls):
ls[1] = None if ls[1] == '-' else ls[1]
tmp = offset(ls[0]), ls[1], ls[2], ls[3:]
return {k: v for (k, v) in zip('orfu', tmp)}
def parse(fn):
zones, rules, zone = {}, {}, None
for ln in lines(fn):
# see zic(8) for documentation
words = WS_SPLIT.split(ln)
if words[0] == 'Zone':
assert words[1] not in zones, words[1]
zone = []
zone.append(zoneline(words[2:]))
if '/' in words[1]:
zones[words[1]] = zone
elif words[0] == '':
assert zone is not None
zone.append(zoneline(words[1:]))
elif words[0] == 'Rule':
zone = None
words[8] = offset(words[8])
rule = rules.setdefault(words[1], [])
rule.append(words[2:])
elif words[0] == 'Link':
zone = None # ignore
else:
assert False, ln
return {'zones': zones, 'rules': rules}
if __name__ == '__main__':
path = sys.argv[1]
version = re.match('tzdata(.*)\.tar\.gz$', os.path.basename(path))
if version is None:
raise StandardError('argument must be tzdata archive')
print(json.dumps(parse(path)))
| offset | identifier_name |
S15.3.5.3_A2_T2.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* @name: S15.3.5.3_A2_T2;
* @section: 15.3.5.3, 11.8.6; | * ii) Let O be Result(i).
* iii) O is not an object, throw a TypeError exception;
* @description: F.prototype is undefined, and V is empty object;
*/
FACTORY = new Function;
FACTORY.prototype = undefined;
obj={};
//CHECK#1
try {
obj instanceof FACTORY;
$FAIL('#1: O is not an object, throw a TypeError exception');
} catch (e) {
if (!(e instanceof TypeError)) {
$ERROR('#1.1: O is not an object, throw a TypeError exception');
}
} | * @assertion: Assume F is a Function object. When the [[HasInstance]] method of F is called with value V and V is an object, the following steps are taken:
* i) Call the [[Get]] method of F with property name "prototype". | random_line_split |
S15.3.5.3_A2_T2.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* @name: S15.3.5.3_A2_T2;
* @section: 15.3.5.3, 11.8.6;
* @assertion: Assume F is a Function object. When the [[HasInstance]] method of F is called with value V and V is an object, the following steps are taken:
* i) Call the [[Get]] method of F with property name "prototype".
* ii) Let O be Result(i).
* iii) O is not an object, throw a TypeError exception;
* @description: F.prototype is undefined, and V is empty object;
*/
FACTORY = new Function;
FACTORY.prototype = undefined;
obj={};
//CHECK#1
try {
obj instanceof FACTORY;
$FAIL('#1: O is not an object, throw a TypeError exception');
} catch (e) {
if (!(e instanceof TypeError)) |
}
| {
$ERROR('#1.1: O is not an object, throw a TypeError exception');
} | conditional_block |
vec-dst.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn sub_expr() {
// Test for a &[T] => &&[T] coercion in sub-expression position
// (surpisingly, this can cause errors which are not caused by either of:
// `let x = vec.slice_mut(0, 2);`
// `foo(vec.slice_mut(0, 2));` ).
let mut vec: Vec<int> = vec!(1, 2, 3, 4);
let b: &mut [int] = [1, 2];
assert!(vec.slice_mut(0, 2) == b);
}
fn index() {
// Tests for indexing into box/& [T, ..n]
let x: [int, ..3] = [1, 2, 3];
let mut x: Box<[int, ..3]> = box x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
let mut x: [int, ..3] = [1, 2, 3];
let x: &mut [int, ..3] = &mut x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
}
pub fn | () {
sub_expr();
index();
}
| main | identifier_name |
vec-dst.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn sub_expr() {
// Test for a &[T] => &&[T] coercion in sub-expression position
// (surpisingly, this can cause errors which are not caused by either of:
// `let x = vec.slice_mut(0, 2);`
// `foo(vec.slice_mut(0, 2));` ).
let mut vec: Vec<int> = vec!(1, 2, 3, 4);
let b: &mut [int] = [1, 2];
assert!(vec.slice_mut(0, 2) == b);
}
fn index() |
pub fn main() {
sub_expr();
index();
}
| {
// Tests for indexing into box/& [T, ..n]
let x: [int, ..3] = [1, 2, 3];
let mut x: Box<[int, ..3]> = box x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
let mut x: [int, ..3] = [1, 2, 3];
let x: &mut [int, ..3] = &mut x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
} | identifier_body |
vec-dst.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | fn sub_expr() {
// Test for a &[T] => &&[T] coercion in sub-expression position
// (surpisingly, this can cause errors which are not caused by either of:
// `let x = vec.slice_mut(0, 2);`
// `foo(vec.slice_mut(0, 2));` ).
let mut vec: Vec<int> = vec!(1, 2, 3, 4);
let b: &mut [int] = [1, 2];
assert!(vec.slice_mut(0, 2) == b);
}
fn index() {
// Tests for indexing into box/& [T, ..n]
let x: [int, ..3] = [1, 2, 3];
let mut x: Box<[int, ..3]> = box x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
let mut x: [int, ..3] = [1, 2, 3];
let x: &mut [int, ..3] = &mut x;
assert!(x[0] == 1);
assert!(x[1] == 2);
assert!(x[2] == 3);
x[1] = 45;
assert!(x[0] == 1);
assert!(x[1] == 45);
assert!(x[2] == 3);
}
pub fn main() {
sub_expr();
index();
} | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
| random_line_split |
job.py | import base64
import pickle
import re
from os import path
from py12306.cluster.cluster import Cluster
from py12306.helpers.api import *
from py12306.app import *
from py12306.helpers.auth_code import AuthCode
from py12306.helpers.event import Event
from py12306.helpers.func import *
from py12306.helpers.request import Request
from py12306.helpers.type import UserType
from py12306.helpers.qrcode import print_qrcode
from py12306.log.order_log import OrderLog
from py12306.log.user_log import UserLog
from py12306.log.common_log import CommonLog
class UserJob:
# heartbeat = 60 * 2 # 心跳保持时长
is_alive = True
check_interval = 5
key = None
user_name = ''
password = ''
type = 'qr'
user = None
info = {} # 用户信息
last_heartbeat = None
is_ready = False
user_loaded = False # 用户是否已加载成功
passengers = []
retry_time = 3
retry_count = 0
login_num = 0 # 尝试登录次数
# Init page
global_repeat_submit_token = None
ticket_info_for_passenger_form = None
order_request_dto = None
cluster = None
lock_init_user_time = 3 * 60
cookie = False
def __init__(self, info):
self.cluster = Cluster()
self.init_data(info)
def init_data(self, info):
self.session = Request()
self.session.add_response_hook(self.response_login_check)
self.key = str(info.get('key'))
self.user_name = info.get('user_name')
self.password = info.get('password')
self.type = info.get('type')
def update_user(self):
from py12306.user.user import User
self.user = User()
self.load_user()
def run(self):
# load user
self.update_user()
self.start()
def start(self):
"""
检测心跳
:return:
"""
while True and self.is_alive:
app_available_check()
if Config().is_slave():
self.load_user_from_remote()
else:
if Config().is_master() and not self.cookie: self.load_user_from_remote() # 主节点加载一次 Cookie
self.check_heartbeat()
if Const.IS_TEST: return
stay_second(self.check_interval)
def check_heartbeat(self):
# 心跳检测
if self.get_last_heartbeat() and (time_int() - self.get_last_heartbeat()) < Config().USER_HEARTBEAT_INTERVAL:
return True
# 只有主节点才能走到这
if self.is_first_time() or not self.check_user_is_login():
if not self.handle_login(): return
self.user_did_load()
message = UserLog.MESSAGE_USER_HEARTBEAT_NORMAL.format(self.get_name(), Config().USER_HEARTBEAT_INTERVAL)
UserLog.add_quick_log(message).flush()
def get_last_heartbeat(self):
if Config().is_cluster_enabled():
return int(self.cluster.session.get(Cluster.KEY_USER_LAST_HEARTBEAT, 0))
return self.last_heartbeat
def set_last_heartbeat(self, time=None):
time = time if time != None else time_int()
if Config().is_cluster_enabled():
self.cluster.session.set(Cluster.KEY_USER_LAST_HEARTBEAT, time)
self.last_heartbeat = time
# def init_cookies
def is_first_time(self):
if Config().is_cluster_enabled():
return not self.cluster.get_user_cookie(self.key)
return not path.exists(self.get_cookie_path())
def handle_login(self, expire=False):
if expire: UserLog.print_user_expired()
self.is_ready = False
UserLog.print_start_login(user=self)
if self.type == 'qr':
return self.qr_login()
else:
return self.login()
def login(self):
"""
获取验证码结果
:return 权限校验码
"""
data = {
'username': self.user_name,
'password': self.password,
'appid': 'otn'
}
answer = AuthCode.get_auth_code(self.session)
data['answer'] = answer
self.request_device_id()
response = self.session.post(API_BASE_LOGIN.get('url'), data)
result = response.json()
if result.get('result_code') == 0: # 登录成功
"""
login 获得 cookie uamtk
auth/uamtk 不请求,会返回 uamtk票据内容为空
/otn/uamauthclient 能拿到用户名
"""
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.login_did_success()
return True
elif result.get('result_code') == 2: # 账号之内错误
# 登录失败,用户名或密码为空
# 密码输入错误
UserLog.add_quick_log(UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message'))).flush()
else:
UserLog.add_quick_log(
UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message', result.get('message',
CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR)))).flush()
return False
def qr_login(self):
self.request_device_id()
image_uuid, png_path = self.download_code()
while True:
data = {
'RAIL_DEVICEID': self.session.cookies.get('RAIL_DEVICEID'),
'RAIL_EXPIRATION': self.session.cookies.get('RAIL_EXPIRATION'),
'uuid': image_uuid,
'appid': 'otn'
}
response = self.session.post(API_AUTH_QRCODE_CHECK.get('url'), data)
result = response.json()
result_code = int(result.get('result_code'))
if result_code == 0:
time.sleep(2)
elif result_code == 1:
UserLog.add_quick_log('请确认登录').flush()
time.sleep(2)
elif result_code == 2:
break
elif result_code == 3:
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
image_uuid = self.download_code()
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
self.session.get(API_USER_LOGIN, allow_redirects=True)
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.session.get(API_USER_LOGIN, allow_redirects=True)
self.login_did_success()
return True
def download_code(self):
try:
UserLog.add_quick_log(UserLog.MESSAGE_QRCODE_DOWNLOADING).flush()
response = self.session.post(API_AUTH_QRCODE_BASE64_DOWNLOAD.get('url'), data={'appid': 'otn'})
result = response.json()
if result.get('result_code') == '0':
img_bytes = base64.b64decode(result.get('image'))
try:
os.mkdir(Config().USER_DATA_DIR + '/qrcode')
except FileExistsError:
pass
png_path = path.normpath(Config().USER_DATA_DIR + '/qrcode/%d.png' % time.time())
with open(png_path, 'wb') as file:
file.write(img_bytes)
file.close()
if os.name == 'nt':
os.startfile(png_path)
else:
print_qrcode(png_path)
UserLog.add_log(UserLog.MESSAGE_QRCODE_DOWNLOADED.format(png_path)).flush()
Notification.send_email_with_qrcode(Config().EMAIL_RECEIVER, '你有新的登录二维码啦!', png_path)
self.retry_count = 0
return result.get('uuid'), png_path
raise KeyError('获取二维码失败: {}'.format(result.get('result_message')))
except Exception as e:
UserLog.add_quick_log(
UserLog.MESSAGE_QRCODE_FAIL.format(e, self.retry_time)).flush()
self.retry_count = self.retry_count + 1
if self.retry_count == 20:
self.retry_count = 0
try:
os.remove(self.get_cookie_path())
except:
pass
time.sleep(self.retry_time)
return self.download_code()
def check_user_is_login(self):
response = self.session.get(API_USER_LOGIN_CHECK)
is_login = response.json().get('data.is_login', False) == 'Y'
if is_login:
self.save_user()
self.set_last_heartbeat()
return self.get_user_info() # 检测应该是不会维持状态,这里再请求下个人中心看有没有用,01-10 看来应该是没用 01-22 有时拿到的状态 是已失效的再加上试试
return is_login
def auth_uamtk(self):
response = self.session.post(API_AUTH_UAMTK.get('url'), {'appid': 'otn'}, headers={
'Referer': 'https://kyfw.12306.cn/otn/passport?redirect=/otn/login/userLogin',
'Origin': 'https://kyfw.12306.cn'
})
result = response.json()
if result.get('newapptk'):
return result.get('newapptk')
# TODO 处理获取失败情况
return False
def auth_uamauthclient(self, tk):
response = self.session.post(API_AUTH_UAMAUTHCLIENT.get('url'), {'tk': tk})
result = response.json()
if result.get('username'):
return result.get('username')
# TODO 处理获取失败情况
return False
def request_device_id(self):
"""
获取加密后的浏览器特征 ID
:return:
"""
response = self.session.get(API_GET_BROWSER_DEVICE_ID)
if response.status_code == 200:
try:
result = json.loads(response.text)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36"
}
self.session.headers.update(headers)
response = self.session.get(base64.b64decode(result['id']).decode())
if response.text.find('callbackFunction') >= 0:
result = response.text[18:-2]
result = json.loads(result)
if not Config().is_cache_rail_id_enabled():
self.session.cookies.update({
'RAIL_EXPIRATION': result.get('exp'),
'RAIL_DEVICEID': result.get('dfp'),
})
else:
self.session.cookies.update({
'RAIL_EXPIRATION': Config().RAIL_EXPIRATION,
'RAIL_DEVICEID': Config().RAIL_DEVICEID,
})
except:
return False
def login_did_success(self):
"""
用户登录成功
:return:
"""
self.login_num += 1
self.welcome_user()
self.save_user()
self.get_user_info()
self.set_last_heartbeat()
self.is_ready = True
def welcome_user(self):
UserLog.print_welcome_user(self)
pass
def get_cookie_path(self):
return Config().USER_DATA_DIR + self.user_name + '.cookie'
def update_user_info(self, info):
self.info = {**self.info, **info}
def get_name(self):
return self.info.get('user_name', '')
def save_user(self):
if Config().is_master():
self.cluster.set_user_cookie(self.key, self.session.cookies)
self.cluster.set_user_info(self.key, self.info)
with open(self.get_cookie_path(), 'wb') as f:
pickle.dump(self.session.cookies, f)
def did_loaded_user(self):
"""
恢复用户成功
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER.format(self.user_name)).flush()
if self.check_user_is_login() and self.get_user_info():
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_SUCCESS.format(self.user_name)).flush()
UserLog.print_welcome_user(self)
self.user_did_load()
else:
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_BUT_EXPIRED).flush()
self.set_last_heartbeat(0)
def user_did_load(self):
"""
用户已经加载成功
:return:
"""
self.is_ready = True
if self.user_loaded: return
self.user_loaded = True
Event().user_loaded({'key': self.key}) # 发布通知
def get_user_info(self):
response = self.session.get(API_USER_INFO.get('url'))
result = response.json()
user_data = result.get('data.userDTO.loginUserDTO')
# 子节点访问会导致主节点登录失效 TODO 可快考虑实时同步 cookie
if user_data:
self.update_user_info({**user_data, **{'user_name': user_data.get('name')}})
self.save_user()
return True
return False
def load_user(self):
if Config().is_cluster_enabled(): return
cookie_path = self.get_cookie_path()
if path.exists(cookie_path):
with open(self.get_cookie_path(), 'rb') as f:
cookie = pickle.load(f)
self.cookie = True
self.session.cookies.update(cookie)
self.did_loaded_user()
return True
return None
def load_user_from_remote(self):
cookie = self.cluster.get_user_cookie(self.key)
info = self.cluster.get_user_info(self.key)
if Config().is_slave() and (not cookie or not info):
while True: # 子节点只能取
UserLog.add_quick_log(UserLog.MESSAGE_USER_COOKIE_NOT_FOUND_FROM_REMOTE.format(self.user_name)).flush()
stay_second(self.retry_time)
return self.load_user_from_remote()
if info: self.info = info
if cookie:
self.session.cookies.update(cookie)
if not self.cookie: # 第一次加载
self.cookie = True
if not Config().is_slave():
self.did_loaded_user()
else:
self.is_ready = True # 设置子节点用户 已准备好
UserLog.print_welcome_user(self)
return True
return False
def check_is_ready(self):
return self.is_ready
def wait_for_ready(self):
if self.is_ready: return self
UserLog.add_quick_log(UserLog.MESSAGE_WAIT_USER_INIT_COMPLETE.format(self.retry_time)).flush()
stay_second(self.retry_time)
return self.wait_for_ready()
def destroy(self):
"""
退出用户
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_USER_BEING_DESTROY.format(self.user_name)).flush()
self.is_alive = False
def response_login_check(self, response, **kwargs):
if Config().is_master() and response.json().get('data.noLogin') == 'true': # relogin
self.handle_login(expire=True)
def get_user_passengers(self):
if self.passengers: return self.passengers
response = self.session.post(API_USER_PASSENGERS)
result = response.json()
if result.get('data.normal_passengers'):
self.passengers = result.get('data.normal_passengers')
# 将乘客写入到文件
with open(Config().USER_PASSENGERS_FILE % self.user_name, 'w', encoding='utf-8') as f:
f.write(json.dumps(self.passengers, indent=4, ensure_ascii=False))
return self.passengers
else:
UserLog.add_quick_log(
UserLog.MESSAGE_GET_USER_PASSENGERS_FAIL.format(
result.get('messages', CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR), self.retry_time)).flush()
if Config().is_slave():
self.load_user_from_remote() # 加载最新 cookie
stay_second(self.retry_time)
return self.get_user_passengers()
def get_passengers_by_members(self, members):
"""
获取格式化后的乘客信息
:param members:
:return:
[{
name: '项羽',
type: 1,
id_card: 0000000000000000000,
type_text: '成人',
enc_str: 'aaaaaa'
}]
"""
self.get_user_passengers()
results = []
for member in members:
is_member_code = is_number(member)
if not is_member_code:
if member[0] == "*":
audlt = 1
member = member[1:]
else:
audlt = 0
child_check = array_dict_find_by_key_value(results, 'name', member)
if not is_member_code and child_check:
new_member = child_check.copy()
new_member['type'] = UserType.CHILD
new_member['type_text'] = dict_find_key_by_value(UserType.dicts, int(new_member['type']))
else:
if is_member_code:
passenger = array_dict_find_by_key_value(self.passengers, 'code', member)
els | ot passenger:
UserLog.add_quick_log(
UserLog.MESSAGE_USER_PASSENGERS_IS_INVALID.format(self.user_name, member)).flush()
return False
new_member = {
'name': passenger.get('passenger_name'),
'id_card': passenger.get('passenger_id_no'),
'id_card_type': passenger.get('passenger_id_type_code'),
'mobile': passenger.get('mobile_no'),
'type': passenger.get('passenger_type'),
'type_text': dict_find_key_by_value(UserType.dicts, int(passenger.get('passenger_type'))),
'enc_str': passenger.get('allEncStr')
}
results.append(new_member)
return results
def request_init_dc_page(self):
"""
请求下单页面 拿到 token
:return:
"""
data = {'_json_att': ''}
response = self.session.post(API_INITDC_URL, data)
html = response.text
token = re.search(r'var globalRepeatSubmitToken = \'(.+?)\'', html)
form = re.search(r'var ticketInfoForPassengerForm *= *(\{.+\})', html)
order = re.search(r'var orderRequestDTO *= *(\{.+\})', html)
# 系统忙,请稍后重试
if html.find('系统忙,请稍后重试') != -1:
OrderLog.add_quick_log(OrderLog.MESSAGE_REQUEST_INIT_DC_PAGE_FAIL).flush() # 重试无用,直接跳过
return False, False, html
try:
self.global_repeat_submit_token = token.groups()[0]
self.ticket_info_for_passenger_form = json.loads(form.groups()[0].replace("'", '"'))
self.order_request_dto = json.loads(order.groups()[0].replace("'", '"'))
except:
return False, False, html # TODO Error
slide_val = re.search(r"var if_check_slide_passcode.*='(\d?)'", html)
is_slide = False
if slide_val:
is_slide = int(slide_val[1]) == 1
return True, is_slide, html
| e:
passenger = array_dict_find_by_key_value(self.passengers, 'passenger_name', member)
if audlt:
passenger['passenger_type'] = UserType.ADULT
if n | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.