file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
socialcoffee.js | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
//var MODAL = $("#modalProducto");
var URL = Define.URL_BASE;
window.onload=function(){
//toastr.error("Ingresaaaaaaaaa");
var url_ajax = URL + Define.URL_OLVIDE; //le decimos a qué url tiene que mandar la información
var data = {
action: 'validar'
};
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.mensaje == "Ok"){
toastr.success("Bienvenido amante del cafe, esto es Coffee Market House");
redireccionar(Define.URL_BASE + "socialcoffee");
}
});
};
function redireccionar(url_direccionar){
setTimeout(function(){ location.href=url_direccionar; }, 5000); //tiempo expresado en milisegundos
}
$("#olvide").delegate('#semeolvido', 'click', function () {//buscar pedido en bd
var usuario_a_buscar = $("#username").val();
var url_ajax = URL + Define.URL_OLVIDE; //le decimos a qué url tiene que mandar la información
var data = {
action: 'semeolvido',
username: usuario_a_buscar
};
if(!SOLICITUSEMEOLVIDO){
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.itemsCount != 0){
SOLICITUSEMEOLVIDO = true;
url_direccionar = Define.URL_BASE + "cuenta/login"
toastr.warning("Hola " + data.data[0].usuarioNombres + ", se te enviará la nueva contraseña al correo: " + data.data[0].usuarioEmail);
redireccionar(url_direccionar);
}else{
toastr.error("No existe una cuenta asociada a ese nombre de usuario.");
}
});
}else{
toastr.error("La solicitud ya fue enviada, revisa tu correo.");
};
});
$("#login").delegate('#ingresoLogin', 'click', function () {//validar usuario
var url_ajax = URL + Define.URL_LOGIN; //le decimos a qué url tiene que mandar la información
var usuario_a_buscar = $("#name").val();
var passwd_user = $("#pswd").val();
var recordame_ve = false;
if($('#recuerdame').is(':checked')){
recordame_ve = true;
} | var data = {
action: 'ingresar',
username: usuario_a_buscar,
password_user: passwd_user,
recuerdame: recordame_ve
};
if(usuario_a_buscar == '' || passwd_user == ''){
toastr.error("Username y/o contraseña vacíos, por favor digite un valor");
}else{
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.mensaje == "Ok"){
toastr.success("Bienvenido amante del cafe, esto es Coffee Market House");
redireccionar(Define.URL_BASE + "socialcoffee");
}
});
};
}); |
alert(recordame_ve);
| random_line_split |
socialcoffee.js | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
//var MODAL = $("#modalProducto");
var URL = Define.URL_BASE;
window.onload=function(){
//toastr.error("Ingresaaaaaaaaa");
var url_ajax = URL + Define.URL_OLVIDE; //le decimos a qué url tiene que mandar la información
var data = {
action: 'validar'
};
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.mensaje == "Ok"){
toastr.success("Bienvenido amante del cafe, esto es Coffee Market House");
redireccionar(Define.URL_BASE + "socialcoffee");
}
});
};
function re | rl_direccionar){
setTimeout(function(){ location.href=url_direccionar; }, 5000); //tiempo expresado en milisegundos
}
$("#olvide").delegate('#semeolvido', 'click', function () {//buscar pedido en bd
var usuario_a_buscar = $("#username").val();
var url_ajax = URL + Define.URL_OLVIDE; //le decimos a qué url tiene que mandar la información
var data = {
action: 'semeolvido',
username: usuario_a_buscar
};
if(!SOLICITUSEMEOLVIDO){
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.itemsCount != 0){
SOLICITUSEMEOLVIDO = true;
url_direccionar = Define.URL_BASE + "cuenta/login"
toastr.warning("Hola " + data.data[0].usuarioNombres + ", se te enviará la nueva contraseña al correo: " + data.data[0].usuarioEmail);
redireccionar(url_direccionar);
}else{
toastr.error("No existe una cuenta asociada a ese nombre de usuario.");
}
});
}else{
toastr.error("La solicitud ya fue enviada, revisa tu correo.");
};
});
$("#login").delegate('#ingresoLogin', 'click', function () {//validar usuario
var url_ajax = URL + Define.URL_LOGIN; //le decimos a qué url tiene que mandar la información
var usuario_a_buscar = $("#name").val();
var passwd_user = $("#pswd").val();
var recordame_ve = false;
if($('#recuerdame').is(':checked')){
recordame_ve = true;
}
alert(recordame_ve);
var data = {
action: 'ingresar',
username: usuario_a_buscar,
password_user: passwd_user,
recuerdame: recordame_ve
};
if(usuario_a_buscar == '' || passwd_user == ''){
toastr.error("Username y/o contraseña vacíos, por favor digite un valor");
}else{
httpPetition.ajxPost(url_ajax, data, function (data) {
if(data.mensaje == "Ok"){
toastr.success("Bienvenido amante del cafe, esto es Coffee Market House");
redireccionar(Define.URL_BASE + "socialcoffee");
}
});
};
});
| direccionar(u | identifier_name |
Toolbar.tsx | import React, { Fragment, FunctionComponent } from 'react';
import { styled } from '@storybook/theming';
import { window } from 'global';
import { FlexBar } from '../bar/bar';
import { Icons } from '../icon/icon';
import { IconButton } from '../bar/button';
interface ZoomProps {
zoom: (val: number) => void;
resetZoom: () => void;
}
interface EjectProps {
storyId?: string;
baseUrl?: string;
}
interface BarProps {
border?: boolean;
}
export type ToolbarProps = BarProps & ZoomProps & EjectProps;
const Zoom: FunctionComponent<ZoomProps> = ({ zoom, resetZoom }) => (
<>
<IconButton
key="zoomin"
onClick={e => {
e.preventDefault();
zoom(0.8);
}}
title="Zoom in"
>
<Icons icon="zoom" />
</IconButton>
<IconButton
key="zoomout"
onClick={e => {
e.preventDefault();
zoom(1.25);
}}
title="Zoom out"
>
<Icons icon="zoomout" />
</IconButton>
<IconButton
key="zoomreset" | onClick={e => {
e.preventDefault();
resetZoom();
}}
title="Reset zoom"
>
<Icons icon="zoomreset" />
</IconButton>
</>
);
const Eject: FunctionComponent<EjectProps> = ({ baseUrl, storyId }) => (
<IconButton
key="opener"
onClick={() => window.open(`${baseUrl}?id=${storyId}`)}
title="Open canvas in new tab"
>
<Icons icon="share" />
</IconButton>
);
const Bar = styled(props => <FlexBar {...props} />)({
position: 'absolute',
left: 0,
right: 0,
top: 0,
transition: 'transform .2s linear',
});
export const Toolbar: FunctionComponent<ToolbarProps> = ({
storyId,
baseUrl,
zoom,
resetZoom,
...rest
}) => (
<Bar {...rest}>
<Fragment key="left">
<Zoom {...{ zoom, resetZoom }} />
</Fragment>
<Fragment key="right">{storyId && <Eject {...{ storyId, baseUrl }} />}</Fragment>
</Bar>
); | random_line_split | |
mixins.py | from collections import OrderedDict
from django.http import HttpResponse
import simplejson
class | (object):
"""This class was designed to be inherited and used to return JSON objects from an Ordered Dictionary
"""
## Ordered Dictionary used to create serialized JSON object
# return_rderedDict() #This will enforce the ordering that we recieve from the database
def __init__(self):
"""
Init function for the JSON Mixin class
"""
self.return_list=OrderedDict()
return
def render_to_response(self, context):
"""Extends default render to response to return serialized JSON.
"""
return self.get_json_response(self.convert_to_json())
def get_json_response(self, content, **httpresponse_kwargs):
"""Returns JSON to calling object in the form of an http response.
"""
return HttpResponse(content,content_type='application/json',**httpresponse_kwargs)
def convert_to_json(self):
"""Serialized the return_list into JSON
"""
return simplejson.dumps(self.return_list) | JSONMixin | identifier_name |
mixins.py | from collections import OrderedDict
from django.http import HttpResponse
import simplejson
class JSONMixin(object):
"""This class was designed to be inherited and used to return JSON objects from an Ordered Dictionary
"""
## Ordered Dictionary used to create serialized JSON object |
def __init__(self):
"""
Init function for the JSON Mixin class
"""
self.return_list=OrderedDict()
return
def render_to_response(self, context):
"""Extends default render to response to return serialized JSON.
"""
return self.get_json_response(self.convert_to_json())
def get_json_response(self, content, **httpresponse_kwargs):
"""Returns JSON to calling object in the form of an http response.
"""
return HttpResponse(content,content_type='application/json',**httpresponse_kwargs)
def convert_to_json(self):
"""Serialized the return_list into JSON
"""
return simplejson.dumps(self.return_list) | # return_rderedDict() #This will enforce the ordering that we recieve from the database | random_line_split |
mixins.py | from collections import OrderedDict
from django.http import HttpResponse
import simplejson
class JSONMixin(object):
"""This class was designed to be inherited and used to return JSON objects from an Ordered Dictionary
"""
## Ordered Dictionary used to create serialized JSON object
# return_rderedDict() #This will enforce the ordering that we recieve from the database
def __init__(self):
"""
Init function for the JSON Mixin class
"""
self.return_list=OrderedDict()
return
def render_to_response(self, context):
"""Extends default render to response to return serialized JSON.
"""
return self.get_json_response(self.convert_to_json())
def get_json_response(self, content, **httpresponse_kwargs):
|
def convert_to_json(self):
"""Serialized the return_list into JSON
"""
return simplejson.dumps(self.return_list) | """Returns JSON to calling object in the form of an http response.
"""
return HttpResponse(content,content_type='application/json',**httpresponse_kwargs) | identifier_body |
leaky_bucket.rs | extern crate ratelimit_meter;
#[macro_use]
extern crate nonzero_ext;
use ratelimit_meter::{
algorithms::Algorithm, test_utilities::current_moment, DirectRateLimiter, LeakyBucket,
NegativeMultiDecision, NonConformance,
};
use std::thread;
use std::time::Duration;
#[test]
fn accepts_first_cell() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
assert_eq!(Ok(()), lb.check_at(current_moment()));
}
#[test]
fn rejects_too_many() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(2u32));
let now = current_moment();
let ms = Duration::from_millis(1);
assert_eq!(Ok(()), lb.check_at(now));
assert_eq!(Ok(()), lb.check_at(now));
assert_ne!(Ok(()), lb.check_at(now + ms * 2));
// should be ok again in 1s:
let next = now + Duration::from_millis(1002);
assert_eq!(Ok(()), lb.check_at(next));
assert_eq!(Ok(()), lb.check_at(next + ms));
assert_ne!(Ok(()), lb.check_at(next + ms * 2), "{:?}", lb);
}
#[test]
fn never_allows_more_than_capacity() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment();
let ms = Duration::from_millis(1);
// Should not allow the first 15 cells on a capacity 5 bucket:
assert_ne!(Ok(()), lb.check_n_at(15, now));
// After 3 and 20 seconds, it should not allow 15 on that bucket either:
assert_ne!(Ok(()), lb.check_n_at(15, now + (ms * 3 * 1000)));
let result = lb.check_n_at(15, now + (ms * 20 * 1000));
match result {
Err(NegativeMultiDecision::InsufficientCapacity(n)) => assert_eq!(n, 15),
_ => panic!("Did not expect {:?}", result),
}
}
#[test]
fn correct_wait_time() {
// Bucket adding a new element per 200ms:
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let mut now = current_moment();
let ms = Duration::from_millis(1);
let mut conforming = 0;
for _i in 0..20 {
now += ms;
let res = lb.check_at(now);
match res {
Ok(()) => {
conforming += 1;
}
Err(wait) => {
now += wait.wait_time_from(now);
assert_eq!(Ok(()), lb.check_at(now));
conforming += 1;
}
}
}
assert_eq!(20, conforming);
}
#[test]
fn prevents_time_travel() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment() + Duration::from_secs(1);
let ms = Duration::from_millis(1);
assert!(lb.check_at(now).is_ok());
assert!(lb.check_at(now - ms).is_ok());
assert!(lb.check_at(now - ms * 500).is_ok());
}
#[test]
fn | () {
let mut lim = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(20u32));
let now = current_moment();
let ms = Duration::from_millis(1);
let mut children = vec![];
lim.check_at(now).unwrap();
for _i in 0..20 {
let mut lim = lim.clone();
children.push(thread::spawn(move || lim.check_at(now).is_ok()));
}
for child in children {
child.join().unwrap();
}
assert!(!lim.check_at(now + ms * 2).is_ok());
assert_eq!(Ok(()), lim.check_at(now + ms * 1000));
}
#[test]
fn tooearly_wait_time_from() {
let lim =
LeakyBucket::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();
let state = <LeakyBucket as Algorithm>::BucketState::default();
let now = current_moment();
let ms = Duration::from_millis(1);
lim.test_and_update(&state, now).unwrap();
if let Err(failure) = lim.test_and_update(&state, now) {
assert_eq!(ms * 1000, failure.wait_time_from(now));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 1000));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2001));
} else {
assert!(false, "Second attempt should fail");
}
}
| actual_threadsafety | identifier_name |
leaky_bucket.rs | extern crate ratelimit_meter;
#[macro_use]
extern crate nonzero_ext;
use ratelimit_meter::{
algorithms::Algorithm, test_utilities::current_moment, DirectRateLimiter, LeakyBucket,
NegativeMultiDecision, NonConformance,
};
use std::thread;
use std::time::Duration;
#[test]
fn accepts_first_cell() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
assert_eq!(Ok(()), lb.check_at(current_moment()));
}
#[test]
fn rejects_too_many() |
#[test]
fn never_allows_more_than_capacity() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment();
let ms = Duration::from_millis(1);
// Should not allow the first 15 cells on a capacity 5 bucket:
assert_ne!(Ok(()), lb.check_n_at(15, now));
// After 3 and 20 seconds, it should not allow 15 on that bucket either:
assert_ne!(Ok(()), lb.check_n_at(15, now + (ms * 3 * 1000)));
let result = lb.check_n_at(15, now + (ms * 20 * 1000));
match result {
Err(NegativeMultiDecision::InsufficientCapacity(n)) => assert_eq!(n, 15),
_ => panic!("Did not expect {:?}", result),
}
}
#[test]
fn correct_wait_time() {
// Bucket adding a new element per 200ms:
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let mut now = current_moment();
let ms = Duration::from_millis(1);
let mut conforming = 0;
for _i in 0..20 {
now += ms;
let res = lb.check_at(now);
match res {
Ok(()) => {
conforming += 1;
}
Err(wait) => {
now += wait.wait_time_from(now);
assert_eq!(Ok(()), lb.check_at(now));
conforming += 1;
}
}
}
assert_eq!(20, conforming);
}
#[test]
fn prevents_time_travel() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment() + Duration::from_secs(1);
let ms = Duration::from_millis(1);
assert!(lb.check_at(now).is_ok());
assert!(lb.check_at(now - ms).is_ok());
assert!(lb.check_at(now - ms * 500).is_ok());
}
#[test]
fn actual_threadsafety() {
let mut lim = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(20u32));
let now = current_moment();
let ms = Duration::from_millis(1);
let mut children = vec![];
lim.check_at(now).unwrap();
for _i in 0..20 {
let mut lim = lim.clone();
children.push(thread::spawn(move || lim.check_at(now).is_ok()));
}
for child in children {
child.join().unwrap();
}
assert!(!lim.check_at(now + ms * 2).is_ok());
assert_eq!(Ok(()), lim.check_at(now + ms * 1000));
}
#[test]
fn tooearly_wait_time_from() {
let lim =
LeakyBucket::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();
let state = <LeakyBucket as Algorithm>::BucketState::default();
let now = current_moment();
let ms = Duration::from_millis(1);
lim.test_and_update(&state, now).unwrap();
if let Err(failure) = lim.test_and_update(&state, now) {
assert_eq!(ms * 1000, failure.wait_time_from(now));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 1000));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2001));
} else {
assert!(false, "Second attempt should fail");
}
}
| {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(2u32));
let now = current_moment();
let ms = Duration::from_millis(1);
assert_eq!(Ok(()), lb.check_at(now));
assert_eq!(Ok(()), lb.check_at(now));
assert_ne!(Ok(()), lb.check_at(now + ms * 2));
// should be ok again in 1s:
let next = now + Duration::from_millis(1002);
assert_eq!(Ok(()), lb.check_at(next));
assert_eq!(Ok(()), lb.check_at(next + ms));
assert_ne!(Ok(()), lb.check_at(next + ms * 2), "{:?}", lb);
} | identifier_body |
leaky_bucket.rs | extern crate ratelimit_meter;
#[macro_use]
extern crate nonzero_ext;
use ratelimit_meter::{
algorithms::Algorithm, test_utilities::current_moment, DirectRateLimiter, LeakyBucket,
NegativeMultiDecision, NonConformance,
};
use std::thread;
use std::time::Duration;
#[test]
fn accepts_first_cell() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
assert_eq!(Ok(()), lb.check_at(current_moment()));
}
#[test]
fn rejects_too_many() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(2u32));
let now = current_moment();
let ms = Duration::from_millis(1);
assert_eq!(Ok(()), lb.check_at(now));
assert_eq!(Ok(()), lb.check_at(now));
assert_ne!(Ok(()), lb.check_at(now + ms * 2));
// should be ok again in 1s:
let next = now + Duration::from_millis(1002);
assert_eq!(Ok(()), lb.check_at(next));
assert_eq!(Ok(()), lb.check_at(next + ms));
assert_ne!(Ok(()), lb.check_at(next + ms * 2), "{:?}", lb); | let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment();
let ms = Duration::from_millis(1);
// Should not allow the first 15 cells on a capacity 5 bucket:
assert_ne!(Ok(()), lb.check_n_at(15, now));
// After 3 and 20 seconds, it should not allow 15 on that bucket either:
assert_ne!(Ok(()), lb.check_n_at(15, now + (ms * 3 * 1000)));
let result = lb.check_n_at(15, now + (ms * 20 * 1000));
match result {
Err(NegativeMultiDecision::InsufficientCapacity(n)) => assert_eq!(n, 15),
_ => panic!("Did not expect {:?}", result),
}
}
#[test]
fn correct_wait_time() {
// Bucket adding a new element per 200ms:
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let mut now = current_moment();
let ms = Duration::from_millis(1);
let mut conforming = 0;
for _i in 0..20 {
now += ms;
let res = lb.check_at(now);
match res {
Ok(()) => {
conforming += 1;
}
Err(wait) => {
now += wait.wait_time_from(now);
assert_eq!(Ok(()), lb.check_at(now));
conforming += 1;
}
}
}
assert_eq!(20, conforming);
}
#[test]
fn prevents_time_travel() {
let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));
let now = current_moment() + Duration::from_secs(1);
let ms = Duration::from_millis(1);
assert!(lb.check_at(now).is_ok());
assert!(lb.check_at(now - ms).is_ok());
assert!(lb.check_at(now - ms * 500).is_ok());
}
#[test]
fn actual_threadsafety() {
let mut lim = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(20u32));
let now = current_moment();
let ms = Duration::from_millis(1);
let mut children = vec![];
lim.check_at(now).unwrap();
for _i in 0..20 {
let mut lim = lim.clone();
children.push(thread::spawn(move || lim.check_at(now).is_ok()));
}
for child in children {
child.join().unwrap();
}
assert!(!lim.check_at(now + ms * 2).is_ok());
assert_eq!(Ok(()), lim.check_at(now + ms * 1000));
}
#[test]
fn tooearly_wait_time_from() {
let lim =
LeakyBucket::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();
let state = <LeakyBucket as Algorithm>::BucketState::default();
let now = current_moment();
let ms = Duration::from_millis(1);
lim.test_and_update(&state, now).unwrap();
if let Err(failure) = lim.test_and_update(&state, now) {
assert_eq!(ms * 1000, failure.wait_time_from(now));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 1000));
assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2001));
} else {
assert!(false, "Second attempt should fail");
}
} | }
#[test]
fn never_allows_more_than_capacity() { | random_line_split |
map-types.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
use std::collections::HashMap;
// Test that trait types printed in error msgs include the type arguments.
fn | () {
let x: Box<HashMap<int, int>> = box HashMap::new();
let x: Box<Map<int, int>> = x;
let y: Box<Map<uint, int>> = box x;
//~^ ERROR the trait `collections::Map<uint,int>` is not implemented
}
| main | identifier_name |
map-types.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
use std::collections::HashMap;
// Test that trait types printed in error msgs include the type arguments.
fn main() | {
let x: Box<HashMap<int, int>> = box HashMap::new();
let x: Box<Map<int, int>> = x;
let y: Box<Map<uint, int>> = box x;
//~^ ERROR the trait `collections::Map<uint,int>` is not implemented
} | identifier_body | |
map-types.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections; |
fn main() {
let x: Box<HashMap<int, int>> = box HashMap::new();
let x: Box<Map<int, int>> = x;
let y: Box<Map<uint, int>> = box x;
//~^ ERROR the trait `collections::Map<uint,int>` is not implemented
} |
use std::collections::HashMap;
// Test that trait types printed in error msgs include the type arguments. | random_line_split |
CloudMailRu.py | # -*- coding: utf-8 -*-
import base64
import json
import re
from ..base.downloader import BaseDownloader
class CloudMailRu(BaseDownloader):
__name__ = "CloudMailRu"
__type__ = "downloader"
__version__ = "0.04"
__status__ = "testing"
__pattern__ = r"https?://cloud\.mail\.ru/dl\?q=(?P<QS>.+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Cloud.mail.ru downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
| def get_info(cls, url="", html=""):
info = super().get_info(url, html)
qs = re.match(cls.__pattern__, url).group('QS')
file_info = json.loads(base64.b64decode(qs).decode("utf-8"))
info.update({
'name': file_info['n'],
'size': file_info['s'],
'u': file_info['u']
})
return info
def setup(self):
self.chunk_limit = -1
self.resume_download = True
self.multi_dl = True
def process(self, pyfile):
self.download(self.info["u"], disposition=False) | OFFLINE_PATTERN = r'"error":\s*"not_exists"'
@classmethod | random_line_split |
CloudMailRu.py | # -*- coding: utf-8 -*-
import base64
import json
import re
from ..base.downloader import BaseDownloader
class CloudMailRu(BaseDownloader):
__name__ = "CloudMailRu"
__type__ = "downloader"
__version__ = "0.04"
__status__ = "testing"
__pattern__ = r"https?://cloud\.mail\.ru/dl\?q=(?P<QS>.+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Cloud.mail.ru downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
OFFLINE_PATTERN = r'"error":\s*"not_exists"'
@classmethod
def get_info(cls, url="", html=""):
info = super().get_info(url, html)
qs = re.match(cls.__pattern__, url).group('QS')
file_info = json.loads(base64.b64decode(qs).decode("utf-8"))
info.update({
'name': file_info['n'],
'size': file_info['s'],
'u': file_info['u']
})
return info
def | (self):
self.chunk_limit = -1
self.resume_download = True
self.multi_dl = True
def process(self, pyfile):
self.download(self.info["u"], disposition=False)
| setup | identifier_name |
CloudMailRu.py | # -*- coding: utf-8 -*-
import base64
import json
import re
from ..base.downloader import BaseDownloader
class CloudMailRu(BaseDownloader):
__name__ = "CloudMailRu"
__type__ = "downloader"
__version__ = "0.04"
__status__ = "testing"
__pattern__ = r"https?://cloud\.mail\.ru/dl\?q=(?P<QS>.+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Cloud.mail.ru downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
OFFLINE_PATTERN = r'"error":\s*"not_exists"'
@classmethod
def get_info(cls, url="", html=""):
info = super().get_info(url, html)
qs = re.match(cls.__pattern__, url).group('QS')
file_info = json.loads(base64.b64decode(qs).decode("utf-8"))
info.update({
'name': file_info['n'],
'size': file_info['s'],
'u': file_info['u']
})
return info
def setup(self):
|
def process(self, pyfile):
self.download(self.info["u"], disposition=False)
| self.chunk_limit = -1
self.resume_download = True
self.multi_dl = True | identifier_body |
config.py | # -*- coding: utf-8 -*-
# ***********************************************************************
# Copyright (C) 2014 - 2017 Oscar Gerardo Lazo Arjona *
# <oscar.lazo@correo.nucleares.unam.mx> *
# *
# This file is part of FAST. *
# *
# FAST is free software: you can redistribute it and/or modify *
# it under the terms of the GNU General Public License as published by *
# the Free Software Foundation, either version 3 of the License, or *
# (at your option) any later version. *
# *
# FAST is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# GNU General Public License for more details. *
# *
# You should have received a copy of the GNU General Public License *
# along with FAST. If not, see <http://www.gnu.org/licenses/>. *
# *
# ***********************************************************************
"""The basic configuration of FAST."""
from fast import __file__
# Whether to use parallelization through OpenMP.
parallel = True
parallel = False
# Whether to use NETCDF binary files for data communication.
use_netcdf = True |
# The install directory for FAST:
fast_path = __file__[:-len("__init__.pyc")] | use_netcdf = False
# An integer between 0 and 2 to control which tests are ran.
run_long_tests = 0 | random_line_split |
google-charts-datatable.ts | declare var google: any;
export interface ArrowFormatInterface {
base: number;
}
export interface BarFormatInterface {
base?: number;
colorNegative?: string;
colorPositive?: string;
drawZeroLine?: boolean;
max?: number;
min?: number;
showValue?: boolean;
width?: number;
}
export interface RangeInterface {
from: number | Date | number[];
to: number | Date | number[];
color?: string;
bgcolor?: string;
fromBgColor?: string;
toBgColor?: string;
}
export interface ColorFormatInterface {
ranges?: RangeInterface[];
}
export interface DateFormat {
formatType?: string;
pattern?: string;
timeZone?: number;
}
export interface NumberFormatInterface {
decimalSymbol?: string;
fractionDigits?: number;
groupingSymbol?: string;
negativeColor?: string;
negativeParens?: boolean;
pattern?: string;
prefix?: string;
suffix?: string;
}
export interface PatternFormatInterface {
pattern: string;
dstColumnIndex?: number;
}
export interface FormatterInterface {
type: string;
options?: (
ArrowFormatInterface
| BarFormatInterface
| ColorFormatInterface
| DateFormat
| NumberFormatInterface
| PatternFormatInterface
);
columns: number[];
}
export interface GoogleChartsDataTableInterface {
dataTable?: any;
firstRowIsData?: boolean;
query?: string;
dataSourceUrl?: string;
/** Refresh interval, in seconds, when using remote data source. */
refreshInterval?: number;
/** Timeout in seconds, when using remote data source */
timeout?: number;
/** Called after query executed. DataTable is updated automatically.
* @param queryResponse google.visualization.QueryResponse
*/
queryCallback?: (queryResponse: any) => any;
formatters?: FormatterInterface[];
view?: string | object | object[];
}
import {
Directive,
EventEmitter,
Output,
} from '@angular/core';
@Directive()
export class GoogleChartsDataTable {
private dataTable: any;
public query: any;
public tid: any;
@Output() dataTableChanged: EventEmitter<any> = new EventEmitter();
| (private opt: GoogleChartsDataTableInterface) {
if (opt) {
this._setDataTable(opt.dataTable, opt.firstRowIsData);
}
}
private send() {
if (this.query === undefined) {
return;
}
this.query.send((queryResponse: any) => {
this.setDataTable(queryResponse.getDataTable());
if (this.opt.queryCallback) {
this.opt.queryCallback(queryResponse);
}
});
}
public init(opt?: GoogleChartsDataTableInterface) {
if (opt) {
this.opt = opt;
}
if (this.tid !== undefined) {
// doesn't work, see https://github.com/google/google-visualization-issues/issues/2381
// this.query.abort();
window.clearInterval(this.tid);
this.tid = undefined;
}
if (this.opt.dataSourceUrl) {
this.query = new google.visualization.Query(this.opt.dataSourceUrl);
if (this.opt.query) {
this.query.setQuery(this.opt.query);
}
if (this.opt.timeout !== undefined) {
this.query.setTimeout(this.opt.timeout);
}
if (this.opt.refreshInterval) {
// this.query.setRefreshInterval(this.opt.refreshInterval);
this.tid = window.setInterval(() => {
this.send();
}, this.opt.refreshInterval * 1000);
}
this.send();
} else {
this.setDataTable(this.opt.dataTable);
}
}
/**
* @returns Underlying google.visualization.DataTable
*/
public getDataTable() {
return this.dataTable;
}
public setDataTable(dt: any, firstRowIsData?: boolean) {
if (firstRowIsData === undefined) {
firstRowIsData = this.opt.firstRowIsData;
}
this._setDataTable(dt, firstRowIsData);
this.dataTableChanged.emit(this.dataTable);
}
private _setDataTable(dt: any, firstRowIsData?: boolean) {
if (Array.isArray(dt)) {
dt = google.visualization.arrayToDataTable(dt, firstRowIsData);
}
this.dataTable = dt;
this.reformat();
}
/**
* Applies formatters to data columns, if defined
*/
public reformat() {
const dt = this.dataTable;
if (dt === undefined) {
return;
}
if (this.opt.formatters === undefined) {
return;
}
for (const formatterConfig of this.opt.formatters) {
let formatter: any;
if (formatterConfig.type === 'PatternFormat') {
const fmtOptions = formatterConfig.options as PatternFormatInterface;
formatter = new google.visualization.PatternFormat(fmtOptions.pattern);
formatter.format(dt, formatterConfig.columns, fmtOptions.dstColumnIndex);
continue;
}
const formatterConstructor = google.visualization[formatterConfig.type];
const formatterOptions = formatterConfig.options;
formatter = new formatterConstructor(formatterOptions);
if (formatterConfig.type === 'ColorFormat' && formatterOptions) {
const fmtOptions = formatterOptions as ColorFormatInterface;
if (fmtOptions.ranges) {
for (const range of fmtOptions.ranges) {
if (typeof (range.fromBgColor) !== 'undefined'
&& typeof (range.toBgColor) !== 'undefined') {
formatter.addGradientRange(range.from, range.to,
range.color, range.fromBgColor, range.toBgColor);
} else {
formatter.addRange(range.from, range.to, range.color, range.bgcolor);
}
}
}
}
for (const col of formatterConfig.columns) {
formatter.format(dt, col);
}
}
}
}
| constructor | identifier_name |
google-charts-datatable.ts | declare var google: any;
export interface ArrowFormatInterface {
base: number;
}
export interface BarFormatInterface {
base?: number;
colorNegative?: string;
colorPositive?: string;
drawZeroLine?: boolean;
max?: number;
min?: number;
showValue?: boolean;
width?: number;
}
export interface RangeInterface {
from: number | Date | number[];
to: number | Date | number[];
color?: string;
bgcolor?: string;
fromBgColor?: string;
toBgColor?: string;
}
export interface ColorFormatInterface {
ranges?: RangeInterface[];
}
export interface DateFormat {
formatType?: string;
pattern?: string;
timeZone?: number;
}
export interface NumberFormatInterface {
decimalSymbol?: string;
fractionDigits?: number;
groupingSymbol?: string;
negativeColor?: string;
negativeParens?: boolean;
pattern?: string;
prefix?: string;
suffix?: string;
}
export interface PatternFormatInterface {
pattern: string;
dstColumnIndex?: number;
}
export interface FormatterInterface {
type: string;
options?: (
ArrowFormatInterface
| BarFormatInterface
| ColorFormatInterface
| DateFormat
| NumberFormatInterface
| PatternFormatInterface
);
columns: number[];
}
export interface GoogleChartsDataTableInterface {
dataTable?: any;
firstRowIsData?: boolean;
query?: string;
dataSourceUrl?: string;
/** Refresh interval, in seconds, when using remote data source. */
refreshInterval?: number;
/** Timeout in seconds, when using remote data source */
timeout?: number;
/** Called after query executed. DataTable is updated automatically.
* @param queryResponse google.visualization.QueryResponse
*/
queryCallback?: (queryResponse: any) => any;
formatters?: FormatterInterface[];
view?: string | object | object[];
}
import {
Directive,
EventEmitter,
Output,
} from '@angular/core';
@Directive()
export class GoogleChartsDataTable {
private dataTable: any;
public query: any;
public tid: any;
@Output() dataTableChanged: EventEmitter<any> = new EventEmitter();
constructor(private opt: GoogleChartsDataTableInterface) {
if (opt) {
this._setDataTable(opt.dataTable, opt.firstRowIsData);
}
}
private send() {
if (this.query === undefined) {
return;
}
this.query.send((queryResponse: any) => {
this.setDataTable(queryResponse.getDataTable());
if (this.opt.queryCallback) {
this.opt.queryCallback(queryResponse);
}
});
}
public init(opt?: GoogleChartsDataTableInterface) {
if (opt) {
this.opt = opt;
}
if (this.tid !== undefined) {
// doesn't work, see https://github.com/google/google-visualization-issues/issues/2381
// this.query.abort();
window.clearInterval(this.tid);
this.tid = undefined;
}
if (this.opt.dataSourceUrl) {
this.query = new google.visualization.Query(this.opt.dataSourceUrl);
if (this.opt.query) {
this.query.setQuery(this.opt.query);
}
if (this.opt.timeout !== undefined) {
this.query.setTimeout(this.opt.timeout);
}
if (this.opt.refreshInterval) {
// this.query.setRefreshInterval(this.opt.refreshInterval);
this.tid = window.setInterval(() => {
this.send();
}, this.opt.refreshInterval * 1000);
}
this.send();
} else {
this.setDataTable(this.opt.dataTable);
}
}
/**
* @returns Underlying google.visualization.DataTable
*/
public getDataTable() |
public setDataTable(dt: any, firstRowIsData?: boolean) {
if (firstRowIsData === undefined) {
firstRowIsData = this.opt.firstRowIsData;
}
this._setDataTable(dt, firstRowIsData);
this.dataTableChanged.emit(this.dataTable);
}
private _setDataTable(dt: any, firstRowIsData?: boolean) {
if (Array.isArray(dt)) {
dt = google.visualization.arrayToDataTable(dt, firstRowIsData);
}
this.dataTable = dt;
this.reformat();
}
/**
* Applies formatters to data columns, if defined
*/
public reformat() {
const dt = this.dataTable;
if (dt === undefined) {
return;
}
if (this.opt.formatters === undefined) {
return;
}
for (const formatterConfig of this.opt.formatters) {
let formatter: any;
if (formatterConfig.type === 'PatternFormat') {
const fmtOptions = formatterConfig.options as PatternFormatInterface;
formatter = new google.visualization.PatternFormat(fmtOptions.pattern);
formatter.format(dt, formatterConfig.columns, fmtOptions.dstColumnIndex);
continue;
}
const formatterConstructor = google.visualization[formatterConfig.type];
const formatterOptions = formatterConfig.options;
formatter = new formatterConstructor(formatterOptions);
if (formatterConfig.type === 'ColorFormat' && formatterOptions) {
const fmtOptions = formatterOptions as ColorFormatInterface;
if (fmtOptions.ranges) {
for (const range of fmtOptions.ranges) {
if (typeof (range.fromBgColor) !== 'undefined'
&& typeof (range.toBgColor) !== 'undefined') {
formatter.addGradientRange(range.from, range.to,
range.color, range.fromBgColor, range.toBgColor);
} else {
formatter.addRange(range.from, range.to, range.color, range.bgcolor);
}
}
}
}
for (const col of formatterConfig.columns) {
formatter.format(dt, col);
}
}
}
}
| {
return this.dataTable;
} | identifier_body |
google-charts-datatable.ts | declare var google: any;
export interface ArrowFormatInterface {
base: number;
}
export interface BarFormatInterface {
base?: number;
colorNegative?: string;
colorPositive?: string;
drawZeroLine?: boolean;
max?: number;
min?: number;
showValue?: boolean;
width?: number;
}
export interface RangeInterface {
from: number | Date | number[];
to: number | Date | number[];
color?: string;
bgcolor?: string;
fromBgColor?: string;
toBgColor?: string;
}
export interface ColorFormatInterface {
ranges?: RangeInterface[];
}
export interface DateFormat {
formatType?: string;
pattern?: string;
timeZone?: number;
}
export interface NumberFormatInterface {
decimalSymbol?: string;
fractionDigits?: number;
groupingSymbol?: string;
negativeColor?: string;
negativeParens?: boolean;
pattern?: string;
prefix?: string;
suffix?: string;
}
export interface PatternFormatInterface {
pattern: string;
dstColumnIndex?: number;
}
export interface FormatterInterface {
type: string;
options?: (
ArrowFormatInterface
| BarFormatInterface
| ColorFormatInterface
| DateFormat
| NumberFormatInterface
| PatternFormatInterface
);
columns: number[];
}
export interface GoogleChartsDataTableInterface {
dataTable?: any;
firstRowIsData?: boolean;
query?: string;
dataSourceUrl?: string;
/** Refresh interval, in seconds, when using remote data source. */
refreshInterval?: number;
/** Timeout in seconds, when using remote data source */
timeout?: number;
/** Called after query executed. DataTable is updated automatically.
* @param queryResponse google.visualization.QueryResponse
*/
queryCallback?: (queryResponse: any) => any;
formatters?: FormatterInterface[];
view?: string | object | object[];
}
import {
Directive,
EventEmitter,
Output,
} from '@angular/core';
@Directive()
export class GoogleChartsDataTable {
private dataTable: any;
public query: any;
public tid: any;
@Output() dataTableChanged: EventEmitter<any> = new EventEmitter();
constructor(private opt: GoogleChartsDataTableInterface) {
if (opt) {
this._setDataTable(opt.dataTable, opt.firstRowIsData);
}
}
private send() {
if (this.query === undefined) {
return;
}
this.query.send((queryResponse: any) => {
this.setDataTable(queryResponse.getDataTable());
if (this.opt.queryCallback) {
this.opt.queryCallback(queryResponse);
}
});
}
public init(opt?: GoogleChartsDataTableInterface) {
if (opt) {
this.opt = opt;
}
if (this.tid !== undefined) {
// doesn't work, see https://github.com/google/google-visualization-issues/issues/2381
// this.query.abort();
window.clearInterval(this.tid);
this.tid = undefined;
}
if (this.opt.dataSourceUrl) {
this.query = new google.visualization.Query(this.opt.dataSourceUrl);
if (this.opt.query) {
this.query.setQuery(this.opt.query);
}
if (this.opt.timeout !== undefined) {
this.query.setTimeout(this.opt.timeout);
}
if (this.opt.refreshInterval) {
// this.query.setRefreshInterval(this.opt.refreshInterval);
this.tid = window.setInterval(() => {
this.send();
}, this.opt.refreshInterval * 1000);
}
this.send();
} else {
this.setDataTable(this.opt.dataTable);
}
}
/**
* @returns Underlying google.visualization.DataTable
*/
public getDataTable() {
return this.dataTable;
}
public setDataTable(dt: any, firstRowIsData?: boolean) {
if (firstRowIsData === undefined) {
firstRowIsData = this.opt.firstRowIsData;
}
this._setDataTable(dt, firstRowIsData);
this.dataTableChanged.emit(this.dataTable);
}
private _setDataTable(dt: any, firstRowIsData?: boolean) {
if (Array.isArray(dt)) {
dt = google.visualization.arrayToDataTable(dt, firstRowIsData);
}
this.dataTable = dt;
this.reformat();
}
/**
* Applies formatters to data columns, if defined
*/
public reformat() {
const dt = this.dataTable;
if (dt === undefined) {
return;
}
if (this.opt.formatters === undefined) {
return;
}
for (const formatterConfig of this.opt.formatters) {
let formatter: any;
if (formatterConfig.type === 'PatternFormat') {
const fmtOptions = formatterConfig.options as PatternFormatInterface;
formatter = new google.visualization.PatternFormat(fmtOptions.pattern);
formatter.format(dt, formatterConfig.columns, fmtOptions.dstColumnIndex);
continue;
}
const formatterConstructor = google.visualization[formatterConfig.type];
const formatterOptions = formatterConfig.options;
formatter = new formatterConstructor(formatterOptions);
if (formatterConfig.type === 'ColorFormat' && formatterOptions) {
const fmtOptions = formatterOptions as ColorFormatInterface;
if (fmtOptions.ranges) |
}
for (const col of formatterConfig.columns) {
formatter.format(dt, col);
}
}
}
}
| {
for (const range of fmtOptions.ranges) {
if (typeof (range.fromBgColor) !== 'undefined'
&& typeof (range.toBgColor) !== 'undefined') {
formatter.addGradientRange(range.from, range.to,
range.color, range.fromBgColor, range.toBgColor);
} else {
formatter.addRange(range.from, range.to, range.color, range.bgcolor);
}
}
} | conditional_block |
google-charts-datatable.ts | declare var google: any;
export interface ArrowFormatInterface {
base: number;
}
export interface BarFormatInterface {
base?: number;
colorNegative?: string;
colorPositive?: string;
drawZeroLine?: boolean;
max?: number;
min?: number;
showValue?: boolean;
width?: number;
}
export interface RangeInterface {
from: number | Date | number[];
to: number | Date | number[];
color?: string;
bgcolor?: string;
fromBgColor?: string;
toBgColor?: string;
}
export interface ColorFormatInterface {
ranges?: RangeInterface[];
}
export interface DateFormat {
formatType?: string;
pattern?: string;
timeZone?: number;
}
export interface NumberFormatInterface {
decimalSymbol?: string;
fractionDigits?: number;
groupingSymbol?: string;
negativeColor?: string;
negativeParens?: boolean;
pattern?: string;
prefix?: string;
suffix?: string;
}
export interface PatternFormatInterface {
pattern: string;
dstColumnIndex?: number;
}
export interface FormatterInterface {
type: string;
options?: (
ArrowFormatInterface
| BarFormatInterface
| ColorFormatInterface
| DateFormat
| NumberFormatInterface
| PatternFormatInterface
);
columns: number[];
}
export interface GoogleChartsDataTableInterface {
dataTable?: any;
firstRowIsData?: boolean; | dataSourceUrl?: string;
/** Refresh interval, in seconds, when using remote data source. */
refreshInterval?: number;
/** Timeout in seconds, when using remote data source */
timeout?: number;
/** Called after query executed. DataTable is updated automatically.
* @param queryResponse google.visualization.QueryResponse
*/
queryCallback?: (queryResponse: any) => any;
formatters?: FormatterInterface[];
view?: string | object | object[];
}
import {
Directive,
EventEmitter,
Output,
} from '@angular/core';
@Directive()
export class GoogleChartsDataTable {
private dataTable: any;
public query: any;
public tid: any;
@Output() dataTableChanged: EventEmitter<any> = new EventEmitter();
constructor(private opt: GoogleChartsDataTableInterface) {
if (opt) {
this._setDataTable(opt.dataTable, opt.firstRowIsData);
}
}
private send() {
if (this.query === undefined) {
return;
}
this.query.send((queryResponse: any) => {
this.setDataTable(queryResponse.getDataTable());
if (this.opt.queryCallback) {
this.opt.queryCallback(queryResponse);
}
});
}
public init(opt?: GoogleChartsDataTableInterface) {
if (opt) {
this.opt = opt;
}
if (this.tid !== undefined) {
// doesn't work, see https://github.com/google/google-visualization-issues/issues/2381
// this.query.abort();
window.clearInterval(this.tid);
this.tid = undefined;
}
if (this.opt.dataSourceUrl) {
this.query = new google.visualization.Query(this.opt.dataSourceUrl);
if (this.opt.query) {
this.query.setQuery(this.opt.query);
}
if (this.opt.timeout !== undefined) {
this.query.setTimeout(this.opt.timeout);
}
if (this.opt.refreshInterval) {
// this.query.setRefreshInterval(this.opt.refreshInterval);
this.tid = window.setInterval(() => {
this.send();
}, this.opt.refreshInterval * 1000);
}
this.send();
} else {
this.setDataTable(this.opt.dataTable);
}
}
/**
* @returns Underlying google.visualization.DataTable
*/
public getDataTable() {
return this.dataTable;
}
public setDataTable(dt: any, firstRowIsData?: boolean) {
if (firstRowIsData === undefined) {
firstRowIsData = this.opt.firstRowIsData;
}
this._setDataTable(dt, firstRowIsData);
this.dataTableChanged.emit(this.dataTable);
}
private _setDataTable(dt: any, firstRowIsData?: boolean) {
if (Array.isArray(dt)) {
dt = google.visualization.arrayToDataTable(dt, firstRowIsData);
}
this.dataTable = dt;
this.reformat();
}
/**
* Applies formatters to data columns, if defined
*/
public reformat() {
const dt = this.dataTable;
if (dt === undefined) {
return;
}
if (this.opt.formatters === undefined) {
return;
}
for (const formatterConfig of this.opt.formatters) {
let formatter: any;
if (formatterConfig.type === 'PatternFormat') {
const fmtOptions = formatterConfig.options as PatternFormatInterface;
formatter = new google.visualization.PatternFormat(fmtOptions.pattern);
formatter.format(dt, formatterConfig.columns, fmtOptions.dstColumnIndex);
continue;
}
const formatterConstructor = google.visualization[formatterConfig.type];
const formatterOptions = formatterConfig.options;
formatter = new formatterConstructor(formatterOptions);
if (formatterConfig.type === 'ColorFormat' && formatterOptions) {
const fmtOptions = formatterOptions as ColorFormatInterface;
if (fmtOptions.ranges) {
for (const range of fmtOptions.ranges) {
if (typeof (range.fromBgColor) !== 'undefined'
&& typeof (range.toBgColor) !== 'undefined') {
formatter.addGradientRange(range.from, range.to,
range.color, range.fromBgColor, range.toBgColor);
} else {
formatter.addRange(range.from, range.to, range.color, range.bgcolor);
}
}
}
}
for (const col of formatterConfig.columns) {
formatter.format(dt, col);
}
}
}
} | query?: string; | random_line_split |
IntCartesianProduct.tsx | import * as _ from 'lodash';
import * as collections from 'typescript-collections';
export class IntCartesianProduct {
private _lengths:Array<number>=[];
private _indices:Array<number> = [];
private maxIndex:number;
private _hasNext:boolean = true;
constructor(lengths:Array<number>) {
this._lengths = collections.arrays.copy(lengths);
this._indices = new Array<number>(lengths.length);
for (var i: number = 0; i < this._indices.length;i++) {
this._indices[i]=0;
}
this.maxIndex = this.findMaxIndex();
}
public findMaxIndex():number {
let max:number = -1;
let maxIndex:number = 1;
for (var i:number = 0; i < this._lengths.length; i++) |
return maxIndex;
}
public hasNext():boolean {
return this._hasNext;
}
public next():Array<number> {
var result: Array<number> = collections.arrays.copy(this._indices);
for(var i:number = this._indices.length - 1; i >= 0; i--) {
if (this._indices[i] == this._lengths[i] - 1) {
this._indices[i] = 0;
if (i == 0) {
this._hasNext = false;
}
} else {
this._indices[i]++;
break;
}
}
return result;
}
public getMaxIndex():number {
//console.log("MaxIndex=" + this.maxIndex);
return this.maxIndex;
}
} | {
var length = this._lengths[i];
maxIndex*=length;
} | conditional_block |
IntCartesianProduct.tsx | import * as _ from 'lodash';
import * as collections from 'typescript-collections';
export class IntCartesianProduct {
private _lengths:Array<number>=[];
private _indices:Array<number> = [];
private maxIndex:number;
private _hasNext:boolean = true;
constructor(lengths:Array<number>) {
this._lengths = collections.arrays.copy(lengths);
this._indices = new Array<number>(lengths.length);
for (var i: number = 0; i < this._indices.length;i++) {
this._indices[i]=0;
}
this.maxIndex = this.findMaxIndex();
}
public findMaxIndex():number {
let max:number = -1;
let maxIndex:number = 1;
for (var i:number = 0; i < this._lengths.length; i++) {
var length = this._lengths[i];
maxIndex*=length;
}
return maxIndex;
}
public hasNext():boolean {
return this._hasNext;
}
public next():Array<number> {
var result: Array<number> = collections.arrays.copy(this._indices);
for(var i:number = this._indices.length - 1; i >= 0; i--) {
if (this._indices[i] == this._lengths[i] - 1) {
this._indices[i] = 0;
if (i == 0) {
this._hasNext = false;
}
} else {
this._indices[i]++;
break;
}
}
return result;
}
public getMaxIndex():number |
} | {
//console.log("MaxIndex=" + this.maxIndex);
return this.maxIndex;
} | identifier_body |
IntCartesianProduct.tsx | import * as _ from 'lodash';
import * as collections from 'typescript-collections';
export class | {
private _lengths:Array<number>=[];
private _indices:Array<number> = [];
private maxIndex:number;
private _hasNext:boolean = true;
constructor(lengths:Array<number>) {
this._lengths = collections.arrays.copy(lengths);
this._indices = new Array<number>(lengths.length);
for (var i: number = 0; i < this._indices.length;i++) {
this._indices[i]=0;
}
this.maxIndex = this.findMaxIndex();
}
public findMaxIndex():number {
let max:number = -1;
let maxIndex:number = 1;
for (var i:number = 0; i < this._lengths.length; i++) {
var length = this._lengths[i];
maxIndex*=length;
}
return maxIndex;
}
public hasNext():boolean {
return this._hasNext;
}
public next():Array<number> {
var result: Array<number> = collections.arrays.copy(this._indices);
for(var i:number = this._indices.length - 1; i >= 0; i--) {
if (this._indices[i] == this._lengths[i] - 1) {
this._indices[i] = 0;
if (i == 0) {
this._hasNext = false;
}
} else {
this._indices[i]++;
break;
}
}
return result;
}
public getMaxIndex():number {
//console.log("MaxIndex=" + this.maxIndex);
return this.maxIndex;
}
} | IntCartesianProduct | identifier_name |
IntCartesianProduct.tsx | import * as _ from 'lodash';
import * as collections from 'typescript-collections';
export class IntCartesianProduct {
private _lengths:Array<number>=[];
private _indices:Array<number> = [];
private maxIndex:number;
private _hasNext:boolean = true;
constructor(lengths:Array<number>) {
this._lengths = collections.arrays.copy(lengths);
this._indices = new Array<number>(lengths.length); | for (var i: number = 0; i < this._indices.length;i++) {
this._indices[i]=0;
}
this.maxIndex = this.findMaxIndex();
}
public findMaxIndex():number {
let max:number = -1;
let maxIndex:number = 1;
for (var i:number = 0; i < this._lengths.length; i++) {
var length = this._lengths[i];
maxIndex*=length;
}
return maxIndex;
}
public hasNext():boolean {
return this._hasNext;
}
public next():Array<number> {
var result: Array<number> = collections.arrays.copy(this._indices);
for(var i:number = this._indices.length - 1; i >= 0; i--) {
if (this._indices[i] == this._lengths[i] - 1) {
this._indices[i] = 0;
if (i == 0) {
this._hasNext = false;
}
} else {
this._indices[i]++;
break;
}
}
return result;
}
public getMaxIndex():number {
//console.log("MaxIndex=" + this.maxIndex);
return this.maxIndex;
}
} | random_line_split | |
add.rs | use std::cmp::Ordering;
use cmp;
use denormalize;
use normalize;
use valid;
pub fn add(num_l: &str, num_r: &str) -> Result<String, String> {
if !valid(num_l) {
Err(format!("Invalid numeral {}", num_l))
} else if !valid(num_r) {
Err(format!("Invalid numeral {}", num_r))
} else {
let sum = merge(denormalize(num_l), denormalize(num_r));
normalize(&sum)
}
}
fn merge(num_l: String, num_r: String) -> String {
let mut digits_l = num_l.chars();
let mut digits_r = num_r.chars();
let mut sum = String::new();
let mut next_l = digits_l.next();
let mut next_r = digits_r.next();
loop {
match (next_l, next_r) {
(Some(l), Some(r)) => {
if cmp(l, r) == Ordering::Greater | else {
sum.push(r);
next_r = digits_r.next();
}
},
(Some(l), None) => {
sum.push(l);
next_l = digits_l.next();
},
(None, Some(r)) => {
sum.push(r);
next_r = digits_r.next();
},
(None, None) => { break }
}
}
sum
}
#[cfg(test)]
mod tests {
use super::add;
#[test]
fn add_i_i() {
assert_eq!("II", add("I", "I").unwrap());
}
#[test]
fn add_i_ii() {
assert_eq!("III", add("I", "II").unwrap());
}
#[test]
fn add_ii_iii_requires_normalization_to_v() {
assert_eq!("V", add("II", "III").unwrap());
}
#[test]
fn add_v_i() {
assert_eq!("VI", add("V", "I").unwrap());
}
#[test]
fn add_i_v_understands_the_relative_order_of_v_and_i() {
assert_eq!("VI", add("I", "V").unwrap());
}
#[test]
fn add_i_iv_denormalizes_before_adding() {
assert_eq!("V", add("I", "IV").unwrap());
}
#[test]
fn add_l_i_supports_l() {
assert_eq!("LI", add("L", "I").unwrap());
}
#[test]
fn add_l_xi_understands_l_x_sort_order() {
assert_eq!("LXI", add("L", "XI").unwrap());
}
#[test]
fn add_fails_when_result_is_too_big_to_be_represented() {
assert!(add("MCMXCIX", "MMCMXCIX").is_err());
}
#[test]
fn add_fails_when_lhs_is_invalid() {
assert!(add("J", "I").is_err());
}
#[test]
fn add_fails_when_rhs_is_invalid() {
assert!(add("I", "").is_err());
}
}
| {
sum.push(l);
next_l = digits_l.next();
} | conditional_block |
add.rs | use std::cmp::Ordering;
use cmp;
use denormalize;
use normalize;
use valid;
pub fn add(num_l: &str, num_r: &str) -> Result<String, String> {
if !valid(num_l) {
Err(format!("Invalid numeral {}", num_l))
} else if !valid(num_r) {
Err(format!("Invalid numeral {}", num_r))
} else {
let sum = merge(denormalize(num_l), denormalize(num_r));
normalize(&sum)
}
}
fn merge(num_l: String, num_r: String) -> String {
let mut digits_l = num_l.chars();
let mut digits_r = num_r.chars();
let mut sum = String::new();
let mut next_l = digits_l.next();
let mut next_r = digits_r.next();
loop {
match (next_l, next_r) {
(Some(l), Some(r)) => {
if cmp(l, r) == Ordering::Greater {
sum.push(l);
next_l = digits_l.next();
} else {
sum.push(r);
next_r = digits_r.next();
}
},
(Some(l), None) => {
sum.push(l);
next_l = digits_l.next();
},
(None, Some(r)) => {
sum.push(r);
next_r = digits_r.next();
},
(None, None) => { break }
}
}
sum
}
#[cfg(test)]
mod tests {
use super::add;
#[test]
fn add_i_i() {
assert_eq!("II", add("I", "I").unwrap());
}
#[test]
fn add_i_ii() {
assert_eq!("III", add("I", "II").unwrap());
}
#[test]
fn add_ii_iii_requires_normalization_to_v() {
assert_eq!("V", add("II", "III").unwrap());
}
#[test]
fn add_v_i() {
assert_eq!("VI", add("V", "I").unwrap());
}
#[test]
fn add_i_v_understands_the_relative_order_of_v_and_i() {
assert_eq!("VI", add("I", "V").unwrap());
}
#[test]
fn add_i_iv_denormalizes_before_adding() {
assert_eq!("V", add("I", "IV").unwrap());
}
#[test] | assert_eq!("LI", add("L", "I").unwrap());
}
#[test]
fn add_l_xi_understands_l_x_sort_order() {
assert_eq!("LXI", add("L", "XI").unwrap());
}
#[test]
fn add_fails_when_result_is_too_big_to_be_represented() {
assert!(add("MCMXCIX", "MMCMXCIX").is_err());
}
#[test]
fn add_fails_when_lhs_is_invalid() {
assert!(add("J", "I").is_err());
}
#[test]
fn add_fails_when_rhs_is_invalid() {
assert!(add("I", "").is_err());
}
} | fn add_l_i_supports_l() { | random_line_split |
add.rs | use std::cmp::Ordering;
use cmp;
use denormalize;
use normalize;
use valid;
pub fn add(num_l: &str, num_r: &str) -> Result<String, String> {
if !valid(num_l) {
Err(format!("Invalid numeral {}", num_l))
} else if !valid(num_r) {
Err(format!("Invalid numeral {}", num_r))
} else {
let sum = merge(denormalize(num_l), denormalize(num_r));
normalize(&sum)
}
}
fn merge(num_l: String, num_r: String) -> String {
let mut digits_l = num_l.chars();
let mut digits_r = num_r.chars();
let mut sum = String::new();
let mut next_l = digits_l.next();
let mut next_r = digits_r.next();
loop {
match (next_l, next_r) {
(Some(l), Some(r)) => {
if cmp(l, r) == Ordering::Greater {
sum.push(l);
next_l = digits_l.next();
} else {
sum.push(r);
next_r = digits_r.next();
}
},
(Some(l), None) => {
sum.push(l);
next_l = digits_l.next();
},
(None, Some(r)) => {
sum.push(r);
next_r = digits_r.next();
},
(None, None) => { break }
}
}
sum
}
#[cfg(test)]
mod tests {
use super::add;
#[test]
fn add_i_i() {
assert_eq!("II", add("I", "I").unwrap());
}
#[test]
fn add_i_ii() {
assert_eq!("III", add("I", "II").unwrap());
}
#[test]
fn add_ii_iii_requires_normalization_to_v() {
assert_eq!("V", add("II", "III").unwrap());
}
#[test]
fn add_v_i() {
assert_eq!("VI", add("V", "I").unwrap());
}
#[test]
fn add_i_v_understands_the_relative_order_of_v_and_i() {
assert_eq!("VI", add("I", "V").unwrap());
}
#[test]
fn add_i_iv_denormalizes_before_adding() {
assert_eq!("V", add("I", "IV").unwrap());
}
#[test]
fn add_l_i_supports_l() |
#[test]
fn add_l_xi_understands_l_x_sort_order() {
assert_eq!("LXI", add("L", "XI").unwrap());
}
#[test]
fn add_fails_when_result_is_too_big_to_be_represented() {
assert!(add("MCMXCIX", "MMCMXCIX").is_err());
}
#[test]
fn add_fails_when_lhs_is_invalid() {
assert!(add("J", "I").is_err());
}
#[test]
fn add_fails_when_rhs_is_invalid() {
assert!(add("I", "").is_err());
}
}
| {
assert_eq!("LI", add("L", "I").unwrap());
} | identifier_body |
add.rs | use std::cmp::Ordering;
use cmp;
use denormalize;
use normalize;
use valid;
pub fn add(num_l: &str, num_r: &str) -> Result<String, String> {
if !valid(num_l) {
Err(format!("Invalid numeral {}", num_l))
} else if !valid(num_r) {
Err(format!("Invalid numeral {}", num_r))
} else {
let sum = merge(denormalize(num_l), denormalize(num_r));
normalize(&sum)
}
}
fn merge(num_l: String, num_r: String) -> String {
let mut digits_l = num_l.chars();
let mut digits_r = num_r.chars();
let mut sum = String::new();
let mut next_l = digits_l.next();
let mut next_r = digits_r.next();
loop {
match (next_l, next_r) {
(Some(l), Some(r)) => {
if cmp(l, r) == Ordering::Greater {
sum.push(l);
next_l = digits_l.next();
} else {
sum.push(r);
next_r = digits_r.next();
}
},
(Some(l), None) => {
sum.push(l);
next_l = digits_l.next();
},
(None, Some(r)) => {
sum.push(r);
next_r = digits_r.next();
},
(None, None) => { break }
}
}
sum
}
#[cfg(test)]
mod tests {
use super::add;
#[test]
fn add_i_i() {
assert_eq!("II", add("I", "I").unwrap());
}
#[test]
fn add_i_ii() {
assert_eq!("III", add("I", "II").unwrap());
}
#[test]
fn add_ii_iii_requires_normalization_to_v() {
assert_eq!("V", add("II", "III").unwrap());
}
#[test]
fn add_v_i() {
assert_eq!("VI", add("V", "I").unwrap());
}
#[test]
fn add_i_v_understands_the_relative_order_of_v_and_i() {
assert_eq!("VI", add("I", "V").unwrap());
}
#[test]
fn add_i_iv_denormalizes_before_adding() {
assert_eq!("V", add("I", "IV").unwrap());
}
#[test]
fn add_l_i_supports_l() {
assert_eq!("LI", add("L", "I").unwrap());
}
#[test]
fn | () {
assert_eq!("LXI", add("L", "XI").unwrap());
}
#[test]
fn add_fails_when_result_is_too_big_to_be_represented() {
assert!(add("MCMXCIX", "MMCMXCIX").is_err());
}
#[test]
fn add_fails_when_lhs_is_invalid() {
assert!(add("J", "I").is_err());
}
#[test]
fn add_fails_when_rhs_is_invalid() {
assert!(add("I", "").is_err());
}
}
| add_l_xi_understands_l_x_sort_order | identifier_name |
conf.py | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
from distutils.command.config import config
import guzzle_sphinx_theme
import tomli
from dunamai import Version
root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, root)
# -- Project information -----------------------------------------------------
# General project metadata is stored in pyproject.toml
with open(os.path.join(root, "pyproject.toml"), "rb") as f:
config = tomli.load(f)
project_meta = config["tool"]["poetry"]
print(project_meta)
project = project_meta["name"]
author = project_meta["authors"][0]
description = project_meta["description"]
url = project_meta["homepage"]
title = project + " Documentation"
_version = Version.from_git()
# The full version, including alpha/beta/rc tags
release = _version.serialize(metadata=False)
# The short X.Y.Z version
version = _version.base
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = "2.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints",
"guzzle_sphinx_theme",
"sphinxcontrib_dooble",
]
# Include a separate entry for special methods, like __init__, where provided.
autodoc_default_options = {
"member-order": "bysource",
"special-members": True,
"exclude-members": "__dict__,__weakref__",
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_translator_class = "guzzle_sphinx_theme.HTMLTranslator"
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = "guzzle_sphinx_theme"
html_title = title
html_short_title = project + " " + version
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {"projectlink": url}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {"**": ["logo-text.html", "globaltoc.html", "searchbox.html"]}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = project + "doc"
| #
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [(master_doc, project + ".tex", title, author, "manual")]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, project.lower(), title, [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, project, title, author, project, description, "Miscellaneous")
]
# -- Extension configuration ------------------------------------------------- |
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). | random_line_split |
select2_locale_fr.js | /**
* Select2 French translation
*/
(function ($) {
"use strict";
| $.extend($.fn.select2.defaults, {
formatNoMatches: function () { return "Aucun résultat trouvé"; },
formatInputTooShort: function (input, min) { var n = min - input.length; return "Merci de saisir " + n + " caractère" + (n == 1? "" : "s") + " de plus"; },
formatInputTooLong: function (input, max) { var n = input.length - max; return "Merci de supprimer " + n + " caractère" + (n == 1? "" : "s"); },
formatSelectionTooBig: function (limit) { return "Vous pouvez seulement sélectionner " + limit + " élément" + (limit == 1 ? "" : "s"); },
formatLoadMore: function (pageNumber) { return "Chargement de résultats supplémentaires…"; },
formatSearching: function () { return "Recherche en cours…"; }
});
})(jQuery); | random_line_split | |
JsonLogger.ts | import { currentTimestampSeconds } from "util/Time";
import { Loggable, Logger, AbstractLogger, LogLevel } from "logger/Logger";
import { Logging } from "logger/Logging";
| /**
* Logger that outputs messages formatted as JSON objects.
*
* Example output:
* {
* "time": "2019-07-01T21:55:00.342Z",
* "message": "wawawa",
* "level": "4",
* "context": "MyClass"
* }
*/
export class JsonLogger extends AbstractLogger {
private readonly context: Loggable[] = [];
private constructor() {
super();
}
public static create(): JsonLogger {
return new JsonLogger();
}
public withContext(...args: Loggable[]): Logger {
const logger = new JsonLogger();
logger.context.push(...this.context, ...args);
return logger;
}
public log(logLevel: LogLevel, message: string, ...args: Loggable[]): void {
if (logLevel < Logging.getLogLevel()) {
return;
}
let level: string;
switch (logLevel) {
case LogLevel.DEBUG:
level = "DEBUG";
break;
case LogLevel.INFO:
level = "INFO";
break;
case LogLevel.WARN:
level = "WARN";
break;
case LogLevel.ERROR:
level = "ERROR";
break;
default:
throw `Unknown log level '${ logLevel }'`;
}
const time: number = currentTimestampSeconds();
const context: Loggable = {
time: new Date(time * 1000).toISOString(),
message,
level,
...this.getContext(this.context),
...this.getContext(args)
};
const output: string = JSON.stringify(context, null, 2);
Logging.getSink().emit({
level: logLevel,
message: output,
time: time
});
}
} | random_line_split | |
JsonLogger.ts | import { currentTimestampSeconds } from "util/Time";
import { Loggable, Logger, AbstractLogger, LogLevel } from "logger/Logger";
import { Logging } from "logger/Logging";
/**
* Logger that outputs messages formatted as JSON objects.
*
* Example output:
* {
* "time": "2019-07-01T21:55:00.342Z",
* "message": "wawawa",
* "level": "4",
* "context": "MyClass"
* }
*/
export class JsonLogger extends AbstractLogger {
private readonly context: Loggable[] = [];
private constructor() {
super();
}
public static | (): JsonLogger {
return new JsonLogger();
}
public withContext(...args: Loggable[]): Logger {
const logger = new JsonLogger();
logger.context.push(...this.context, ...args);
return logger;
}
public log(logLevel: LogLevel, message: string, ...args: Loggable[]): void {
if (logLevel < Logging.getLogLevel()) {
return;
}
let level: string;
switch (logLevel) {
case LogLevel.DEBUG:
level = "DEBUG";
break;
case LogLevel.INFO:
level = "INFO";
break;
case LogLevel.WARN:
level = "WARN";
break;
case LogLevel.ERROR:
level = "ERROR";
break;
default:
throw `Unknown log level '${ logLevel }'`;
}
const time: number = currentTimestampSeconds();
const context: Loggable = {
time: new Date(time * 1000).toISOString(),
message,
level,
...this.getContext(this.context),
...this.getContext(args)
};
const output: string = JSON.stringify(context, null, 2);
Logging.getSink().emit({
level: logLevel,
message: output,
time: time
});
}
}
| create | identifier_name |
JsonLogger.ts | import { currentTimestampSeconds } from "util/Time";
import { Loggable, Logger, AbstractLogger, LogLevel } from "logger/Logger";
import { Logging } from "logger/Logging";
/**
* Logger that outputs messages formatted as JSON objects.
*
* Example output:
* {
* "time": "2019-07-01T21:55:00.342Z",
* "message": "wawawa",
* "level": "4",
* "context": "MyClass"
* }
*/
export class JsonLogger extends AbstractLogger {
private readonly context: Loggable[] = [];
private constructor() {
super();
}
public static create(): JsonLogger {
return new JsonLogger();
}
public withContext(...args: Loggable[]): Logger |
public log(logLevel: LogLevel, message: string, ...args: Loggable[]): void {
if (logLevel < Logging.getLogLevel()) {
return;
}
let level: string;
switch (logLevel) {
case LogLevel.DEBUG:
level = "DEBUG";
break;
case LogLevel.INFO:
level = "INFO";
break;
case LogLevel.WARN:
level = "WARN";
break;
case LogLevel.ERROR:
level = "ERROR";
break;
default:
throw `Unknown log level '${ logLevel }'`;
}
const time: number = currentTimestampSeconds();
const context: Loggable = {
time: new Date(time * 1000).toISOString(),
message,
level,
...this.getContext(this.context),
...this.getContext(args)
};
const output: string = JSON.stringify(context, null, 2);
Logging.getSink().emit({
level: logLevel,
message: output,
time: time
});
}
}
| {
const logger = new JsonLogger();
logger.context.push(...this.context, ...args);
return logger;
} | identifier_body |
__init__.py | """
run tests against a webserver running in the same reactor
NOTE: this test uses port 8888 on localhost
"""
import os
import ujson as json
import cyclone.httpclient
from twisted.internet import defer
from twisted.application import internet
from twisted.trial.unittest import TestCase
from twisted.python import log
from txbitwrap.api import factory as Api
from txbitwrap.machine import set_pnml_path
import txbitwrap.event
IFACE = '127.0.0.1'
PORT = 8888
OPTIONS = { | 'pg-username': 'bitwrap',
'pg-password': 'bitwrap',
'pg-database': 'bitwrap'
}
class ApiTest(TestCase):
""" setup rpc endpoint and invoke ping method """
def setUp(self):
""" start tcp endpoint """
set_pnml_path(OPTIONS['machine-path'])
self.options = OPTIONS
#pylint: disable=no-member
self.service = internet.TCPServer(PORT, Api(self.options), interface=self.options['listen-ip'])
#pylint: enable=no-member
self.service.startService()
@defer.inlineCallbacks
def tearDown(self):
""" stop tcp endpoint """
self.service.stopService()
yield txbitwrap.event.rdq.stop()
@staticmethod
def url(resource):
""" bulid a url using test endpoint """
return 'http://%s:%s/%s' % (IFACE, PORT, resource)
@staticmethod
def client(resource):
""" rpc client """
return cyclone.httpclient.JsonRPC(ApiTest.url(resource))
@staticmethod
def fetch(resource, **kwargs):
""" async request with httpclient"""
return cyclone.httpclient.fetch(ApiTest.url(resource), **kwargs)
@staticmethod
def dispatch(**event):
""" rpc client """
resource = 'dispatch/%s/%s/%s' % (event['schema'], event['oid'], event['action'])
url = ApiTest.url(resource)
if isinstance(event['payload'], str):
data = event['payload']
else:
data = json.dumps(event['payload'])
return cyclone.httpclient.fetch(url, postdata=data)
@staticmethod
def broadcast(**event):
""" rpc client """
resource = 'broadcast/%s/%s' % (event['schema'], event['id'])
url = ApiTest.url(resource)
data = json.dumps(event)
return cyclone.httpclient.fetch(url, postdata=data) | 'listen-ip': IFACE,
'listen-port': PORT,
'machine-path': os.path.abspath(os.path.dirname(__file__) + '/../../schemata'),
'pg-host': '127.0.0.1',
'pg-port': 5432, | random_line_split |
__init__.py | """
run tests against a webserver running in the same reactor
NOTE: this test uses port 8888 on localhost
"""
import os
import ujson as json
import cyclone.httpclient
from twisted.internet import defer
from twisted.application import internet
from twisted.trial.unittest import TestCase
from twisted.python import log
from txbitwrap.api import factory as Api
from txbitwrap.machine import set_pnml_path
import txbitwrap.event
IFACE = '127.0.0.1'
PORT = 8888
OPTIONS = {
'listen-ip': IFACE,
'listen-port': PORT,
'machine-path': os.path.abspath(os.path.dirname(__file__) + '/../../schemata'),
'pg-host': '127.0.0.1',
'pg-port': 5432,
'pg-username': 'bitwrap',
'pg-password': 'bitwrap',
'pg-database': 'bitwrap'
}
class ApiTest(TestCase):
""" setup rpc endpoint and invoke ping method """
def setUp(self):
""" start tcp endpoint """
set_pnml_path(OPTIONS['machine-path'])
self.options = OPTIONS
#pylint: disable=no-member
self.service = internet.TCPServer(PORT, Api(self.options), interface=self.options['listen-ip'])
#pylint: enable=no-member
self.service.startService()
@defer.inlineCallbacks
def tearDown(self):
""" stop tcp endpoint """
self.service.stopService()
yield txbitwrap.event.rdq.stop()
@staticmethod
def url(resource):
""" bulid a url using test endpoint """
return 'http://%s:%s/%s' % (IFACE, PORT, resource)
@staticmethod
def | (resource):
""" rpc client """
return cyclone.httpclient.JsonRPC(ApiTest.url(resource))
@staticmethod
def fetch(resource, **kwargs):
""" async request with httpclient"""
return cyclone.httpclient.fetch(ApiTest.url(resource), **kwargs)
@staticmethod
def dispatch(**event):
""" rpc client """
resource = 'dispatch/%s/%s/%s' % (event['schema'], event['oid'], event['action'])
url = ApiTest.url(resource)
if isinstance(event['payload'], str):
data = event['payload']
else:
data = json.dumps(event['payload'])
return cyclone.httpclient.fetch(url, postdata=data)
@staticmethod
def broadcast(**event):
""" rpc client """
resource = 'broadcast/%s/%s' % (event['schema'], event['id'])
url = ApiTest.url(resource)
data = json.dumps(event)
return cyclone.httpclient.fetch(url, postdata=data)
| client | identifier_name |
__init__.py | """
run tests against a webserver running in the same reactor
NOTE: this test uses port 8888 on localhost
"""
import os
import ujson as json
import cyclone.httpclient
from twisted.internet import defer
from twisted.application import internet
from twisted.trial.unittest import TestCase
from twisted.python import log
from txbitwrap.api import factory as Api
from txbitwrap.machine import set_pnml_path
import txbitwrap.event
IFACE = '127.0.0.1'
PORT = 8888
OPTIONS = {
'listen-ip': IFACE,
'listen-port': PORT,
'machine-path': os.path.abspath(os.path.dirname(__file__) + '/../../schemata'),
'pg-host': '127.0.0.1',
'pg-port': 5432,
'pg-username': 'bitwrap',
'pg-password': 'bitwrap',
'pg-database': 'bitwrap'
}
class ApiTest(TestCase):
| """ setup rpc endpoint and invoke ping method """
def setUp(self):
""" start tcp endpoint """
set_pnml_path(OPTIONS['machine-path'])
self.options = OPTIONS
#pylint: disable=no-member
self.service = internet.TCPServer(PORT, Api(self.options), interface=self.options['listen-ip'])
#pylint: enable=no-member
self.service.startService()
@defer.inlineCallbacks
def tearDown(self):
""" stop tcp endpoint """
self.service.stopService()
yield txbitwrap.event.rdq.stop()
@staticmethod
def url(resource):
""" bulid a url using test endpoint """
return 'http://%s:%s/%s' % (IFACE, PORT, resource)
@staticmethod
def client(resource):
""" rpc client """
return cyclone.httpclient.JsonRPC(ApiTest.url(resource))
@staticmethod
def fetch(resource, **kwargs):
""" async request with httpclient"""
return cyclone.httpclient.fetch(ApiTest.url(resource), **kwargs)
@staticmethod
def dispatch(**event):
""" rpc client """
resource = 'dispatch/%s/%s/%s' % (event['schema'], event['oid'], event['action'])
url = ApiTest.url(resource)
if isinstance(event['payload'], str):
data = event['payload']
else:
data = json.dumps(event['payload'])
return cyclone.httpclient.fetch(url, postdata=data)
@staticmethod
def broadcast(**event):
""" rpc client """
resource = 'broadcast/%s/%s' % (event['schema'], event['id'])
url = ApiTest.url(resource)
data = json.dumps(event)
return cyclone.httpclient.fetch(url, postdata=data) | identifier_body | |
__init__.py | """
run tests against a webserver running in the same reactor
NOTE: this test uses port 8888 on localhost
"""
import os
import ujson as json
import cyclone.httpclient
from twisted.internet import defer
from twisted.application import internet
from twisted.trial.unittest import TestCase
from twisted.python import log
from txbitwrap.api import factory as Api
from txbitwrap.machine import set_pnml_path
import txbitwrap.event
IFACE = '127.0.0.1'
PORT = 8888
OPTIONS = {
'listen-ip': IFACE,
'listen-port': PORT,
'machine-path': os.path.abspath(os.path.dirname(__file__) + '/../../schemata'),
'pg-host': '127.0.0.1',
'pg-port': 5432,
'pg-username': 'bitwrap',
'pg-password': 'bitwrap',
'pg-database': 'bitwrap'
}
class ApiTest(TestCase):
""" setup rpc endpoint and invoke ping method """
def setUp(self):
""" start tcp endpoint """
set_pnml_path(OPTIONS['machine-path'])
self.options = OPTIONS
#pylint: disable=no-member
self.service = internet.TCPServer(PORT, Api(self.options), interface=self.options['listen-ip'])
#pylint: enable=no-member
self.service.startService()
@defer.inlineCallbacks
def tearDown(self):
""" stop tcp endpoint """
self.service.stopService()
yield txbitwrap.event.rdq.stop()
@staticmethod
def url(resource):
""" bulid a url using test endpoint """
return 'http://%s:%s/%s' % (IFACE, PORT, resource)
@staticmethod
def client(resource):
""" rpc client """
return cyclone.httpclient.JsonRPC(ApiTest.url(resource))
@staticmethod
def fetch(resource, **kwargs):
""" async request with httpclient"""
return cyclone.httpclient.fetch(ApiTest.url(resource), **kwargs)
@staticmethod
def dispatch(**event):
""" rpc client """
resource = 'dispatch/%s/%s/%s' % (event['schema'], event['oid'], event['action'])
url = ApiTest.url(resource)
if isinstance(event['payload'], str):
|
else:
data = json.dumps(event['payload'])
return cyclone.httpclient.fetch(url, postdata=data)
@staticmethod
def broadcast(**event):
""" rpc client """
resource = 'broadcast/%s/%s' % (event['schema'], event['id'])
url = ApiTest.url(resource)
data = json.dumps(event)
return cyclone.httpclient.fetch(url, postdata=data)
| data = event['payload'] | conditional_block |
_y.py | import _plotly_utils.basevalidators
class YValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="y", parent_name="volume.caps", **kwargs):
| super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Y"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
""",
),
**kwargs
) | identifier_body | |
_y.py | import _plotly_utils.basevalidators
class | (_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="y", parent_name="volume.caps", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Y"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
""",
),
**kwargs
)
| YValidator | identifier_name |
_y.py | class YValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="y", parent_name="volume.caps", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Y"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
""",
),
**kwargs
) | import _plotly_utils.basevalidators
| random_line_split | |
ui.d.ts | import { DeviceService } from './device';
import { SessionService } from './session';
import { ViewService } from './view';
import { RealityService, RealityServiceProvider } from './reality';
/**
* Provides a default UI
*/
export declare class | {
private sessionService;
private viewService;
private realityService;
private realityServiceProvider;
private deviceService;
private element?;
private realityViewerSelectorElement;
private realityViewerListElement;
private menuBackgroundElement;
private realityViewerItemElements;
private menuItems;
private menuOpen;
private openInArgonMenuItem;
private hmdMenuItem;
private realityMenuItem;
private maximizeMenuItem;
constructor(sessionService: SessionService, viewService: ViewService, realityService: RealityService, realityServiceProvider: RealityServiceProvider, deviceService: DeviceService);
private _createMenuItem(icon, hint, onSelect?);
private onSelect(element, cb);
private toggleMenu();
private _hideMenuItem(e);
updateMenu(): void;
}
| DefaultUIService | identifier_name |
ui.d.ts | import { DeviceService } from './device';
import { SessionService } from './session';
import { ViewService } from './view';
import { RealityService, RealityServiceProvider } from './reality';
/**
* Provides a default UI
*/
export declare class DefaultUIService {
private sessionService;
private viewService;
private realityService;
private realityServiceProvider;
private deviceService;
private element?;
private realityViewerSelectorElement;
private realityViewerListElement;
private menuBackgroundElement;
private realityViewerItemElements;
private menuItems;
private menuOpen;
private openInArgonMenuItem;
private hmdMenuItem; | private _createMenuItem(icon, hint, onSelect?);
private onSelect(element, cb);
private toggleMenu();
private _hideMenuItem(e);
updateMenu(): void;
} | private realityMenuItem;
private maximizeMenuItem;
constructor(sessionService: SessionService, viewService: ViewService, realityService: RealityService, realityServiceProvider: RealityServiceProvider, deviceService: DeviceService); | random_line_split |
help-dialog.component.d.ts | import { OnDestroy, OnInit } from '@angular/core';
import { MediaObserver } from '@angular/flex-layout';
import { MimeViewerIntl } from '../core/intl/viewer-intl';
import { MimeResizeService } from '../core/mime-resize-service/mime-resize.service'; | private mimeResizeService;
tabHeight: {};
private mimeHeight;
private subscriptions;
constructor(mediaObserver: MediaObserver, intl: MimeViewerIntl, mimeResizeService: MimeResizeService);
ngOnInit(): void;
ngOnDestroy(): void;
private resizeTabHeight;
static ɵfac: i0.ɵɵFactoryDeclaration<HelpDialogComponent, never>;
static ɵcmp: i0.ɵɵComponentDeclaration<HelpDialogComponent, "mime-help", never, {}, {}, never, never>;
} | import * as i0 from "@angular/core";
export declare class HelpDialogComponent implements OnInit, OnDestroy {
mediaObserver: MediaObserver;
intl: MimeViewerIntl; | random_line_split |
help-dialog.component.d.ts | import { OnDestroy, OnInit } from '@angular/core';
import { MediaObserver } from '@angular/flex-layout';
import { MimeViewerIntl } from '../core/intl/viewer-intl';
import { MimeResizeService } from '../core/mime-resize-service/mime-resize.service';
import * as i0 from "@angular/core";
export declare class | implements OnInit, OnDestroy {
mediaObserver: MediaObserver;
intl: MimeViewerIntl;
private mimeResizeService;
tabHeight: {};
private mimeHeight;
private subscriptions;
constructor(mediaObserver: MediaObserver, intl: MimeViewerIntl, mimeResizeService: MimeResizeService);
ngOnInit(): void;
ngOnDestroy(): void;
private resizeTabHeight;
static ɵfac: i0.ɵɵFactoryDeclaration<HelpDialogComponent, never>;
static ɵcmp: i0.ɵɵComponentDeclaration<HelpDialogComponent, "mime-help", never, {}, {}, never, never>;
}
| HelpDialogComponent | identifier_name |
conf_fixture.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# | # under the License.
import os
from oslo_policy import opts
from oslo_service import wsgi
from manila.common import config
CONF = config.CONF
def set_defaults(conf):
_safe_set_of_opts(conf, 'verbose', True)
_safe_set_of_opts(conf, 'state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__),
'..',
'..')))
_safe_set_of_opts(conf, 'connection', "sqlite://", group='database')
_safe_set_of_opts(conf, 'sqlite_synchronous', False)
_POLICY_PATH = os.path.abspath(os.path.join(CONF.state_path,
'manila/tests/policy.json'))
opts.set_defaults(conf, policy_file=_POLICY_PATH)
_safe_set_of_opts(conf, 'share_export_ip', '0.0.0.0')
_safe_set_of_opts(conf, 'service_instance_user', 'fake_user')
_API_PASTE_PATH = os.path.abspath(os.path.join(CONF.state_path,
'etc/manila/api-paste.ini'))
wsgi.register_opts(conf)
_safe_set_of_opts(conf, 'api_paste_config', _API_PASTE_PATH)
_safe_set_of_opts(conf, 'share_driver',
'manila.tests.fake_driver.FakeShareDriver')
_safe_set_of_opts(conf, 'auth_strategy', 'noauth')
_safe_set_of_opts(conf, 'zfs_share_export_ip', '1.1.1.1')
_safe_set_of_opts(conf, 'zfs_service_ip', '2.2.2.2')
_safe_set_of_opts(conf, 'zfs_zpool_list', ['foo', 'bar'])
_safe_set_of_opts(conf, 'zfs_share_helpers', 'NFS=foo.bar.Helper')
_safe_set_of_opts(conf, 'zfs_replica_snapshot_prefix', 'foo_prefix_')
_safe_set_of_opts(conf, 'hitachi_hsp_host', '172.24.47.190')
_safe_set_of_opts(conf, 'hitachi_hsp_username', 'hsp_user')
_safe_set_of_opts(conf, 'hitachi_hsp_password', 'hsp_password')
_safe_set_of_opts(conf, 'qnap_management_url', 'http://1.2.3.4:8080')
_safe_set_of_opts(conf, 'qnap_share_ip', '1.2.3.4')
_safe_set_of_opts(conf, 'qnap_nas_login', 'admin')
_safe_set_of_opts(conf, 'qnap_nas_password', 'qnapadmin')
_safe_set_of_opts(conf, 'qnap_poolname', 'Storage Pool 1')
def _safe_set_of_opts(conf, *args, **kwargs):
try:
conf.set_default(*args, **kwargs)
except config.cfg.NoSuchOptError:
# Assumed that opt is not imported and not used
pass | # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations | random_line_split |
conf_fixture.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_policy import opts
from oslo_service import wsgi
from manila.common import config
CONF = config.CONF
def set_defaults(conf):
|
def _safe_set_of_opts(conf, *args, **kwargs):
try:
conf.set_default(*args, **kwargs)
except config.cfg.NoSuchOptError:
# Assumed that opt is not imported and not used
pass
| _safe_set_of_opts(conf, 'verbose', True)
_safe_set_of_opts(conf, 'state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__),
'..',
'..')))
_safe_set_of_opts(conf, 'connection', "sqlite://", group='database')
_safe_set_of_opts(conf, 'sqlite_synchronous', False)
_POLICY_PATH = os.path.abspath(os.path.join(CONF.state_path,
'manila/tests/policy.json'))
opts.set_defaults(conf, policy_file=_POLICY_PATH)
_safe_set_of_opts(conf, 'share_export_ip', '0.0.0.0')
_safe_set_of_opts(conf, 'service_instance_user', 'fake_user')
_API_PASTE_PATH = os.path.abspath(os.path.join(CONF.state_path,
'etc/manila/api-paste.ini'))
wsgi.register_opts(conf)
_safe_set_of_opts(conf, 'api_paste_config', _API_PASTE_PATH)
_safe_set_of_opts(conf, 'share_driver',
'manila.tests.fake_driver.FakeShareDriver')
_safe_set_of_opts(conf, 'auth_strategy', 'noauth')
_safe_set_of_opts(conf, 'zfs_share_export_ip', '1.1.1.1')
_safe_set_of_opts(conf, 'zfs_service_ip', '2.2.2.2')
_safe_set_of_opts(conf, 'zfs_zpool_list', ['foo', 'bar'])
_safe_set_of_opts(conf, 'zfs_share_helpers', 'NFS=foo.bar.Helper')
_safe_set_of_opts(conf, 'zfs_replica_snapshot_prefix', 'foo_prefix_')
_safe_set_of_opts(conf, 'hitachi_hsp_host', '172.24.47.190')
_safe_set_of_opts(conf, 'hitachi_hsp_username', 'hsp_user')
_safe_set_of_opts(conf, 'hitachi_hsp_password', 'hsp_password')
_safe_set_of_opts(conf, 'qnap_management_url', 'http://1.2.3.4:8080')
_safe_set_of_opts(conf, 'qnap_share_ip', '1.2.3.4')
_safe_set_of_opts(conf, 'qnap_nas_login', 'admin')
_safe_set_of_opts(conf, 'qnap_nas_password', 'qnapadmin')
_safe_set_of_opts(conf, 'qnap_poolname', 'Storage Pool 1') | identifier_body |
conf_fixture.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_policy import opts
from oslo_service import wsgi
from manila.common import config
CONF = config.CONF
def set_defaults(conf):
_safe_set_of_opts(conf, 'verbose', True)
_safe_set_of_opts(conf, 'state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__),
'..',
'..')))
_safe_set_of_opts(conf, 'connection', "sqlite://", group='database')
_safe_set_of_opts(conf, 'sqlite_synchronous', False)
_POLICY_PATH = os.path.abspath(os.path.join(CONF.state_path,
'manila/tests/policy.json'))
opts.set_defaults(conf, policy_file=_POLICY_PATH)
_safe_set_of_opts(conf, 'share_export_ip', '0.0.0.0')
_safe_set_of_opts(conf, 'service_instance_user', 'fake_user')
_API_PASTE_PATH = os.path.abspath(os.path.join(CONF.state_path,
'etc/manila/api-paste.ini'))
wsgi.register_opts(conf)
_safe_set_of_opts(conf, 'api_paste_config', _API_PASTE_PATH)
_safe_set_of_opts(conf, 'share_driver',
'manila.tests.fake_driver.FakeShareDriver')
_safe_set_of_opts(conf, 'auth_strategy', 'noauth')
_safe_set_of_opts(conf, 'zfs_share_export_ip', '1.1.1.1')
_safe_set_of_opts(conf, 'zfs_service_ip', '2.2.2.2')
_safe_set_of_opts(conf, 'zfs_zpool_list', ['foo', 'bar'])
_safe_set_of_opts(conf, 'zfs_share_helpers', 'NFS=foo.bar.Helper')
_safe_set_of_opts(conf, 'zfs_replica_snapshot_prefix', 'foo_prefix_')
_safe_set_of_opts(conf, 'hitachi_hsp_host', '172.24.47.190')
_safe_set_of_opts(conf, 'hitachi_hsp_username', 'hsp_user')
_safe_set_of_opts(conf, 'hitachi_hsp_password', 'hsp_password')
_safe_set_of_opts(conf, 'qnap_management_url', 'http://1.2.3.4:8080')
_safe_set_of_opts(conf, 'qnap_share_ip', '1.2.3.4')
_safe_set_of_opts(conf, 'qnap_nas_login', 'admin')
_safe_set_of_opts(conf, 'qnap_nas_password', 'qnapadmin')
_safe_set_of_opts(conf, 'qnap_poolname', 'Storage Pool 1')
def | (conf, *args, **kwargs):
try:
conf.set_default(*args, **kwargs)
except config.cfg.NoSuchOptError:
# Assumed that opt is not imported and not used
pass
| _safe_set_of_opts | identifier_name |
bounds.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, MetaWord, Item};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use ptr::P;
pub fn | (cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
let name = match mitem.node {
MetaWord(ref tname) => {
match tname.get() {
"Copy" => "Copy",
"Send" => "Send",
"Sync" => "Sync",
ref tname => {
cx.span_bug(span,
format!("expected built-in trait name but \
found {}",
*tname).as_slice())
}
}
},
_ => {
return cx.span_err(span, "unexpected value in deriving, expected \
a trait")
}
};
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "kinds", name)),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!()
};
trait_def.expand(cx, mitem, item, push)
}
| expand_deriving_bound | identifier_name |
bounds.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, MetaWord, Item};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use ptr::P;
pub fn expand_deriving_bound(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
let name = match mitem.node {
MetaWord(ref tname) => | ,
_ => {
return cx.span_err(span, "unexpected value in deriving, expected \
a trait")
}
};
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "kinds", name)),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!()
};
trait_def.expand(cx, mitem, item, push)
}
| {
match tname.get() {
"Copy" => "Copy",
"Send" => "Send",
"Sync" => "Sync",
ref tname => {
cx.span_bug(span,
format!("expected built-in trait name but \
found {}",
*tname).as_slice())
}
}
} | conditional_block |
bounds.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, MetaWord, Item};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use ptr::P;
pub fn expand_deriving_bound(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) | {
let name = match mitem.node {
MetaWord(ref tname) => {
match tname.get() {
"Copy" => "Copy",
"Send" => "Send",
"Sync" => "Sync",
ref tname => {
cx.span_bug(span,
format!("expected built-in trait name but \
found {}",
*tname).as_slice())
}
}
},
_ => {
return cx.span_err(span, "unexpected value in deriving, expected \
a trait")
}
};
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "kinds", name)),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!()
};
trait_def.expand(cx, mitem, item, push)
} | identifier_body | |
bounds.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, MetaWord, Item};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use ptr::P;
pub fn expand_deriving_bound(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
let name = match mitem.node {
MetaWord(ref tname) => {
match tname.get() {
"Copy" => "Copy",
"Send" => "Send",
"Sync" => "Sync",
ref tname => { | }
}
},
_ => {
return cx.span_err(span, "unexpected value in deriving, expected \
a trait")
}
};
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "kinds", name)),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!()
};
trait_def.expand(cx, mitem, item, push)
} | cx.span_bug(span,
format!("expected built-in trait name but \
found {}",
*tname).as_slice()) | random_line_split |
meteor-methods.js | import { Class as Model } from 'meteor/jagi:astronomy';
import * as Errors from './errors.js';
export function | (config) {
config.collection = new Mongo.Collection(config.collectionName);
config.model = Model.create({
name: config.modelName,
collection: config.collection,
fields: config.modelFields,
});
config.saveMethod = 'save' + config.modelName;
config.removeMethod = 'remove' + config.modelName;
var methods = {};
methods[config.saveMethod] = saveDoc;
methods[config.removeMethod] = removeDoc;
Meteor.methods(methods);
let colFieldsFunc = function () {
result = [];
for (var i = 0; i < config.formFields.length; i++) {
if (config.formFields[i].colClass) {
result[i] = config.formFields[i];
}
}
return result;
}
config.colFields = colFieldsFunc();
}
export function saveDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
try {
doc.save();
} catch (e) {
Errors.handle(e);
}
}
export function removeDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
doc.remove();
} | init | identifier_name |
meteor-methods.js | import { Class as Model } from 'meteor/jagi:astronomy';
import * as Errors from './errors.js';
export function init(config) {
config.collection = new Mongo.Collection(config.collectionName);
config.model = Model.create({
name: config.modelName,
collection: config.collection,
fields: config.modelFields,
});
config.saveMethod = 'save' + config.modelName;
config.removeMethod = 'remove' + config.modelName;
var methods = {};
methods[config.saveMethod] = saveDoc;
methods[config.removeMethod] = removeDoc;
Meteor.methods(methods);
let colFieldsFunc = function () {
result = [];
for (var i = 0; i < config.formFields.length; i++) {
if (config.formFields[i].colClass) |
}
return result;
}
config.colFields = colFieldsFunc();
}
export function saveDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
try {
doc.save();
} catch (e) {
Errors.handle(e);
}
}
export function removeDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
doc.remove();
} | {
result[i] = config.formFields[i];
} | conditional_block |
meteor-methods.js | import { Class as Model } from 'meteor/jagi:astronomy';
import * as Errors from './errors.js';
export function init(config) {
config.collection = new Mongo.Collection(config.collectionName);
config.model = Model.create({
name: config.modelName,
collection: config.collection,
fields: config.modelFields,
});
config.saveMethod = 'save' + config.modelName;
config.removeMethod = 'remove' + config.modelName;
var methods = {};
methods[config.saveMethod] = saveDoc;
methods[config.removeMethod] = removeDoc;
Meteor.methods(methods);
let colFieldsFunc = function () {
result = [];
for (var i = 0; i < config.formFields.length; i++) {
if (config.formFields[i].colClass) {
result[i] = config.formFields[i];
}
}
return result;
}
config.colFields = colFieldsFunc();
}
export function saveDoc (doc) {
if ( !Meteor.userId() ) {
return; | }
try {
doc.save();
} catch (e) {
Errors.handle(e);
}
}
export function removeDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
doc.remove();
} | random_line_split | |
meteor-methods.js | import { Class as Model } from 'meteor/jagi:astronomy';
import * as Errors from './errors.js';
export function init(config) {
config.collection = new Mongo.Collection(config.collectionName);
config.model = Model.create({
name: config.modelName,
collection: config.collection,
fields: config.modelFields,
});
config.saveMethod = 'save' + config.modelName;
config.removeMethod = 'remove' + config.modelName;
var methods = {};
methods[config.saveMethod] = saveDoc;
methods[config.removeMethod] = removeDoc;
Meteor.methods(methods);
let colFieldsFunc = function () {
result = [];
for (var i = 0; i < config.formFields.length; i++) {
if (config.formFields[i].colClass) {
result[i] = config.formFields[i];
}
}
return result;
}
config.colFields = colFieldsFunc();
}
export function saveDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
try {
doc.save();
} catch (e) {
Errors.handle(e);
}
}
export function removeDoc (doc) | {
if ( !Meteor.userId() ) {
return;
}
doc.remove();
} | identifier_body | |
0006_auto__add_field_reference_year.py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Reference.year'
db.add_column(u'citations_reference', 'year',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Reference.year'
db.delete_column(u'citations_reference', 'year')
models = {
u'citations.reference': {
'Meta': {'object_name': 'Reference'},
'abstract': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'edition': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '17', 'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), | 'volume': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['citations'] | 'series': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'BK'", 'max_length': '3'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), | random_line_split |
0006_auto__add_field_reference_year.py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class | (SchemaMigration):
def forwards(self, orm):
# Adding field 'Reference.year'
db.add_column(u'citations_reference', 'year',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Reference.year'
db.delete_column(u'citations_reference', 'year')
models = {
u'citations.reference': {
'Meta': {'object_name': 'Reference'},
'abstract': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'edition': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '17', 'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'series': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'BK'", 'max_length': '3'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['citations'] | Migration | identifier_name |
0006_auto__add_field_reference_year.py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Reference.year'
|
def backwards(self, orm):
# Deleting field 'Reference.year'
db.delete_column(u'citations_reference', 'year')
models = {
u'citations.reference': {
'Meta': {'object_name': 'Reference'},
'abstract': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'edition': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '17', 'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'series': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'BK'", 'max_length': '3'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['citations'] | db.add_column(u'citations_reference', 'year',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False) | identifier_body |
IMainInputButtonAction.ts | /*
* Copyright (C) 2017 ZeXtras S.r.l.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, version 2 of
* the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License.
* If not, see <http://www.gnu.org/licenses/>.
*/
import {Action} from "redux";
import {InputToolbarButtonsActionType} from "./OpenChatAction";
| type: InputToolbarButtonsActionType;
} | export interface IInputToolbarButtonAction extends Action {
button?: JSX.Element;
side: "left" | "right"; | random_line_split |
localrepocache_tests.py | # Copyright (C) 2012-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import urllib2
import os
import cliapp
import fs.memoryfs
import morphlib
import morphlib.gitdir_tests
class FakeApplication(object):
def __init__(self):
self.settings = {
'verbose': True
}
def status(self, msg):
pass
class LocalRepoCacheTests(unittest.TestCase):
def setUp(self):
aliases = ['upstream=git://example.com/#example.com:%s.git']
repo_resolver = morphlib.repoaliasresolver.RepoAliasResolver(aliases)
tarball_base_url = 'http://lorry.example.com/tarballs/'
self.reponame = 'upstream:reponame'
self.repourl = 'git://example.com/reponame'
escaped_url = 'git___example_com_reponame'
self.tarball_url = '%s%s.tar' % (tarball_base_url, escaped_url)
self.cachedir = '/cache/dir'
self.cache_path = '%s/%s' % (self.cachedir, escaped_url)
self.remotes = {}
self.fetched = []
self.removed = []
self.lrc = morphlib.localrepocache.LocalRepoCache(
FakeApplication(), self.cachedir, repo_resolver, tarball_base_url)
self.lrc.fs = fs.memoryfs.MemoryFS()
self.lrc._git = self.fake_git
self.lrc._fetch = self.not_found
self.lrc._mkdtemp = self.fake_mkdtemp
self.lrc._new_cached_repo_instance = self.new_cached_repo_instance
self._mkdtemp_count = 0
def fake_git(self, args, **kwargs):
if args[0] == 'clone':
self.assertEqual(len(args), 5)
remote = args[3]
local = args[4]
self.remotes['origin'] = {'url': remote, 'updates': 0}
self.lrc.fs.makedir(local, recursive=True)
elif args[0:2] == ['remote', 'set-url']:
remote = args[2]
url = args[3]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.url']:
remote = 'origin'
url = args[2]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.mirror']:
remote = 'origin'
elif args[0:2] == ['config', 'remote.origin.fetch']:
remote = 'origin'
else:
raise NotImplementedError()
def fake_mkdtemp(self, dirname):
thing = "foo"+str(self._mkdtemp_count)
self._mkdtemp_count += 1
self.lrc.fs.makedir(dirname+"/"+thing)
return thing
def new_cached_repo_instance(self, *args):
with morphlib.gitdir_tests.allow_nonexistant_git_repos():
return morphlib.cachedrepo.CachedRepo( | raise cliapp.AppException('Not found')
def test_has_not_got_shortened_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.reponame))
def test_has_not_got_absolute_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.repourl))
def test_caches_shortened_repository_on_request(self):
self.lrc.cache_repo(self.reponame)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_caches_absolute_repository_on_request(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_cachedir_does_not_exist_initially(self):
self.assertFalse(self.lrc.fs.exists(self.cachedir))
def test_creates_cachedir_if_missing(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.fs.exists(self.cachedir))
def test_happily_caches_same_repo_twice(self):
self.lrc.cache_repo(self.repourl)
self.lrc.cache_repo(self.repourl)
def test_fails_to_cache_when_remote_does_not_exist(self):
def fail(args, **kwargs):
self.lrc.fs.makedir(args[4])
raise cliapp.AppException('')
self.lrc._git = fail
self.assertRaises(morphlib.localrepocache.NoRemote,
self.lrc.cache_repo, self.repourl)
def test_does_not_mind_a_missing_tarball(self):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [])
def test_fetches_tarball_when_it_exists(self):
self.lrc._fetch = lambda url, path: self.fetched.append(url)
self.unpacked_tar = ""
self.mkdir_path = ""
with morphlib.gitdir_tests.monkeypatch(
morphlib.cachedrepo.CachedRepo, 'update', lambda self: None):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [self.tarball_url])
self.assertFalse(self.lrc.fs.exists(self.cache_path + '.tar'))
self.assertEqual(self.remotes['origin']['url'], self.repourl)
def test_gets_cached_shortened_repo(self):
self.lrc.cache_repo(self.reponame)
cached = self.lrc.get_repo(self.reponame)
self.assertTrue(cached is not None)
def test_gets_cached_absolute_repo(self):
self.lrc.cache_repo(self.repourl)
cached = self.lrc.get_repo(self.repourl)
self.assertTrue(cached is not None)
def test_get_repo_raises_exception_if_repo_is_not_cached(self):
self.assertRaises(Exception, self.lrc.get_repo, self.repourl)
def test_escapes_repourl_as_filename(self):
escaped = self.lrc._escape(self.repourl)
self.assertFalse('/' in escaped)
def test_noremote_error_message_contains_repo_name(self):
e = morphlib.localrepocache.NoRemote(self.repourl, [])
self.assertTrue(self.repourl in str(e))
def test_avoids_caching_local_repo(self):
self.lrc.fs.makedir('/local/repo', recursive=True)
self.lrc.cache_repo('file:///local/repo')
cached = self.lrc.get_repo('file:///local/repo')
assert cached.path == '/local/repo' | FakeApplication(), *args)
def not_found(self, url, path): | random_line_split |
localrepocache_tests.py | # Copyright (C) 2012-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import urllib2
import os
import cliapp
import fs.memoryfs
import morphlib
import morphlib.gitdir_tests
class FakeApplication(object):
|
class LocalRepoCacheTests(unittest.TestCase):
def setUp(self):
aliases = ['upstream=git://example.com/#example.com:%s.git']
repo_resolver = morphlib.repoaliasresolver.RepoAliasResolver(aliases)
tarball_base_url = 'http://lorry.example.com/tarballs/'
self.reponame = 'upstream:reponame'
self.repourl = 'git://example.com/reponame'
escaped_url = 'git___example_com_reponame'
self.tarball_url = '%s%s.tar' % (tarball_base_url, escaped_url)
self.cachedir = '/cache/dir'
self.cache_path = '%s/%s' % (self.cachedir, escaped_url)
self.remotes = {}
self.fetched = []
self.removed = []
self.lrc = morphlib.localrepocache.LocalRepoCache(
FakeApplication(), self.cachedir, repo_resolver, tarball_base_url)
self.lrc.fs = fs.memoryfs.MemoryFS()
self.lrc._git = self.fake_git
self.lrc._fetch = self.not_found
self.lrc._mkdtemp = self.fake_mkdtemp
self.lrc._new_cached_repo_instance = self.new_cached_repo_instance
self._mkdtemp_count = 0
def fake_git(self, args, **kwargs):
if args[0] == 'clone':
self.assertEqual(len(args), 5)
remote = args[3]
local = args[4]
self.remotes['origin'] = {'url': remote, 'updates': 0}
self.lrc.fs.makedir(local, recursive=True)
elif args[0:2] == ['remote', 'set-url']:
remote = args[2]
url = args[3]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.url']:
remote = 'origin'
url = args[2]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.mirror']:
remote = 'origin'
elif args[0:2] == ['config', 'remote.origin.fetch']:
remote = 'origin'
else:
raise NotImplementedError()
def fake_mkdtemp(self, dirname):
thing = "foo"+str(self._mkdtemp_count)
self._mkdtemp_count += 1
self.lrc.fs.makedir(dirname+"/"+thing)
return thing
def new_cached_repo_instance(self, *args):
with morphlib.gitdir_tests.allow_nonexistant_git_repos():
return morphlib.cachedrepo.CachedRepo(
FakeApplication(), *args)
def not_found(self, url, path):
raise cliapp.AppException('Not found')
def test_has_not_got_shortened_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.reponame))
def test_has_not_got_absolute_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.repourl))
def test_caches_shortened_repository_on_request(self):
self.lrc.cache_repo(self.reponame)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_caches_absolute_repository_on_request(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_cachedir_does_not_exist_initially(self):
self.assertFalse(self.lrc.fs.exists(self.cachedir))
def test_creates_cachedir_if_missing(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.fs.exists(self.cachedir))
def test_happily_caches_same_repo_twice(self):
self.lrc.cache_repo(self.repourl)
self.lrc.cache_repo(self.repourl)
def test_fails_to_cache_when_remote_does_not_exist(self):
def fail(args, **kwargs):
self.lrc.fs.makedir(args[4])
raise cliapp.AppException('')
self.lrc._git = fail
self.assertRaises(morphlib.localrepocache.NoRemote,
self.lrc.cache_repo, self.repourl)
def test_does_not_mind_a_missing_tarball(self):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [])
def test_fetches_tarball_when_it_exists(self):
self.lrc._fetch = lambda url, path: self.fetched.append(url)
self.unpacked_tar = ""
self.mkdir_path = ""
with morphlib.gitdir_tests.monkeypatch(
morphlib.cachedrepo.CachedRepo, 'update', lambda self: None):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [self.tarball_url])
self.assertFalse(self.lrc.fs.exists(self.cache_path + '.tar'))
self.assertEqual(self.remotes['origin']['url'], self.repourl)
def test_gets_cached_shortened_repo(self):
self.lrc.cache_repo(self.reponame)
cached = self.lrc.get_repo(self.reponame)
self.assertTrue(cached is not None)
def test_gets_cached_absolute_repo(self):
self.lrc.cache_repo(self.repourl)
cached = self.lrc.get_repo(self.repourl)
self.assertTrue(cached is not None)
def test_get_repo_raises_exception_if_repo_is_not_cached(self):
self.assertRaises(Exception, self.lrc.get_repo, self.repourl)
def test_escapes_repourl_as_filename(self):
escaped = self.lrc._escape(self.repourl)
self.assertFalse('/' in escaped)
def test_noremote_error_message_contains_repo_name(self):
e = morphlib.localrepocache.NoRemote(self.repourl, [])
self.assertTrue(self.repourl in str(e))
def test_avoids_caching_local_repo(self):
self.lrc.fs.makedir('/local/repo', recursive=True)
self.lrc.cache_repo('file:///local/repo')
cached = self.lrc.get_repo('file:///local/repo')
assert cached.path == '/local/repo'
| def __init__(self):
self.settings = {
'verbose': True
}
def status(self, msg):
pass | identifier_body |
localrepocache_tests.py | # Copyright (C) 2012-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import urllib2
import os
import cliapp
import fs.memoryfs
import morphlib
import morphlib.gitdir_tests
class FakeApplication(object):
def __init__(self):
self.settings = {
'verbose': True
}
def status(self, msg):
pass
class LocalRepoCacheTests(unittest.TestCase):
def setUp(self):
aliases = ['upstream=git://example.com/#example.com:%s.git']
repo_resolver = morphlib.repoaliasresolver.RepoAliasResolver(aliases)
tarball_base_url = 'http://lorry.example.com/tarballs/'
self.reponame = 'upstream:reponame'
self.repourl = 'git://example.com/reponame'
escaped_url = 'git___example_com_reponame'
self.tarball_url = '%s%s.tar' % (tarball_base_url, escaped_url)
self.cachedir = '/cache/dir'
self.cache_path = '%s/%s' % (self.cachedir, escaped_url)
self.remotes = {}
self.fetched = []
self.removed = []
self.lrc = morphlib.localrepocache.LocalRepoCache(
FakeApplication(), self.cachedir, repo_resolver, tarball_base_url)
self.lrc.fs = fs.memoryfs.MemoryFS()
self.lrc._git = self.fake_git
self.lrc._fetch = self.not_found
self.lrc._mkdtemp = self.fake_mkdtemp
self.lrc._new_cached_repo_instance = self.new_cached_repo_instance
self._mkdtemp_count = 0
def fake_git(self, args, **kwargs):
if args[0] == 'clone':
self.assertEqual(len(args), 5)
remote = args[3]
local = args[4]
self.remotes['origin'] = {'url': remote, 'updates': 0}
self.lrc.fs.makedir(local, recursive=True)
elif args[0:2] == ['remote', 'set-url']:
remote = args[2]
url = args[3]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.url']:
remote = 'origin'
url = args[2]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.mirror']:
remote = 'origin'
elif args[0:2] == ['config', 'remote.origin.fetch']:
remote = 'origin'
else:
raise NotImplementedError()
def fake_mkdtemp(self, dirname):
thing = "foo"+str(self._mkdtemp_count)
self._mkdtemp_count += 1
self.lrc.fs.makedir(dirname+"/"+thing)
return thing
def new_cached_repo_instance(self, *args):
with morphlib.gitdir_tests.allow_nonexistant_git_repos():
return morphlib.cachedrepo.CachedRepo(
FakeApplication(), *args)
def not_found(self, url, path):
raise cliapp.AppException('Not found')
def | (self):
self.assertFalse(self.lrc.has_repo(self.reponame))
def test_has_not_got_absolute_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.repourl))
def test_caches_shortened_repository_on_request(self):
self.lrc.cache_repo(self.reponame)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_caches_absolute_repository_on_request(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_cachedir_does_not_exist_initially(self):
self.assertFalse(self.lrc.fs.exists(self.cachedir))
def test_creates_cachedir_if_missing(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.fs.exists(self.cachedir))
def test_happily_caches_same_repo_twice(self):
self.lrc.cache_repo(self.repourl)
self.lrc.cache_repo(self.repourl)
def test_fails_to_cache_when_remote_does_not_exist(self):
def fail(args, **kwargs):
self.lrc.fs.makedir(args[4])
raise cliapp.AppException('')
self.lrc._git = fail
self.assertRaises(morphlib.localrepocache.NoRemote,
self.lrc.cache_repo, self.repourl)
def test_does_not_mind_a_missing_tarball(self):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [])
def test_fetches_tarball_when_it_exists(self):
self.lrc._fetch = lambda url, path: self.fetched.append(url)
self.unpacked_tar = ""
self.mkdir_path = ""
with morphlib.gitdir_tests.monkeypatch(
morphlib.cachedrepo.CachedRepo, 'update', lambda self: None):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [self.tarball_url])
self.assertFalse(self.lrc.fs.exists(self.cache_path + '.tar'))
self.assertEqual(self.remotes['origin']['url'], self.repourl)
def test_gets_cached_shortened_repo(self):
self.lrc.cache_repo(self.reponame)
cached = self.lrc.get_repo(self.reponame)
self.assertTrue(cached is not None)
def test_gets_cached_absolute_repo(self):
self.lrc.cache_repo(self.repourl)
cached = self.lrc.get_repo(self.repourl)
self.assertTrue(cached is not None)
def test_get_repo_raises_exception_if_repo_is_not_cached(self):
self.assertRaises(Exception, self.lrc.get_repo, self.repourl)
def test_escapes_repourl_as_filename(self):
escaped = self.lrc._escape(self.repourl)
self.assertFalse('/' in escaped)
def test_noremote_error_message_contains_repo_name(self):
e = morphlib.localrepocache.NoRemote(self.repourl, [])
self.assertTrue(self.repourl in str(e))
def test_avoids_caching_local_repo(self):
self.lrc.fs.makedir('/local/repo', recursive=True)
self.lrc.cache_repo('file:///local/repo')
cached = self.lrc.get_repo('file:///local/repo')
assert cached.path == '/local/repo'
| test_has_not_got_shortened_repo_initially | identifier_name |
localrepocache_tests.py | # Copyright (C) 2012-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import urllib2
import os
import cliapp
import fs.memoryfs
import morphlib
import morphlib.gitdir_tests
class FakeApplication(object):
def __init__(self):
self.settings = {
'verbose': True
}
def status(self, msg):
pass
class LocalRepoCacheTests(unittest.TestCase):
def setUp(self):
aliases = ['upstream=git://example.com/#example.com:%s.git']
repo_resolver = morphlib.repoaliasresolver.RepoAliasResolver(aliases)
tarball_base_url = 'http://lorry.example.com/tarballs/'
self.reponame = 'upstream:reponame'
self.repourl = 'git://example.com/reponame'
escaped_url = 'git___example_com_reponame'
self.tarball_url = '%s%s.tar' % (tarball_base_url, escaped_url)
self.cachedir = '/cache/dir'
self.cache_path = '%s/%s' % (self.cachedir, escaped_url)
self.remotes = {}
self.fetched = []
self.removed = []
self.lrc = morphlib.localrepocache.LocalRepoCache(
FakeApplication(), self.cachedir, repo_resolver, tarball_base_url)
self.lrc.fs = fs.memoryfs.MemoryFS()
self.lrc._git = self.fake_git
self.lrc._fetch = self.not_found
self.lrc._mkdtemp = self.fake_mkdtemp
self.lrc._new_cached_repo_instance = self.new_cached_repo_instance
self._mkdtemp_count = 0
def fake_git(self, args, **kwargs):
if args[0] == 'clone':
|
elif args[0:2] == ['remote', 'set-url']:
remote = args[2]
url = args[3]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.url']:
remote = 'origin'
url = args[2]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.mirror']:
remote = 'origin'
elif args[0:2] == ['config', 'remote.origin.fetch']:
remote = 'origin'
else:
raise NotImplementedError()
def fake_mkdtemp(self, dirname):
thing = "foo"+str(self._mkdtemp_count)
self._mkdtemp_count += 1
self.lrc.fs.makedir(dirname+"/"+thing)
return thing
def new_cached_repo_instance(self, *args):
with morphlib.gitdir_tests.allow_nonexistant_git_repos():
return morphlib.cachedrepo.CachedRepo(
FakeApplication(), *args)
def not_found(self, url, path):
raise cliapp.AppException('Not found')
def test_has_not_got_shortened_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.reponame))
def test_has_not_got_absolute_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.repourl))
def test_caches_shortened_repository_on_request(self):
self.lrc.cache_repo(self.reponame)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_caches_absolute_repository_on_request(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_cachedir_does_not_exist_initially(self):
self.assertFalse(self.lrc.fs.exists(self.cachedir))
def test_creates_cachedir_if_missing(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.fs.exists(self.cachedir))
def test_happily_caches_same_repo_twice(self):
self.lrc.cache_repo(self.repourl)
self.lrc.cache_repo(self.repourl)
def test_fails_to_cache_when_remote_does_not_exist(self):
def fail(args, **kwargs):
self.lrc.fs.makedir(args[4])
raise cliapp.AppException('')
self.lrc._git = fail
self.assertRaises(morphlib.localrepocache.NoRemote,
self.lrc.cache_repo, self.repourl)
def test_does_not_mind_a_missing_tarball(self):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [])
def test_fetches_tarball_when_it_exists(self):
self.lrc._fetch = lambda url, path: self.fetched.append(url)
self.unpacked_tar = ""
self.mkdir_path = ""
with morphlib.gitdir_tests.monkeypatch(
morphlib.cachedrepo.CachedRepo, 'update', lambda self: None):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [self.tarball_url])
self.assertFalse(self.lrc.fs.exists(self.cache_path + '.tar'))
self.assertEqual(self.remotes['origin']['url'], self.repourl)
def test_gets_cached_shortened_repo(self):
self.lrc.cache_repo(self.reponame)
cached = self.lrc.get_repo(self.reponame)
self.assertTrue(cached is not None)
def test_gets_cached_absolute_repo(self):
self.lrc.cache_repo(self.repourl)
cached = self.lrc.get_repo(self.repourl)
self.assertTrue(cached is not None)
def test_get_repo_raises_exception_if_repo_is_not_cached(self):
self.assertRaises(Exception, self.lrc.get_repo, self.repourl)
def test_escapes_repourl_as_filename(self):
escaped = self.lrc._escape(self.repourl)
self.assertFalse('/' in escaped)
def test_noremote_error_message_contains_repo_name(self):
e = morphlib.localrepocache.NoRemote(self.repourl, [])
self.assertTrue(self.repourl in str(e))
def test_avoids_caching_local_repo(self):
self.lrc.fs.makedir('/local/repo', recursive=True)
self.lrc.cache_repo('file:///local/repo')
cached = self.lrc.get_repo('file:///local/repo')
assert cached.path == '/local/repo'
| self.assertEqual(len(args), 5)
remote = args[3]
local = args[4]
self.remotes['origin'] = {'url': remote, 'updates': 0}
self.lrc.fs.makedir(local, recursive=True) | conditional_block |
models.py | from django.db import models
from django.core.validators import validate_email, validate_slug, validate_ipv46_address
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ava.core.models import TimeStampedModel
from ava.core_group.models import Group
from ava.core_identity.validators import validate_skype, validate_twitter
class Identity(TimeStampedModel):
# An identity is an online persona that can map to a single person, a group
# of people, or an automated service.
GROUP = 'GROUP'
PERSON = 'PERSON'
IDENTITY_TYPE_CHOICES = (
(GROUP, 'Group'),
(PERSON, 'Person'),
)
name = models.CharField(max_length=100, verbose_name='Name', null=True, blank=True)
description = models.TextField(max_length=500, verbose_name='Description', null=True, blank=True)
identity_type = models.CharField(max_length=10,
choices=IDENTITY_TYPE_CHOICES,
default=PERSON,
verbose_name='Identity Type')
groups = models.ManyToManyField(Group,
blank=True,
related_name='identities')
| return self.name or ''
def get_absolute_url(self):
return reverse('identity-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'identity'
verbose_name_plural = 'identities'
ordering = ['name']
class Person(TimeStampedModel):
first_name = models.CharField(max_length=75, validators=[validate_slug])
surname = models.CharField(max_length=75, validators=[validate_slug])
identity = models.ManyToManyField('Identity', blank=True)
def __str__(self):
return (self.first_name + " " + self.surname).strip() or ''
def get_absolute_url(self):
return reverse('person-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'person'
verbose_name_plural = 'people'
ordering = ['surname', 'first_name']
class Identifier(TimeStampedModel):
"""
TODO: DocString
"""
EMAIL = 'EMAIL'
SKYPE = 'SKYPE'
IP = 'IPADD'
UNAME = 'UNAME'
TWITTER = 'TWITTER'
NAME = 'NAME'
IDENTIFIER_TYPE_CHOICES = (
(EMAIL, 'Email Address'),
(SKYPE, 'Skype ID'),
(IP, 'IP Address'),
(UNAME, 'Username'),
(TWITTER, 'Twitter ID'),
(NAME, 'Other name'),
)
identifier = models.CharField(max_length=100)
identifier_type = models.CharField(max_length=10,
choices=IDENTIFIER_TYPE_CHOICES,
default=EMAIL,
verbose_name='Identifier Type')
identity = models.ForeignKey('Identity', related_name='identifiers')
def __str__(self):
return self.identifier or ''
def get_absolute_url(self):
return reverse('identifier-detail', kwargs={'pk': self.id})
def clean(self):
if self.identifier_type is 'EMAIL':
try:
validate_email(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid email address')
if self.identifier_type is 'IPADD':
try:
validate_ipv46_address(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid IPv4/IPv6 address')
if self.identifier_type is 'UNAME' or self.identifier_type is 'NAME':
try:
validate_slug(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid username or name')
if self.identifier_type is 'SKYPE':
try:
validate_skype(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Skype user name')
if self.identifier_type is 'TWITTER':
try:
validate_twitter(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Twitter user name')
class Meta:
unique_together = ("identifier", "identifier_type", "identity")
ordering = ['identifier', 'identifier_type'] | def __str__(self): | random_line_split |
models.py | from django.db import models
from django.core.validators import validate_email, validate_slug, validate_ipv46_address
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ava.core.models import TimeStampedModel
from ava.core_group.models import Group
from ava.core_identity.validators import validate_skype, validate_twitter
class Identity(TimeStampedModel):
# An identity is an online persona that can map to a single person, a group
# of people, or an automated service.
GROUP = 'GROUP'
PERSON = 'PERSON'
IDENTITY_TYPE_CHOICES = (
(GROUP, 'Group'),
(PERSON, 'Person'),
)
name = models.CharField(max_length=100, verbose_name='Name', null=True, blank=True)
description = models.TextField(max_length=500, verbose_name='Description', null=True, blank=True)
identity_type = models.CharField(max_length=10,
choices=IDENTITY_TYPE_CHOICES,
default=PERSON,
verbose_name='Identity Type')
groups = models.ManyToManyField(Group,
blank=True,
related_name='identities')
def __str__(self):
return self.name or ''
def get_absolute_url(self):
return reverse('identity-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'identity'
verbose_name_plural = 'identities'
ordering = ['name']
class Person(TimeStampedModel):
first_name = models.CharField(max_length=75, validators=[validate_slug])
surname = models.CharField(max_length=75, validators=[validate_slug])
identity = models.ManyToManyField('Identity', blank=True)
def __str__(self):
return (self.first_name + " " + self.surname).strip() or ''
def get_absolute_url(self):
return reverse('person-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'person'
verbose_name_plural = 'people'
ordering = ['surname', 'first_name']
class Identifier(TimeStampedModel):
"""
TODO: DocString
"""
EMAIL = 'EMAIL'
SKYPE = 'SKYPE'
IP = 'IPADD'
UNAME = 'UNAME'
TWITTER = 'TWITTER'
NAME = 'NAME'
IDENTIFIER_TYPE_CHOICES = (
(EMAIL, 'Email Address'),
(SKYPE, 'Skype ID'),
(IP, 'IP Address'),
(UNAME, 'Username'),
(TWITTER, 'Twitter ID'),
(NAME, 'Other name'),
)
identifier = models.CharField(max_length=100)
identifier_type = models.CharField(max_length=10,
choices=IDENTIFIER_TYPE_CHOICES,
default=EMAIL,
verbose_name='Identifier Type')
identity = models.ForeignKey('Identity', related_name='identifiers')
def __str__(self):
return self.identifier or ''
def get_absolute_url(self):
return reverse('identifier-detail', kwargs={'pk': self.id})
def clean(self):
if self.identifier_type is 'EMAIL':
try:
validate_email(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid email address')
if self.identifier_type is 'IPADD':
|
if self.identifier_type is 'UNAME' or self.identifier_type is 'NAME':
try:
validate_slug(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid username or name')
if self.identifier_type is 'SKYPE':
try:
validate_skype(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Skype user name')
if self.identifier_type is 'TWITTER':
try:
validate_twitter(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Twitter user name')
class Meta:
unique_together = ("identifier", "identifier_type", "identity")
ordering = ['identifier', 'identifier_type']
| try:
validate_ipv46_address(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid IPv4/IPv6 address') | conditional_block |
models.py | from django.db import models
from django.core.validators import validate_email, validate_slug, validate_ipv46_address
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ava.core.models import TimeStampedModel
from ava.core_group.models import Group
from ava.core_identity.validators import validate_skype, validate_twitter
class Identity(TimeStampedModel):
# An identity is an online persona that can map to a single person, a group
# of people, or an automated service.
GROUP = 'GROUP'
PERSON = 'PERSON'
IDENTITY_TYPE_CHOICES = (
(GROUP, 'Group'),
(PERSON, 'Person'),
)
name = models.CharField(max_length=100, verbose_name='Name', null=True, blank=True)
description = models.TextField(max_length=500, verbose_name='Description', null=True, blank=True)
identity_type = models.CharField(max_length=10,
choices=IDENTITY_TYPE_CHOICES,
default=PERSON,
verbose_name='Identity Type')
groups = models.ManyToManyField(Group,
blank=True,
related_name='identities')
def __str__(self):
return self.name or ''
def get_absolute_url(self):
return reverse('identity-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'identity'
verbose_name_plural = 'identities'
ordering = ['name']
class Person(TimeStampedModel):
first_name = models.CharField(max_length=75, validators=[validate_slug])
surname = models.CharField(max_length=75, validators=[validate_slug])
identity = models.ManyToManyField('Identity', blank=True)
def __str__(self):
return (self.first_name + " " + self.surname).strip() or ''
def get_absolute_url(self):
return reverse('person-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'person'
verbose_name_plural = 'people'
ordering = ['surname', 'first_name']
class Identifier(TimeStampedModel):
"""
TODO: DocString
"""
EMAIL = 'EMAIL'
SKYPE = 'SKYPE'
IP = 'IPADD'
UNAME = 'UNAME'
TWITTER = 'TWITTER'
NAME = 'NAME'
IDENTIFIER_TYPE_CHOICES = (
(EMAIL, 'Email Address'),
(SKYPE, 'Skype ID'),
(IP, 'IP Address'),
(UNAME, 'Username'),
(TWITTER, 'Twitter ID'),
(NAME, 'Other name'),
)
identifier = models.CharField(max_length=100)
identifier_type = models.CharField(max_length=10,
choices=IDENTIFIER_TYPE_CHOICES,
default=EMAIL,
verbose_name='Identifier Type')
identity = models.ForeignKey('Identity', related_name='identifiers')
def __str__(self):
return self.identifier or ''
def get_absolute_url(self):
|
def clean(self):
if self.identifier_type is 'EMAIL':
try:
validate_email(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid email address')
if self.identifier_type is 'IPADD':
try:
validate_ipv46_address(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid IPv4/IPv6 address')
if self.identifier_type is 'UNAME' or self.identifier_type is 'NAME':
try:
validate_slug(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid username or name')
if self.identifier_type is 'SKYPE':
try:
validate_skype(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Skype user name')
if self.identifier_type is 'TWITTER':
try:
validate_twitter(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Twitter user name')
class Meta:
unique_together = ("identifier", "identifier_type", "identity")
ordering = ['identifier', 'identifier_type']
| return reverse('identifier-detail', kwargs={'pk': self.id}) | identifier_body |
models.py | from django.db import models
from django.core.validators import validate_email, validate_slug, validate_ipv46_address
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ava.core.models import TimeStampedModel
from ava.core_group.models import Group
from ava.core_identity.validators import validate_skype, validate_twitter
class Identity(TimeStampedModel):
# An identity is an online persona that can map to a single person, a group
# of people, or an automated service.
GROUP = 'GROUP'
PERSON = 'PERSON'
IDENTITY_TYPE_CHOICES = (
(GROUP, 'Group'),
(PERSON, 'Person'),
)
name = models.CharField(max_length=100, verbose_name='Name', null=True, blank=True)
description = models.TextField(max_length=500, verbose_name='Description', null=True, blank=True)
identity_type = models.CharField(max_length=10,
choices=IDENTITY_TYPE_CHOICES,
default=PERSON,
verbose_name='Identity Type')
groups = models.ManyToManyField(Group,
blank=True,
related_name='identities')
def __str__(self):
return self.name or ''
def get_absolute_url(self):
return reverse('identity-detail', kwargs={'pk': self.id})
class | :
verbose_name = 'identity'
verbose_name_plural = 'identities'
ordering = ['name']
class Person(TimeStampedModel):
first_name = models.CharField(max_length=75, validators=[validate_slug])
surname = models.CharField(max_length=75, validators=[validate_slug])
identity = models.ManyToManyField('Identity', blank=True)
def __str__(self):
return (self.first_name + " " + self.surname).strip() or ''
def get_absolute_url(self):
return reverse('person-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'person'
verbose_name_plural = 'people'
ordering = ['surname', 'first_name']
class Identifier(TimeStampedModel):
"""
TODO: DocString
"""
EMAIL = 'EMAIL'
SKYPE = 'SKYPE'
IP = 'IPADD'
UNAME = 'UNAME'
TWITTER = 'TWITTER'
NAME = 'NAME'
IDENTIFIER_TYPE_CHOICES = (
(EMAIL, 'Email Address'),
(SKYPE, 'Skype ID'),
(IP, 'IP Address'),
(UNAME, 'Username'),
(TWITTER, 'Twitter ID'),
(NAME, 'Other name'),
)
identifier = models.CharField(max_length=100)
identifier_type = models.CharField(max_length=10,
choices=IDENTIFIER_TYPE_CHOICES,
default=EMAIL,
verbose_name='Identifier Type')
identity = models.ForeignKey('Identity', related_name='identifiers')
def __str__(self):
return self.identifier or ''
def get_absolute_url(self):
return reverse('identifier-detail', kwargs={'pk': self.id})
def clean(self):
if self.identifier_type is 'EMAIL':
try:
validate_email(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid email address')
if self.identifier_type is 'IPADD':
try:
validate_ipv46_address(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid IPv4/IPv6 address')
if self.identifier_type is 'UNAME' or self.identifier_type is 'NAME':
try:
validate_slug(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid username or name')
if self.identifier_type is 'SKYPE':
try:
validate_skype(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Skype user name')
if self.identifier_type is 'TWITTER':
try:
validate_twitter(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Twitter user name')
class Meta:
unique_together = ("identifier", "identifier_type", "identity")
ordering = ['identifier', 'identifier_type']
| Meta | identifier_name |
SPFormField.tsx | import * as React from 'react';
import { ControlMode } from '../../../../common/datatypes/ControlMode';
import { IFieldSchema } from '../../../../common/services/datatypes/RenderListData';
import FormField from './FormField';
import { IFormFieldProps } from './FormField';
import { IDatePickerStrings } from 'office-ui-fabric-react/lib/DatePicker';
import { TextField } from 'office-ui-fabric-react/lib/TextField';
import { Icon } from 'office-ui-fabric-react/lib/Icon';
import SPFieldTextEdit from './SPFieldTextEdit';
import SPFieldRichTextEdit from './SPFieldRichTextEdit';
import SPFieldLookupEdit from './SPFieldLookupEdit';
import SPFieldChoiceEdit from './SPFieldChoiceEdit';
import SPFieldNumberEdit from './SPFieldNumberEdit';
import SPFieldDateEdit from './SPFieldDateEdit';
import SPFieldBooleanEdit from './SPFieldBooleanEdit';
import SPFieldTextDisplay from './SPFieldTextDisplay';
import SPFieldRichTextDisplay from './SPFieldRichTextDisplay';
import SPFieldLookupDisplay from './SPFieldLookupDisplay';
import SPFieldUserDisplay from './SPFieldUserDisplay';
import SPFieldUrlDisplay from './SPFieldUrlDisplay';
import * as strings from 'FormFieldStrings';
import styles from './SPFormField.module.scss';
const EditFieldTypeMappings: { [fieldType: string]: React.StatelessComponent<ISPFormFieldProps> } = {
Text: SPFieldTextEdit,
RichText: SPFieldRichTextEdit,
Note: SPFieldTextEdit,
Lookup: SPFieldLookupEdit,
LookupMulti: SPFieldLookupEdit,
Choice: SPFieldChoiceEdit,
MultiChoice: SPFieldChoiceEdit,
Number: SPFieldNumberEdit,
Currency: SPFieldNumberEdit,
DateTime: SPFieldDateEdit,
Boolean: SPFieldBooleanEdit,
File: SPFieldTextEdit,
/* The following are known but unsupported types as of now:
User: null,
UserMulti: null,
URL: null,
TaxonomyFieldType: null,
Attachments: null,
TaxonomyFieldTypeMulti: null,
*/
};
const DisplayFieldTypeMappings: {
[fieldType: string]: {
component: React.StatelessComponent<ISPFormFieldProps>,
valuePreProcess?: (value: any) => any
},
} = {
Text: { component: SPFieldTextDisplay },
RichText: { component: SPFieldRichTextDisplay },
Note: { component: SPFieldTextDisplay },
Lookup: { component: SPFieldLookupDisplay },
LookupMulti: { component: SPFieldLookupDisplay },
Choice: { component: SPFieldTextDisplay },
MultiChoice: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.join(', ') : '' },
Number: { component: SPFieldTextDisplay },
Currency: { component: SPFieldTextDisplay },
DateTime: { component: SPFieldTextDisplay },
Boolean: { component: SPFieldTextDisplay },
User: { component: SPFieldUserDisplay },
UserMulti: { component: SPFieldUserDisplay },
URL: { component: SPFieldUrlDisplay },
File: { component: SPFieldTextDisplay },
TaxonomyFieldType: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.Label : '' },
TaxonomyFieldTypeMulti: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.map((v) => v.Label).join(', ') : '' },
/* The following are known but unsupported types as of now:
Attachments: null,
*/
};
export interface ISPFormFieldProps extends IFormFieldProps {
extraData?: any;
fieldSchema: IFieldSchema;
hideIfFieldUnsupported?: boolean;
}
const SPFormField: React.SFC<ISPFormFieldProps> = (props) => {
let fieldControl = null;
const fieldType = props.fieldSchema.FieldType;
const richText = props.fieldSchema.RichText;
if (props.controlMode === ControlMode.Display) {
if (DisplayFieldTypeMappings.hasOwnProperty(fieldType)) {
const fieldMapping = richText ? DisplayFieldTypeMappings['RichText'] : DisplayFieldTypeMappings[fieldType];
const childProps = fieldMapping.valuePreProcess ? { ...props, value: fieldMapping.valuePreProcess(props.value) } : props;
fieldControl = React.createElement(fieldMapping.component, childProps);
} else if (!props.hideIfFieldUnsupported) {
const value = (props.value) ? ((typeof props.value === 'string') ? props.value : JSON.stringify(props.value)) : '';
fieldControl = <div className={`ard-${fieldType}field-display`}>
<span>{value}</span>
<div className={styles.unsupportedFieldMessage}><Icon iconName='Error' />{`${strings.UnsupportedFieldType} "${fieldType}"`}</div>
</div>;
}
} else {
if (EditFieldTypeMappings.hasOwnProperty(fieldType)) {
fieldControl = richText ? React.createElement(EditFieldTypeMappings['RichText'], props) : React.createElement(EditFieldTypeMappings[fieldType], props);
} else if (!props.hideIfFieldUnsupported) |
underlined
/>;
}
}
return (fieldControl)
? <FormField
{...props}
label={props.label || props.fieldSchema.Title}
description={props.description || props.fieldSchema.Description}
required={props.fieldSchema.Required}
errorMessage={props.errorMessage}
>
{fieldControl}
</FormField>
: null;
};
export default SPFormField;
| {
const isObjValue = (props.value) && (typeof props.value !== 'string');
const value = (props.value) ? ((typeof props.value === 'string') ? props.value : JSON.stringify(props.value)) : '';
fieldControl = <TextField
readOnly
multiline={isObjValue}
value={value}
errorMessage={`${strings.UnsupportedFieldType} "${fieldType}"`} | conditional_block |
SPFormField.tsx | import * as React from 'react';
import { ControlMode } from '../../../../common/datatypes/ControlMode';
import { IFieldSchema } from '../../../../common/services/datatypes/RenderListData';
import FormField from './FormField';
import { IFormFieldProps } from './FormField';
import { IDatePickerStrings } from 'office-ui-fabric-react/lib/DatePicker';
import { TextField } from 'office-ui-fabric-react/lib/TextField';
import { Icon } from 'office-ui-fabric-react/lib/Icon';
import SPFieldTextEdit from './SPFieldTextEdit';
import SPFieldRichTextEdit from './SPFieldRichTextEdit';
import SPFieldLookupEdit from './SPFieldLookupEdit';
import SPFieldChoiceEdit from './SPFieldChoiceEdit';
import SPFieldNumberEdit from './SPFieldNumberEdit';
import SPFieldDateEdit from './SPFieldDateEdit';
import SPFieldBooleanEdit from './SPFieldBooleanEdit';
import SPFieldTextDisplay from './SPFieldTextDisplay';
import SPFieldRichTextDisplay from './SPFieldRichTextDisplay';
import SPFieldLookupDisplay from './SPFieldLookupDisplay';
import SPFieldUserDisplay from './SPFieldUserDisplay';
import SPFieldUrlDisplay from './SPFieldUrlDisplay';
import * as strings from 'FormFieldStrings';
import styles from './SPFormField.module.scss';
const EditFieldTypeMappings: { [fieldType: string]: React.StatelessComponent<ISPFormFieldProps> } = {
Text: SPFieldTextEdit,
RichText: SPFieldRichTextEdit,
Note: SPFieldTextEdit,
Lookup: SPFieldLookupEdit,
LookupMulti: SPFieldLookupEdit,
Choice: SPFieldChoiceEdit,
MultiChoice: SPFieldChoiceEdit,
Number: SPFieldNumberEdit,
Currency: SPFieldNumberEdit,
DateTime: SPFieldDateEdit,
Boolean: SPFieldBooleanEdit,
File: SPFieldTextEdit,
/* The following are known but unsupported types as of now:
User: null,
UserMulti: null,
URL: null,
TaxonomyFieldType: null,
Attachments: null,
TaxonomyFieldTypeMulti: null,
*/
};
const DisplayFieldTypeMappings: {
[fieldType: string]: {
component: React.StatelessComponent<ISPFormFieldProps>,
valuePreProcess?: (value: any) => any
},
} = {
Text: { component: SPFieldTextDisplay },
RichText: { component: SPFieldRichTextDisplay },
Note: { component: SPFieldTextDisplay },
Lookup: { component: SPFieldLookupDisplay },
LookupMulti: { component: SPFieldLookupDisplay },
Choice: { component: SPFieldTextDisplay },
MultiChoice: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.join(', ') : '' },
Number: { component: SPFieldTextDisplay },
Currency: { component: SPFieldTextDisplay },
DateTime: { component: SPFieldTextDisplay },
Boolean: { component: SPFieldTextDisplay },
User: { component: SPFieldUserDisplay },
UserMulti: { component: SPFieldUserDisplay },
URL: { component: SPFieldUrlDisplay },
| Attachments: null,
*/
};
export interface ISPFormFieldProps extends IFormFieldProps {
extraData?: any;
fieldSchema: IFieldSchema;
hideIfFieldUnsupported?: boolean;
}
const SPFormField: React.SFC<ISPFormFieldProps> = (props) => {
let fieldControl = null;
const fieldType = props.fieldSchema.FieldType;
const richText = props.fieldSchema.RichText;
if (props.controlMode === ControlMode.Display) {
if (DisplayFieldTypeMappings.hasOwnProperty(fieldType)) {
const fieldMapping = richText ? DisplayFieldTypeMappings['RichText'] : DisplayFieldTypeMappings[fieldType];
const childProps = fieldMapping.valuePreProcess ? { ...props, value: fieldMapping.valuePreProcess(props.value) } : props;
fieldControl = React.createElement(fieldMapping.component, childProps);
} else if (!props.hideIfFieldUnsupported) {
const value = (props.value) ? ((typeof props.value === 'string') ? props.value : JSON.stringify(props.value)) : '';
fieldControl = <div className={`ard-${fieldType}field-display`}>
<span>{value}</span>
<div className={styles.unsupportedFieldMessage}><Icon iconName='Error' />{`${strings.UnsupportedFieldType} "${fieldType}"`}</div>
</div>;
}
} else {
if (EditFieldTypeMappings.hasOwnProperty(fieldType)) {
fieldControl = richText ? React.createElement(EditFieldTypeMappings['RichText'], props) : React.createElement(EditFieldTypeMappings[fieldType], props);
} else if (!props.hideIfFieldUnsupported) {
const isObjValue = (props.value) && (typeof props.value !== 'string');
const value = (props.value) ? ((typeof props.value === 'string') ? props.value : JSON.stringify(props.value)) : '';
fieldControl = <TextField
readOnly
multiline={isObjValue}
value={value}
errorMessage={`${strings.UnsupportedFieldType} "${fieldType}"`}
underlined
/>;
}
}
return (fieldControl)
? <FormField
{...props}
label={props.label || props.fieldSchema.Title}
description={props.description || props.fieldSchema.Description}
required={props.fieldSchema.Required}
errorMessage={props.errorMessage}
>
{fieldControl}
</FormField>
: null;
};
export default SPFormField; | File: { component: SPFieldTextDisplay },
TaxonomyFieldType: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.Label : '' },
TaxonomyFieldTypeMulti: { component: SPFieldTextDisplay, valuePreProcess: (val) => val ? val.map((v) => v.Label).join(', ') : '' },
/* The following are known but unsupported types as of now:
| random_line_split |
db.js | import Dexie from 'dexie'
const db = new Dexie('metaDb')
db.version(1).stores({
player: '++id, name',
game: '++id, team, season, day'
})
db.open().catch(e => {
console.error(`metaDb open failed: ${e.stack}`)
})
export const resetPlayer = () => db.table('player').clear()
.catch(e => console.log(`error resetting player table: ${e}`))
export const resetGame = () => db.table('game').clear()
.catch(e => console.log(`error resetting game table: ${e}`))
export const getPlayer = () => db.table('player').toArray()
.then(players => players[0].name)
.catch(() => null)
export default db
//
// // DB is your Dexie object
// // objectList is your list of objects
// DB.transaction('rw', DB.table, function()
// {
// var numObjects = objectList.length
//
// for ( var i = 0 i < numObjects i++ ) | // {
// DB.table.put( objectList[i] )
// }
// }) | random_line_split | |
webcore.py | # Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self.prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
class CoreHandler (SplitRequestHandler):
"""
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cgi.escape(part)))
r.write("\n" + "-" * (0+len(path)) + "\n")
dirs = []
files = []
for f in d:
if f.startswith("."): continue
if os.path.isdir(os.path.join(dirpath, f)):
dirs.append(f)
else:
files.append(f)
def entry (n, rest=''):
link = urllib.quote(n)
name = cgi.escape(n)
r.write('<a href="%s">%s</a>\n' % (link,name+rest))
for f in dirs:
entry(f, "/")
for f in files:
entry(f)
r.write("</pre></body></html>")
r.seek(0)
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(r.getvalue())))
self.end_headers()
return r
def translate_path (self, path, include_prefix = True):
"""
Translate a web-path to a local filesystem path
Odd path elements (e.g., ones that contain local filesystem path
separators) are stripped.
"""
def fixpath (p):
o = []
skip = 0
while True:
p,tail = posixpath.split(p)
if p in ('/','') and tail == '': break
if tail in ('','.', os.path.curdir, os.path.pardir): continue
if os.path.sep in tail: continue
if os.path.altsep and os.path.altsep in tail: continue
if os.path.splitdrive(tail)[0] != '': continue
if tail == '..':
skip += 1
continue
if skip:
skip -= 1
continue
o.append(tail)
o.reverse()
return o
# Remove query string / fragment
if "?" in path: path = path[:path.index("?")]
if "#" in path: path = path[:path.index("#")]
path = fixpath(path)
if path:
path = os.path.join(*path)
else:
path = ''
if include_prefix:
path = os.path.join(os.path.abspath(self.args['root']), path)
return path
def wrapRequestHandler (handlerClass):
return type("Split" + handlerClass.__name__,
(SplitRequestHandler, handlerClass, object), {})
from CGIHTTPServer import CGIHTTPRequestHandler
class SplitCGIRequestHandler (SplitRequestHandler,
CGIHTTPRequestHandler, object):
"""
Runs CGIRequestHandler serving from an arbitrary path.
This really should be a feature of CGIRequestHandler and the way of
implementing it here is scary and awful, but it at least sort of works.
"""
__lock = threading.Lock()
def _split_dispatch (self, command):
with self.__lock:
olddir = os.getcwd()
try:
os.chdir(self.args)
return SplitRequestHandler._split_dispatch(self, command)
finally:
os.chdir(olddir)
class SplitterRequestHandler (BaseHTTPRequestHandler):
def __init__ (self, *args, **kw):
#self.rec = Recording(args[0])
#self.args = args
#self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
#BaseHTTPRequestHandler.__init__(self, self.rec, *args[1:], **kw)
BaseHTTPRequestHandler.__init__(self, *args, **kw)
def log_request (self, code = '-', size = '-'):
weblog.debug('splitter:"%s" %s %s',
self.requestline, str(code), str(size))
def log_error (self, fmt, *args):
weblog.error('splitter:' + fmt % args)
def log_message (self, fmt, *args):
weblog.info('splitter:' + fmt % args)
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = None
while True:
for m in self.server.matches:
if self.path.startswith(m[0]):
#print m,self.path
handler = m[1](self, m[0], m[3])
#pb = self.rec.getPlayback()
#handler = m[1](pb, *self.args[1:])
_setAttribs(self, handler)
if m[2]:
# Trim. Behavior is not "perfect"
handler.path = self.path[len(m[0]):]
if m[0].endswith('/'):
handler.path = '/' + handler.path
break
if handler is None:
handler = self
if not self.path.endswith('/'):
# Handle splits like directories
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
break
break
return handler._split_dispatch(self.command)
class SplitThreadedServer(ThreadingMixIn, HTTPServer):
matches = [] # Tuples of (Prefix, TrimPrefix, Handler)
# def __init__ (self, *args, **kw):
# BaseHTTPRequestHandler.__init__(self, *args, **kw)
# self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def | (self, prefix, handler, args = None, trim_prefix = True):
# Not very efficient
assert (handler is None) or (issubclass(handler, SplitRequestHandler))
self.matches = [m for m in self.matches if m[0] != prefix]
if handler is None: return
self.matches.append((prefix, handler, trim_prefix, args))
self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def add_static_dir (self, www_path, local_path=None, relative=False):
"""
Serves a directory of static content.
www_path is the prefix of the URL that maps to this directory.
local_path is the directory to serve content from. If it's not
specified, it is assume to be a directory with the same name as
www_path.
relative, if True, means that the local path is to be a sibling
of the calling module.
For an example, see the launch() function in this module.
"""
if not www_path.startswith('/'): www_path = '/' + www_path
if local_path is None:
local_path = www_path[1:]
if relative:
local_path = os.path.basename(local_path)
if relative:
import inspect
path = inspect.stack()[1][1]
path = os.path.dirname(path)
local_path = os.path.join(path, local_path)
local_path = os.path.abspath(local_path)
log.debug("Serving %s at %s", local_path, www_path)
self.set_handler(www_path, StaticContentHandler,
{'root':local_path}, True);
def launch (address='', port=8000, static=False):
httpd = SplitThreadedServer((address, int(port)), SplitterRequestHandler)
core.register("WebServer", httpd)
httpd.set_handler("/", CoreHandler, httpd, True)
#httpd.set_handler("/foo", StaticContentHandler, {'root':'.'}, True)
#httpd.set_handler("/f", StaticContentHandler, {'root':'pox'}, True)
#httpd.set_handler("/cgis", SplitCGIRequestHandler, "pox/web/www_root")
if static is True:
httpd.add_static_dir('static', 'www_root', relative=True)
elif static is False:
pass
else:
static = static.split(",")
for entry in static:
if entry.lower() == "":
httpd.add_static_dir('static', 'www_root', relative=True)
continue
if ':' not in entry:
directory = entry
prefix = os.path.split(directory)
if prefix[1] == '':
prefix = os.path.split(prefix[0])
prefix = prefix[1]
assert prefix != ''
else:
prefix,directory = entry.split(":")
directory = os.path.expanduser(directory)
httpd.add_static_dir(prefix, directory, relative=False)
def run ():
try:
log.debug("Listening on %s:%i" % httpd.socket.getsockname())
httpd.serve_forever()
except:
pass
log.info("Server quit")
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
| set_handler | identifier_name |
webcore.py | # Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self.prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
class CoreHandler (SplitRequestHandler):
"""
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cgi.escape(part)))
r.write("\n" + "-" * (0+len(path)) + "\n")
dirs = []
files = []
for f in d:
if f.startswith("."): continue
if os.path.isdir(os.path.join(dirpath, f)):
dirs.append(f)
else:
files.append(f)
def entry (n, rest=''):
link = urllib.quote(n)
name = cgi.escape(n)
r.write('<a href="%s">%s</a>\n' % (link,name+rest))
for f in dirs:
entry(f, "/")
for f in files:
entry(f)
r.write("</pre></body></html>")
r.seek(0)
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(r.getvalue())))
self.end_headers()
return r
def translate_path (self, path, include_prefix = True):
"""
Translate a web-path to a local filesystem path
Odd path elements (e.g., ones that contain local filesystem path
separators) are stripped.
"""
def fixpath (p):
o = []
skip = 0
while True:
p,tail = posixpath.split(p)
if p in ('/','') and tail == '': break
if tail in ('','.', os.path.curdir, os.path.pardir): continue
if os.path.sep in tail: continue
if os.path.altsep and os.path.altsep in tail: continue
if os.path.splitdrive(tail)[0] != '': continue
if tail == '..':
skip += 1
continue
if skip:
skip -= 1
continue
o.append(tail)
o.reverse()
return o
# Remove query string / fragment
if "?" in path: path = path[:path.index("?")]
if "#" in path: path = path[:path.index("#")]
path = fixpath(path)
if path:
path = os.path.join(*path)
else:
path = ''
if include_prefix:
path = os.path.join(os.path.abspath(self.args['root']), path)
return path
def wrapRequestHandler (handlerClass):
return type("Split" + handlerClass.__name__,
(SplitRequestHandler, handlerClass, object), {})
from CGIHTTPServer import CGIHTTPRequestHandler
class SplitCGIRequestHandler (SplitRequestHandler,
CGIHTTPRequestHandler, object):
"""
Runs CGIRequestHandler serving from an arbitrary path.
This really should be a feature of CGIRequestHandler and the way of
implementing it here is scary and awful, but it at least sort of works.
"""
__lock = threading.Lock()
def _split_dispatch (self, command):
with self.__lock:
olddir = os.getcwd()
try:
os.chdir(self.args)
return SplitRequestHandler._split_dispatch(self, command)
finally:
os.chdir(olddir)
class SplitterRequestHandler (BaseHTTPRequestHandler):
def __init__ (self, *args, **kw):
#self.rec = Recording(args[0])
#self.args = args
#self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
#BaseHTTPRequestHandler.__init__(self, self.rec, *args[1:], **kw)
BaseHTTPRequestHandler.__init__(self, *args, **kw)
def log_request (self, code = '-', size = '-'):
weblog.debug('splitter:"%s" %s %s',
self.requestline, str(code), str(size))
def log_error (self, fmt, *args):
weblog.error('splitter:' + fmt % args)
def log_message (self, fmt, *args):
weblog.info('splitter:' + fmt % args)
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = None
while True:
for m in self.server.matches:
if self.path.startswith(m[0]):
#print m,self.path
handler = m[1](self, m[0], m[3])
#pb = self.rec.getPlayback()
#handler = m[1](pb, *self.args[1:])
_setAttribs(self, handler)
if m[2]:
# Trim. Behavior is not "perfect"
handler.path = self.path[len(m[0]):]
if m[0].endswith('/'):
handler.path = '/' + handler.path
break
if handler is None:
handler = self
if not self.path.endswith('/'):
# Handle splits like directories
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
break
break
return handler._split_dispatch(self.command)
class SplitThreadedServer(ThreadingMixIn, HTTPServer):
matches = [] # Tuples of (Prefix, TrimPrefix, Handler)
# def __init__ (self, *args, **kw):
# BaseHTTPRequestHandler.__init__(self, *args, **kw)
# self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def set_handler (self, prefix, handler, args = None, trim_prefix = True):
# Not very efficient
assert (handler is None) or (issubclass(handler, SplitRequestHandler))
self.matches = [m for m in self.matches if m[0] != prefix]
if handler is None: return
self.matches.append((prefix, handler, trim_prefix, args))
self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def add_static_dir (self, www_path, local_path=None, relative=False):
"""
Serves a directory of static content.
www_path is the prefix of the URL that maps to this directory.
local_path is the directory to serve content from. If it's not
specified, it is assume to be a directory with the same name as
www_path.
relative, if True, means that the local path is to be a sibling
of the calling module.
For an example, see the launch() function in this module.
"""
if not www_path.startswith('/'): www_path = '/' + www_path
if local_path is None:
local_path = www_path[1:]
if relative:
local_path = os.path.basename(local_path)
if relative:
import inspect
path = inspect.stack()[1][1]
path = os.path.dirname(path)
local_path = os.path.join(path, local_path)
local_path = os.path.abspath(local_path)
log.debug("Serving %s at %s", local_path, www_path)
self.set_handler(www_path, StaticContentHandler,
{'root':local_path}, True);
def launch (address='', port=8000, static=False):
httpd = SplitThreadedServer((address, int(port)), SplitterRequestHandler)
core.register("WebServer", httpd)
httpd.set_handler("/", CoreHandler, httpd, True)
#httpd.set_handler("/foo", StaticContentHandler, {'root':'.'}, True)
#httpd.set_handler("/f", StaticContentHandler, {'root':'pox'}, True)
#httpd.set_handler("/cgis", SplitCGIRequestHandler, "pox/web/www_root")
if static is True:
httpd.add_static_dir('static', 'www_root', relative=True)
elif static is False:
pass
else:
static = static.split(",")
for entry in static:
if entry.lower() == "":
httpd.add_static_dir('static', 'www_root', relative=True)
continue
if ':' not in entry:
directory = entry
prefix = os.path.split(directory)
if prefix[1] == '':
prefix = os.path.split(prefix[0])
prefix = prefix[1]
assert prefix != ''
else: | httpd.add_static_dir(prefix, directory, relative=False)
def run ():
try:
log.debug("Listening on %s:%i" % httpd.socket.getsockname())
httpd.serve_forever()
except:
pass
log.info("Server quit")
thread = threading.Thread(target=run)
thread.daemon = True
thread.start() | prefix,directory = entry.split(":")
directory = os.path.expanduser(directory) | random_line_split |
webcore.py | # Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self.prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
class CoreHandler (SplitRequestHandler):
"""
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
|
def translate_path (self, path, include_prefix = True):
"""
Translate a web-path to a local filesystem path
Odd path elements (e.g., ones that contain local filesystem path
separators) are stripped.
"""
def fixpath (p):
o = []
skip = 0
while True:
p,tail = posixpath.split(p)
if p in ('/','') and tail == '': break
if tail in ('','.', os.path.curdir, os.path.pardir): continue
if os.path.sep in tail: continue
if os.path.altsep and os.path.altsep in tail: continue
if os.path.splitdrive(tail)[0] != '': continue
if tail == '..':
skip += 1
continue
if skip:
skip -= 1
continue
o.append(tail)
o.reverse()
return o
# Remove query string / fragment
if "?" in path: path = path[:path.index("?")]
if "#" in path: path = path[:path.index("#")]
path = fixpath(path)
if path:
path = os.path.join(*path)
else:
path = ''
if include_prefix:
path = os.path.join(os.path.abspath(self.args['root']), path)
return path
def wrapRequestHandler (handlerClass):
return type("Split" + handlerClass.__name__,
(SplitRequestHandler, handlerClass, object), {})
from CGIHTTPServer import CGIHTTPRequestHandler
class SplitCGIRequestHandler (SplitRequestHandler,
CGIHTTPRequestHandler, object):
"""
Runs CGIRequestHandler serving from an arbitrary path.
This really should be a feature of CGIRequestHandler and the way of
implementing it here is scary and awful, but it at least sort of works.
"""
__lock = threading.Lock()
def _split_dispatch (self, command):
with self.__lock:
olddir = os.getcwd()
try:
os.chdir(self.args)
return SplitRequestHandler._split_dispatch(self, command)
finally:
os.chdir(olddir)
class SplitterRequestHandler (BaseHTTPRequestHandler):
def __init__ (self, *args, **kw):
#self.rec = Recording(args[0])
#self.args = args
#self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
#BaseHTTPRequestHandler.__init__(self, self.rec, *args[1:], **kw)
BaseHTTPRequestHandler.__init__(self, *args, **kw)
def log_request (self, code = '-', size = '-'):
weblog.debug('splitter:"%s" %s %s',
self.requestline, str(code), str(size))
def log_error (self, fmt, *args):
weblog.error('splitter:' + fmt % args)
def log_message (self, fmt, *args):
weblog.info('splitter:' + fmt % args)
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = None
while True:
for m in self.server.matches:
if self.path.startswith(m[0]):
#print m,self.path
handler = m[1](self, m[0], m[3])
#pb = self.rec.getPlayback()
#handler = m[1](pb, *self.args[1:])
_setAttribs(self, handler)
if m[2]:
# Trim. Behavior is not "perfect"
handler.path = self.path[len(m[0]):]
if m[0].endswith('/'):
handler.path = '/' + handler.path
break
if handler is None:
handler = self
if not self.path.endswith('/'):
# Handle splits like directories
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
break
break
return handler._split_dispatch(self.command)
class SplitThreadedServer(ThreadingMixIn, HTTPServer):
matches = [] # Tuples of (Prefix, TrimPrefix, Handler)
# def __init__ (self, *args, **kw):
# BaseHTTPRequestHandler.__init__(self, *args, **kw)
# self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def set_handler (self, prefix, handler, args = None, trim_prefix = True):
# Not very efficient
assert (handler is None) or (issubclass(handler, SplitRequestHandler))
self.matches = [m for m in self.matches if m[0] != prefix]
if handler is None: return
self.matches.append((prefix, handler, trim_prefix, args))
self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def add_static_dir (self, www_path, local_path=None, relative=False):
"""
Serves a directory of static content.
www_path is the prefix of the URL that maps to this directory.
local_path is the directory to serve content from. If it's not
specified, it is assume to be a directory with the same name as
www_path.
relative, if True, means that the local path is to be a sibling
of the calling module.
For an example, see the launch() function in this module.
"""
if not www_path.startswith('/'): www_path = '/' + www_path
if local_path is None:
local_path = www_path[1:]
if relative:
local_path = os.path.basename(local_path)
if relative:
import inspect
path = inspect.stack()[1][1]
path = os.path.dirname(path)
local_path = os.path.join(path, local_path)
local_path = os.path.abspath(local_path)
log.debug("Serving %s at %s", local_path, www_path)
self.set_handler(www_path, StaticContentHandler,
{'root':local_path}, True);
def launch (address='', port=8000, static=False):
httpd = SplitThreadedServer((address, int(port)), SplitterRequestHandler)
core.register("WebServer", httpd)
httpd.set_handler("/", CoreHandler, httpd, True)
#httpd.set_handler("/foo", StaticContentHandler, {'root':'.'}, True)
#httpd.set_handler("/f", StaticContentHandler, {'root':'pox'}, True)
#httpd.set_handler("/cgis", SplitCGIRequestHandler, "pox/web/www_root")
if static is True:
httpd.add_static_dir('static', 'www_root', relative=True)
elif static is False:
pass
else:
static = static.split(",")
for entry in static:
if entry.lower() == "":
httpd.add_static_dir('static', 'www_root', relative=True)
continue
if ':' not in entry:
directory = entry
prefix = os.path.split(directory)
if prefix[1] == '':
prefix = os.path.split(prefix[0])
prefix = prefix[1]
assert prefix != ''
else:
prefix,directory = entry.split(":")
directory = os.path.expanduser(directory)
httpd.add_static_dir(prefix, directory, relative=False)
def run ():
try:
log.debug("Listening on %s:%i" % httpd.socket.getsockname())
httpd.serve_forever()
except:
pass
log.info("Server quit")
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
| try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cgi.escape(part)))
r.write("\n" + "-" * (0+len(path)) + "\n")
dirs = []
files = []
for f in d:
if f.startswith("."): continue
if os.path.isdir(os.path.join(dirpath, f)):
dirs.append(f)
else:
files.append(f)
def entry (n, rest=''):
link = urllib.quote(n)
name = cgi.escape(n)
r.write('<a href="%s">%s</a>\n' % (link,name+rest))
for f in dirs:
entry(f, "/")
for f in files:
entry(f)
r.write("</pre></body></html>")
r.seek(0)
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(r.getvalue())))
self.end_headers()
return r | identifier_body |
webcore.py | # Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self.prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
class CoreHandler (SplitRequestHandler):
"""
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cgi.escape(part)))
r.write("\n" + "-" * (0+len(path)) + "\n")
dirs = []
files = []
for f in d:
if f.startswith("."): continue
if os.path.isdir(os.path.join(dirpath, f)):
dirs.append(f)
else:
files.append(f)
def entry (n, rest=''):
link = urllib.quote(n)
name = cgi.escape(n)
r.write('<a href="%s">%s</a>\n' % (link,name+rest))
for f in dirs:
entry(f, "/")
for f in files:
entry(f)
r.write("</pre></body></html>")
r.seek(0)
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(r.getvalue())))
self.end_headers()
return r
def translate_path (self, path, include_prefix = True):
"""
Translate a web-path to a local filesystem path
Odd path elements (e.g., ones that contain local filesystem path
separators) are stripped.
"""
def fixpath (p):
o = []
skip = 0
while True:
p,tail = posixpath.split(p)
if p in ('/','') and tail == '': break
if tail in ('','.', os.path.curdir, os.path.pardir): continue
if os.path.sep in tail: continue
if os.path.altsep and os.path.altsep in tail: continue
if os.path.splitdrive(tail)[0] != '': continue
if tail == '..':
skip += 1
continue
if skip:
skip -= 1
continue
o.append(tail)
o.reverse()
return o
# Remove query string / fragment
if "?" in path: path = path[:path.index("?")]
if "#" in path: path = path[:path.index("#")]
path = fixpath(path)
if path:
path = os.path.join(*path)
else:
path = ''
if include_prefix:
path = os.path.join(os.path.abspath(self.args['root']), path)
return path
def wrapRequestHandler (handlerClass):
return type("Split" + handlerClass.__name__,
(SplitRequestHandler, handlerClass, object), {})
from CGIHTTPServer import CGIHTTPRequestHandler
class SplitCGIRequestHandler (SplitRequestHandler,
CGIHTTPRequestHandler, object):
"""
Runs CGIRequestHandler serving from an arbitrary path.
This really should be a feature of CGIRequestHandler and the way of
implementing it here is scary and awful, but it at least sort of works.
"""
__lock = threading.Lock()
def _split_dispatch (self, command):
with self.__lock:
olddir = os.getcwd()
try:
os.chdir(self.args)
return SplitRequestHandler._split_dispatch(self, command)
finally:
os.chdir(olddir)
class SplitterRequestHandler (BaseHTTPRequestHandler):
def __init__ (self, *args, **kw):
#self.rec = Recording(args[0])
#self.args = args
#self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
#BaseHTTPRequestHandler.__init__(self, self.rec, *args[1:], **kw)
BaseHTTPRequestHandler.__init__(self, *args, **kw)
def log_request (self, code = '-', size = '-'):
weblog.debug('splitter:"%s" %s %s',
self.requestline, str(code), str(size))
def log_error (self, fmt, *args):
weblog.error('splitter:' + fmt % args)
def log_message (self, fmt, *args):
weblog.info('splitter:' + fmt % args)
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = None
while True:
for m in self.server.matches:
if self.path.startswith(m[0]):
#print m,self.path
handler = m[1](self, m[0], m[3])
#pb = self.rec.getPlayback()
#handler = m[1](pb, *self.args[1:])
_setAttribs(self, handler)
if m[2]:
# Trim. Behavior is not "perfect"
handler.path = self.path[len(m[0]):]
if m[0].endswith('/'):
handler.path = '/' + handler.path
break
if handler is None:
handler = self
if not self.path.endswith('/'):
# Handle splits like directories
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
break
break
return handler._split_dispatch(self.command)
class SplitThreadedServer(ThreadingMixIn, HTTPServer):
matches = [] # Tuples of (Prefix, TrimPrefix, Handler)
# def __init__ (self, *args, **kw):
# BaseHTTPRequestHandler.__init__(self, *args, **kw)
# self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def set_handler (self, prefix, handler, args = None, trim_prefix = True):
# Not very efficient
assert (handler is None) or (issubclass(handler, SplitRequestHandler))
self.matches = [m for m in self.matches if m[0] != prefix]
if handler is None: return
self.matches.append((prefix, handler, trim_prefix, args))
self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def add_static_dir (self, www_path, local_path=None, relative=False):
"""
Serves a directory of static content.
www_path is the prefix of the URL that maps to this directory.
local_path is the directory to serve content from. If it's not
specified, it is assume to be a directory with the same name as
www_path.
relative, if True, means that the local path is to be a sibling
of the calling module.
For an example, see the launch() function in this module.
"""
if not www_path.startswith('/'): |
if local_path is None:
local_path = www_path[1:]
if relative:
local_path = os.path.basename(local_path)
if relative:
import inspect
path = inspect.stack()[1][1]
path = os.path.dirname(path)
local_path = os.path.join(path, local_path)
local_path = os.path.abspath(local_path)
log.debug("Serving %s at %s", local_path, www_path)
self.set_handler(www_path, StaticContentHandler,
{'root':local_path}, True);
def launch (address='', port=8000, static=False):
httpd = SplitThreadedServer((address, int(port)), SplitterRequestHandler)
core.register("WebServer", httpd)
httpd.set_handler("/", CoreHandler, httpd, True)
#httpd.set_handler("/foo", StaticContentHandler, {'root':'.'}, True)
#httpd.set_handler("/f", StaticContentHandler, {'root':'pox'}, True)
#httpd.set_handler("/cgis", SplitCGIRequestHandler, "pox/web/www_root")
if static is True:
httpd.add_static_dir('static', 'www_root', relative=True)
elif static is False:
pass
else:
static = static.split(",")
for entry in static:
if entry.lower() == "":
httpd.add_static_dir('static', 'www_root', relative=True)
continue
if ':' not in entry:
directory = entry
prefix = os.path.split(directory)
if prefix[1] == '':
prefix = os.path.split(prefix[0])
prefix = prefix[1]
assert prefix != ''
else:
prefix,directory = entry.split(":")
directory = os.path.expanduser(directory)
httpd.add_static_dir(prefix, directory, relative=False)
def run ():
try:
log.debug("Listening on %s:%i" % httpd.socket.getsockname())
httpd.serve_forever()
except:
pass
log.info("Server quit")
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
| www_path = '/' + www_path | conditional_block |
EditAdminProductCard.tsx | import { useState, useContext } from 'react';
import * as React from 'react';
import { ApolloError } from '@apollo/client';
import {
ErrorDisplay,
MultipleChoiceInput,
useUniqueId,
BooleanInput,
BootstrapFormSelect,
usePropertySetters,
} from '@neinteractiveliterature/litform';
import AdminProductVariantsTable from './AdminProductVariantsTable';
import LiquidInput from '../../BuiltInFormControls/LiquidInput';
import useAsyncFunction from '../../useAsyncFunction';
import PricingStructureInput from './PricingStructureInput';
import buildProductInput from '../buildProductInput';
import AppRootContext from '../../AppRootContext';
import { AdminProductsQueryData, AdminProductsQueryDocument } from '../queries.generated';
import { useCreateProductMutation, useUpdateProductMutation } from '../mutations.generated';
import { EditingProduct } from './EditingProductTypes';
import { hasRealId } from '../../GeneratedIdUtils';
import { PricingStrategy } from '../../graphqlTypes.generated';
export type EditAdminProductCardProps = {
initialProduct: EditingProduct;
close: () => void;
ticketTypes: AdminProductsQueryData['convention']['ticket_types'];
};
function EditAdminProductCard({ initialProduct, close, ticketTypes }: EditAdminProductCardProps): JSX.Element {
const { ticketName } = useContext(AppRootContext);
const [createProduct] = useCreateProductMutation();
const [updateProduct] = useUpdateProductMutation();
const [product, setProduct] = useState(initialProduct);
const [setAvailable, setDescription, setName, setPaymentOptions, setPricingStructure, setProductVariants] =
usePropertySetters(
setProduct,
'available',
'description',
'name',
'payment_options',
'pricing_structure',
'product_variants',
);
const imageChanged = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = (event.target.files ?? [])[0];
if (!file) {
return;
}
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
imageFile: file,
}));
const reader = new FileReader();
reader.addEventListener('load', () => {
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
image_url: reader.result?.toString(),
}));
});
reader.readAsDataURL(file);
};
const deleteVariant = (variantId: string) => {
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
delete_variant_ids: [...prevEditingProduct.delete_variant_ids, variantId],
}));
};
const saveProduct = async () => {
const productInput = buildProductInput(product);
if (hasRealId(product)) {
await updateProduct({
variables: { id: product.id, product: productInput },
});
} else {
await createProduct({
variables: { product: productInput },
update: (cache, result) => {
const data = cache.readQuery<AdminProductsQueryData>({ query: AdminProductsQueryDocument });
const newProduct = result.data?.createProduct?.product;
if (!data || !newProduct) {
return;
}
cache.writeQuery<AdminProductsQueryData>({
query: AdminProductsQueryDocument,
data: {
...data,
convention: {
...data.convention,
products: [...data.convention.products, newProduct],
},
},
});
},
});
}
close();
};
const [saveClicked, saveError] = useAsyncFunction(saveProduct);
const paymentOptionChoices = [
{
label: (
<span>
<i className="bi-credit-card" /> Stripe
</span>
),
value: 'stripe',
},
{
label: (
<span>
<i className="bi-briefcase-fill" /> Pay at convention
</span>
),
value: 'pay_at_convention',
},
];
const imageInputId = useUniqueId('image-input-');
return (
<div className="mb-4 card bg-light border-dark glow-dark">
<div className="card-header">
<div className="d-flex align-items-center">
<div className="flex-grow-1">
<input
aria-label="Product name"
type="text"
className="form-control"
placeholder="Product name"
name="name"
value={product.name}
onChange={(event) => setName(event.target.value)}
/>
</div>
<div className="ms-2">
<ul className="list-inline m-0">
<li className="list-inline-item">
<button type="button" className="btn btn-sm btn-secondary" onClick={close}>
Cancel
</button>
</li>
<li className="list-inline-item">
<button type="button" className="btn btn-sm btn-primary" onClick={saveClicked}>
Save
</button>
</li>
</ul>
</div>
</div>
<div className="d-flex flex-wrap">
<div className="me-4">
<BooleanInput
name="available"
caption="Available for purchase"
value={product.available}
onChange={setAvailable}
/>
</div>
<div className="me-4">
<MultipleChoiceInput
name="payment_options"
caption="Payment options"
choices={paymentOptionChoices}
multiple
value={product.payment_options}
onChange={(newValue: string[]) => setPaymentOptions(newValue)}
choiceClassName="form-check-inline"
/>
</div>
<div>
<BootstrapFormSelect
label={`Provide ${ticketName} type`}
value={product.provides_ticket_type?.id}
onValueChange={(value) =>
setProduct((prev) => ({
...prev,
provides_ticket_type: ticketTypes.find((tt) => tt.id.toString() === value),
}))
}
>
<option value={undefined}>No {ticketName}</option>
{ticketTypes.map((ticketType) => (
<option value={ticketType.id} key={ticketType.id}>
{ticketType.description}
</option>
))}
</BootstrapFormSelect>
</div>
</div>
</div>
<div className="card-body">
<ErrorDisplay graphQLError={saveError as ApolloError} />
<div className="d-lg-flex justify-content-lg-start align-items-lg-start">
<div className="d-flex flex-column align-items-center">
{product.image && <img src={product.image.url} style={{ maxWidth: '200px' }} alt={product.name} />}
<div className="mt-2" style={{ width: '220px' }}>
<label className="form-label" htmlFor={imageInputId}>
Choose image...
</label>
<input
id={imageInputId}
className="form-control"
type="file"
accept="image/*"
onChange={imageChanged}
aria-label="Choose image..."
/>
</div>
</div>
<div className="ml-lg-4 col-lg">
<div className="d-flex">
<strong className="me-1">Base price:</strong>
<PricingStructureInput
value={
product.pricing_structure ?? {
__typename: 'PricingStructure',
pricing_strategy: PricingStrategy.Fixed,
value: {
__typename: 'Money',
currency_code: 'USD',
fractional: 0,
},
}
}
onChange={setPricingStructure}
/>
</div>
<LiquidInput value={product.description ?? ''} onChange={setDescription} />
<AdminProductVariantsTable
product={product}
editing
onChange={setProductVariants}
deleteVariant={deleteVariant}
/>
</div>
</div>
</div>
</div>
);
} |
export default EditAdminProductCard; | random_line_split | |
EditAdminProductCard.tsx | import { useState, useContext } from 'react';
import * as React from 'react';
import { ApolloError } from '@apollo/client';
import {
ErrorDisplay,
MultipleChoiceInput,
useUniqueId,
BooleanInput,
BootstrapFormSelect,
usePropertySetters,
} from '@neinteractiveliterature/litform';
import AdminProductVariantsTable from './AdminProductVariantsTable';
import LiquidInput from '../../BuiltInFormControls/LiquidInput';
import useAsyncFunction from '../../useAsyncFunction';
import PricingStructureInput from './PricingStructureInput';
import buildProductInput from '../buildProductInput';
import AppRootContext from '../../AppRootContext';
import { AdminProductsQueryData, AdminProductsQueryDocument } from '../queries.generated';
import { useCreateProductMutation, useUpdateProductMutation } from '../mutations.generated';
import { EditingProduct } from './EditingProductTypes';
import { hasRealId } from '../../GeneratedIdUtils';
import { PricingStrategy } from '../../graphqlTypes.generated';
export type EditAdminProductCardProps = {
initialProduct: EditingProduct;
close: () => void;
ticketTypes: AdminProductsQueryData['convention']['ticket_types'];
};
function EditAdminProductCard({ initialProduct, close, ticketTypes }: EditAdminProductCardProps): JSX.Element {
const { ticketName } = useContext(AppRootContext);
const [createProduct] = useCreateProductMutation();
const [updateProduct] = useUpdateProductMutation();
const [product, setProduct] = useState(initialProduct);
const [setAvailable, setDescription, setName, setPaymentOptions, setPricingStructure, setProductVariants] =
usePropertySetters(
setProduct,
'available',
'description',
'name',
'payment_options',
'pricing_structure',
'product_variants',
);
const imageChanged = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = (event.target.files ?? [])[0];
if (!file) {
return;
}
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
imageFile: file,
}));
const reader = new FileReader();
reader.addEventListener('load', () => {
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
image_url: reader.result?.toString(),
}));
});
reader.readAsDataURL(file);
};
const deleteVariant = (variantId: string) => {
setProduct((prevEditingProduct) => ({
...prevEditingProduct,
delete_variant_ids: [...prevEditingProduct.delete_variant_ids, variantId],
}));
};
const saveProduct = async () => {
const productInput = buildProductInput(product);
if (hasRealId(product)) | else {
await createProduct({
variables: { product: productInput },
update: (cache, result) => {
const data = cache.readQuery<AdminProductsQueryData>({ query: AdminProductsQueryDocument });
const newProduct = result.data?.createProduct?.product;
if (!data || !newProduct) {
return;
}
cache.writeQuery<AdminProductsQueryData>({
query: AdminProductsQueryDocument,
data: {
...data,
convention: {
...data.convention,
products: [...data.convention.products, newProduct],
},
},
});
},
});
}
close();
};
const [saveClicked, saveError] = useAsyncFunction(saveProduct);
const paymentOptionChoices = [
{
label: (
<span>
<i className="bi-credit-card" /> Stripe
</span>
),
value: 'stripe',
},
{
label: (
<span>
<i className="bi-briefcase-fill" /> Pay at convention
</span>
),
value: 'pay_at_convention',
},
];
const imageInputId = useUniqueId('image-input-');
return (
<div className="mb-4 card bg-light border-dark glow-dark">
<div className="card-header">
<div className="d-flex align-items-center">
<div className="flex-grow-1">
<input
aria-label="Product name"
type="text"
className="form-control"
placeholder="Product name"
name="name"
value={product.name}
onChange={(event) => setName(event.target.value)}
/>
</div>
<div className="ms-2">
<ul className="list-inline m-0">
<li className="list-inline-item">
<button type="button" className="btn btn-sm btn-secondary" onClick={close}>
Cancel
</button>
</li>
<li className="list-inline-item">
<button type="button" className="btn btn-sm btn-primary" onClick={saveClicked}>
Save
</button>
</li>
</ul>
</div>
</div>
<div className="d-flex flex-wrap">
<div className="me-4">
<BooleanInput
name="available"
caption="Available for purchase"
value={product.available}
onChange={setAvailable}
/>
</div>
<div className="me-4">
<MultipleChoiceInput
name="payment_options"
caption="Payment options"
choices={paymentOptionChoices}
multiple
value={product.payment_options}
onChange={(newValue: string[]) => setPaymentOptions(newValue)}
choiceClassName="form-check-inline"
/>
</div>
<div>
<BootstrapFormSelect
label={`Provide ${ticketName} type`}
value={product.provides_ticket_type?.id}
onValueChange={(value) =>
setProduct((prev) => ({
...prev,
provides_ticket_type: ticketTypes.find((tt) => tt.id.toString() === value),
}))
}
>
<option value={undefined}>No {ticketName}</option>
{ticketTypes.map((ticketType) => (
<option value={ticketType.id} key={ticketType.id}>
{ticketType.description}
</option>
))}
</BootstrapFormSelect>
</div>
</div>
</div>
<div className="card-body">
<ErrorDisplay graphQLError={saveError as ApolloError} />
<div className="d-lg-flex justify-content-lg-start align-items-lg-start">
<div className="d-flex flex-column align-items-center">
{product.image && <img src={product.image.url} style={{ maxWidth: '200px' }} alt={product.name} />}
<div className="mt-2" style={{ width: '220px' }}>
<label className="form-label" htmlFor={imageInputId}>
Choose image...
</label>
<input
id={imageInputId}
className="form-control"
type="file"
accept="image/*"
onChange={imageChanged}
aria-label="Choose image..."
/>
</div>
</div>
<div className="ml-lg-4 col-lg">
<div className="d-flex">
<strong className="me-1">Base price:</strong>
<PricingStructureInput
value={
product.pricing_structure ?? {
__typename: 'PricingStructure',
pricing_strategy: PricingStrategy.Fixed,
value: {
__typename: 'Money',
currency_code: 'USD',
fractional: 0,
},
}
}
onChange={setPricingStructure}
/>
</div>
<LiquidInput value={product.description ?? ''} onChange={setDescription} />
<AdminProductVariantsTable
product={product}
editing
onChange={setProductVariants}
deleteVariant={deleteVariant}
/>
</div>
</div>
</div>
</div>
);
}
export default EditAdminProductCard;
| {
await updateProduct({
variables: { id: product.id, product: productInput },
});
} | conditional_block |
center-ellipses.pipe.ts | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Pipe, PipeTransform } from '@angular/core';
const limit = 72;
@Pipe({
name: 'centerEllipses'
})
export class CenterEllipsesPipe implements PipeTransform {
private trail = '...';
transform(value: any, length?: number): any {
let tLimit = length ? length : limit;
if (!value) {
return '';
}
| ? value.substring(0, tLimit / 2) + this.trail + value.substring(value.length - tLimit / 2, value.length)
: value;
}
} | if (!length) {
return value;
}
return value.length > tLimit | random_line_split |
center-ellipses.pipe.ts | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Pipe, PipeTransform } from '@angular/core';
const limit = 72;
@Pipe({
name: 'centerEllipses'
})
export class CenterEllipsesPipe implements PipeTransform {
private trail = '...';
transform(value: any, length?: number): any {
let tLimit = length ? length : limit;
if (!value) {
return '';
}
if (!length) |
return value.length > tLimit
? value.substring(0, tLimit / 2) + this.trail + value.substring(value.length - tLimit / 2, value.length)
: value;
}
}
| {
return value;
} | conditional_block |
center-ellipses.pipe.ts | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Pipe, PipeTransform } from '@angular/core';
const limit = 72;
@Pipe({
name: 'centerEllipses'
})
export class CenterEllipsesPipe implements PipeTransform {
private trail = '...';
| (value: any, length?: number): any {
let tLimit = length ? length : limit;
if (!value) {
return '';
}
if (!length) {
return value;
}
return value.length > tLimit
? value.substring(0, tLimit / 2) + this.trail + value.substring(value.length - tLimit / 2, value.length)
: value;
}
}
| transform | identifier_name |
public_api.spec.ts | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as filesUtils from './public_api';
describe('FilesUtils', () => {
describe('getImageFileDensity', () => {
const fakeFile = (name: string) => ({ name } as File);
const getImageFileDensity = filesUtils.getImageFileDensity;
it('Should return density info in filename', () => {
const file1 = fakeFile('test@1.5x.jpg');
const density1 = getImageFileDensity(file1);
expect(density1).toEqual(1.5);
const file2 = fakeFile('test@2x.jpg');
const density2 = getImageFileDensity(file2);
expect(density2).toEqual(2);
const file3 = fakeFile('test.image@4x.jpeg');
const density3 = getImageFileDensity(file3);
expect(density3).toEqual(4);
});
it('Should return 1 if density info not present in filename', () => {
const file1 = fakeFile('test.jpg');
const density1 = getImageFileDensity(file1);
expect(density1).toEqual(1); |
const file2 = fakeFile('test.image.jpg');
const density2 = getImageFileDensity(file2);
expect(density2).toEqual(1);
const file3 = fakeFile('test.image');
const density3 = getImageFileDensity(file3);
expect(density3).toEqual(1);
});
it('Should return 1 if density info in filename is malformed', () => {
const file1 = fakeFile('test@.jpg');
const density1 = getImageFileDensity(file1);
expect(density1).toEqual(1);
const file2 = fakeFile('test.image@ex.jpg');
const density2 = getImageFileDensity(file2);
expect(density2).toEqual(1);
const file3 = fakeFile('test.image@2ex.jpg');
const density3 = getImageFileDensity(file3);
expect(density3).toEqual(1);
});
});
}); | random_line_split | |
PortalUIEditorDialog.controller.ts | /// <reference path="../main/Resource.ts" />
/// <reference path="../../../typings/index.d.ts" />
module PortalUIEditor {
export class ResourceEditorController {
private $modalInstance: any;
private $http: any;
private $window: any;
json: string;
validationResult: string;
/** @ngInject */
constructor($modalInstance: any, $http: any, $window: any) {
this.$modalInstance = $modalInstance;
this.$http = $http;
this.$window = $window;
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Open');
}
validate() {
try {
JSON.parse(this.json);
this.validationResult = "Valid JSON!";
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Passed');
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Failed: ' + err.toString());
}
}
close() {
this.$modalInstance.dismiss('cancel');
};
preview() {
console.log('preview!'); | var obj: any;
try {
obj = JSON.parse(this.json);
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Preview', 'Failed: ' + err.toString());
return null;
}
let url = 'http://armportaluiredirector.azurewebsites.net/?json=POST';
this.$http.post(url, obj).then((response) => {
//console.log('Got response: ' + response);
let cacheUrl = response.data;
let portalUiUrl = 'https://portal.azure.com/#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"{jsonUrl}"}}';
portalUiUrl = portalUiUrl.replace('{jsonUrl}', cacheUrl);
this.$window.open(portalUiUrl);
}, (response) => {
console.error('Not sure what to do: ' + response);
});
}
}
} | random_line_split | |
PortalUIEditorDialog.controller.ts | /// <reference path="../main/Resource.ts" />
/// <reference path="../../../typings/index.d.ts" />
module PortalUIEditor {
export class ResourceEditorController {
private $modalInstance: any;
private $http: any;
private $window: any;
json: string;
validationResult: string;
/** @ngInject */
constructor($modalInstance: any, $http: any, $window: any) |
validate() {
try {
JSON.parse(this.json);
this.validationResult = "Valid JSON!";
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Passed');
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Failed: ' + err.toString());
}
}
close() {
this.$modalInstance.dismiss('cancel');
};
preview() {
console.log('preview!');
var obj: any;
try {
obj = JSON.parse(this.json);
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Preview', 'Failed: ' + err.toString());
return null;
}
let url = 'http://armportaluiredirector.azurewebsites.net/?json=POST';
this.$http.post(url, obj).then((response) => {
//console.log('Got response: ' + response);
let cacheUrl = response.data;
let portalUiUrl = 'https://portal.azure.com/#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"{jsonUrl}"}}';
portalUiUrl = portalUiUrl.replace('{jsonUrl}', cacheUrl);
this.$window.open(portalUiUrl);
}, (response) => {
console.error('Not sure what to do: ' + response);
});
}
}
}
| {
this.$modalInstance = $modalInstance;
this.$http = $http;
this.$window = $window;
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Open');
} | identifier_body |
PortalUIEditorDialog.controller.ts | /// <reference path="../main/Resource.ts" />
/// <reference path="../../../typings/index.d.ts" />
module PortalUIEditor {
export class ResourceEditorController {
private $modalInstance: any;
private $http: any;
private $window: any;
json: string;
validationResult: string;
/** @ngInject */
constructor($modalInstance: any, $http: any, $window: any) {
this.$modalInstance = $modalInstance;
this.$http = $http;
this.$window = $window;
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Open');
}
validate() {
try {
JSON.parse(this.json);
this.validationResult = "Valid JSON!";
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Passed');
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Validate', 'Failed: ' + err.toString());
}
}
| () {
this.$modalInstance.dismiss('cancel');
};
preview() {
console.log('preview!');
var obj: any;
try {
obj = JSON.parse(this.json);
} catch (err) {
this.validationResult = "Invalid JSON: " + err.toString();
ArmViz.Telemetry.sendEvent('PortalUIEditor', 'Preview', 'Failed: ' + err.toString());
return null;
}
let url = 'http://armportaluiredirector.azurewebsites.net/?json=POST';
this.$http.post(url, obj).then((response) => {
//console.log('Got response: ' + response);
let cacheUrl = response.data;
let portalUiUrl = 'https://portal.azure.com/#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"{jsonUrl}"}}';
portalUiUrl = portalUiUrl.replace('{jsonUrl}', cacheUrl);
this.$window.open(portalUiUrl);
}, (response) => {
console.error('Not sure what to do: ' + response);
});
}
}
}
| close | identifier_name |
login.rs | use std::io::prelude::*;
use std::io;
use cargo::ops;
use cargo::core::{SourceId, Source};
use cargo::sources::RegistrySource;
use cargo::util::{CliResult, CliError, Config};
#[derive(RustcDecodable)]
struct | {
flag_host: Option<String>,
arg_token: Option<String>,
flag_verbose: bool,
}
pub const USAGE: &'static str = "
Save an api token from the registry locally
Usage:
cargo login [options] [<token>]
Options:
-h, --help Print this message
--host HOST Host to set the token for
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
config.shell().set_verbose(options.flag_verbose);
let token = match options.arg_token.clone() {
Some(token) => token,
None => {
let err = (|| {
let src = try!(SourceId::for_central(config));
let mut src = RegistrySource::new(&src, config);
try!(src.update());
let config = try!(src.config());
let host = options.flag_host.clone().unwrap_or(config.api);
println!("please visit {}me and paste the API Token below",
host);
let mut line = String::new();
let input = io::stdin();
try!(input.lock().read_line(&mut line));
Ok(line)
})();
try!(err.map_err(|e| CliError::from_boxed(e, 101)))
}
};
let token = token.trim().to_string();
try!(ops::registry_login(config, token).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
}
| Options | identifier_name |
login.rs | use std::io::prelude::*;
use std::io;
use cargo::ops;
use cargo::core::{SourceId, Source};
use cargo::sources::RegistrySource;
use cargo::util::{CliResult, CliError, Config};
#[derive(RustcDecodable)]
struct Options {
flag_host: Option<String>,
arg_token: Option<String>,
flag_verbose: bool,
}
pub const USAGE: &'static str = "
Save an api token from the registry locally | cargo login [options] [<token>]
Options:
-h, --help Print this message
--host HOST Host to set the token for
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
config.shell().set_verbose(options.flag_verbose);
let token = match options.arg_token.clone() {
Some(token) => token,
None => {
let err = (|| {
let src = try!(SourceId::for_central(config));
let mut src = RegistrySource::new(&src, config);
try!(src.update());
let config = try!(src.config());
let host = options.flag_host.clone().unwrap_or(config.api);
println!("please visit {}me and paste the API Token below",
host);
let mut line = String::new();
let input = io::stdin();
try!(input.lock().read_line(&mut line));
Ok(line)
})();
try!(err.map_err(|e| CliError::from_boxed(e, 101)))
}
};
let token = token.trim().to_string();
try!(ops::registry_login(config, token).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
} |
Usage: | random_line_split |
index.d.ts | /*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
|
/// <reference types="@stdlib/types"/>
import { Complex64 } from '@stdlib/types/object';
/**
* Subtracts two single-precision complex floating-point numbers.
*
* @param z1 - complex number
* @param z2 - complex number
* @returns result
*
* @example
* var Complex64 = require( `@stdlib/complex/float32` );
* var real = require( `@stdlib/complex/real` );
* var imag = require( `@stdlib/complex/imag` );
*
* var z1 = new Complex64( 5.0, 3.0 );
* // returns <Complex64>
*
* var z2 = new Complex64( -2.0, 1.0 );
* // returns <Complex64>
*
* var out = csubf( z1, z2 );
* // returns <Complex64>
*
* var re = real( out );
* // returns 7.0
*
* var im = imag( out );
* // returns 2.0
*/
declare function csubf( z1: Complex64, z2: Complex64 ): Complex64;
// EXPORTS //
export = csubf; | // TypeScript Version: 2.0 | random_line_split |
Button.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
import compose from 'recompose/compose';
import SafeAnchor from '../SafeAnchor';
import Ripple from '../Ripple';
import { withStyleProps, getUnhandledProps, defaultProps, prefix, isOneOf } from '../utils';
import { ButtonProps } from './Button.d';
class Button extends React.Component<ButtonProps> {
static propTypes = {
appearance: PropTypes.oneOf(['default', 'primary', 'link', 'subtle', 'ghost']),
active: PropTypes.bool,
componentClass: PropTypes.elementType,
children: PropTypes.node,
block: PropTypes.bool,
loading: PropTypes.bool,
disabled: PropTypes.bool,
ripple: PropTypes.bool
};
static defaultProps = {
appearance: 'default',
ripple: true
};
render() {
const {
active,
disabled,
loading,
block,
className,
classPrefix,
appearance,
children,
ripple,
componentClass: Component,
...props
} = this.props;
const unhandled = getUnhandledProps(Button, props);
const addPrefix = prefix(classPrefix);
const classes = classNames(classPrefix, addPrefix(appearance), className, {
[addPrefix('active')]: active,
[addPrefix('disabled')]: disabled,
[addPrefix('loading')]: loading,
[addPrefix('block')]: block
});
const rippleElement = ripple && !isOneOf(appearance, ['link', 'ghost']) ? <Ripple /> : null;
const spin = <span className={addPrefix('spin')} />;
if (Component === 'button') {
| (unhandled.href) {
return (
<SafeAnchor {...unhandled} aria-disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</SafeAnchor>
);
}
unhandled.type = unhandled.type || 'button';
}
return (
<Component {...unhandled} disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</Component>
);
}
}
export default compose<any, ButtonProps>(
withStyleProps<ButtonProps>({
hasSize: true,
hasColor: true
}),
defaultProps<ButtonProps>({
classPrefix: 'btn',
componentClass: 'button'
})
)(Button);
| if | identifier_name |
Button.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
import compose from 'recompose/compose';
import SafeAnchor from '../SafeAnchor';
import Ripple from '../Ripple';
import { withStyleProps, getUnhandledProps, defaultProps, prefix, isOneOf } from '../utils';
import { ButtonProps } from './Button.d';
class Button extends React.Component<ButtonProps> {
static propTypes = {
appearance: PropTypes.oneOf(['default', 'primary', 'link', 'subtle', 'ghost']),
active: PropTypes.bool,
componentClass: PropTypes.elementType,
children: PropTypes.node,
block: PropTypes.bool,
loading: PropTypes.bool,
disabled: PropTypes.bool,
ripple: PropTypes.bool
};
static defaultProps = {
appearance: 'default',
ripple: true
};
render() {
const {
active,
disabled,
loading,
block,
className,
classPrefix,
appearance,
children,
ripple,
componentClass: Component,
...props
} = this.props;
const unhandled = getUnhandledProps(Button, props);
const addPrefix = prefix(classPrefix);
const classes = classNames(classPrefix, addPrefix(appearance), className, {
[addPrefix('active')]: active,
[addPrefix('disabled')]: disabled,
[addPrefix('loading')]: loading,
[addPrefix('block')]: block
});
const rippleElement = ripple && !isOneOf(appearance, ['link', 'ghost']) ? <Ripple /> : null;
const spin = <span className={addPrefix('spin')} />;
if (Component === 'button') {
if (unhandled.href) |
unhandled.type = unhandled.type || 'button';
}
return (
<Component {...unhandled} disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</Component>
);
}
}
export default compose<any, ButtonProps>(
withStyleProps<ButtonProps>({
hasSize: true,
hasColor: true
}),
defaultProps<ButtonProps>({
classPrefix: 'btn',
componentClass: 'button'
})
)(Button);
| {
return (
<SafeAnchor {...unhandled} aria-disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</SafeAnchor>
);
} | identifier_body |
Button.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
import compose from 'recompose/compose';
import SafeAnchor from '../SafeAnchor';
import Ripple from '../Ripple';
import { withStyleProps, getUnhandledProps, defaultProps, prefix, isOneOf } from '../utils';
import { ButtonProps } from './Button.d';
class Button extends React.Component<ButtonProps> {
static propTypes = {
appearance: PropTypes.oneOf(['default', 'primary', 'link', 'subtle', 'ghost']),
active: PropTypes.bool,
componentClass: PropTypes.elementType,
children: PropTypes.node,
block: PropTypes.bool,
loading: PropTypes.bool,
disabled: PropTypes.bool,
ripple: PropTypes.bool
};
static defaultProps = {
appearance: 'default',
ripple: true
};
render() {
const {
active,
disabled,
loading,
block,
className,
classPrefix,
appearance,
children,
ripple,
componentClass: Component,
...props
} = this.props;
const unhandled = getUnhandledProps(Button, props);
const addPrefix = prefix(classPrefix);
const classes = classNames(classPrefix, addPrefix(appearance), className, {
[addPrefix('active')]: active,
[addPrefix('disabled')]: disabled,
[addPrefix('loading')]: loading,
[addPrefix('block')]: block
});
const rippleElement = ripple && !isOneOf(appearance, ['link', 'ghost']) ? <Ripple /> : null;
const spin = <span className={addPrefix('spin')} />;
if (Component === 'button') {
if (unhandled.href) { | return (
<SafeAnchor {...unhandled} aria-disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</SafeAnchor>
);
}
unhandled.type = unhandled.type || 'button';
}
return (
<Component {...unhandled} disabled={disabled} className={classes}>
{loading && spin}
{children}
{rippleElement}
</Component>
);
}
}
export default compose<any, ButtonProps>(
withStyleProps<ButtonProps>({
hasSize: true,
hasColor: true
}),
defaultProps<ButtonProps>({
classPrefix: 'btn',
componentClass: 'button'
})
)(Button); | random_line_split | |
linear.py | import heapq
import os
import numpy
from smqtk.algorithms.nn_index.hash_index import HashIndex
from smqtk.utils.bit_utils import (
bit_vector_to_int_large,
int_to_bit_vector_large,
)
from smqtk.utils.metrics import hamming_distance
__author__ = "paul.tunison@kitware.com"
class LinearHashIndex (HashIndex):
"""
Basic linear index using heap sort (aka brute force).
"""
@classmethod
def is_usable(cls):
return True
def __init__(self, file_cache=None):
"""
Initialize linear, brute-force hash index
:param file_cache: Optional path to a file to cache our index to.
:type file_cache: str
"""
super(LinearHashIndex, self).__init__()
self.file_cache = file_cache
self.index = numpy.array([], bool)
self.load_cache()
def get_config(self):
return {
'file_cache': self.file_cache,
}
def load_cache(self):
"""
Load from file cache if we have one
"""
if self.file_cache and os.path.isfile(self.file_cache):
self.index = numpy.load(self.file_cache)
def save_cache(self):
"""
save to file cache if configures
"""
if self.file_cache:
numpy.save(self.file_cache, self.index)
def count(self):
return len(self.index)
def build_index(self, hashes):
"""
Build the index with the give hash codes (bit-vectors).
Subsequent calls to this method should rebuild the index, not add to
it, or raise an exception to as to protect the current index.
:raises ValueError: No data available in the given iterable.
:param hashes: Iterable of descriptor elements to build index
over.
:type hashes: collections.Iterable[numpy.ndarray[bool]]
"""
new_index = numpy.array(map(bit_vector_to_int_large, hashes))
if not new_index.size:
raise ValueError("No hashes given to index.")
self.index = new_index
self.save_cache()
def nn(self, h, n=1):
"""
Return the nearest `N` neighbors to the given hash code.
| length as indexed hash codes.
:type h: numpy.ndarray[bool]
:param n: Number of nearest neighbors to find.
:type n: int
:raises ValueError: No index to query from.
:return: Tuple of nearest N hash codes and a tuple of the distance
values to those neighbors.
:rtype: (tuple[numpy.ndarray[bool], tuple[float])
"""
super(LinearHashIndex, self).nn(h, n)
h_int = bit_vector_to_int_large(h)
bits = len(h)
#: :type: list[int|long]
near_codes = \
heapq.nsmallest(n, self.index,
lambda e: hamming_distance(h_int, e)
)
distances = map(hamming_distance, near_codes,
[h_int] * len(near_codes))
return [int_to_bit_vector_large(c, bits) for c in near_codes], \
[d / float(bits) for d in distances] | Distances are in the range [0,1] and are the percent different each
neighbor hash is from the query, based on the number of bits contained
in the query.
:param h: Hash code to compute the neighbors of. Should be the same bit | random_line_split |
linear.py | import heapq
import os
import numpy
from smqtk.algorithms.nn_index.hash_index import HashIndex
from smqtk.utils.bit_utils import (
bit_vector_to_int_large,
int_to_bit_vector_large,
)
from smqtk.utils.metrics import hamming_distance
__author__ = "paul.tunison@kitware.com"
class LinearHashIndex (HashIndex):
"""
Basic linear index using heap sort (aka brute force).
"""
@classmethod
def is_usable(cls):
|
def __init__(self, file_cache=None):
"""
Initialize linear, brute-force hash index
:param file_cache: Optional path to a file to cache our index to.
:type file_cache: str
"""
super(LinearHashIndex, self).__init__()
self.file_cache = file_cache
self.index = numpy.array([], bool)
self.load_cache()
def get_config(self):
return {
'file_cache': self.file_cache,
}
def load_cache(self):
"""
Load from file cache if we have one
"""
if self.file_cache and os.path.isfile(self.file_cache):
self.index = numpy.load(self.file_cache)
def save_cache(self):
"""
save to file cache if configures
"""
if self.file_cache:
numpy.save(self.file_cache, self.index)
def count(self):
return len(self.index)
def build_index(self, hashes):
"""
Build the index with the give hash codes (bit-vectors).
Subsequent calls to this method should rebuild the index, not add to
it, or raise an exception to as to protect the current index.
:raises ValueError: No data available in the given iterable.
:param hashes: Iterable of descriptor elements to build index
over.
:type hashes: collections.Iterable[numpy.ndarray[bool]]
"""
new_index = numpy.array(map(bit_vector_to_int_large, hashes))
if not new_index.size:
raise ValueError("No hashes given to index.")
self.index = new_index
self.save_cache()
def nn(self, h, n=1):
"""
Return the nearest `N` neighbors to the given hash code.
Distances are in the range [0,1] and are the percent different each
neighbor hash is from the query, based on the number of bits contained
in the query.
:param h: Hash code to compute the neighbors of. Should be the same bit
length as indexed hash codes.
:type h: numpy.ndarray[bool]
:param n: Number of nearest neighbors to find.
:type n: int
:raises ValueError: No index to query from.
:return: Tuple of nearest N hash codes and a tuple of the distance
values to those neighbors.
:rtype: (tuple[numpy.ndarray[bool], tuple[float])
"""
super(LinearHashIndex, self).nn(h, n)
h_int = bit_vector_to_int_large(h)
bits = len(h)
#: :type: list[int|long]
near_codes = \
heapq.nsmallest(n, self.index,
lambda e: hamming_distance(h_int, e)
)
distances = map(hamming_distance, near_codes,
[h_int] * len(near_codes))
return [int_to_bit_vector_large(c, bits) for c in near_codes], \
[d / float(bits) for d in distances]
| return True | identifier_body |
linear.py | import heapq
import os
import numpy
from smqtk.algorithms.nn_index.hash_index import HashIndex
from smqtk.utils.bit_utils import (
bit_vector_to_int_large,
int_to_bit_vector_large,
)
from smqtk.utils.metrics import hamming_distance
__author__ = "paul.tunison@kitware.com"
class LinearHashIndex (HashIndex):
"""
Basic linear index using heap sort (aka brute force).
"""
@classmethod
def is_usable(cls):
return True
def __init__(self, file_cache=None):
"""
Initialize linear, brute-force hash index
:param file_cache: Optional path to a file to cache our index to.
:type file_cache: str
"""
super(LinearHashIndex, self).__init__()
self.file_cache = file_cache
self.index = numpy.array([], bool)
self.load_cache()
def get_config(self):
return {
'file_cache': self.file_cache,
}
def load_cache(self):
"""
Load from file cache if we have one
"""
if self.file_cache and os.path.isfile(self.file_cache):
self.index = numpy.load(self.file_cache)
def save_cache(self):
"""
save to file cache if configures
"""
if self.file_cache:
|
def count(self):
return len(self.index)
def build_index(self, hashes):
"""
Build the index with the give hash codes (bit-vectors).
Subsequent calls to this method should rebuild the index, not add to
it, or raise an exception to as to protect the current index.
:raises ValueError: No data available in the given iterable.
:param hashes: Iterable of descriptor elements to build index
over.
:type hashes: collections.Iterable[numpy.ndarray[bool]]
"""
new_index = numpy.array(map(bit_vector_to_int_large, hashes))
if not new_index.size:
raise ValueError("No hashes given to index.")
self.index = new_index
self.save_cache()
def nn(self, h, n=1):
"""
Return the nearest `N` neighbors to the given hash code.
Distances are in the range [0,1] and are the percent different each
neighbor hash is from the query, based on the number of bits contained
in the query.
:param h: Hash code to compute the neighbors of. Should be the same bit
length as indexed hash codes.
:type h: numpy.ndarray[bool]
:param n: Number of nearest neighbors to find.
:type n: int
:raises ValueError: No index to query from.
:return: Tuple of nearest N hash codes and a tuple of the distance
values to those neighbors.
:rtype: (tuple[numpy.ndarray[bool], tuple[float])
"""
super(LinearHashIndex, self).nn(h, n)
h_int = bit_vector_to_int_large(h)
bits = len(h)
#: :type: list[int|long]
near_codes = \
heapq.nsmallest(n, self.index,
lambda e: hamming_distance(h_int, e)
)
distances = map(hamming_distance, near_codes,
[h_int] * len(near_codes))
return [int_to_bit_vector_large(c, bits) for c in near_codes], \
[d / float(bits) for d in distances]
| numpy.save(self.file_cache, self.index) | conditional_block |
linear.py | import heapq
import os
import numpy
from smqtk.algorithms.nn_index.hash_index import HashIndex
from smqtk.utils.bit_utils import (
bit_vector_to_int_large,
int_to_bit_vector_large,
)
from smqtk.utils.metrics import hamming_distance
__author__ = "paul.tunison@kitware.com"
class LinearHashIndex (HashIndex):
"""
Basic linear index using heap sort (aka brute force).
"""
@classmethod
def | (cls):
return True
def __init__(self, file_cache=None):
"""
Initialize linear, brute-force hash index
:param file_cache: Optional path to a file to cache our index to.
:type file_cache: str
"""
super(LinearHashIndex, self).__init__()
self.file_cache = file_cache
self.index = numpy.array([], bool)
self.load_cache()
def get_config(self):
return {
'file_cache': self.file_cache,
}
def load_cache(self):
"""
Load from file cache if we have one
"""
if self.file_cache and os.path.isfile(self.file_cache):
self.index = numpy.load(self.file_cache)
def save_cache(self):
"""
save to file cache if configures
"""
if self.file_cache:
numpy.save(self.file_cache, self.index)
def count(self):
return len(self.index)
def build_index(self, hashes):
"""
Build the index with the give hash codes (bit-vectors).
Subsequent calls to this method should rebuild the index, not add to
it, or raise an exception to as to protect the current index.
:raises ValueError: No data available in the given iterable.
:param hashes: Iterable of descriptor elements to build index
over.
:type hashes: collections.Iterable[numpy.ndarray[bool]]
"""
new_index = numpy.array(map(bit_vector_to_int_large, hashes))
if not new_index.size:
raise ValueError("No hashes given to index.")
self.index = new_index
self.save_cache()
def nn(self, h, n=1):
"""
Return the nearest `N` neighbors to the given hash code.
Distances are in the range [0,1] and are the percent different each
neighbor hash is from the query, based on the number of bits contained
in the query.
:param h: Hash code to compute the neighbors of. Should be the same bit
length as indexed hash codes.
:type h: numpy.ndarray[bool]
:param n: Number of nearest neighbors to find.
:type n: int
:raises ValueError: No index to query from.
:return: Tuple of nearest N hash codes and a tuple of the distance
values to those neighbors.
:rtype: (tuple[numpy.ndarray[bool], tuple[float])
"""
super(LinearHashIndex, self).nn(h, n)
h_int = bit_vector_to_int_large(h)
bits = len(h)
#: :type: list[int|long]
near_codes = \
heapq.nsmallest(n, self.index,
lambda e: hamming_distance(h_int, e)
)
distances = map(hamming_distance, near_codes,
[h_int] * len(near_codes))
return [int_to_bit_vector_large(c, bits) for c in near_codes], \
[d / float(bits) for d in distances]
| is_usable | identifier_name |
views.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Images and Snapshots.
"""
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import tables
from horizon import tabs
from .images.tables import ImagesTable
from .snapshots.tables import SnapshotsTable
from .volume_snapshots.tables import VolumeSnapshotsTable
from .volume_snapshots.tabs import SnapshotDetailTabs
LOG = logging.getLogger(__name__)
class IndexView(tables.MultiTableView): | return getattr(self, "_more_%s" % table.name, False)
def get_images_data(self):
marker = self.request.GET.get(ImagesTable._meta.pagination_param, None)
try:
# FIXME(gabriel): The paging is going to be strange here due to
# our filtering after the fact.
(all_images,
self._more_images) = api.image_list_detailed(self.request,
marker=marker)
images = [im for im in all_images
if im.container_format not in ['aki', 'ari'] and
im.properties.get("image_type", '') != "snapshot"]
except:
images = []
exceptions.handle(self.request, _("Unable to retrieve images."))
return images
def get_snapshots_data(self):
req = self.request
marker = req.GET.get(SnapshotsTable._meta.pagination_param, None)
try:
snaps, self._more_snapshots = api.snapshot_list_detailed(req,
marker=marker)
except:
snaps = []
exceptions.handle(req, _("Unable to retrieve snapshots."))
return snaps
def get_volume_snapshots_data(self):
try:
snapshots = api.volume_snapshot_list(self.request)
except:
snapshots = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume snapshots."))
return snapshots
class DetailView(tabs.TabView):
tab_group_class = SnapshotDetailTabs
template_name = 'project/images_and_snapshots/snapshots/detail.html' | table_classes = (ImagesTable, SnapshotsTable, VolumeSnapshotsTable)
template_name = 'project/images_and_snapshots/index.html'
def has_more_data(self, table): | random_line_split |
views.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Images and Snapshots.
"""
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import tables
from horizon import tabs
from .images.tables import ImagesTable
from .snapshots.tables import SnapshotsTable
from .volume_snapshots.tables import VolumeSnapshotsTable
from .volume_snapshots.tabs import SnapshotDetailTabs
LOG = logging.getLogger(__name__)
class IndexView(tables.MultiTableView):
|
class DetailView(tabs.TabView):
tab_group_class = SnapshotDetailTabs
template_name = 'project/images_and_snapshots/snapshots/detail.html'
| table_classes = (ImagesTable, SnapshotsTable, VolumeSnapshotsTable)
template_name = 'project/images_and_snapshots/index.html'
def has_more_data(self, table):
return getattr(self, "_more_%s" % table.name, False)
def get_images_data(self):
marker = self.request.GET.get(ImagesTable._meta.pagination_param, None)
try:
# FIXME(gabriel): The paging is going to be strange here due to
# our filtering after the fact.
(all_images,
self._more_images) = api.image_list_detailed(self.request,
marker=marker)
images = [im for im in all_images
if im.container_format not in ['aki', 'ari'] and
im.properties.get("image_type", '') != "snapshot"]
except:
images = []
exceptions.handle(self.request, _("Unable to retrieve images."))
return images
def get_snapshots_data(self):
req = self.request
marker = req.GET.get(SnapshotsTable._meta.pagination_param, None)
try:
snaps, self._more_snapshots = api.snapshot_list_detailed(req,
marker=marker)
except:
snaps = []
exceptions.handle(req, _("Unable to retrieve snapshots."))
return snaps
def get_volume_snapshots_data(self):
try:
snapshots = api.volume_snapshot_list(self.request)
except:
snapshots = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume snapshots."))
return snapshots | identifier_body |
views.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Images and Snapshots.
"""
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import tables
from horizon import tabs
from .images.tables import ImagesTable
from .snapshots.tables import SnapshotsTable
from .volume_snapshots.tables import VolumeSnapshotsTable
from .volume_snapshots.tabs import SnapshotDetailTabs
LOG = logging.getLogger(__name__)
class IndexView(tables.MultiTableView):
table_classes = (ImagesTable, SnapshotsTable, VolumeSnapshotsTable)
template_name = 'project/images_and_snapshots/index.html'
def | (self, table):
return getattr(self, "_more_%s" % table.name, False)
def get_images_data(self):
marker = self.request.GET.get(ImagesTable._meta.pagination_param, None)
try:
# FIXME(gabriel): The paging is going to be strange here due to
# our filtering after the fact.
(all_images,
self._more_images) = api.image_list_detailed(self.request,
marker=marker)
images = [im for im in all_images
if im.container_format not in ['aki', 'ari'] and
im.properties.get("image_type", '') != "snapshot"]
except:
images = []
exceptions.handle(self.request, _("Unable to retrieve images."))
return images
def get_snapshots_data(self):
req = self.request
marker = req.GET.get(SnapshotsTable._meta.pagination_param, None)
try:
snaps, self._more_snapshots = api.snapshot_list_detailed(req,
marker=marker)
except:
snaps = []
exceptions.handle(req, _("Unable to retrieve snapshots."))
return snaps
def get_volume_snapshots_data(self):
try:
snapshots = api.volume_snapshot_list(self.request)
except:
snapshots = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume snapshots."))
return snapshots
class DetailView(tabs.TabView):
tab_group_class = SnapshotDetailTabs
template_name = 'project/images_and_snapshots/snapshots/detail.html'
| has_more_data | identifier_name |
main.js | //defining the searched words
var searchWord1 = 'seksuaaliset vähemmistöt';
var searchWord2 = 'lhbt';
$(function () {
// getting the pictures using finna api
function getPictures(pictureSearch1, pictureSearch2) {
console.log('getPictures');
$.ajax({
'url':
'https://api.finna.fi/v1/search?lookfor=' + pictureSearch1 + '+OR+'
+ pictureSearch2 + '&filter[]=online_boolean:"1"&filter[]=format:"0/Image/',
'dataType': 'json',
'success': onGetPictures
});
}
//getting the url:s and adding the pictures to html
function onGetPictures(obj) {
if (obj) {
var records = obj.records;
var pictureAddress = records.map(
function (rec) {
return rec.images;
}
);
console.log(pictureUrl);
} else {
console.log('Not found!');
}
//This function should be in two parts but I did not know how to do it....
var firstPictureUrl = 'https://api.finna.fi' + pictureAddress[0];
$('#carousel').append('<div class="item active" id="item"><img src="'
+ firstPictureUrl + '" alt="pic"></div>');
for (var i = 1; i < pictureAddress.length; i++) {
var pictureUrl = 'https://api.finna.fi' + pictureAddress[i];
console.log(pictureUrl);
$('#carousel').append('<div class="item"><img src="' + pictureUrl
+ '" alt="pic"></div>');
}
| console.log('getPictures');
$.ajax({
'url': 'https://api.finna.fi/v1/search?lookfor=' + searchBook + '&filter[]=online_boolean:"1"&filter[]=format:"0/Book/',
'dataType': 'json',
'success': onGetBooks
});
}
function onGetBooks(obj) {
if (obj) {
var bookRecords = obj.records;
var bookNames = bookRecords.map(
function(rec) {
return rec.title;
}
);
//$('#result').append(bookNames.join(', ') + '<br>');
console.log(bookNames);
} else {
console.log('Not found!');
}
for (var i=0; i<bookNames.length; i++){
var singleBook = bookNames[i];
$('#result').append('<li>' + singleBook + '</li>');
}
}
$('#searchButton').click(function () {
$('#result').empty();
var searchValue = $('#searchInput').val();
console.log(searchValue);
getBooks(searchValue);
});*/
}); | }
getPictures(searchWord1, searchWord2);
/*function getBooks(searchBook) { | random_line_split |
main.js |
//defining the searched words
var searchWord1 = 'seksuaaliset vähemmistöt';
var searchWord2 = 'lhbt';
$(function () {
// getting the pictures using finna api
function getPictures(pictureSearch1, pictureSearch2) {
console.log('getPictures');
$.ajax({
'url':
'https://api.finna.fi/v1/search?lookfor=' + pictureSearch1 + '+OR+'
+ pictureSearch2 + '&filter[]=online_boolean:"1"&filter[]=format:"0/Image/',
'dataType': 'json',
'success': onGetPictures
});
}
//getting the url:s and adding the pictures to html
function onGetPictures(obj) {
if (obj) {
| lse {
console.log('Not found!');
}
//This function should be in two parts but I did not know how to do it....
var firstPictureUrl = 'https://api.finna.fi' + pictureAddress[0];
$('#carousel').append('<div class="item active" id="item"><img src="'
+ firstPictureUrl + '" alt="pic"></div>');
for (var i = 1; i < pictureAddress.length; i++) {
var pictureUrl = 'https://api.finna.fi' + pictureAddress[i];
console.log(pictureUrl);
$('#carousel').append('<div class="item"><img src="' + pictureUrl
+ '" alt="pic"></div>');
}
}
getPictures(searchWord1, searchWord2);
/*function getBooks(searchBook) {
console.log('getPictures');
$.ajax({
'url': 'https://api.finna.fi/v1/search?lookfor=' + searchBook + '&filter[]=online_boolean:"1"&filter[]=format:"0/Book/',
'dataType': 'json',
'success': onGetBooks
});
}
function onGetBooks(obj) {
if (obj) {
var bookRecords = obj.records;
var bookNames = bookRecords.map(
function(rec) {
return rec.title;
}
);
//$('#result').append(bookNames.join(', ') + '<br>');
console.log(bookNames);
} else {
console.log('Not found!');
}
for (var i=0; i<bookNames.length; i++){
var singleBook = bookNames[i];
$('#result').append('<li>' + singleBook + '</li>');
}
}
$('#searchButton').click(function () {
$('#result').empty();
var searchValue = $('#searchInput').val();
console.log(searchValue);
getBooks(searchValue);
});*/
});
| var records = obj.records;
var pictureAddress = records.map(
function (rec) {
return rec.images;
}
);
console.log(pictureUrl);
} e | conditional_block |
main.js |
//defining the searched words
var searchWord1 = 'seksuaaliset vähemmistöt';
var searchWord2 = 'lhbt';
$(function () {
// getting the pictures using finna api
function ge | ictureSearch1, pictureSearch2) {
console.log('getPictures');
$.ajax({
'url':
'https://api.finna.fi/v1/search?lookfor=' + pictureSearch1 + '+OR+'
+ pictureSearch2 + '&filter[]=online_boolean:"1"&filter[]=format:"0/Image/',
'dataType': 'json',
'success': onGetPictures
});
}
//getting the url:s and adding the pictures to html
function onGetPictures(obj) {
if (obj) {
var records = obj.records;
var pictureAddress = records.map(
function (rec) {
return rec.images;
}
);
console.log(pictureUrl);
} else {
console.log('Not found!');
}
//This function should be in two parts but I did not know how to do it....
var firstPictureUrl = 'https://api.finna.fi' + pictureAddress[0];
$('#carousel').append('<div class="item active" id="item"><img src="'
+ firstPictureUrl + '" alt="pic"></div>');
for (var i = 1; i < pictureAddress.length; i++) {
var pictureUrl = 'https://api.finna.fi' + pictureAddress[i];
console.log(pictureUrl);
$('#carousel').append('<div class="item"><img src="' + pictureUrl
+ '" alt="pic"></div>');
}
}
getPictures(searchWord1, searchWord2);
/*function getBooks(searchBook) {
console.log('getPictures');
$.ajax({
'url': 'https://api.finna.fi/v1/search?lookfor=' + searchBook + '&filter[]=online_boolean:"1"&filter[]=format:"0/Book/',
'dataType': 'json',
'success': onGetBooks
});
}
function onGetBooks(obj) {
if (obj) {
var bookRecords = obj.records;
var bookNames = bookRecords.map(
function(rec) {
return rec.title;
}
);
//$('#result').append(bookNames.join(', ') + '<br>');
console.log(bookNames);
} else {
console.log('Not found!');
}
for (var i=0; i<bookNames.length; i++){
var singleBook = bookNames[i];
$('#result').append('<li>' + singleBook + '</li>');
}
}
$('#searchButton').click(function () {
$('#result').empty();
var searchValue = $('#searchInput').val();
console.log(searchValue);
getBooks(searchValue);
});*/
});
| tPictures(p | identifier_name |
main.js |
//defining the searched words
var searchWord1 = 'seksuaaliset vähemmistöt';
var searchWord2 = 'lhbt';
$(function () {
// getting the pictures using finna api
function getPictures(pictureSearch1, pictureSearch2) {
console.log('getPictures');
$.ajax({
'url':
'https://api.finna.fi/v1/search?lookfor=' + pictureSearch1 + '+OR+'
+ pictureSearch2 + '&filter[]=online_boolean:"1"&filter[]=format:"0/Image/',
'dataType': 'json',
'success': onGetPictures
});
}
//getting the url:s and adding the pictures to html
function onGetPictures(obj) {
| getPictures(searchWord1, searchWord2);
/*function getBooks(searchBook) {
console.log('getPictures');
$.ajax({
'url': 'https://api.finna.fi/v1/search?lookfor=' + searchBook + '&filter[]=online_boolean:"1"&filter[]=format:"0/Book/',
'dataType': 'json',
'success': onGetBooks
});
}
function onGetBooks(obj) {
if (obj) {
var bookRecords = obj.records;
var bookNames = bookRecords.map(
function(rec) {
return rec.title;
}
);
//$('#result').append(bookNames.join(', ') + '<br>');
console.log(bookNames);
} else {
console.log('Not found!');
}
for (var i=0; i<bookNames.length; i++){
var singleBook = bookNames[i];
$('#result').append('<li>' + singleBook + '</li>');
}
}
$('#searchButton').click(function () {
$('#result').empty();
var searchValue = $('#searchInput').val();
console.log(searchValue);
getBooks(searchValue);
});*/
});
|
if (obj) {
var records = obj.records;
var pictureAddress = records.map(
function (rec) {
return rec.images;
}
);
console.log(pictureUrl);
} else {
console.log('Not found!');
}
//This function should be in two parts but I did not know how to do it....
var firstPictureUrl = 'https://api.finna.fi' + pictureAddress[0];
$('#carousel').append('<div class="item active" id="item"><img src="'
+ firstPictureUrl + '" alt="pic"></div>');
for (var i = 1; i < pictureAddress.length; i++) {
var pictureUrl = 'https://api.finna.fi' + pictureAddress[i];
console.log(pictureUrl);
$('#carousel').append('<div class="item"><img src="' + pictureUrl
+ '" alt="pic"></div>');
}
}
| identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.