file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
share.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import json
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from behave import *
@step('I share first element in the history list')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
history = context.browser.find_element_by_id("HistoryPopup")
entries = history.find_elements_by_xpath('.//li[not(@data-clone-template)]')
assert len(entries) > 0, "There are no entries in the history"
item = entries[0]
item.find_elements_by_xpath('.//*[@data-share-item]')[0].click()
@then('the json to share is shown with url "{url}" and contains the following headers')
def step_impl(context, url):
# Wait for modal to appear
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ShareRequestForm')))
output = context.browser.execute_script("return restman.ui.editors.get('#ShareRequestEditor').getValue();")
snippet = json.loads(output)
assert url == snippet["url"], "URL: \"{}\" not in output.\nOutput: {}".format(value, output)
for row in context.table:
|
@step('I click on import request')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
# Click on import
context.browser.find_element_by_id('ImportHistory').click()
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ImportRequestForm')))
@step('I write a shared request for "{url}"')
def step_impl(context, url):
req = json.dumps({
"method": "POST",
"url": url,
"headers": {
"Content-Type": "application/json",
"X-Test-Header": "shared_request"
},
"body": {
"type": "form",
"content": {
"SomeKey": "SomeValue11233",
"SomeOtherKey": "SomeOtherValue019",
}
}
})
context.browser.execute_script("return restman.ui.editors.setValue('#ImportRequestEditor', atob('{}'));".format(base64.b64encode(req)))
@step('I click on load import request')
def step_impl(context):
# Import request
context.browser.find_element_by_xpath("//*[@id='ImportRequestForm']//input[@value='Import']").click()
| assert row['key'] in snippet['headers'], "Header {} is not in output".format(row['key'])
assert row['value'] == snippet['headers'][row['key']], "Header value is not correct. Expected: {}; Actual: {}".format(value, snippet['headers'][name]) | conditional_block |
share.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64 | import json
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from behave import *
@step('I share first element in the history list')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
history = context.browser.find_element_by_id("HistoryPopup")
entries = history.find_elements_by_xpath('.//li[not(@data-clone-template)]')
assert len(entries) > 0, "There are no entries in the history"
item = entries[0]
item.find_elements_by_xpath('.//*[@data-share-item]')[0].click()
@then('the json to share is shown with url "{url}" and contains the following headers')
def step_impl(context, url):
# Wait for modal to appear
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ShareRequestForm')))
output = context.browser.execute_script("return restman.ui.editors.get('#ShareRequestEditor').getValue();")
snippet = json.loads(output)
assert url == snippet["url"], "URL: \"{}\" not in output.\nOutput: {}".format(value, output)
for row in context.table:
assert row['key'] in snippet['headers'], "Header {} is not in output".format(row['key'])
assert row['value'] == snippet['headers'][row['key']], "Header value is not correct. Expected: {}; Actual: {}".format(value, snippet['headers'][name])
@step('I click on import request')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
# Click on import
context.browser.find_element_by_id('ImportHistory').click()
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ImportRequestForm')))
@step('I write a shared request for "{url}"')
def step_impl(context, url):
req = json.dumps({
"method": "POST",
"url": url,
"headers": {
"Content-Type": "application/json",
"X-Test-Header": "shared_request"
},
"body": {
"type": "form",
"content": {
"SomeKey": "SomeValue11233",
"SomeOtherKey": "SomeOtherValue019",
}
}
})
context.browser.execute_script("return restman.ui.editors.setValue('#ImportRequestEditor', atob('{}'));".format(base64.b64encode(req)))
@step('I click on load import request')
def step_impl(context):
# Import request
context.browser.find_element_by_xpath("//*[@id='ImportRequestForm']//input[@value='Import']").click() | random_line_split | |
0002_auto_20150708_1158.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class | (migrations.Migration):
dependencies = [
('taskmanager', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100, help_text='Enter the project name')),
('color', models.CharField(verbose_name='color', validators=[django.core.validators.RegexValidator('(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)')], default='#fff', max_length=7, help_text='Enter the hex color code, like #ccc or #cccccc')),
('user', models.ForeignKey(verbose_name='user', related_name='profjects', to='taskmanager.Profile')),
],
options={
'ordering': ('user', 'name'),
'verbose_name': 'Project',
'verbose_name_plural': 'Projects',
},
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('user', 'name')]),
),
]
| Migration | identifier_name |
0002_auto_20150708_1158.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
| dependencies = [
('taskmanager', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100, help_text='Enter the project name')),
('color', models.CharField(verbose_name='color', validators=[django.core.validators.RegexValidator('(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)')], default='#fff', max_length=7, help_text='Enter the hex color code, like #ccc or #cccccc')),
('user', models.ForeignKey(verbose_name='user', related_name='profjects', to='taskmanager.Profile')),
],
options={
'ordering': ('user', 'name'),
'verbose_name': 'Project',
'verbose_name_plural': 'Projects',
},
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('user', 'name')]),
),
] | identifier_body | |
0002_auto_20150708_1158.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('taskmanager', '0001_initial'), | ]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100, help_text='Enter the project name')),
('color', models.CharField(verbose_name='color', validators=[django.core.validators.RegexValidator('(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)')], default='#fff', max_length=7, help_text='Enter the hex color code, like #ccc or #cccccc')),
('user', models.ForeignKey(verbose_name='user', related_name='profjects', to='taskmanager.Profile')),
],
options={
'ordering': ('user', 'name'),
'verbose_name': 'Project',
'verbose_name_plural': 'Projects',
},
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('user', 'name')]),
),
] | random_line_split | |
http_method.rs | /// `HttpMethod` defines supported HTTP methods.
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum HttpMethod {
Delete,
Get,
Head,
Post,
Put,
// pathological
Connect,
Options,
Trace,
// webdav
Copy,
Lock,
MKCol,
Move,
PropFind,
PropPatch,
Search,
Unlock,
// subversion
Report,
MKActivity,
Checkout,
Merge,
// upnp
MSearch,
Notify,
Subscribe,
Unsubscribe,
// RFC-5789
Patch,
Purge,
// CalDAV
MKCalendar,
}
impl ToString for HttpMethod {
fn to_string(&self) -> String |
}
| {
match *self {
HttpMethod::Delete => "DELETE".to_string(),
HttpMethod::Get => "GET".to_string(),
HttpMethod::Head => "HEAD".to_string(),
HttpMethod::Post => "POST".to_string(),
HttpMethod::Put => "Put".to_string(),
HttpMethod::Connect => "CONNECT".to_string(),
HttpMethod::Options => "OPTIONS".to_string(),
HttpMethod::Trace => "TRACE".to_string(),
HttpMethod::Copy => "COPY".to_string(),
HttpMethod::Lock => "LOCK".to_string(),
HttpMethod::MKCol => "MKCOL".to_string(),
HttpMethod::Move => "MOVE".to_string(),
HttpMethod::PropFind => "PROPFIND".to_string(),
HttpMethod::PropPatch => "PROPPATCH".to_string(),
HttpMethod::Search => "SEARCH".to_string(),
HttpMethod::Unlock => "UNLOCK".to_string(),
HttpMethod::Report => "REPORT".to_string(),
HttpMethod::MKActivity => "MKACTIVITY".to_string(),
HttpMethod::Checkout => "CHECKOUT".to_string(),
HttpMethod::Merge => "MERGE".to_string(),
HttpMethod::MSearch => "M-SEARCH".to_string(),
HttpMethod::Notify => "NOTIFY".to_string(),
HttpMethod::Subscribe => "SUBSCRIBE".to_string(),
HttpMethod::Unsubscribe => "UNSUBSCRIBE".to_string(),
HttpMethod::Patch => "PATCH".to_string(),
HttpMethod::Purge => "PURGE".to_string(),
HttpMethod::MKCalendar => "MKCALENDAR".to_string(),
}
} | identifier_body |
http_method.rs | /// `HttpMethod` defines supported HTTP methods.
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum | {
Delete,
Get,
Head,
Post,
Put,
// pathological
Connect,
Options,
Trace,
// webdav
Copy,
Lock,
MKCol,
Move,
PropFind,
PropPatch,
Search,
Unlock,
// subversion
Report,
MKActivity,
Checkout,
Merge,
// upnp
MSearch,
Notify,
Subscribe,
Unsubscribe,
// RFC-5789
Patch,
Purge,
// CalDAV
MKCalendar,
}
impl ToString for HttpMethod {
fn to_string(&self) -> String {
match *self {
HttpMethod::Delete => "DELETE".to_string(),
HttpMethod::Get => "GET".to_string(),
HttpMethod::Head => "HEAD".to_string(),
HttpMethod::Post => "POST".to_string(),
HttpMethod::Put => "Put".to_string(),
HttpMethod::Connect => "CONNECT".to_string(),
HttpMethod::Options => "OPTIONS".to_string(),
HttpMethod::Trace => "TRACE".to_string(),
HttpMethod::Copy => "COPY".to_string(),
HttpMethod::Lock => "LOCK".to_string(),
HttpMethod::MKCol => "MKCOL".to_string(),
HttpMethod::Move => "MOVE".to_string(),
HttpMethod::PropFind => "PROPFIND".to_string(),
HttpMethod::PropPatch => "PROPPATCH".to_string(),
HttpMethod::Search => "SEARCH".to_string(),
HttpMethod::Unlock => "UNLOCK".to_string(),
HttpMethod::Report => "REPORT".to_string(),
HttpMethod::MKActivity => "MKACTIVITY".to_string(),
HttpMethod::Checkout => "CHECKOUT".to_string(),
HttpMethod::Merge => "MERGE".to_string(),
HttpMethod::MSearch => "M-SEARCH".to_string(),
HttpMethod::Notify => "NOTIFY".to_string(),
HttpMethod::Subscribe => "SUBSCRIBE".to_string(),
HttpMethod::Unsubscribe => "UNSUBSCRIBE".to_string(),
HttpMethod::Patch => "PATCH".to_string(),
HttpMethod::Purge => "PURGE".to_string(),
HttpMethod::MKCalendar => "MKCALENDAR".to_string(),
}
}
}
| HttpMethod | identifier_name |
http_method.rs | /// `HttpMethod` defines supported HTTP methods.
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum HttpMethod {
Delete,
Get,
Head,
Post,
Put,
// pathological
Connect,
Options,
Trace,
// webdav
Copy,
Lock,
MKCol,
Move,
PropFind,
PropPatch,
Search,
Unlock,
// subversion | Report,
MKActivity,
Checkout,
Merge,
// upnp
MSearch,
Notify,
Subscribe,
Unsubscribe,
// RFC-5789
Patch,
Purge,
// CalDAV
MKCalendar,
}
impl ToString for HttpMethod {
fn to_string(&self) -> String {
match *self {
HttpMethod::Delete => "DELETE".to_string(),
HttpMethod::Get => "GET".to_string(),
HttpMethod::Head => "HEAD".to_string(),
HttpMethod::Post => "POST".to_string(),
HttpMethod::Put => "Put".to_string(),
HttpMethod::Connect => "CONNECT".to_string(),
HttpMethod::Options => "OPTIONS".to_string(),
HttpMethod::Trace => "TRACE".to_string(),
HttpMethod::Copy => "COPY".to_string(),
HttpMethod::Lock => "LOCK".to_string(),
HttpMethod::MKCol => "MKCOL".to_string(),
HttpMethod::Move => "MOVE".to_string(),
HttpMethod::PropFind => "PROPFIND".to_string(),
HttpMethod::PropPatch => "PROPPATCH".to_string(),
HttpMethod::Search => "SEARCH".to_string(),
HttpMethod::Unlock => "UNLOCK".to_string(),
HttpMethod::Report => "REPORT".to_string(),
HttpMethod::MKActivity => "MKACTIVITY".to_string(),
HttpMethod::Checkout => "CHECKOUT".to_string(),
HttpMethod::Merge => "MERGE".to_string(),
HttpMethod::MSearch => "M-SEARCH".to_string(),
HttpMethod::Notify => "NOTIFY".to_string(),
HttpMethod::Subscribe => "SUBSCRIBE".to_string(),
HttpMethod::Unsubscribe => "UNSUBSCRIBE".to_string(),
HttpMethod::Patch => "PATCH".to_string(),
HttpMethod::Purge => "PURGE".to_string(),
HttpMethod::MKCalendar => "MKCALENDAR".to_string(),
}
}
} | random_line_split | |
lib.rs | //! `bincode` is a crate for encoding and decoding using a tiny binary
//! serialization strategy.
//!
//! There are simple functions for encoding to `Vec<u8>` and decoding from
//! `&[u8]`, but the meat of the library is the `encode_into` and `decode_from`
//! functions which respectively allow encoding into a `std::io::Writer`
//! and decoding from a `std::io::Buffer`.
//!
//! ## Modules
//! There are two ways to encode and decode structs using `bincode`, either using `rustc_serialize`
//! or the `serde` crate. `rustc_serialize` and `serde` are crates and and also the names of their
//! corresponding modules inside of `bincode`. Both modules have exactly equivalant functions, and
//! and the only difference is whether or not the library user wants to use `rustc_serialize` or
//! `serde`.
//!
//! ### Using Basic Functions
//!
//! ```rust
//! #![allow(unstable)]
//! extern crate bincode;
//! use bincode::rustc_serialize::{encode, decode};
//! fn main() {
//! // The object that we will serialize.
//! let target = Some("hello world".to_string());
//! // The maximum size of the encoded message.
//! let limit = bincode::SizeLimit::Bounded(20);
//!
//! let encoded: Vec<u8> = encode(&target, limit).unwrap();
//! let decoded: Option<String> = decode(&encoded[..]).unwrap();
//! assert_eq!(target, decoded);
//! }
//! ```
#![crate_name = "bincode"]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "./icon.png")]
extern crate rustc_serialize as rustc_serialize_crate;
extern crate byteorder;
extern crate num;
extern crate serde as serde_crate;
pub use refbox::{RefBox, StrBox, SliceBox};
mod refbox;
pub mod rustc_serialize;
pub mod serde;
/// A limit on the amount of bytes that can be read or written.
///
/// Size limits are an incredibly important part of both encoding and decoding.
///
/// In order to prevent DOS attacks on a decoder, it is important to limit the
/// amount of bytes that a single encoded message can be; otherwise, if you
/// are decoding bytes right off of a TCP stream for example, it would be
/// possible for an attacker to flood your server with a 3TB vec, causing the
/// decoder to run out of memory and crash your application!
/// Because of this, you can provide a maximum-number-of-bytes that can be read
/// during decoding, and the decoder will explicitly fail if it has to read
/// any more than that.
///
/// On the other side, you want to make sure that you aren't encoding a message
/// that is larger than your decoder expects. By supplying a size limit to an
/// encoding function, the encoder will verify that the structure can be encoded
/// within that limit. This verification occurs before any bytes are written to
/// the Writer, so recovering from an the error is easy.
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
pub enum | {
Infinite,
Bounded(u64)
}
| SizeLimit | identifier_name |
lib.rs | //! `bincode` is a crate for encoding and decoding using a tiny binary
//! serialization strategy.
//!
//! There are simple functions for encoding to `Vec<u8>` and decoding from
//! `&[u8]`, but the meat of the library is the `encode_into` and `decode_from`
//! functions which respectively allow encoding into a `std::io::Writer`
//! and decoding from a `std::io::Buffer`.
//!
//! ## Modules
//! There are two ways to encode and decode structs using `bincode`, either using `rustc_serialize`
//! or the `serde` crate. `rustc_serialize` and `serde` are crates and and also the names of their
//! corresponding modules inside of `bincode`. Both modules have exactly equivalant functions, and
//! and the only difference is whether or not the library user wants to use `rustc_serialize` or
//! `serde`.
//!
//! ### Using Basic Functions
//!
//! ```rust
//! #![allow(unstable)]
//! extern crate bincode;
//! use bincode::rustc_serialize::{encode, decode};
//! fn main() {
//! // The object that we will serialize.
//! let target = Some("hello world".to_string());
//! // The maximum size of the encoded message.
//! let limit = bincode::SizeLimit::Bounded(20);
//!
//! let encoded: Vec<u8> = encode(&target, limit).unwrap();
//! let decoded: Option<String> = decode(&encoded[..]).unwrap();
//! assert_eq!(target, decoded);
//! }
//! ```
#![crate_name = "bincode"]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "./icon.png")]
extern crate rustc_serialize as rustc_serialize_crate;
extern crate byteorder;
extern crate num;
extern crate serde as serde_crate;
| pub use refbox::{RefBox, StrBox, SliceBox};
mod refbox;
pub mod rustc_serialize;
pub mod serde;
/// A limit on the amount of bytes that can be read or written.
///
/// Size limits are an incredibly important part of both encoding and decoding.
///
/// In order to prevent DOS attacks on a decoder, it is important to limit the
/// amount of bytes that a single encoded message can be; otherwise, if you
/// are decoding bytes right off of a TCP stream for example, it would be
/// possible for an attacker to flood your server with a 3TB vec, causing the
/// decoder to run out of memory and crash your application!
/// Because of this, you can provide a maximum-number-of-bytes that can be read
/// during decoding, and the decoder will explicitly fail if it has to read
/// any more than that.
///
/// On the other side, you want to make sure that you aren't encoding a message
/// that is larger than your decoder expects. By supplying a size limit to an
/// encoding function, the encoder will verify that the structure can be encoded
/// within that limit. This verification occurs before any bytes are written to
/// the Writer, so recovering from an the error is easy.
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
pub enum SizeLimit {
Infinite,
Bounded(u64)
} | random_line_split | |
base.command.ts | import { Observable } from 'rxjs/Observable';
import { CommandPayload } from './payloads/base.command.payload';
import { Gateway } from '../gateways/base.gateway';
import { Observer } from 'rxjs/Observer';
export enum CommandState {
IDLE,
EXECUTING,
INVOKED
};
export interface CommandResult {
command: Command;
payload: any;
}
export abstract class Command {
static _id: number = 0;
protected _state: CommandState;
protected _payload: CommandPayload;
protected _commands: Command[] = [];
protected _method: any;
protected _gateway: Gateway;
protected _id: number = 0;
constructor(payload?: CommandPayload) {
this._payload = payload;
Command._id += 1;
this._id = Command._id;
}
get id(): number {
return this._id;
}
get payload(): CommandPayload {
return this._payload;
}
set payload(value: CommandPayload) {
this._payload = value;
}
get method(): any {
return this._method;
}
set method(value: any) {
this._method = value;
}
set gateway(value: Gateway) {
this._gateway = value;
}
get mimeType() |
concat(command: Command): void {
this._payload.concat(command.payload);
}
serialize(): string | Blob | ArrayBuffer {
return this._payload.serialize();
}
parse(response: any): any {
return this._payload.parse(response);
};
invoke(context?: Command): Observable<CommandResult> {
context = context || this;
context.state = CommandState.EXECUTING;
let result = Observable.create((observer: Observer<CommandResult>) => {
this._gateway.send(context).subscribe((response: Observer<any>) => {
context.state = CommandState.INVOKED;
observer.next({
command: context,
payload: context.parse(response)
});
}, (error: any) => observer.error(context.parse(error)),
() => observer.complete());
});
return result;
}
set state(value: CommandState) {
this._state = value;
}
get state(): CommandState {
return this._state;
}
}
| {
return this._payload.mimeType;
} | identifier_body |
base.command.ts | import { Observable } from 'rxjs/Observable';
import { CommandPayload } from './payloads/base.command.payload';
import { Gateway } from '../gateways/base.gateway';
import { Observer } from 'rxjs/Observer';
export enum CommandState {
IDLE,
EXECUTING,
INVOKED
};
export interface CommandResult {
command: Command;
payload: any;
}
export abstract class Command {
static _id: number = 0;
protected _state: CommandState;
protected _payload: CommandPayload;
protected _commands: Command[] = [];
protected _method: any;
protected _gateway: Gateway;
protected _id: number = 0;
constructor(payload?: CommandPayload) {
this._payload = payload;
Command._id += 1;
this._id = Command._id;
}
get id(): number {
return this._id;
}
get payload(): CommandPayload {
return this._payload;
}
set payload(value: CommandPayload) {
this._payload = value;
}
get | (): any {
return this._method;
}
set method(value: any) {
this._method = value;
}
set gateway(value: Gateway) {
this._gateway = value;
}
get mimeType() {
return this._payload.mimeType;
}
concat(command: Command): void {
this._payload.concat(command.payload);
}
serialize(): string | Blob | ArrayBuffer {
return this._payload.serialize();
}
parse(response: any): any {
return this._payload.parse(response);
};
invoke(context?: Command): Observable<CommandResult> {
context = context || this;
context.state = CommandState.EXECUTING;
let result = Observable.create((observer: Observer<CommandResult>) => {
this._gateway.send(context).subscribe((response: Observer<any>) => {
context.state = CommandState.INVOKED;
observer.next({
command: context,
payload: context.parse(response)
});
}, (error: any) => observer.error(context.parse(error)),
() => observer.complete());
});
return result;
}
set state(value: CommandState) {
this._state = value;
}
get state(): CommandState {
return this._state;
}
}
| method | identifier_name |
base.command.ts | import { Observable } from 'rxjs/Observable';
import { CommandPayload } from './payloads/base.command.payload';
import { Gateway } from '../gateways/base.gateway';
import { Observer } from 'rxjs/Observer';
export enum CommandState {
IDLE,
EXECUTING,
INVOKED
};
export interface CommandResult {
command: Command;
payload: any;
} | static _id: number = 0;
protected _state: CommandState;
protected _payload: CommandPayload;
protected _commands: Command[] = [];
protected _method: any;
protected _gateway: Gateway;
protected _id: number = 0;
constructor(payload?: CommandPayload) {
this._payload = payload;
Command._id += 1;
this._id = Command._id;
}
get id(): number {
return this._id;
}
get payload(): CommandPayload {
return this._payload;
}
set payload(value: CommandPayload) {
this._payload = value;
}
get method(): any {
return this._method;
}
set method(value: any) {
this._method = value;
}
set gateway(value: Gateway) {
this._gateway = value;
}
get mimeType() {
return this._payload.mimeType;
}
concat(command: Command): void {
this._payload.concat(command.payload);
}
serialize(): string | Blob | ArrayBuffer {
return this._payload.serialize();
}
parse(response: any): any {
return this._payload.parse(response);
};
invoke(context?: Command): Observable<CommandResult> {
context = context || this;
context.state = CommandState.EXECUTING;
let result = Observable.create((observer: Observer<CommandResult>) => {
this._gateway.send(context).subscribe((response: Observer<any>) => {
context.state = CommandState.INVOKED;
observer.next({
command: context,
payload: context.parse(response)
});
}, (error: any) => observer.error(context.parse(error)),
() => observer.complete());
});
return result;
}
set state(value: CommandState) {
this._state = value;
}
get state(): CommandState {
return this._state;
}
} |
export abstract class Command { | random_line_split |
index.js | module.exports = function(el, state, container) {
var ul = el.getElementsByTagName('ul')[0]
var lastFlags = []
var controlsTouch = -1
var containerTouch = {"id":-1, "x":-1, "y":-1}
el.addEventListener('touchstart', startTouchControls)
el.addEventListener('touchmove', handleTouchControls)
el.addEventListener('touchend', unTouchControls)
container.addEventListener('touchstart', startTouchContainer)
container.addEventListener('touchmove', handleTouchContainer)
container.addEventListener('touchend', unTouchContainer)
function startTouchControls(event) {
if (controlsTouch === -1) {
controlsTouch = event.targetTouches[0].identifier
}
handleTouchControls(event)
}
function | (event) {
event.preventDefault()
var touch = null
if (event.targetTouches.length > 1) {
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === controlsTouch) {
touch = event.targetTouches[t]
break
}
}
} else {
touch = event.targetTouches[0]
}
if (touch === null) return
var top=touch.clientY-el.offsetTop
var left=touch.clientX-el.offsetLeft
var flags=[]
if (top < 50) flags.push('forward')
if (left < 50 && top < 100) flags.push('left')
if (left > 100 && top < 100) flags.push('right')
if (top > 100 && left > 50 && left < 100) flags.push('backward')
if (top > 50 && top < 100 && left > 50 && left < 100) flags.push('jump')
if (flags.indexOf('jump') === -1) {
for (flag in lastFlags) {
if (flags.indexOf(lastFlags[flag]) !== -1) {
lastFlags.splice(flag, 1)
}
}
setState(lastFlags, 0)
setState(flags, 1)
lastFlags = flags
} else if (lastFlags.indexOf('jump') === -1) {
// Start jumping (in additional to existing movement)
lastFlags.push('jump')
setState(['jump'], 1)
}
}
function unTouchControls() {
setState(lastFlags, 0)
lastFlags = []
controlsTouch = -1
}
function setState(states, value) {
var delta = {}
for(s in states) {
delta[states[s]] = value
}
state.write(delta)
}
function startTouchContainer(event) {
if (containerTouch.id === -1) {
containerTouch.id = event.targetTouches[0].identifier
containerTouch.x = event.targetTouches[0].clientX
containerTouch.y = event.targetTouches[0].clientY
}
handleTouchContainer(event)
}
function handleTouchContainer(event) {
event.preventDefault()
var touch = null, x = y = -1, delta = {}
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === containerTouch.id) {
touch = event.targetTouches[t]
break
}
}
if (touch === null) return
dx = containerTouch.x - touch.clientX
dy = containerTouch.y - touch.clientY
delta.x_rotation_accum = dy * 2
delta.y_rotation_accum = dx * 8
state.write(delta)
containerTouch.x = touch.clientX
containerTouch.y = touch.clientY
}
function unTouchContainer(event) {
containerTouch = {"id":-1, "x":-1, "y":-1}
}
} | handleTouchControls | identifier_name |
index.js | module.exports = function(el, state, container) {
var ul = el.getElementsByTagName('ul')[0]
var lastFlags = []
var controlsTouch = -1
var containerTouch = {"id":-1, "x":-1, "y":-1}
el.addEventListener('touchstart', startTouchControls)
el.addEventListener('touchmove', handleTouchControls)
el.addEventListener('touchend', unTouchControls)
container.addEventListener('touchstart', startTouchContainer) | controlsTouch = event.targetTouches[0].identifier
}
handleTouchControls(event)
}
function handleTouchControls(event) {
event.preventDefault()
var touch = null
if (event.targetTouches.length > 1) {
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === controlsTouch) {
touch = event.targetTouches[t]
break
}
}
} else {
touch = event.targetTouches[0]
}
if (touch === null) return
var top=touch.clientY-el.offsetTop
var left=touch.clientX-el.offsetLeft
var flags=[]
if (top < 50) flags.push('forward')
if (left < 50 && top < 100) flags.push('left')
if (left > 100 && top < 100) flags.push('right')
if (top > 100 && left > 50 && left < 100) flags.push('backward')
if (top > 50 && top < 100 && left > 50 && left < 100) flags.push('jump')
if (flags.indexOf('jump') === -1) {
for (flag in lastFlags) {
if (flags.indexOf(lastFlags[flag]) !== -1) {
lastFlags.splice(flag, 1)
}
}
setState(lastFlags, 0)
setState(flags, 1)
lastFlags = flags
} else if (lastFlags.indexOf('jump') === -1) {
// Start jumping (in additional to existing movement)
lastFlags.push('jump')
setState(['jump'], 1)
}
}
function unTouchControls() {
setState(lastFlags, 0)
lastFlags = []
controlsTouch = -1
}
function setState(states, value) {
var delta = {}
for(s in states) {
delta[states[s]] = value
}
state.write(delta)
}
function startTouchContainer(event) {
if (containerTouch.id === -1) {
containerTouch.id = event.targetTouches[0].identifier
containerTouch.x = event.targetTouches[0].clientX
containerTouch.y = event.targetTouches[0].clientY
}
handleTouchContainer(event)
}
function handleTouchContainer(event) {
event.preventDefault()
var touch = null, x = y = -1, delta = {}
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === containerTouch.id) {
touch = event.targetTouches[t]
break
}
}
if (touch === null) return
dx = containerTouch.x - touch.clientX
dy = containerTouch.y - touch.clientY
delta.x_rotation_accum = dy * 2
delta.y_rotation_accum = dx * 8
state.write(delta)
containerTouch.x = touch.clientX
containerTouch.y = touch.clientY
}
function unTouchContainer(event) {
containerTouch = {"id":-1, "x":-1, "y":-1}
}
} | container.addEventListener('touchmove', handleTouchContainer)
container.addEventListener('touchend', unTouchContainer)
function startTouchControls(event) {
if (controlsTouch === -1) { | random_line_split |
index.js | module.exports = function(el, state, container) {
var ul = el.getElementsByTagName('ul')[0]
var lastFlags = []
var controlsTouch = -1
var containerTouch = {"id":-1, "x":-1, "y":-1}
el.addEventListener('touchstart', startTouchControls)
el.addEventListener('touchmove', handleTouchControls)
el.addEventListener('touchend', unTouchControls)
container.addEventListener('touchstart', startTouchContainer)
container.addEventListener('touchmove', handleTouchContainer)
container.addEventListener('touchend', unTouchContainer)
function startTouchControls(event) {
if (controlsTouch === -1) {
controlsTouch = event.targetTouches[0].identifier
}
handleTouchControls(event)
}
function handleTouchControls(event) {
event.preventDefault()
var touch = null
if (event.targetTouches.length > 1) {
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === controlsTouch) {
touch = event.targetTouches[t]
break
}
}
} else {
touch = event.targetTouches[0]
}
if (touch === null) return
var top=touch.clientY-el.offsetTop
var left=touch.clientX-el.offsetLeft
var flags=[]
if (top < 50) flags.push('forward')
if (left < 50 && top < 100) flags.push('left')
if (left > 100 && top < 100) flags.push('right')
if (top > 100 && left > 50 && left < 100) flags.push('backward')
if (top > 50 && top < 100 && left > 50 && left < 100) flags.push('jump')
if (flags.indexOf('jump') === -1) {
for (flag in lastFlags) {
if (flags.indexOf(lastFlags[flag]) !== -1) {
lastFlags.splice(flag, 1)
}
}
setState(lastFlags, 0)
setState(flags, 1)
lastFlags = flags
} else if (lastFlags.indexOf('jump') === -1) |
}
function unTouchControls() {
setState(lastFlags, 0)
lastFlags = []
controlsTouch = -1
}
function setState(states, value) {
var delta = {}
for(s in states) {
delta[states[s]] = value
}
state.write(delta)
}
function startTouchContainer(event) {
if (containerTouch.id === -1) {
containerTouch.id = event.targetTouches[0].identifier
containerTouch.x = event.targetTouches[0].clientX
containerTouch.y = event.targetTouches[0].clientY
}
handleTouchContainer(event)
}
function handleTouchContainer(event) {
event.preventDefault()
var touch = null, x = y = -1, delta = {}
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === containerTouch.id) {
touch = event.targetTouches[t]
break
}
}
if (touch === null) return
dx = containerTouch.x - touch.clientX
dy = containerTouch.y - touch.clientY
delta.x_rotation_accum = dy * 2
delta.y_rotation_accum = dx * 8
state.write(delta)
containerTouch.x = touch.clientX
containerTouch.y = touch.clientY
}
function unTouchContainer(event) {
containerTouch = {"id":-1, "x":-1, "y":-1}
}
} | {
// Start jumping (in additional to existing movement)
lastFlags.push('jump')
setState(['jump'], 1)
} | conditional_block |
index.js | module.exports = function(el, state, container) {
var ul = el.getElementsByTagName('ul')[0]
var lastFlags = []
var controlsTouch = -1
var containerTouch = {"id":-1, "x":-1, "y":-1}
el.addEventListener('touchstart', startTouchControls)
el.addEventListener('touchmove', handleTouchControls)
el.addEventListener('touchend', unTouchControls)
container.addEventListener('touchstart', startTouchContainer)
container.addEventListener('touchmove', handleTouchContainer)
container.addEventListener('touchend', unTouchContainer)
function startTouchControls(event) {
if (controlsTouch === -1) {
controlsTouch = event.targetTouches[0].identifier
}
handleTouchControls(event)
}
function handleTouchControls(event) {
event.preventDefault()
var touch = null
if (event.targetTouches.length > 1) {
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === controlsTouch) {
touch = event.targetTouches[t]
break
}
}
} else {
touch = event.targetTouches[0]
}
if (touch === null) return
var top=touch.clientY-el.offsetTop
var left=touch.clientX-el.offsetLeft
var flags=[]
if (top < 50) flags.push('forward')
if (left < 50 && top < 100) flags.push('left')
if (left > 100 && top < 100) flags.push('right')
if (top > 100 && left > 50 && left < 100) flags.push('backward')
if (top > 50 && top < 100 && left > 50 && left < 100) flags.push('jump')
if (flags.indexOf('jump') === -1) {
for (flag in lastFlags) {
if (flags.indexOf(lastFlags[flag]) !== -1) {
lastFlags.splice(flag, 1)
}
}
setState(lastFlags, 0)
setState(flags, 1)
lastFlags = flags
} else if (lastFlags.indexOf('jump') === -1) {
// Start jumping (in additional to existing movement)
lastFlags.push('jump')
setState(['jump'], 1)
}
}
function unTouchControls() {
setState(lastFlags, 0)
lastFlags = []
controlsTouch = -1
}
function setState(states, value) {
var delta = {}
for(s in states) {
delta[states[s]] = value
}
state.write(delta)
}
function startTouchContainer(event) |
function handleTouchContainer(event) {
event.preventDefault()
var touch = null, x = y = -1, delta = {}
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === containerTouch.id) {
touch = event.targetTouches[t]
break
}
}
if (touch === null) return
dx = containerTouch.x - touch.clientX
dy = containerTouch.y - touch.clientY
delta.x_rotation_accum = dy * 2
delta.y_rotation_accum = dx * 8
state.write(delta)
containerTouch.x = touch.clientX
containerTouch.y = touch.clientY
}
function unTouchContainer(event) {
containerTouch = {"id":-1, "x":-1, "y":-1}
}
} | {
if (containerTouch.id === -1) {
containerTouch.id = event.targetTouches[0].identifier
containerTouch.x = event.targetTouches[0].clientX
containerTouch.y = event.targetTouches[0].clientY
}
handleTouchContainer(event)
} | identifier_body |
trait-attributes.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
// ignore-tidy-linelength
pub trait Foo {
// @has foo/trait.Foo.html '//h3[@id="tymethod.foo"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
fn foo();
}
#[must_use]
pub struct Bar;
impl Bar {
// @has foo/struct.Bar.html '//h4[@id="method.bar"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar() {}
// @has foo/struct.Bar.html '//h4[@id="method.bar2"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar2() {}
} | // file at the top-level directory of this distribution and at | random_line_split |
trait-attributes.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
// ignore-tidy-linelength
pub trait Foo {
// @has foo/trait.Foo.html '//h3[@id="tymethod.foo"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
fn foo();
}
#[must_use]
pub struct Bar;
impl Bar {
// @has foo/struct.Bar.html '//h4[@id="method.bar"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar() {}
// @has foo/struct.Bar.html '//h4[@id="method.bar2"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar2() |
}
| {} | identifier_body |
trait-attributes.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
// ignore-tidy-linelength
pub trait Foo {
// @has foo/trait.Foo.html '//h3[@id="tymethod.foo"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
fn foo();
}
#[must_use]
pub struct | ;
impl Bar {
// @has foo/struct.Bar.html '//h4[@id="method.bar"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar() {}
// @has foo/struct.Bar.html '//h4[@id="method.bar2"]//div[@class="docblock attributes"]' '#[must_use]'
#[must_use]
pub fn bar2() {}
}
| Bar | identifier_name |
locale.js | /**
* Shopware 4.0
* Copyright © 2012 shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*
* @category Shopware
* @package Login
* @subpackage Model
* @copyright Copyright (c) 2012, shopware AG (http://www.shopware.de) | /**
* Shopware Backend - ErrorReporter Main Model
*
* todo@all: Documentation
*/
Ext.define('Shopware.apps.UserManager.model.Locale', {
extend: 'Ext.data.Model',
fields: [ 'id', 'name' ]
}); | * @version $Id$
* @author shopware AG
*/
| random_line_split |
attributes.rs | #!/she-bang line
//inner attributes
#![crate_type = "lib"]
#![crate_name = "rary"]
mod empty {}
fn main() {
#![crate_type = "lib"]
}
enum E {
#[cfg(test)] F(#[cfg(test)] i32)
}
#[empty_attr()]
const T: i32 = 92;
fn attrs_on_statements() |
struct S<#[foo]'a, #[may_dangle] T> {}
#[macro_export]
macro_rules! give_me_struct {
($name:ident) => {
#[allow(non_camel_case_types)]
struct $name;
}
}
#[cfg(not(test))]
give_me_struct! {
hello_world
}
#[post("/", data = "<todo_form>")]
fn string_value() {}
| {
#[cfg(test)]
let x = 92;
#[cfg(test)]
loop {}
#[cfg(test)]
1 + 1;
S { #[foo] foo: 92 };
} | identifier_body |
attributes.rs | #!/she-bang line
//inner attributes | #![crate_name = "rary"]
mod empty {}
fn main() {
#![crate_type = "lib"]
}
enum E {
#[cfg(test)] F(#[cfg(test)] i32)
}
#[empty_attr()]
const T: i32 = 92;
fn attrs_on_statements() {
#[cfg(test)]
let x = 92;
#[cfg(test)]
loop {}
#[cfg(test)]
1 + 1;
S { #[foo] foo: 92 };
}
struct S<#[foo]'a, #[may_dangle] T> {}
#[macro_export]
macro_rules! give_me_struct {
($name:ident) => {
#[allow(non_camel_case_types)]
struct $name;
}
}
#[cfg(not(test))]
give_me_struct! {
hello_world
}
#[post("/", data = "<todo_form>")]
fn string_value() {} | #![crate_type = "lib"] | random_line_split |
attributes.rs | #!/she-bang line
//inner attributes
#![crate_type = "lib"]
#![crate_name = "rary"]
mod empty {}
fn | () {
#![crate_type = "lib"]
}
enum E {
#[cfg(test)] F(#[cfg(test)] i32)
}
#[empty_attr()]
const T: i32 = 92;
fn attrs_on_statements() {
#[cfg(test)]
let x = 92;
#[cfg(test)]
loop {}
#[cfg(test)]
1 + 1;
S { #[foo] foo: 92 };
}
struct S<#[foo]'a, #[may_dangle] T> {}
#[macro_export]
macro_rules! give_me_struct {
($name:ident) => {
#[allow(non_camel_case_types)]
struct $name;
}
}
#[cfg(not(test))]
give_me_struct! {
hello_world
}
#[post("/", data = "<todo_form>")]
fn string_value() {}
| main | identifier_name |
rule.js | /* Authors:
* Endi Sukma Dewata <edewata@redhat.com>
*
* Copyright (C) 2010 Red Hat
* see file 'COPYING' for use and warranty information
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
define([
'./ipa',
'./jquery',
'./phases',
'./reg',
'./rpc',
'./text',
'./details',
'./search',
'./association',
'./entity'],
function(IPA, $, phases, reg, rpc, text) {
IPA.rule_details_widget = function(spec) {
spec = spec || {};
var that = IPA.composite_widget(spec);
that.radio_name = spec.radio_name;
that.options = spec.options || [];
that.tables = spec.tables || [];
that.columns = spec.columns;
that.note = spec.note;
that.init = function() {
that.enable_radio = IPA.rule_radio_widget({
name: that.radio_name,
options: that.options,
entity: that.entity,
css_class: 'rule-enable-radio',
note: that.note
});
that.widgets.add_widget(that.enable_radio);
that.enable_radio.value_changed.attach(that.on_enable_radio_changed);
};
that.on_enable_radio_changed = function() {
var value = that.enable_radio.save();
if(value.length > 0) {
var enabled = ('' === value[0]);
for (var i=0; i<that.tables.length; i++) {
var table = that.tables[i];
var table_widget = that.widgets.get_widget(table.name);
table_widget.set_enabled(enabled);
}
}
};
that.init();
return that;
};
/**
* Rule radio widget
*
* Intended to be used especially by rule widget.
*
* @class IPA.rule_radio_widget
* @extends IPA.radio_widget
*/
IPA.rule_radio_widget = function(spec) {
spec = spec || {};
var that = IPA.radio_widget(spec);
/**
* The text from note will be displayed after radio buttons.
*/
that.note = spec.note || '';
/** @inheritDoc */
that.create = function(container) {
var param_info = IPA.get_entity_param(that.entity.name, that.name);
var title = param_info ? param_info.doc : that.name;
container.append(document.createTextNode(title + ': '));
that.widget_create(container);
that.owb_create(container);
if (that.undo) {
that.create_undo(container);
}
if (that.note) {
$('<div />', {
text: text.get(that.note),
'class': 'rule-radio-note'
}).appendTo(container);
}
};
return that;
};
IPA.rule_association_table_widget = function(spec) {
spec = spec || {};
spec.footer = spec.footer === undefined ? false : spec.footer;
var that = IPA.association_table_widget(spec);
that.external = spec.external;
that.setup_column = function(column, div, record) {
var suppress_link = false;
if (that.external) {
suppress_link = record[that.external] === 'true';
}
column.setup(div, record, suppress_link);
};
that.create_columns = function() {
if (!that.columns.length) {
that.association_table_widget_create_columns();
if (that.external) {
that.create_column({
name: that.external,
label: '@i18n:objects.sudorule.external',
entity: that.other_entity,
formatter: 'boolean',
width: '200px'
});
}
}
};
that.create_add_dialog = function() {
var entity_label = that.entity.metadata.label_singular;
var pkey = that.facet.get_pkey();
var other_entity_label = that.other_entity.metadata.label;
|
var exclude = that.values;
if (that.external) {
exclude = [];
for (var i=0; i<that.values.length; i++) {
exclude.push(that.values[i][that.name]);
}
}
return IPA.rule_association_adder_dialog({
title: title,
pkey: pkey,
other_entity: that.other_entity,
attribute_member: that.attribute_member,
entity: that.entity,
external: that.external,
exclude: exclude
});
};
return that;
};
IPA.rule_association_table_field = function(spec) {
spec = spec || {};
var that = IPA.association_table_field(spec);
that.external = spec.external;
that.set_values_external = function(values, external) {
for (var i=0; i<values.length; i++) {
var record = values[i];
if (typeof record !== 'object') {
record = {};
record[that.param] = values[i];
}
record[that.external] = external;
values[i] = record;
}
};
that.load = function(data) {
that.values = that.adapter.load(data);
if (that.external) {
that.set_values_external(that.values, '');
var external_values = that.adapter.load(data, that.external, []);
that.set_values_external(external_values, 'true');
$.merge(that.values, external_values);
}
that.widget.update(that.values);
that.widget.unselect_all();
};
that.get_update_info = function() {
var update_info = IPA.update_info_builder.new_update_info();
//association_table_widget performs basic add and remove operation
//immediately. Rule association field test if its enabled and if not it
//performs delete operation.
if (!that.widget.enabled) {
var values = that.widget.save();
if (values.length > 0) { //no need to delete if has no values
var command = rpc.command({
entity: that.entity.name,
method: that.widget.remove_method,
args: that.facet.get_pkeys()
});
command.set_option(that.widget.other_entity.name, values);
update_info.append_command(command, that.priority);
}
}
return update_info;
};
return that;
};
IPA.rule_association_adder_dialog = function(spec) {
spec = spec || {};
var that = IPA.association_adder_dialog(spec);
that.external = spec.external;
that.add = function() {
var rows = that.available_table.remove_selected_rows();
that.selected_table.add_rows(rows);
if (that.external) {
var pkey_name = that.other_entity.metadata.primary_key;
var value = that.external_field.val();
if (!value) return;
var record = {};
record[pkey_name] = value;
that.selected_table.add_record(record);
that.external_field.val('');
}
};
return that;
};
phases.on('registration', function() {
var w = reg.widget;
var f = reg.field;
w.register('rule_association_table', IPA.rule_association_table_widget);
f.register('rule_association_table', IPA.rule_association_table_field);
});
return {};
}); | var title = that.add_title;
title = title.replace('${entity}', entity_label);
title = title.replace('${primary_key}', pkey);
title = title.replace('${other_entity}', other_entity_label); | random_line_split |
rule.js | /* Authors:
* Endi Sukma Dewata <edewata@redhat.com>
*
* Copyright (C) 2010 Red Hat
* see file 'COPYING' for use and warranty information
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
define([
'./ipa',
'./jquery',
'./phases',
'./reg',
'./rpc',
'./text',
'./details',
'./search',
'./association',
'./entity'],
function(IPA, $, phases, reg, rpc, text) {
IPA.rule_details_widget = function(spec) {
spec = spec || {};
var that = IPA.composite_widget(spec);
that.radio_name = spec.radio_name;
that.options = spec.options || [];
that.tables = spec.tables || [];
that.columns = spec.columns;
that.note = spec.note;
that.init = function() {
that.enable_radio = IPA.rule_radio_widget({
name: that.radio_name,
options: that.options,
entity: that.entity,
css_class: 'rule-enable-radio',
note: that.note
});
that.widgets.add_widget(that.enable_radio);
that.enable_radio.value_changed.attach(that.on_enable_radio_changed);
};
that.on_enable_radio_changed = function() {
var value = that.enable_radio.save();
if(value.length > 0) {
var enabled = ('' === value[0]);
for (var i=0; i<that.tables.length; i++) |
}
};
that.init();
return that;
};
/**
* Rule radio widget
*
* Intended to be used especially by rule widget.
*
* @class IPA.rule_radio_widget
* @extends IPA.radio_widget
*/
IPA.rule_radio_widget = function(spec) {
spec = spec || {};
var that = IPA.radio_widget(spec);
/**
* The text from note will be displayed after radio buttons.
*/
that.note = spec.note || '';
/** @inheritDoc */
that.create = function(container) {
var param_info = IPA.get_entity_param(that.entity.name, that.name);
var title = param_info ? param_info.doc : that.name;
container.append(document.createTextNode(title + ': '));
that.widget_create(container);
that.owb_create(container);
if (that.undo) {
that.create_undo(container);
}
if (that.note) {
$('<div />', {
text: text.get(that.note),
'class': 'rule-radio-note'
}).appendTo(container);
}
};
return that;
};
IPA.rule_association_table_widget = function(spec) {
spec = spec || {};
spec.footer = spec.footer === undefined ? false : spec.footer;
var that = IPA.association_table_widget(spec);
that.external = spec.external;
that.setup_column = function(column, div, record) {
var suppress_link = false;
if (that.external) {
suppress_link = record[that.external] === 'true';
}
column.setup(div, record, suppress_link);
};
that.create_columns = function() {
if (!that.columns.length) {
that.association_table_widget_create_columns();
if (that.external) {
that.create_column({
name: that.external,
label: '@i18n:objects.sudorule.external',
entity: that.other_entity,
formatter: 'boolean',
width: '200px'
});
}
}
};
that.create_add_dialog = function() {
var entity_label = that.entity.metadata.label_singular;
var pkey = that.facet.get_pkey();
var other_entity_label = that.other_entity.metadata.label;
var title = that.add_title;
title = title.replace('${entity}', entity_label);
title = title.replace('${primary_key}', pkey);
title = title.replace('${other_entity}', other_entity_label);
var exclude = that.values;
if (that.external) {
exclude = [];
for (var i=0; i<that.values.length; i++) {
exclude.push(that.values[i][that.name]);
}
}
return IPA.rule_association_adder_dialog({
title: title,
pkey: pkey,
other_entity: that.other_entity,
attribute_member: that.attribute_member,
entity: that.entity,
external: that.external,
exclude: exclude
});
};
return that;
};
IPA.rule_association_table_field = function(spec) {
spec = spec || {};
var that = IPA.association_table_field(spec);
that.external = spec.external;
that.set_values_external = function(values, external) {
for (var i=0; i<values.length; i++) {
var record = values[i];
if (typeof record !== 'object') {
record = {};
record[that.param] = values[i];
}
record[that.external] = external;
values[i] = record;
}
};
that.load = function(data) {
that.values = that.adapter.load(data);
if (that.external) {
that.set_values_external(that.values, '');
var external_values = that.adapter.load(data, that.external, []);
that.set_values_external(external_values, 'true');
$.merge(that.values, external_values);
}
that.widget.update(that.values);
that.widget.unselect_all();
};
that.get_update_info = function() {
var update_info = IPA.update_info_builder.new_update_info();
//association_table_widget performs basic add and remove operation
//immediately. Rule association field test if its enabled and if not it
//performs delete operation.
if (!that.widget.enabled) {
var values = that.widget.save();
if (values.length > 0) { //no need to delete if has no values
var command = rpc.command({
entity: that.entity.name,
method: that.widget.remove_method,
args: that.facet.get_pkeys()
});
command.set_option(that.widget.other_entity.name, values);
update_info.append_command(command, that.priority);
}
}
return update_info;
};
return that;
};
IPA.rule_association_adder_dialog = function(spec) {
spec = spec || {};
var that = IPA.association_adder_dialog(spec);
that.external = spec.external;
that.add = function() {
var rows = that.available_table.remove_selected_rows();
that.selected_table.add_rows(rows);
if (that.external) {
var pkey_name = that.other_entity.metadata.primary_key;
var value = that.external_field.val();
if (!value) return;
var record = {};
record[pkey_name] = value;
that.selected_table.add_record(record);
that.external_field.val('');
}
};
return that;
};
phases.on('registration', function() {
var w = reg.widget;
var f = reg.field;
w.register('rule_association_table', IPA.rule_association_table_widget);
f.register('rule_association_table', IPA.rule_association_table_field);
});
return {};
});
| {
var table = that.tables[i];
var table_widget = that.widgets.get_widget(table.name);
table_widget.set_enabled(enabled);
} | conditional_block |
imageadmin.py | # -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _
from filer.admin.fileadmin import FileAdmin
from filer.models import Image
class ImageAdminForm(forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," instead of ".".
# javascript would parse that to an integer.
return '%.6F' % self.instance.sidebar_image_ratio()
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
|
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
)
| css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
) | identifier_body |
imageadmin.py | # -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _ | from filer.models import Image
class ImageAdminForm(forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," instead of ".".
# javascript would parse that to an integer.
return '%.6F' % self.instance.sidebar_image_ratio()
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
)
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
) |
from filer.admin.fileadmin import FileAdmin | random_line_split |
imageadmin.py | # -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _
from filer.admin.fileadmin import FileAdmin
from filer.models import Image
class | (forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," instead of ".".
# javascript would parse that to an integer.
return '%.6F' % self.instance.sidebar_image_ratio()
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
)
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
)
| ImageAdminForm | identifier_name |
imageadmin.py | # -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _
from filer.admin.fileadmin import FileAdmin
from filer.models import Image
class ImageAdminForm(forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," instead of ".".
# javascript would parse that to an integer.
|
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
)
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
)
| return '%.6F' % self.instance.sidebar_image_ratio() | conditional_block |
reporter.ts | /**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {Config} from '@jest/types';
import {AssertionResult, TestResult} from '@jest/test-result';
import {formatResultsErrors} from 'jest-message-util';
import {SpecResult} from './jasmine/Spec';
import {SuiteResult} from './jasmine/Suite';
import {Reporter, RunDetails} from './types';
type Microseconds = number;
export default class Jasmine2Reporter implements Reporter {
private _testResults: Array<AssertionResult>;
private _globalConfig: Config.GlobalConfig;
private _config: Config.ProjectConfig;
private _currentSuites: Array<string>;
private _resolve: any;
private _resultsPromise: Promise<TestResult>;
private _startTimes: Map<string, Microseconds>;
private _testPath: Config.Path;
constructor(
globalConfig: Config.GlobalConfig,
config: Config.ProjectConfig,
testPath: Config.Path,
) {
this._globalConfig = globalConfig;
this._config = config;
this._testPath = testPath;
this._testResults = [];
this._currentSuites = [];
this._resolve = null;
this._resultsPromise = new Promise(resolve => (this._resolve = resolve));
this._startTimes = new Map();
}
jasmineStarted(_runDetails: RunDetails) {}
specStarted(spec: SpecResult) {
this._startTimes.set(spec.id, Date.now());
}
specDone(result: SpecResult): void {
this._testResults.push(
this._extractSpecResults(result, this._currentSuites.slice(0)),
);
}
suiteStarted(suite: SuiteResult): void {
this._currentSuites.push(suite.description);
}
suiteDone(_result: SuiteResult): void {
this._currentSuites.pop();
}
jasmineDone(_runDetails: RunDetails): void |
getResults(): Promise<TestResult> {
return this._resultsPromise;
}
private _addMissingMessageToStack(stack: string, message?: string) {
// Some errors (e.g. Angular injection error) don't prepend error.message
// to stack, instead the first line of the stack is just plain 'Error'
const ERROR_REGEX = /^Error\s*\n/;
if (
stack &&
message &&
ERROR_REGEX.test(stack) &&
stack.indexOf(message) === -1
) {
return message + stack.replace(ERROR_REGEX, '\n');
}
return stack;
}
private _extractSpecResults(
specResult: SpecResult,
ancestorTitles: Array<string>,
): AssertionResult {
const start = this._startTimes.get(specResult.id);
const duration = start ? Date.now() - start : undefined;
const status =
specResult.status === 'disabled' ? 'pending' : specResult.status;
const location = specResult.__callsite
? {
column: specResult.__callsite.getColumnNumber(),
line: specResult.__callsite.getLineNumber(),
}
: null;
const results: AssertionResult = {
ancestorTitles,
duration,
failureMessages: [],
fullName: specResult.fullName,
location,
numPassingAsserts: 0, // Jasmine2 only returns an array of failed asserts.
status,
title: specResult.description,
};
specResult.failedExpectations.forEach(failed => {
const message =
!failed.matcherName && failed.stack
? this._addMissingMessageToStack(failed.stack, failed.message)
: failed.message || '';
results.failureMessages.push(message);
});
return results;
}
}
| {
let numFailingTests = 0;
let numPassingTests = 0;
let numPendingTests = 0;
let numTodoTests = 0;
const testResults = this._testResults;
testResults.forEach(testResult => {
if (testResult.status === 'failed') {
numFailingTests++;
} else if (testResult.status === 'pending') {
numPendingTests++;
} else if (testResult.status === 'todo') {
numTodoTests++;
} else {
numPassingTests++;
}
});
const testResult = {
console: null,
failureMessage: formatResultsErrors(
testResults,
this._config,
this._globalConfig,
this._testPath,
),
numFailingTests,
numPassingTests,
numPendingTests,
numTodoTests,
perfStats: {
end: 0,
start: 0,
},
snapshot: {
added: 0,
fileDeleted: false,
matched: 0,
unchecked: 0,
unmatched: 0,
updated: 0,
},
testFilePath: this._testPath,
testResults,
};
this._resolve(testResult);
} | identifier_body |
reporter.ts | /**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {Config} from '@jest/types';
import {AssertionResult, TestResult} from '@jest/test-result';
import {formatResultsErrors} from 'jest-message-util';
import {SpecResult} from './jasmine/Spec';
import {SuiteResult} from './jasmine/Suite';
import {Reporter, RunDetails} from './types';
type Microseconds = number;
export default class Jasmine2Reporter implements Reporter {
private _testResults: Array<AssertionResult>;
private _globalConfig: Config.GlobalConfig;
private _config: Config.ProjectConfig;
private _currentSuites: Array<string>;
private _resolve: any;
private _resultsPromise: Promise<TestResult>;
private _startTimes: Map<string, Microseconds>;
private _testPath: Config.Path;
| this._globalConfig = globalConfig;
this._config = config;
this._testPath = testPath;
this._testResults = [];
this._currentSuites = [];
this._resolve = null;
this._resultsPromise = new Promise(resolve => (this._resolve = resolve));
this._startTimes = new Map();
}
jasmineStarted(_runDetails: RunDetails) {}
specStarted(spec: SpecResult) {
this._startTimes.set(spec.id, Date.now());
}
specDone(result: SpecResult): void {
this._testResults.push(
this._extractSpecResults(result, this._currentSuites.slice(0)),
);
}
suiteStarted(suite: SuiteResult): void {
this._currentSuites.push(suite.description);
}
suiteDone(_result: SuiteResult): void {
this._currentSuites.pop();
}
jasmineDone(_runDetails: RunDetails): void {
let numFailingTests = 0;
let numPassingTests = 0;
let numPendingTests = 0;
let numTodoTests = 0;
const testResults = this._testResults;
testResults.forEach(testResult => {
if (testResult.status === 'failed') {
numFailingTests++;
} else if (testResult.status === 'pending') {
numPendingTests++;
} else if (testResult.status === 'todo') {
numTodoTests++;
} else {
numPassingTests++;
}
});
const testResult = {
console: null,
failureMessage: formatResultsErrors(
testResults,
this._config,
this._globalConfig,
this._testPath,
),
numFailingTests,
numPassingTests,
numPendingTests,
numTodoTests,
perfStats: {
end: 0,
start: 0,
},
snapshot: {
added: 0,
fileDeleted: false,
matched: 0,
unchecked: 0,
unmatched: 0,
updated: 0,
},
testFilePath: this._testPath,
testResults,
};
this._resolve(testResult);
}
getResults(): Promise<TestResult> {
return this._resultsPromise;
}
private _addMissingMessageToStack(stack: string, message?: string) {
// Some errors (e.g. Angular injection error) don't prepend error.message
// to stack, instead the first line of the stack is just plain 'Error'
const ERROR_REGEX = /^Error\s*\n/;
if (
stack &&
message &&
ERROR_REGEX.test(stack) &&
stack.indexOf(message) === -1
) {
return message + stack.replace(ERROR_REGEX, '\n');
}
return stack;
}
private _extractSpecResults(
specResult: SpecResult,
ancestorTitles: Array<string>,
): AssertionResult {
const start = this._startTimes.get(specResult.id);
const duration = start ? Date.now() - start : undefined;
const status =
specResult.status === 'disabled' ? 'pending' : specResult.status;
const location = specResult.__callsite
? {
column: specResult.__callsite.getColumnNumber(),
line: specResult.__callsite.getLineNumber(),
}
: null;
const results: AssertionResult = {
ancestorTitles,
duration,
failureMessages: [],
fullName: specResult.fullName,
location,
numPassingAsserts: 0, // Jasmine2 only returns an array of failed asserts.
status,
title: specResult.description,
};
specResult.failedExpectations.forEach(failed => {
const message =
!failed.matcherName && failed.stack
? this._addMissingMessageToStack(failed.stack, failed.message)
: failed.message || '';
results.failureMessages.push(message);
});
return results;
}
} | constructor(
globalConfig: Config.GlobalConfig,
config: Config.ProjectConfig,
testPath: Config.Path,
) { | random_line_split |
reporter.ts | /**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {Config} from '@jest/types';
import {AssertionResult, TestResult} from '@jest/test-result';
import {formatResultsErrors} from 'jest-message-util';
import {SpecResult} from './jasmine/Spec';
import {SuiteResult} from './jasmine/Suite';
import {Reporter, RunDetails} from './types';
type Microseconds = number;
export default class | implements Reporter {
private _testResults: Array<AssertionResult>;
private _globalConfig: Config.GlobalConfig;
private _config: Config.ProjectConfig;
private _currentSuites: Array<string>;
private _resolve: any;
private _resultsPromise: Promise<TestResult>;
private _startTimes: Map<string, Microseconds>;
private _testPath: Config.Path;
constructor(
globalConfig: Config.GlobalConfig,
config: Config.ProjectConfig,
testPath: Config.Path,
) {
this._globalConfig = globalConfig;
this._config = config;
this._testPath = testPath;
this._testResults = [];
this._currentSuites = [];
this._resolve = null;
this._resultsPromise = new Promise(resolve => (this._resolve = resolve));
this._startTimes = new Map();
}
jasmineStarted(_runDetails: RunDetails) {}
specStarted(spec: SpecResult) {
this._startTimes.set(spec.id, Date.now());
}
specDone(result: SpecResult): void {
this._testResults.push(
this._extractSpecResults(result, this._currentSuites.slice(0)),
);
}
suiteStarted(suite: SuiteResult): void {
this._currentSuites.push(suite.description);
}
suiteDone(_result: SuiteResult): void {
this._currentSuites.pop();
}
jasmineDone(_runDetails: RunDetails): void {
let numFailingTests = 0;
let numPassingTests = 0;
let numPendingTests = 0;
let numTodoTests = 0;
const testResults = this._testResults;
testResults.forEach(testResult => {
if (testResult.status === 'failed') {
numFailingTests++;
} else if (testResult.status === 'pending') {
numPendingTests++;
} else if (testResult.status === 'todo') {
numTodoTests++;
} else {
numPassingTests++;
}
});
const testResult = {
console: null,
failureMessage: formatResultsErrors(
testResults,
this._config,
this._globalConfig,
this._testPath,
),
numFailingTests,
numPassingTests,
numPendingTests,
numTodoTests,
perfStats: {
end: 0,
start: 0,
},
snapshot: {
added: 0,
fileDeleted: false,
matched: 0,
unchecked: 0,
unmatched: 0,
updated: 0,
},
testFilePath: this._testPath,
testResults,
};
this._resolve(testResult);
}
getResults(): Promise<TestResult> {
return this._resultsPromise;
}
private _addMissingMessageToStack(stack: string, message?: string) {
// Some errors (e.g. Angular injection error) don't prepend error.message
// to stack, instead the first line of the stack is just plain 'Error'
const ERROR_REGEX = /^Error\s*\n/;
if (
stack &&
message &&
ERROR_REGEX.test(stack) &&
stack.indexOf(message) === -1
) {
return message + stack.replace(ERROR_REGEX, '\n');
}
return stack;
}
private _extractSpecResults(
specResult: SpecResult,
ancestorTitles: Array<string>,
): AssertionResult {
const start = this._startTimes.get(specResult.id);
const duration = start ? Date.now() - start : undefined;
const status =
specResult.status === 'disabled' ? 'pending' : specResult.status;
const location = specResult.__callsite
? {
column: specResult.__callsite.getColumnNumber(),
line: specResult.__callsite.getLineNumber(),
}
: null;
const results: AssertionResult = {
ancestorTitles,
duration,
failureMessages: [],
fullName: specResult.fullName,
location,
numPassingAsserts: 0, // Jasmine2 only returns an array of failed asserts.
status,
title: specResult.description,
};
specResult.failedExpectations.forEach(failed => {
const message =
!failed.matcherName && failed.stack
? this._addMissingMessageToStack(failed.stack, failed.message)
: failed.message || '';
results.failureMessages.push(message);
});
return results;
}
}
| Jasmine2Reporter | identifier_name |
lib.rs | // The MIT License (MIT)
//
// Copyright (c) 2014 Jeremy Letang
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE. | #![crate_type = "dylib"]
#![allow(dead_code, non_camel_case_types, missing_doc)]
#![feature(struct_variant)]
#![feature(globs)]
#![unstable]
extern crate libc;
pub use self::window::Window;
pub use self::video_mode::VideoMode;
pub use self::window_builder::WindowBuilder;
pub use self::context_settings::ContextSettings;
#[cfg(target_os = "macos")]
#[path = "mac_os/mod.rs"]
mod imp;
#[cfg(target_os = "wind32")]
#[path = "windows/mod.rs"]
mod imp;
#[cfg(target_os = "linux")]
#[path = "linux/mod.rs"]
mod imp;
mod native;
mod window;
mod video_mode;
mod window_builder;
pub mod context_settings;
pub mod window_style;
pub mod event;
pub mod inputs;
pub mod gl; |
#![crate_name = "verdigris"]
#![desc = "Multi plateform opengl windowing for Rust"]
#![license = "MIT"]
#![crate_type = "rlib"] | random_line_split |
angular-locale_en-lc.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
"AD"
],
"FIRSTDAYOFWEEK": 0,
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "dd/MM/y h:mm a",
"shortDate": "dd/MM/y",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-lc",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) | return PLURAL_CATEGORY.OTHER;}
});
}]);
| { return PLURAL_CATEGORY.ONE; } | conditional_block |
angular-locale_en-lc.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
| "January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "dd/MM/y h:mm a",
"shortDate": "dd/MM/y",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-lc",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | "AD"
],
"FIRSTDAYOFWEEK": 0,
"MONTH": [
| random_line_split |
angular-locale_en-lc.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) |
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
"AD"
],
"FIRSTDAYOFWEEK": 0,
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "dd/MM/y h:mm a",
"shortDate": "dd/MM/y",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-lc",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
| {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
} | identifier_body |
angular-locale_en-lc.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function | (n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
"AD"
],
"FIRSTDAYOFWEEK": 0,
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "dd/MM/y h:mm a",
"shortDate": "dd/MM/y",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-lc",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
| getDecimals | identifier_name |
signer_client.rs | use client::{Rpc, RpcError};
use rpc::v1::types::{ConfirmationRequest, TransactionModification, U256, BlockNumber};
use serde_json::{Value as JsonValue, to_value};
use std::path::PathBuf;
use futures::{BoxFuture, Canceled};
pub struct SignerRpc {
rpc: Rpc,
}
impl SignerRpc {
pub fn new(url: &str, authfile: &PathBuf) -> Result<Self, RpcError> {
Ok(SignerRpc { rpc: Rpc::new(&url, authfile)? })
}
pub fn requests_to_confirm(&mut self) ->
BoxFuture<Result<Vec<ConfirmationRequest>, RpcError>, Canceled>
{
self.rpc.request("signer_requestsToConfirm", vec![])
}
pub fn | (
&mut self,
id: U256,
new_gas: Option<U256>,
new_gas_price: Option<U256>,
new_min_block: Option<Option<BlockNumber>>,
pwd: &str
) -> BoxFuture<Result<U256, RpcError>, Canceled>
{
self.rpc.request("signer_confirmRequest", vec![
to_value(&format!("{:#x}", id)),
to_value(&TransactionModification { gas_price: new_gas_price, gas: new_gas, min_block: new_min_block }),
to_value(&pwd),
])
}
pub fn reject_request(&mut self, id: U256) ->
BoxFuture<Result<bool, RpcError>, Canceled>
{
self.rpc.request("signer_rejectRequest", vec![
JsonValue::String(format!("{:#x}", id))
])
}
}
| confirm_request | identifier_name |
signer_client.rs | use client::{Rpc, RpcError};
use rpc::v1::types::{ConfirmationRequest, TransactionModification, U256, BlockNumber};
use serde_json::{Value as JsonValue, to_value};
use std::path::PathBuf;
use futures::{BoxFuture, Canceled};
pub struct SignerRpc {
rpc: Rpc,
}
impl SignerRpc {
pub fn new(url: &str, authfile: &PathBuf) -> Result<Self, RpcError> {
Ok(SignerRpc { rpc: Rpc::new(&url, authfile)? })
}
pub fn requests_to_confirm(&mut self) ->
BoxFuture<Result<Vec<ConfirmationRequest>, RpcError>, Canceled>
{
self.rpc.request("signer_requestsToConfirm", vec![])
}
pub fn confirm_request(
&mut self,
id: U256,
new_gas: Option<U256>,
new_gas_price: Option<U256>,
new_min_block: Option<Option<BlockNumber>>,
pwd: &str
) -> BoxFuture<Result<U256, RpcError>, Canceled>
|
pub fn reject_request(&mut self, id: U256) ->
BoxFuture<Result<bool, RpcError>, Canceled>
{
self.rpc.request("signer_rejectRequest", vec![
JsonValue::String(format!("{:#x}", id))
])
}
}
| {
self.rpc.request("signer_confirmRequest", vec![
to_value(&format!("{:#x}", id)),
to_value(&TransactionModification { gas_price: new_gas_price, gas: new_gas, min_block: new_min_block }),
to_value(&pwd),
])
} | identifier_body |
signer_client.rs | use client::{Rpc, RpcError};
use rpc::v1::types::{ConfirmationRequest, TransactionModification, U256, BlockNumber};
use serde_json::{Value as JsonValue, to_value};
use std::path::PathBuf;
use futures::{BoxFuture, Canceled};
pub struct SignerRpc {
rpc: Rpc,
}
impl SignerRpc {
pub fn new(url: &str, authfile: &PathBuf) -> Result<Self, RpcError> {
Ok(SignerRpc { rpc: Rpc::new(&url, authfile)? })
}
pub fn requests_to_confirm(&mut self) ->
BoxFuture<Result<Vec<ConfirmationRequest>, RpcError>, Canceled>
{
self.rpc.request("signer_requestsToConfirm", vec![]) | id: U256,
new_gas: Option<U256>,
new_gas_price: Option<U256>,
new_min_block: Option<Option<BlockNumber>>,
pwd: &str
) -> BoxFuture<Result<U256, RpcError>, Canceled>
{
self.rpc.request("signer_confirmRequest", vec![
to_value(&format!("{:#x}", id)),
to_value(&TransactionModification { gas_price: new_gas_price, gas: new_gas, min_block: new_min_block }),
to_value(&pwd),
])
}
pub fn reject_request(&mut self, id: U256) ->
BoxFuture<Result<bool, RpcError>, Canceled>
{
self.rpc.request("signer_rejectRequest", vec![
JsonValue::String(format!("{:#x}", id))
])
}
} | }
pub fn confirm_request(
&mut self, | random_line_split |
lazytox.py | #!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
| files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close() | async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
| random_line_split |
lazytox.py | #!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def | (stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
| read_stream | identifier_name |
lazytox.py | #!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
|
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
| printc(PASS, "Pylint and Flake8 passed") | conditional_block |
lazytox.py | #!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
|
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
| """Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res | identifier_body |
flat-map-observable-scalar.js | var oldFlatMapWithCurrentThreadScheduler = RxOld.Observable.range(0, 25, RxOld.Scheduler.currentThread).flatMap(RxOld.Observable.return(0, RxOld.Scheduler.currentThread));
var newFlatMapWithCurrentThreadScheduler = RxNew.Observable.range(0, 25, RxNew.Scheduler.immediate).flatMapTo(RxNew.Observable.return(0, RxNew.Scheduler.immediate));
return suite
.add('old flatMap (scalar Observable) with current thread scheduler', function () {
oldFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
})
.add('new flatMap (scalar Observable) with current thread scheduler', function () {
newFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
});
function _next(x) { }
function _error(e){ }
function _complete(){ }
}; | var RxOld = require("rx");
var RxNew = require("../../../../index");
module.exports = function (suite) {
| random_line_split | |
flat-map-observable-scalar.js | var RxOld = require("rx");
var RxNew = require("../../../../index");
module.exports = function (suite) {
var oldFlatMapWithCurrentThreadScheduler = RxOld.Observable.range(0, 25, RxOld.Scheduler.currentThread).flatMap(RxOld.Observable.return(0, RxOld.Scheduler.currentThread));
var newFlatMapWithCurrentThreadScheduler = RxNew.Observable.range(0, 25, RxNew.Scheduler.immediate).flatMapTo(RxNew.Observable.return(0, RxNew.Scheduler.immediate));
return suite
.add('old flatMap (scalar Observable) with current thread scheduler', function () {
oldFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
})
.add('new flatMap (scalar Observable) with current thread scheduler', function () {
newFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
});
function _next(x) { }
function _error(e){ }
function | (){ }
}; | _complete | identifier_name |
flat-map-observable-scalar.js | var RxOld = require("rx");
var RxNew = require("../../../../index");
module.exports = function (suite) {
var oldFlatMapWithCurrentThreadScheduler = RxOld.Observable.range(0, 25, RxOld.Scheduler.currentThread).flatMap(RxOld.Observable.return(0, RxOld.Scheduler.currentThread));
var newFlatMapWithCurrentThreadScheduler = RxNew.Observable.range(0, 25, RxNew.Scheduler.immediate).flatMapTo(RxNew.Observable.return(0, RxNew.Scheduler.immediate));
return suite
.add('old flatMap (scalar Observable) with current thread scheduler', function () {
oldFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
})
.add('new flatMap (scalar Observable) with current thread scheduler', function () {
newFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
});
function _next(x) { }
function _error(e) |
function _complete(){ }
}; | { } | identifier_body |
__init__.py | # -*- coding: utf-8 -*-
import json
from vilya.libs import api_errors
from vilya.models.project import CodeDoubanProject
from vilya.views.api.utils import RestAPIUI, api_require_login, jsonize
from vilya.views.api.repos.product import ProductUI
from vilya.views.api.repos.summary import SummaryUI
from vilya.views.api.repos.intern import InternUI
from vilya.views.api.repos.default_branch import DefaultBranchUI
from vilya.views.api.repos.commits import CommitsUI
from vilya.views.api.repos.post_receive import PostReceiveUI
from vilya.views.api.repos.git2svn import GIT2SVNUI
from vilya.views.api.repos.svn2git import SVN2GITUI
from vilya.views.api.repos.pulls import PullsUI
from vilya.views.api.repos.issues import IssuesUI
from vilya.views.api.repos.contents import ContentsUI
from vilya.views.api.repos.push import PushUI
from vilya.views.api.repos.watchers import WatchersUI
_q_exports = []
def _q_lookup(request, name):
return RepositoryUI(name)
def _q_access(request):
request.response.set_content_type('application/json; charset=utf-8')
class RepositoryUI(object):
_q_exports = [
'lang_stats', 'forks', 'pulls', 'summary',
'committers', 'name', 'owner', 'product',
'intern_banned', 'default_branch', 'commits',
'post_receive', 'svn2git', 'git2svn', 'issues',
'contents', 'can_push', 'watchers'
]
def __init__(self, name):
self.name = name
self.repo = CodeDoubanProject.get_by_name(self.name)
def __call__(self, request):
return self._q_index(request)
@jsonize
def _q_index(self, request):
if not self.repo:
raise api_errors.NotFoundError("repo")
return {}
def _q_access(self, request):
self.method = request.method
def _q_lookup(self, request, part):
name = "%s/%s" % (self.name, part)
if not CodeDoubanProject.exists(name):
raise api_errors.NotFoundError("repo") | def lang_stats(self, request):
if not self.repo:
raise api_errors.NotFoundError
if self.method == 'POST':
language = request.get_form_var('language', '')
languages = request.get_form_var('languages', '[]')
try:
languages = json.loads(languages)
except ValueError:
raise api_errors.NotJSONError
self.repo.language = language
self.repo.languages = languages
return {}
else:
return dict(language=self.repo.language,
languages=self.repo.languages)
@property
def forks(self):
return ForksUI(self.repo)
@property
def pulls(self):
return PullsUI(self.repo)
@property
def product(self):
return ProductUI(self.repo)
@property
def summary(self):
return SummaryUI(self.repo)
@property
def intern_banned(self):
return InternUI(self.repo)
@property
def can_push(self):
return PushUI(self.repo)
@property
def default_branch(self):
return DefaultBranchUI(self.repo)
@property
def commits(self):
return CommitsUI(self.repo)
@property
def post_receive(self):
return PostReceiveUI(self.repo)
@property
def svn2git(self):
return SVN2GITUI(self.repo)
@property
def git2svn(self):
return GIT2SVNUI(self.repo)
@property
def issues(self):
return IssuesUI(self.repo)
@property
def contents(self):
return ContentsUI(self.repo)
@property
def watchers(self):
return WatchersUI(self.repo)
class ForksUI(RestAPIUI):
_q_exports = []
_q_methods = ['get', 'post']
def __init__(self, repo):
self.repo = repo
@api_require_login
def post(self, request):
repo = self.repo
fork_repo = repo.new_fork(self.user.name)
if not fork_repo:
# FIXME: repository exists
return []
return fork_repo.as_dict()
def get(self, request):
fork_repos = self.repo.get_forked_projects()
return [project.get_info(without_commits=True)
for project in fork_repos] | return RepositoryUI(name)
@jsonize | random_line_split |
__init__.py | # -*- coding: utf-8 -*-
import json
from vilya.libs import api_errors
from vilya.models.project import CodeDoubanProject
from vilya.views.api.utils import RestAPIUI, api_require_login, jsonize
from vilya.views.api.repos.product import ProductUI
from vilya.views.api.repos.summary import SummaryUI
from vilya.views.api.repos.intern import InternUI
from vilya.views.api.repos.default_branch import DefaultBranchUI
from vilya.views.api.repos.commits import CommitsUI
from vilya.views.api.repos.post_receive import PostReceiveUI
from vilya.views.api.repos.git2svn import GIT2SVNUI
from vilya.views.api.repos.svn2git import SVN2GITUI
from vilya.views.api.repos.pulls import PullsUI
from vilya.views.api.repos.issues import IssuesUI
from vilya.views.api.repos.contents import ContentsUI
from vilya.views.api.repos.push import PushUI
from vilya.views.api.repos.watchers import WatchersUI
_q_exports = []
def _q_lookup(request, name):
return RepositoryUI(name)
def _q_access(request):
request.response.set_content_type('application/json; charset=utf-8')
class RepositoryUI(object):
_q_exports = [
'lang_stats', 'forks', 'pulls', 'summary',
'committers', 'name', 'owner', 'product',
'intern_banned', 'default_branch', 'commits',
'post_receive', 'svn2git', 'git2svn', 'issues',
'contents', 'can_push', 'watchers'
]
def __init__(self, name):
self.name = name
self.repo = CodeDoubanProject.get_by_name(self.name)
def __call__(self, request):
return self._q_index(request)
@jsonize
def _q_index(self, request):
if not self.repo:
raise api_errors.NotFoundError("repo")
return {}
def _q_access(self, request):
self.method = request.method
def _q_lookup(self, request, part):
name = "%s/%s" % (self.name, part)
if not CodeDoubanProject.exists(name):
raise api_errors.NotFoundError("repo")
return RepositoryUI(name)
@jsonize
def lang_stats(self, request):
if not self.repo:
raise api_errors.NotFoundError
if self.method == 'POST':
language = request.get_form_var('language', '')
languages = request.get_form_var('languages', '[]')
try:
languages = json.loads(languages)
except ValueError:
raise api_errors.NotJSONError
self.repo.language = language
self.repo.languages = languages
return {}
else:
return dict(language=self.repo.language,
languages=self.repo.languages)
@property
def forks(self):
return ForksUI(self.repo)
@property
def pulls(self):
return PullsUI(self.repo)
@property
def product(self):
return ProductUI(self.repo)
@property
def summary(self):
return SummaryUI(self.repo)
@property
def intern_banned(self):
return InternUI(self.repo)
@property
def can_push(self):
return PushUI(self.repo)
@property
def default_branch(self):
return DefaultBranchUI(self.repo)
@property
def | (self):
return CommitsUI(self.repo)
@property
def post_receive(self):
return PostReceiveUI(self.repo)
@property
def svn2git(self):
return SVN2GITUI(self.repo)
@property
def git2svn(self):
return GIT2SVNUI(self.repo)
@property
def issues(self):
return IssuesUI(self.repo)
@property
def contents(self):
return ContentsUI(self.repo)
@property
def watchers(self):
return WatchersUI(self.repo)
class ForksUI(RestAPIUI):
_q_exports = []
_q_methods = ['get', 'post']
def __init__(self, repo):
self.repo = repo
@api_require_login
def post(self, request):
repo = self.repo
fork_repo = repo.new_fork(self.user.name)
if not fork_repo:
# FIXME: repository exists
return []
return fork_repo.as_dict()
def get(self, request):
fork_repos = self.repo.get_forked_projects()
return [project.get_info(without_commits=True)
for project in fork_repos]
| commits | identifier_name |
__init__.py | # -*- coding: utf-8 -*-
import json
from vilya.libs import api_errors
from vilya.models.project import CodeDoubanProject
from vilya.views.api.utils import RestAPIUI, api_require_login, jsonize
from vilya.views.api.repos.product import ProductUI
from vilya.views.api.repos.summary import SummaryUI
from vilya.views.api.repos.intern import InternUI
from vilya.views.api.repos.default_branch import DefaultBranchUI
from vilya.views.api.repos.commits import CommitsUI
from vilya.views.api.repos.post_receive import PostReceiveUI
from vilya.views.api.repos.git2svn import GIT2SVNUI
from vilya.views.api.repos.svn2git import SVN2GITUI
from vilya.views.api.repos.pulls import PullsUI
from vilya.views.api.repos.issues import IssuesUI
from vilya.views.api.repos.contents import ContentsUI
from vilya.views.api.repos.push import PushUI
from vilya.views.api.repos.watchers import WatchersUI
_q_exports = []
def _q_lookup(request, name):
return RepositoryUI(name)
def _q_access(request):
request.response.set_content_type('application/json; charset=utf-8')
class RepositoryUI(object):
_q_exports = [
'lang_stats', 'forks', 'pulls', 'summary',
'committers', 'name', 'owner', 'product',
'intern_banned', 'default_branch', 'commits',
'post_receive', 'svn2git', 'git2svn', 'issues',
'contents', 'can_push', 'watchers'
]
def __init__(self, name):
self.name = name
self.repo = CodeDoubanProject.get_by_name(self.name)
def __call__(self, request):
return self._q_index(request)
@jsonize
def _q_index(self, request):
if not self.repo:
raise api_errors.NotFoundError("repo")
return {}
def _q_access(self, request):
self.method = request.method
def _q_lookup(self, request, part):
name = "%s/%s" % (self.name, part)
if not CodeDoubanProject.exists(name):
raise api_errors.NotFoundError("repo")
return RepositoryUI(name)
@jsonize
def lang_stats(self, request):
if not self.repo:
raise api_errors.NotFoundError
if self.method == 'POST':
|
else:
return dict(language=self.repo.language,
languages=self.repo.languages)
@property
def forks(self):
return ForksUI(self.repo)
@property
def pulls(self):
return PullsUI(self.repo)
@property
def product(self):
return ProductUI(self.repo)
@property
def summary(self):
return SummaryUI(self.repo)
@property
def intern_banned(self):
return InternUI(self.repo)
@property
def can_push(self):
return PushUI(self.repo)
@property
def default_branch(self):
return DefaultBranchUI(self.repo)
@property
def commits(self):
return CommitsUI(self.repo)
@property
def post_receive(self):
return PostReceiveUI(self.repo)
@property
def svn2git(self):
return SVN2GITUI(self.repo)
@property
def git2svn(self):
return GIT2SVNUI(self.repo)
@property
def issues(self):
return IssuesUI(self.repo)
@property
def contents(self):
return ContentsUI(self.repo)
@property
def watchers(self):
return WatchersUI(self.repo)
class ForksUI(RestAPIUI):
_q_exports = []
_q_methods = ['get', 'post']
def __init__(self, repo):
self.repo = repo
@api_require_login
def post(self, request):
repo = self.repo
fork_repo = repo.new_fork(self.user.name)
if not fork_repo:
# FIXME: repository exists
return []
return fork_repo.as_dict()
def get(self, request):
fork_repos = self.repo.get_forked_projects()
return [project.get_info(without_commits=True)
for project in fork_repos]
| language = request.get_form_var('language', '')
languages = request.get_form_var('languages', '[]')
try:
languages = json.loads(languages)
except ValueError:
raise api_errors.NotJSONError
self.repo.language = language
self.repo.languages = languages
return {} | conditional_block |
__init__.py | # -*- coding: utf-8 -*-
import json
from vilya.libs import api_errors
from vilya.models.project import CodeDoubanProject
from vilya.views.api.utils import RestAPIUI, api_require_login, jsonize
from vilya.views.api.repos.product import ProductUI
from vilya.views.api.repos.summary import SummaryUI
from vilya.views.api.repos.intern import InternUI
from vilya.views.api.repos.default_branch import DefaultBranchUI
from vilya.views.api.repos.commits import CommitsUI
from vilya.views.api.repos.post_receive import PostReceiveUI
from vilya.views.api.repos.git2svn import GIT2SVNUI
from vilya.views.api.repos.svn2git import SVN2GITUI
from vilya.views.api.repos.pulls import PullsUI
from vilya.views.api.repos.issues import IssuesUI
from vilya.views.api.repos.contents import ContentsUI
from vilya.views.api.repos.push import PushUI
from vilya.views.api.repos.watchers import WatchersUI
_q_exports = []
def _q_lookup(request, name):
|
def _q_access(request):
request.response.set_content_type('application/json; charset=utf-8')
class RepositoryUI(object):
_q_exports = [
'lang_stats', 'forks', 'pulls', 'summary',
'committers', 'name', 'owner', 'product',
'intern_banned', 'default_branch', 'commits',
'post_receive', 'svn2git', 'git2svn', 'issues',
'contents', 'can_push', 'watchers'
]
def __init__(self, name):
self.name = name
self.repo = CodeDoubanProject.get_by_name(self.name)
def __call__(self, request):
return self._q_index(request)
@jsonize
def _q_index(self, request):
if not self.repo:
raise api_errors.NotFoundError("repo")
return {}
def _q_access(self, request):
self.method = request.method
def _q_lookup(self, request, part):
name = "%s/%s" % (self.name, part)
if not CodeDoubanProject.exists(name):
raise api_errors.NotFoundError("repo")
return RepositoryUI(name)
@jsonize
def lang_stats(self, request):
if not self.repo:
raise api_errors.NotFoundError
if self.method == 'POST':
language = request.get_form_var('language', '')
languages = request.get_form_var('languages', '[]')
try:
languages = json.loads(languages)
except ValueError:
raise api_errors.NotJSONError
self.repo.language = language
self.repo.languages = languages
return {}
else:
return dict(language=self.repo.language,
languages=self.repo.languages)
@property
def forks(self):
return ForksUI(self.repo)
@property
def pulls(self):
return PullsUI(self.repo)
@property
def product(self):
return ProductUI(self.repo)
@property
def summary(self):
return SummaryUI(self.repo)
@property
def intern_banned(self):
return InternUI(self.repo)
@property
def can_push(self):
return PushUI(self.repo)
@property
def default_branch(self):
return DefaultBranchUI(self.repo)
@property
def commits(self):
return CommitsUI(self.repo)
@property
def post_receive(self):
return PostReceiveUI(self.repo)
@property
def svn2git(self):
return SVN2GITUI(self.repo)
@property
def git2svn(self):
return GIT2SVNUI(self.repo)
@property
def issues(self):
return IssuesUI(self.repo)
@property
def contents(self):
return ContentsUI(self.repo)
@property
def watchers(self):
return WatchersUI(self.repo)
class ForksUI(RestAPIUI):
_q_exports = []
_q_methods = ['get', 'post']
def __init__(self, repo):
self.repo = repo
@api_require_login
def post(self, request):
repo = self.repo
fork_repo = repo.new_fork(self.user.name)
if not fork_repo:
# FIXME: repository exists
return []
return fork_repo.as_dict()
def get(self, request):
fork_repos = self.repo.get_forked_projects()
return [project.get_info(without_commits=True)
for project in fork_repos]
| return RepositoryUI(name) | identifier_body |
models.py | from collections import defaultdict
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Q, Count, Sum, Max, Min
from django.db.models.signals import pre_save
from django.dispatch import receiver
from hashlib import sha1
from proso.dict import group_keys_by_value_lists
from proso.django.cache import cache_pure
from proso.list import flatten
from proso_common.models import get_config
from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview
from time import time as time_lib
import json
import logging
LOGGER = logging.getLogger('django.request')
class TagManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('concepts')
class Tag(models.Model):
"""
Arbitrary tag for concepts.
"""
type = models.CharField(max_length=50)
value = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
type_name = models.CharField(max_length=100)
value_name = models.CharField(max_length=100)
objects = TagManager()
class Meta:
unique_together = ("type", "value", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "tag",
"type": self.type,
"value": self.value,
"lang": self.lang,
"type_name": self.type_name,
"value_name": self.value_name,
}
if not nested:
data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()]
return data
def __str__(self):
return "{}: {}".format(self.type, self.value)
class ConceptManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('tags', 'actions')
@cache_pure()
def get_concept_item_mapping(self, concepts=None, lang=None):
"""
Get mapping of concepts to items belonging to concept.
Args:
concepts (list of Concept): Defaults to None meaning all concepts
lang (str): language of concepts, if None use language of concepts
Returns:
dict: concept (int) -> list of item ids (int)
"""
if concepts is None:
concepts = self.filter(active=True)
if lang is not None:
concepts = concepts.filter(lang=lang)
if lang is None:
languages = set([concept.lang for concept in concepts])
if len(languages) > 1:
raise Exception('Concepts has multiple languages')
lang = list(languages)[0]
item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query)
for concept in concepts], lang)
return dict(zip([c.pk for c in concepts], item_lists))
@cache_pure()
def get_item_concept_mapping(self, lang):
""" Get mapping of items_ids to concepts containing these items
Args:
lang (str): language of concepts
Returns:
dict: item (int) -> set of concepts (int)
"""
concepts = self.filter(active=True, lang=lang)
return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang))
def get_concepts_to_recalculate(self, users, lang, concepts=None):
"""
Get concept which have same changes and have to be recalculated
Args:
users (list of users or user): users whose user stats we are interesting in
lang (str): language of used concepts
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user -> set of concepts (int) - in case of list of users
list of stats (str) - in case of one user
"""
only_one_user = False
if not isinstance(users, list):
only_one_user = True
users = [users]
mapping = self.get_item_concept_mapping(lang)
current_user_stats = defaultdict(lambda: {})
user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type
if concepts is not None:
user_stats_qs = user_stats_qs.filter(concept__in=concepts)
for user_stat in user_stats_qs:
current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat
concepts_to_recalculate = defaultdict(lambda: set())
for user, item, time in Answer.objects.filter(user__in=users)\
.values_list("user_id", "item").annotate(Max("time")):
if item not in mapping:
# in reality this should by corner case, so it is efficient to not filter Answers
continue # item is not in concept
time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4)
time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', default=2)
for concept in mapping[item]:
if user in current_user_stats and concept in current_user_stats[user] \
and current_user_stats[user][concept].time > time:
if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):
continue # cache is up to date
if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept):
concepts_to_recalculate[user].add(concept)
if only_one_user:
return concepts_to_recalculate[users[0]]
return concepts_to_recalculate
def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor):
cache_timedelta = cache_time - last_answer_time
if cache_timedelta > timedelta(days=365):
return False
if cache_timedelta < timedelta(hours=lower_bound):
return False
return cache_timedelta < expiration_factor * (datetime.now() - cache_time)
class Concept(models.Model):
|
class ActionManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
class Action(models.Model):
"""
Actions which can be done with concept
"""
concept = models.ForeignKey(Concept, related_name="actions")
identifier = models.CharField(max_length=50)
name = models.CharField(max_length=200)
url = models.CharField(max_length=200)
objects = ActionManager()
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "action",
"identifier": self.identifier,
"name": self.name,
"url": self.url,
}
if not nested:
data["concept"] = self.concept.to_json(nested=True)
return data
def __str__(self):
return "{} - {}".format(self.concept, self.name)
class UserStatManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
def recalculate_concepts(self, concepts, lang=None):
"""
Recalculated given concepts for given users
Args:
concepts (dict): user id (int -> set of concepts to recalculate)
lang(Optional[str]): language used to get items in all concepts (cached).
Defaults to None, in that case are get items only in used concepts
"""
if len(concepts) == 0:
return
if lang is None:
items = Concept.objects.get_concept_item_mapping(concepts=Concept.objects.filter(pk__in=set(flatten(concepts.values()))))
else:
items = Concept.objects.get_concept_item_mapping(lang=lang)
environment = get_environment()
mastery_threshold = get_mastery_trashold()
for user, concepts in concepts.items():
all_items = list(set(flatten([items[c] for c in concepts])))
answer_counts = environment.number_of_answers_more_items(all_items, user)
correct_answer_counts = environment.number_of_correct_answers_more_items(all_items, user)
predictions = dict(list(zip(all_items, get_predictive_model().
predict_more_items(environment, user, all_items, time=get_time_for_knowledge_overview()))))
new_user_stats = []
stats_to_delete_condition = Q()
for concept in concepts:
answer_aggregates = Answer.objects.filter(user=user, item__in=items[concept]).aggregate(
time_spent=Sum("response_time"),
sessions=Count("session", True),
time_first=Min("time"),
time_last=Max("time"),
)
stats = {
"answer_count": sum(answer_counts[i] for i in items[concept]),
"correct_answer_count": sum(correct_answer_counts[i] for i in items[concept]),
"item_count": len(items[concept]),
"practiced_items_count": sum([answer_counts[i] > 0 for i in items[concept]]),
"mastered_items_count": sum([predictions[i] >= mastery_threshold for i in items[concept]]),
"prediction": sum([predictions[i] for i in items[concept]]) / len(items[concept]),
"time_spent": answer_aggregates["time_spent"] / 1000,
"session_count": answer_aggregates["sessions"],
"time_first": answer_aggregates["time_first"].timestamp(),
"time_last": answer_aggregates["time_last"].timestamp(),
}
stats_to_delete_condition |= Q(user=user, concept=concept)
for stat_name, value in stats.items():
new_user_stats.append(UserStat(user_id=user, concept_id=concept, stat=stat_name, value=value))
self.filter(stats_to_delete_condition).delete()
self.bulk_create(new_user_stats)
def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True):
"""
Finds all UserStats of given concepts and users.
Recompute UserStats if necessary
Args:
users (Optional[list of users] or [user]): list of primary keys of user or users
Defaults to None meaning all users.
lang (string): use only concepts witch the lang. Defaults to None meaning all languages.
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users
dict: concept_identifier - > (stat_name -> value) -- for one user
"""
only_one_user = False
if not isinstance(users, list):
users = [users]
only_one_user = True
if recalculate:
if lang is None:
raise ValueError('Recalculation without lang is not supported.')
time_start = time_lib()
concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts)
LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start))
time_start = time_lib()
self.recalculate_concepts(concepts_to_recalculate, lang)
LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start))
qs = self.prepare_related().filter(user__in=users, concept__active=True)
if concepts is not None:
qs = qs.filter(concept__in=concepts)
if lang is not None:
qs = qs.filter(concept__lang=lang)
if since is not None:
qs = qs.filter(time__gte=since)
data = defaultdict(lambda: defaultdict(lambda: {}))
for user_stat in qs:
data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value
if only_one_user:
return data[users[0].pk if type(users[0]) == User else users[0]]
return data
class UserStat(models.Model):
"""
Represent arbitrary statistic (float) of the user on concept
"""
concept = models.ForeignKey(Concept)
user = models.ForeignKey(User, related_name="stats")
stat = models.CharField(max_length=50)
time = models.DateTimeField(auto_now=True)
value = models.FloatField()
objects = UserStatManager()
class Meta:
unique_together = ("concept", "user", "stat")
def __str__(self):
return "{} - {}: {}".format(self.stat, self.concept, self.value)
@receiver(pre_save, sender=Concept)
def generate_identifier(sender, instance, **kwargs):
"""
Generate and set identifier of concept before saving object to DB
Args:
sender (class): should be Concept
instance (Concept): saving concept
"""
identifier = Concept.create_identifier(instance.query)
qs = Concept.objects.filter(identifier=identifier, lang=instance.lang)
if instance.pk:
qs = qs.exclude(pk=instance.pk)
if qs.count() > 0:
raise ValueError("Concept identifier conflict")
instance.identifier = identifier
| """
Model concepts for open learner model
"""
identifier = models.CharField(max_length=20, blank=True)
query = models.TextField()
name = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
tags = models.ManyToManyField(Tag, related_name="concepts", blank=True)
active = models.BooleanField(default=True)
objects = ConceptManager()
class Meta:
unique_together = ("identifier", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "concept",
"identifier": self.identifier,
"name": self.name,
"query": self.query,
"lang": self.lang,
}
if not nested:
data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()]
data["actions"] = [action.to_json(nested=True) for action in self.actions.all()]
return data
@staticmethod
def create_identifier(query):
"""
Crete identifier of concept
Args:
query (str): query defining concept
Returns:
str: identifier of length 20
"""
return sha1(query.encode()).hexdigest()[:20]
def __str__(self):
return self.name
def __repr__(self):
return "{}-{}".format(self.identifier, self.lang) | identifier_body |
models.py | from collections import defaultdict
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Q, Count, Sum, Max, Min
from django.db.models.signals import pre_save
from django.dispatch import receiver
from hashlib import sha1
from proso.dict import group_keys_by_value_lists
from proso.django.cache import cache_pure
from proso.list import flatten
from proso_common.models import get_config
from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview
from time import time as time_lib
import json
import logging
LOGGER = logging.getLogger('django.request')
class TagManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('concepts')
class Tag(models.Model):
"""
Arbitrary tag for concepts.
"""
type = models.CharField(max_length=50)
value = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
type_name = models.CharField(max_length=100)
value_name = models.CharField(max_length=100)
objects = TagManager()
class Meta:
unique_together = ("type", "value", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "tag",
"type": self.type,
"value": self.value,
"lang": self.lang,
"type_name": self.type_name,
"value_name": self.value_name,
}
if not nested:
data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()]
return data
def __str__(self):
return "{}: {}".format(self.type, self.value)
class ConceptManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('tags', 'actions')
@cache_pure()
def get_concept_item_mapping(self, concepts=None, lang=None):
"""
Get mapping of concepts to items belonging to concept.
Args:
concepts (list of Concept): Defaults to None meaning all concepts
lang (str): language of concepts, if None use language of concepts
Returns:
dict: concept (int) -> list of item ids (int)
"""
if concepts is None:
concepts = self.filter(active=True)
if lang is not None:
concepts = concepts.filter(lang=lang)
if lang is None:
languages = set([concept.lang for concept in concepts])
if len(languages) > 1:
raise Exception('Concepts has multiple languages')
lang = list(languages)[0]
item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query)
for concept in concepts], lang)
return dict(zip([c.pk for c in concepts], item_lists))
@cache_pure()
def get_item_concept_mapping(self, lang):
""" Get mapping of items_ids to concepts containing these items
Args:
lang (str): language of concepts
Returns:
dict: item (int) -> set of concepts (int)
"""
concepts = self.filter(active=True, lang=lang)
return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang))
def get_concepts_to_recalculate(self, users, lang, concepts=None):
"""
Get concept which have same changes and have to be recalculated
Args:
users (list of users or user): users whose user stats we are interesting in
lang (str): language of used concepts
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user -> set of concepts (int) - in case of list of users
list of stats (str) - in case of one user
"""
only_one_user = False
if not isinstance(users, list):
only_one_user = True
users = [users]
mapping = self.get_item_concept_mapping(lang)
current_user_stats = defaultdict(lambda: {})
user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type
if concepts is not None:
user_stats_qs = user_stats_qs.filter(concept__in=concepts)
for user_stat in user_stats_qs:
current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat
concepts_to_recalculate = defaultdict(lambda: set())
for user, item, time in Answer.objects.filter(user__in=users)\
.values_list("user_id", "item").annotate(Max("time")):
if item not in mapping:
# in reality this should by corner case, so it is efficient to not filter Answers
continue # item is not in concept
time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4)
time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', default=2)
for concept in mapping[item]:
if user in current_user_stats and concept in current_user_stats[user] \
and current_user_stats[user][concept].time > time:
if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):
continue # cache is up to date
if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept):
concepts_to_recalculate[user].add(concept)
if only_one_user:
return concepts_to_recalculate[users[0]]
return concepts_to_recalculate
def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor):
cache_timedelta = cache_time - last_answer_time
if cache_timedelta > timedelta(days=365):
return False
if cache_timedelta < timedelta(hours=lower_bound):
return False
return cache_timedelta < expiration_factor * (datetime.now() - cache_time)
class Concept(models.Model):
"""
Model concepts for open learner model
"""
identifier = models.CharField(max_length=20, blank=True)
query = models.TextField()
name = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
tags = models.ManyToManyField(Tag, related_name="concepts", blank=True)
active = models.BooleanField(default=True)
objects = ConceptManager()
class Meta:
unique_together = ("identifier", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "concept",
"identifier": self.identifier,
"name": self.name,
"query": self.query,
"lang": self.lang,
}
if not nested:
data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()]
data["actions"] = [action.to_json(nested=True) for action in self.actions.all()]
return data
@staticmethod
def create_identifier(query):
"""
Crete identifier of concept
Args:
query (str): query defining concept
Returns:
str: identifier of length 20
"""
return sha1(query.encode()).hexdigest()[:20]
def __str__(self):
return self.name
def __repr__(self):
return "{}-{}".format(self.identifier, self.lang)
class ActionManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
class Action(models.Model):
"""
Actions which can be done with concept
"""
concept = models.ForeignKey(Concept, related_name="actions")
identifier = models.CharField(max_length=50)
name = models.CharField(max_length=200)
url = models.CharField(max_length=200)
objects = ActionManager()
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "action",
"identifier": self.identifier,
"name": self.name,
"url": self.url,
}
if not nested:
data["concept"] = self.concept.to_json(nested=True)
return data
def __str__(self):
return "{} - {}".format(self.concept, self.name)
class UserStatManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
def recalculate_concepts(self, concepts, lang=None):
"""
Recalculated given concepts for given users
Args:
concepts (dict): user id (int -> set of concepts to recalculate)
lang(Optional[str]): language used to get items in all concepts (cached).
Defaults to None, in that case are get items only in used concepts
"""
if len(concepts) == 0:
return
if lang is None:
|
else:
items = Concept.objects.get_concept_item_mapping(lang=lang)
environment = get_environment()
mastery_threshold = get_mastery_trashold()
for user, concepts in concepts.items():
all_items = list(set(flatten([items[c] for c in concepts])))
answer_counts = environment.number_of_answers_more_items(all_items, user)
correct_answer_counts = environment.number_of_correct_answers_more_items(all_items, user)
predictions = dict(list(zip(all_items, get_predictive_model().
predict_more_items(environment, user, all_items, time=get_time_for_knowledge_overview()))))
new_user_stats = []
stats_to_delete_condition = Q()
for concept in concepts:
answer_aggregates = Answer.objects.filter(user=user, item__in=items[concept]).aggregate(
time_spent=Sum("response_time"),
sessions=Count("session", True),
time_first=Min("time"),
time_last=Max("time"),
)
stats = {
"answer_count": sum(answer_counts[i] for i in items[concept]),
"correct_answer_count": sum(correct_answer_counts[i] for i in items[concept]),
"item_count": len(items[concept]),
"practiced_items_count": sum([answer_counts[i] > 0 for i in items[concept]]),
"mastered_items_count": sum([predictions[i] >= mastery_threshold for i in items[concept]]),
"prediction": sum([predictions[i] for i in items[concept]]) / len(items[concept]),
"time_spent": answer_aggregates["time_spent"] / 1000,
"session_count": answer_aggregates["sessions"],
"time_first": answer_aggregates["time_first"].timestamp(),
"time_last": answer_aggregates["time_last"].timestamp(),
}
stats_to_delete_condition |= Q(user=user, concept=concept)
for stat_name, value in stats.items():
new_user_stats.append(UserStat(user_id=user, concept_id=concept, stat=stat_name, value=value))
self.filter(stats_to_delete_condition).delete()
self.bulk_create(new_user_stats)
def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True):
"""
Finds all UserStats of given concepts and users.
Recompute UserStats if necessary
Args:
users (Optional[list of users] or [user]): list of primary keys of user or users
Defaults to None meaning all users.
lang (string): use only concepts witch the lang. Defaults to None meaning all languages.
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users
dict: concept_identifier - > (stat_name -> value) -- for one user
"""
only_one_user = False
if not isinstance(users, list):
users = [users]
only_one_user = True
if recalculate:
if lang is None:
raise ValueError('Recalculation without lang is not supported.')
time_start = time_lib()
concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts)
LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start))
time_start = time_lib()
self.recalculate_concepts(concepts_to_recalculate, lang)
LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start))
qs = self.prepare_related().filter(user__in=users, concept__active=True)
if concepts is not None:
qs = qs.filter(concept__in=concepts)
if lang is not None:
qs = qs.filter(concept__lang=lang)
if since is not None:
qs = qs.filter(time__gte=since)
data = defaultdict(lambda: defaultdict(lambda: {}))
for user_stat in qs:
data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value
if only_one_user:
return data[users[0].pk if type(users[0]) == User else users[0]]
return data
class UserStat(models.Model):
"""
Represent arbitrary statistic (float) of the user on concept
"""
concept = models.ForeignKey(Concept)
user = models.ForeignKey(User, related_name="stats")
stat = models.CharField(max_length=50)
time = models.DateTimeField(auto_now=True)
value = models.FloatField()
objects = UserStatManager()
class Meta:
unique_together = ("concept", "user", "stat")
def __str__(self):
return "{} - {}: {}".format(self.stat, self.concept, self.value)
@receiver(pre_save, sender=Concept)
def generate_identifier(sender, instance, **kwargs):
"""
Generate and set identifier of concept before saving object to DB
Args:
sender (class): should be Concept
instance (Concept): saving concept
"""
identifier = Concept.create_identifier(instance.query)
qs = Concept.objects.filter(identifier=identifier, lang=instance.lang)
if instance.pk:
qs = qs.exclude(pk=instance.pk)
if qs.count() > 0:
raise ValueError("Concept identifier conflict")
instance.identifier = identifier
| items = Concept.objects.get_concept_item_mapping(concepts=Concept.objects.filter(pk__in=set(flatten(concepts.values())))) | conditional_block |
models.py | from collections import defaultdict
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Q, Count, Sum, Max, Min
from django.db.models.signals import pre_save
from django.dispatch import receiver
from hashlib import sha1
from proso.dict import group_keys_by_value_lists
from proso.django.cache import cache_pure
from proso.list import flatten
from proso_common.models import get_config
from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview
from time import time as time_lib
import json
import logging
LOGGER = logging.getLogger('django.request')
class TagManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('concepts')
class Tag(models.Model):
"""
Arbitrary tag for concepts.
"""
type = models.CharField(max_length=50)
value = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
type_name = models.CharField(max_length=100)
value_name = models.CharField(max_length=100)
objects = TagManager()
class Meta:
unique_together = ("type", "value", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "tag",
"type": self.type,
"value": self.value,
"lang": self.lang,
"type_name": self.type_name,
"value_name": self.value_name,
}
if not nested:
data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()]
return data
def __str__(self):
return "{}: {}".format(self.type, self.value)
class | (models.Manager):
def prepare_related(self):
return self.prefetch_related('tags', 'actions')
@cache_pure()
def get_concept_item_mapping(self, concepts=None, lang=None):
"""
Get mapping of concepts to items belonging to concept.
Args:
concepts (list of Concept): Defaults to None meaning all concepts
lang (str): language of concepts, if None use language of concepts
Returns:
dict: concept (int) -> list of item ids (int)
"""
if concepts is None:
concepts = self.filter(active=True)
if lang is not None:
concepts = concepts.filter(lang=lang)
if lang is None:
languages = set([concept.lang for concept in concepts])
if len(languages) > 1:
raise Exception('Concepts has multiple languages')
lang = list(languages)[0]
item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query)
for concept in concepts], lang)
return dict(zip([c.pk for c in concepts], item_lists))
@cache_pure()
def get_item_concept_mapping(self, lang):
""" Get mapping of items_ids to concepts containing these items
Args:
lang (str): language of concepts
Returns:
dict: item (int) -> set of concepts (int)
"""
concepts = self.filter(active=True, lang=lang)
return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang))
def get_concepts_to_recalculate(self, users, lang, concepts=None):
"""
Get concept which have same changes and have to be recalculated
Args:
users (list of users or user): users whose user stats we are interesting in
lang (str): language of used concepts
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user -> set of concepts (int) - in case of list of users
list of stats (str) - in case of one user
"""
only_one_user = False
if not isinstance(users, list):
only_one_user = True
users = [users]
mapping = self.get_item_concept_mapping(lang)
current_user_stats = defaultdict(lambda: {})
user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type
if concepts is not None:
user_stats_qs = user_stats_qs.filter(concept__in=concepts)
for user_stat in user_stats_qs:
current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat
concepts_to_recalculate = defaultdict(lambda: set())
for user, item, time in Answer.objects.filter(user__in=users)\
.values_list("user_id", "item").annotate(Max("time")):
if item not in mapping:
# in reality this should by corner case, so it is efficient to not filter Answers
continue # item is not in concept
time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4)
time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', default=2)
for concept in mapping[item]:
if user in current_user_stats and concept in current_user_stats[user] \
and current_user_stats[user][concept].time > time:
if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):
continue # cache is up to date
if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept):
concepts_to_recalculate[user].add(concept)
if only_one_user:
return concepts_to_recalculate[users[0]]
return concepts_to_recalculate
def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor):
cache_timedelta = cache_time - last_answer_time
if cache_timedelta > timedelta(days=365):
return False
if cache_timedelta < timedelta(hours=lower_bound):
return False
return cache_timedelta < expiration_factor * (datetime.now() - cache_time)
class Concept(models.Model):
"""
Model concepts for open learner model
"""
identifier = models.CharField(max_length=20, blank=True)
query = models.TextField()
name = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
tags = models.ManyToManyField(Tag, related_name="concepts", blank=True)
active = models.BooleanField(default=True)
objects = ConceptManager()
class Meta:
unique_together = ("identifier", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "concept",
"identifier": self.identifier,
"name": self.name,
"query": self.query,
"lang": self.lang,
}
if not nested:
data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()]
data["actions"] = [action.to_json(nested=True) for action in self.actions.all()]
return data
@staticmethod
def create_identifier(query):
"""
Crete identifier of concept
Args:
query (str): query defining concept
Returns:
str: identifier of length 20
"""
return sha1(query.encode()).hexdigest()[:20]
def __str__(self):
return self.name
def __repr__(self):
return "{}-{}".format(self.identifier, self.lang)
class ActionManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
class Action(models.Model):
"""
Actions which can be done with concept
"""
concept = models.ForeignKey(Concept, related_name="actions")
identifier = models.CharField(max_length=50)
name = models.CharField(max_length=200)
url = models.CharField(max_length=200)
objects = ActionManager()
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "action",
"identifier": self.identifier,
"name": self.name,
"url": self.url,
}
if not nested:
data["concept"] = self.concept.to_json(nested=True)
return data
def __str__(self):
return "{} - {}".format(self.concept, self.name)
class UserStatManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
def recalculate_concepts(self, concepts, lang=None):
"""
Recalculated given concepts for given users
Args:
concepts (dict): user id (int -> set of concepts to recalculate)
lang(Optional[str]): language used to get items in all concepts (cached).
Defaults to None, in that case are get items only in used concepts
"""
if len(concepts) == 0:
return
if lang is None:
items = Concept.objects.get_concept_item_mapping(concepts=Concept.objects.filter(pk__in=set(flatten(concepts.values()))))
else:
items = Concept.objects.get_concept_item_mapping(lang=lang)
environment = get_environment()
mastery_threshold = get_mastery_trashold()
for user, concepts in concepts.items():
all_items = list(set(flatten([items[c] for c in concepts])))
answer_counts = environment.number_of_answers_more_items(all_items, user)
correct_answer_counts = environment.number_of_correct_answers_more_items(all_items, user)
predictions = dict(list(zip(all_items, get_predictive_model().
predict_more_items(environment, user, all_items, time=get_time_for_knowledge_overview()))))
new_user_stats = []
stats_to_delete_condition = Q()
for concept in concepts:
answer_aggregates = Answer.objects.filter(user=user, item__in=items[concept]).aggregate(
time_spent=Sum("response_time"),
sessions=Count("session", True),
time_first=Min("time"),
time_last=Max("time"),
)
stats = {
"answer_count": sum(answer_counts[i] for i in items[concept]),
"correct_answer_count": sum(correct_answer_counts[i] for i in items[concept]),
"item_count": len(items[concept]),
"practiced_items_count": sum([answer_counts[i] > 0 for i in items[concept]]),
"mastered_items_count": sum([predictions[i] >= mastery_threshold for i in items[concept]]),
"prediction": sum([predictions[i] for i in items[concept]]) / len(items[concept]),
"time_spent": answer_aggregates["time_spent"] / 1000,
"session_count": answer_aggregates["sessions"],
"time_first": answer_aggregates["time_first"].timestamp(),
"time_last": answer_aggregates["time_last"].timestamp(),
}
stats_to_delete_condition |= Q(user=user, concept=concept)
for stat_name, value in stats.items():
new_user_stats.append(UserStat(user_id=user, concept_id=concept, stat=stat_name, value=value))
self.filter(stats_to_delete_condition).delete()
self.bulk_create(new_user_stats)
def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True):
"""
Finds all UserStats of given concepts and users.
Recompute UserStats if necessary
Args:
users (Optional[list of users] or [user]): list of primary keys of user or users
Defaults to None meaning all users.
lang (string): use only concepts witch the lang. Defaults to None meaning all languages.
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users
dict: concept_identifier - > (stat_name -> value) -- for one user
"""
only_one_user = False
if not isinstance(users, list):
users = [users]
only_one_user = True
if recalculate:
if lang is None:
raise ValueError('Recalculation without lang is not supported.')
time_start = time_lib()
concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts)
LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start))
time_start = time_lib()
self.recalculate_concepts(concepts_to_recalculate, lang)
LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start))
qs = self.prepare_related().filter(user__in=users, concept__active=True)
if concepts is not None:
qs = qs.filter(concept__in=concepts)
if lang is not None:
qs = qs.filter(concept__lang=lang)
if since is not None:
qs = qs.filter(time__gte=since)
data = defaultdict(lambda: defaultdict(lambda: {}))
for user_stat in qs:
data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value
if only_one_user:
return data[users[0].pk if type(users[0]) == User else users[0]]
return data
class UserStat(models.Model):
"""
Represent arbitrary statistic (float) of the user on concept
"""
concept = models.ForeignKey(Concept)
user = models.ForeignKey(User, related_name="stats")
stat = models.CharField(max_length=50)
time = models.DateTimeField(auto_now=True)
value = models.FloatField()
objects = UserStatManager()
class Meta:
unique_together = ("concept", "user", "stat")
def __str__(self):
return "{} - {}: {}".format(self.stat, self.concept, self.value)
@receiver(pre_save, sender=Concept)
def generate_identifier(sender, instance, **kwargs):
"""
Generate and set identifier of concept before saving object to DB
Args:
sender (class): should be Concept
instance (Concept): saving concept
"""
identifier = Concept.create_identifier(instance.query)
qs = Concept.objects.filter(identifier=identifier, lang=instance.lang)
if instance.pk:
qs = qs.exclude(pk=instance.pk)
if qs.count() > 0:
raise ValueError("Concept identifier conflict")
instance.identifier = identifier
| ConceptManager | identifier_name |
models.py | from collections import defaultdict
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Q, Count, Sum, Max, Min
from django.db.models.signals import pre_save
from django.dispatch import receiver
from hashlib import sha1
from proso.dict import group_keys_by_value_lists
from proso.django.cache import cache_pure
from proso.list import flatten
from proso_common.models import get_config
from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview
from time import time as time_lib
import json
import logging
LOGGER = logging.getLogger('django.request')
class TagManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('concepts')
class Tag(models.Model):
"""
Arbitrary tag for concepts.
"""
type = models.CharField(max_length=50)
value = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
type_name = models.CharField(max_length=100)
value_name = models.CharField(max_length=100)
objects = TagManager()
class Meta:
unique_together = ("type", "value", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "tag",
"type": self.type,
"value": self.value,
"lang": self.lang,
"type_name": self.type_name,
"value_name": self.value_name,
}
if not nested:
data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()]
return data
def __str__(self):
return "{}: {}".format(self.type, self.value)
class ConceptManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('tags', 'actions')
@cache_pure()
def get_concept_item_mapping(self, concepts=None, lang=None):
"""
Get mapping of concepts to items belonging to concept.
Args:
concepts (list of Concept): Defaults to None meaning all concepts
lang (str): language of concepts, if None use language of concepts
Returns:
dict: concept (int) -> list of item ids (int)
"""
if concepts is None:
concepts = self.filter(active=True)
if lang is not None:
concepts = concepts.filter(lang=lang)
if lang is None:
languages = set([concept.lang for concept in concepts])
if len(languages) > 1:
raise Exception('Concepts has multiple languages')
lang = list(languages)[0]
item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query)
for concept in concepts], lang)
return dict(zip([c.pk for c in concepts], item_lists))
@cache_pure()
def get_item_concept_mapping(self, lang):
""" Get mapping of items_ids to concepts containing these items
Args:
lang (str): language of concepts
Returns:
dict: item (int) -> set of concepts (int)
"""
concepts = self.filter(active=True, lang=lang)
return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang))
def get_concepts_to_recalculate(self, users, lang, concepts=None):
"""
Get concept which have same changes and have to be recalculated
Args:
users (list of users or user): users whose user stats we are interesting in
lang (str): language of used concepts
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user -> set of concepts (int) - in case of list of users
list of stats (str) - in case of one user
"""
only_one_user = False
if not isinstance(users, list):
only_one_user = True
users = [users]
mapping = self.get_item_concept_mapping(lang)
current_user_stats = defaultdict(lambda: {})
user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type
if concepts is not None:
user_stats_qs = user_stats_qs.filter(concept__in=concepts)
for user_stat in user_stats_qs:
current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat
concepts_to_recalculate = defaultdict(lambda: set())
for user, item, time in Answer.objects.filter(user__in=users)\
.values_list("user_id", "item").annotate(Max("time")):
if item not in mapping:
# in reality this should by corner case, so it is efficient to not filter Answers
continue # item is not in concept
time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4)
time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', default=2)
for concept in mapping[item]:
if user in current_user_stats and concept in current_user_stats[user] \
and current_user_stats[user][concept].time > time:
if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):
continue # cache is up to date
if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept):
concepts_to_recalculate[user].add(concept)
if only_one_user:
return concepts_to_recalculate[users[0]]
return concepts_to_recalculate
def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor):
cache_timedelta = cache_time - last_answer_time
if cache_timedelta > timedelta(days=365):
return False
if cache_timedelta < timedelta(hours=lower_bound):
return False
return cache_timedelta < expiration_factor * (datetime.now() - cache_time)
class Concept(models.Model):
"""
Model concepts for open learner model
"""
identifier = models.CharField(max_length=20, blank=True)
query = models.TextField()
name = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
tags = models.ManyToManyField(Tag, related_name="concepts", blank=True)
active = models.BooleanField(default=True)
objects = ConceptManager()
class Meta:
unique_together = ("identifier", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "concept",
"identifier": self.identifier,
"name": self.name,
"query": self.query,
"lang": self.lang,
}
if not nested:
data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()]
data["actions"] = [action.to_json(nested=True) for action in self.actions.all()]
return data
@staticmethod
def create_identifier(query):
"""
Crete identifier of concept
Args:
query (str): query defining concept
Returns:
str: identifier of length 20
"""
return sha1(query.encode()).hexdigest()[:20]
def __str__(self):
return self.name
def __repr__(self):
return "{}-{}".format(self.identifier, self.lang)
class ActionManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
class Action(models.Model):
"""
Actions which can be done with concept
"""
concept = models.ForeignKey(Concept, related_name="actions")
identifier = models.CharField(max_length=50)
name = models.CharField(max_length=200)
url = models.CharField(max_length=200)
objects = ActionManager()
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "action",
"identifier": self.identifier,
"name": self.name,
"url": self.url,
}
if not nested:
data["concept"] = self.concept.to_json(nested=True)
return data
def __str__(self):
return "{} - {}".format(self.concept, self.name)
class UserStatManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
def recalculate_concepts(self, concepts, lang=None):
"""
Recalculated given concepts for given users
Args:
concepts (dict): user id (int -> set of concepts to recalculate)
lang(Optional[str]): language used to get items in all concepts (cached).
Defaults to None, in that case are get items only in used concepts
"""
if len(concepts) == 0:
return
if lang is None:
items = Concept.objects.get_concept_item_mapping(concepts=Concept.objects.filter(pk__in=set(flatten(concepts.values()))))
else:
items = Concept.objects.get_concept_item_mapping(lang=lang)
environment = get_environment()
mastery_threshold = get_mastery_trashold()
for user, concepts in concepts.items():
all_items = list(set(flatten([items[c] for c in concepts])))
answer_counts = environment.number_of_answers_more_items(all_items, user)
correct_answer_counts = environment.number_of_correct_answers_more_items(all_items, user)
predictions = dict(list(zip(all_items, get_predictive_model().
predict_more_items(environment, user, all_items, time=get_time_for_knowledge_overview()))))
new_user_stats = []
stats_to_delete_condition = Q()
for concept in concepts:
answer_aggregates = Answer.objects.filter(user=user, item__in=items[concept]).aggregate(
time_spent=Sum("response_time"),
sessions=Count("session", True),
time_first=Min("time"),
time_last=Max("time"),
)
stats = {
"answer_count": sum(answer_counts[i] for i in items[concept]),
"correct_answer_count": sum(correct_answer_counts[i] for i in items[concept]),
"item_count": len(items[concept]),
"practiced_items_count": sum([answer_counts[i] > 0 for i in items[concept]]),
"mastered_items_count": sum([predictions[i] >= mastery_threshold for i in items[concept]]),
"prediction": sum([predictions[i] for i in items[concept]]) / len(items[concept]),
"time_spent": answer_aggregates["time_spent"] / 1000,
"session_count": answer_aggregates["sessions"],
"time_first": answer_aggregates["time_first"].timestamp(),
"time_last": answer_aggregates["time_last"].timestamp(),
}
stats_to_delete_condition |= Q(user=user, concept=concept)
for stat_name, value in stats.items():
new_user_stats.append(UserStat(user_id=user, concept_id=concept, stat=stat_name, value=value))
self.filter(stats_to_delete_condition).delete()
self.bulk_create(new_user_stats)
def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True):
"""
Finds all UserStats of given concepts and users.
Recompute UserStats if necessary
Args:
users (Optional[list of users] or [user]): list of primary keys of user or users
Defaults to None meaning all users.
lang (string): use only concepts witch the lang. Defaults to None meaning all languages.
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users
dict: concept_identifier - > (stat_name -> value) -- for one user
"""
only_one_user = False
if not isinstance(users, list):
users = [users]
only_one_user = True
if recalculate:
if lang is None:
raise ValueError('Recalculation without lang is not supported.')
time_start = time_lib()
concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts)
LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start))
time_start = time_lib()
self.recalculate_concepts(concepts_to_recalculate, lang)
LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start))
qs = self.prepare_related().filter(user__in=users, concept__active=True)
if concepts is not None:
qs = qs.filter(concept__in=concepts)
if lang is not None:
qs = qs.filter(concept__lang=lang)
if since is not None:
qs = qs.filter(time__gte=since)
data = defaultdict(lambda: defaultdict(lambda: {}))
for user_stat in qs:
data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value
if only_one_user:
return data[users[0].pk if type(users[0]) == User else users[0]]
return data
| concept = models.ForeignKey(Concept)
user = models.ForeignKey(User, related_name="stats")
stat = models.CharField(max_length=50)
time = models.DateTimeField(auto_now=True)
value = models.FloatField()
objects = UserStatManager()
class Meta:
unique_together = ("concept", "user", "stat")
def __str__(self):
return "{} - {}: {}".format(self.stat, self.concept, self.value)
@receiver(pre_save, sender=Concept)
def generate_identifier(sender, instance, **kwargs):
"""
Generate and set identifier of concept before saving object to DB
Args:
sender (class): should be Concept
instance (Concept): saving concept
"""
identifier = Concept.create_identifier(instance.query)
qs = Concept.objects.filter(identifier=identifier, lang=instance.lang)
if instance.pk:
qs = qs.exclude(pk=instance.pk)
if qs.count() > 0:
raise ValueError("Concept identifier conflict")
instance.identifier = identifier | class UserStat(models.Model):
"""
Represent arbitrary statistic (float) of the user on concept
""" | random_line_split |
logout.controller.tests.js | /*
This file is part of MyConference.
MyConference is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License version 3
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should find a copy of the GNU Affero General Public License in the
root directory along with this program.
If not, see http://www.gnu.org/licenses/agpl-3.0.html.
*/
describe('LogoutCtrl: ', function () {
var scope,
backendServiceMock,
translateMock,
ionicPopupMock,
stateMock,
logoutDfd,
translateDfd,
alertDfd,
ctrl;
beforeEach(module('starter'));
beforeEach(module(function ($provide, $urlRouterProvider) {
$provide.value('$ionicTemplateCache', function () {
});
$urlRouterProvider.deferIntercept();
}));
beforeEach(inject(function ($rootScope, $controller, $q, $httpBackend) {
$httpBackend.whenGET('locales/de.json').respond(200, '');
scope = $rootScope.$new();
logoutDfd = $q.defer();
translateDfd = $q.defer();
alertDfd = $q.defer();
backendServiceMock = {
logout: jasmine.createSpy('logout spy').and.returnValue(logoutDfd.promise)
} | ctrl = $controller('LogoutCtrl', {
$scope: scope,
backendService: backendServiceMock,
$state: stateMock,
$ionicPopup: ionicPopupMock,
$translate: translateMock,
})
}))
// tests
it('should call backendService.logout function', function () {
expect(backendServiceMock.logout).toHaveBeenCalled();
})
describe('after logout, ', function () {
beforeEach(function () {
//simulate logout
logoutDfd.resolve([]);
scope.$digest();
})
it('should call $translate service with "Done!" message', function () {
expect(translateMock).toHaveBeenCalledWith('Done!');
})
it('should call alert about successful logout', function () {
translateDfd.resolve('Done!');
scope.$digest();
expect(ionicPopupMock.alert).toHaveBeenCalledWith({
title: 'Done!',
template: "{{'You are logged out' | translate}}"
})
})
it('should redirect to the start page', function () {
translateDfd.resolve('Done!');
scope.$digest();
alertDfd.resolve();
scope.$digest();
expect(stateMock.go).toHaveBeenCalledWith('app.start');
})
})
}) | translateMock = jasmine.createSpy('$translate spy').and.returnValue(translateDfd.promise)
ionicPopupMock = jasmine.createSpyObj('$ionicPopup spy', ['alert'])
ionicPopupMock.alert.and.returnValue(alertDfd.promise)
stateMock = jasmine.createSpyObj('$state spy', ['go']) | random_line_split |
feature-gate-unboxed-closures.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Test;
impl FnOnce<(u32, u32)> for Test {
type Output = u32;
extern "rust-call" fn call_once(self, (a, b): (u32, u32)) -> u32 |
//~^^^ ERROR rust-call ABI is subject to change (see issue #29625)
}
fn main() {
assert_eq!(Test(1u32, 2u32), 3u32);
}
| {
a + b
} | identifier_body |
feature-gate-unboxed-closures.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Test;
impl FnOnce<(u32, u32)> for Test {
type Output = u32;
extern "rust-call" fn call_once(self, (a, b): (u32, u32)) -> u32 {
a + b
}
//~^^^ ERROR rust-call ABI is subject to change (see issue #29625)
}
fn | () {
assert_eq!(Test(1u32, 2u32), 3u32);
}
| main | identifier_name |
feature-gate-unboxed-closures.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Test;
impl FnOnce<(u32, u32)> for Test {
type Output = u32;
extern "rust-call" fn call_once(self, (a, b): (u32, u32)) -> u32 {
a + b
}
//~^^^ ERROR rust-call ABI is subject to change (see issue #29625)
}
fn main() {
assert_eq!(Test(1u32, 2u32), 3u32);
} | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | random_line_split |
fastqc.py | ##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
General EasyBuild support for installing FastQC
@author: Emilio Palumbo
"""
import os
import stat
from easybuild.tools.filetools import run_cmd
from easybuild.easyblocks.generic.packedbinary import PackedBinary
class EB_FastQC(PackedBinary):
"""Easyblock implementing the build step for FastQC,
this is just give execution permission to the `fastqc` binary before installing.
"""
def install_step(self):
| """Overwrite install_step from PackedBinary"""
os.chdir(self.builddir)
os.chmod("FastQC/fastqc", os.stat("FastQC/fastqc").st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
super(EB_FastQC, self).install_step() | identifier_body | |
fastqc.py | ##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
General EasyBuild support for installing FastQC
@author: Emilio Palumbo
"""
import os
import stat
from easybuild.tools.filetools import run_cmd
from easybuild.easyblocks.generic.packedbinary import PackedBinary
| this is just give execution permission to the `fastqc` binary before installing.
"""
def install_step(self):
"""Overwrite install_step from PackedBinary"""
os.chdir(self.builddir)
os.chmod("FastQC/fastqc", os.stat("FastQC/fastqc").st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
super(EB_FastQC, self).install_step() |
class EB_FastQC(PackedBinary):
"""Easyblock implementing the build step for FastQC, | random_line_split |
fastqc.py | ##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
General EasyBuild support for installing FastQC
@author: Emilio Palumbo
"""
import os
import stat
from easybuild.tools.filetools import run_cmd
from easybuild.easyblocks.generic.packedbinary import PackedBinary
class | (PackedBinary):
"""Easyblock implementing the build step for FastQC,
this is just give execution permission to the `fastqc` binary before installing.
"""
def install_step(self):
"""Overwrite install_step from PackedBinary"""
os.chdir(self.builddir)
os.chmod("FastQC/fastqc", os.stat("FastQC/fastqc").st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
super(EB_FastQC, self).install_step()
| EB_FastQC | identifier_name |
commands_clear_tests.py | import os
from unittest import TestCase
from click.testing import CliRunner
from regparser.commands.clear import clear
from regparser.index import entry
class CommandsClearTests(TestCase):
def setUp(self):
self.cli = CliRunner()
def test_no_errors_when_clear(self):
"""Should raise no errors when no cached files are present"""
with self.cli.isolated_filesystem():
self.cli.invoke(clear)
def test_deletes_fr_cache(self):
with self.cli.isolated_filesystem():
open('fr_cache.sqlite', 'w').close()
self.assertTrue(os.path.exists('fr_cache.sqlite'))
# flag must be present
self.cli.invoke(clear) |
def test_deletes_index(self):
with self.cli.isolated_filesystem():
entry.Entry('aaa', 'bbb').write('ccc')
entry.Entry('bbb', 'ccc').write('ddd')
self.assertEqual(1, len(entry.Entry("aaa")))
self.assertEqual(1, len(entry.Entry("bbb")))
self.cli.invoke(clear)
self.assertEqual(0, len(entry.Entry("aaa")))
self.assertEqual(0, len(entry.Entry("bbb")))
def test_deletes_can_be_focused(self):
"""If params are provided to delete certain directories, only those
directories should get removed"""
with self.cli.isolated_filesystem():
to_delete = ['delroot/aaa/bbb', 'delroot/aaa/ccc',
'root/delsub/aaa', 'root/delsub/bbb']
to_keep = ['root/othersub/aaa', 'root/aaa',
'top-level-file', 'other-root/aaa']
for path in to_delete + to_keep:
entry.Entry(*path.split('/')).write('')
self.cli.invoke(clear, ['delroot', 'root/delsub'])
self.assertItemsEqual(['top-level-file', 'root', 'other-root'],
list(entry.Entry()))
self.assertItemsEqual(['othersub', 'aaa'],
list(entry.Entry('root')))
self.assertItemsEqual(['aaa'],
list(entry.Entry('other-root'))) | self.assertTrue(os.path.exists('fr_cache.sqlite'))
self.cli.invoke(clear, ['--http-cache'])
self.assertFalse(os.path.exists('fr_cache.sqlite')) | random_line_split |
commands_clear_tests.py | import os
from unittest import TestCase
from click.testing import CliRunner
from regparser.commands.clear import clear
from regparser.index import entry
class CommandsClearTests(TestCase):
def setUp(self):
self.cli = CliRunner()
def test_no_errors_when_clear(self):
"""Should raise no errors when no cached files are present"""
with self.cli.isolated_filesystem():
self.cli.invoke(clear)
def test_deletes_fr_cache(self):
|
def test_deletes_index(self):
with self.cli.isolated_filesystem():
entry.Entry('aaa', 'bbb').write('ccc')
entry.Entry('bbb', 'ccc').write('ddd')
self.assertEqual(1, len(entry.Entry("aaa")))
self.assertEqual(1, len(entry.Entry("bbb")))
self.cli.invoke(clear)
self.assertEqual(0, len(entry.Entry("aaa")))
self.assertEqual(0, len(entry.Entry("bbb")))
def test_deletes_can_be_focused(self):
"""If params are provided to delete certain directories, only those
directories should get removed"""
with self.cli.isolated_filesystem():
to_delete = ['delroot/aaa/bbb', 'delroot/aaa/ccc',
'root/delsub/aaa', 'root/delsub/bbb']
to_keep = ['root/othersub/aaa', 'root/aaa',
'top-level-file', 'other-root/aaa']
for path in to_delete + to_keep:
entry.Entry(*path.split('/')).write('')
self.cli.invoke(clear, ['delroot', 'root/delsub'])
self.assertItemsEqual(['top-level-file', 'root', 'other-root'],
list(entry.Entry()))
self.assertItemsEqual(['othersub', 'aaa'],
list(entry.Entry('root')))
self.assertItemsEqual(['aaa'],
list(entry.Entry('other-root')))
| with self.cli.isolated_filesystem():
open('fr_cache.sqlite', 'w').close()
self.assertTrue(os.path.exists('fr_cache.sqlite'))
# flag must be present
self.cli.invoke(clear)
self.assertTrue(os.path.exists('fr_cache.sqlite'))
self.cli.invoke(clear, ['--http-cache'])
self.assertFalse(os.path.exists('fr_cache.sqlite')) | identifier_body |
commands_clear_tests.py | import os
from unittest import TestCase
from click.testing import CliRunner
from regparser.commands.clear import clear
from regparser.index import entry
class CommandsClearTests(TestCase):
def setUp(self):
self.cli = CliRunner()
def test_no_errors_when_clear(self):
"""Should raise no errors when no cached files are present"""
with self.cli.isolated_filesystem():
self.cli.invoke(clear)
def test_deletes_fr_cache(self):
with self.cli.isolated_filesystem():
open('fr_cache.sqlite', 'w').close()
self.assertTrue(os.path.exists('fr_cache.sqlite'))
# flag must be present
self.cli.invoke(clear)
self.assertTrue(os.path.exists('fr_cache.sqlite'))
self.cli.invoke(clear, ['--http-cache'])
self.assertFalse(os.path.exists('fr_cache.sqlite'))
def test_deletes_index(self):
with self.cli.isolated_filesystem():
entry.Entry('aaa', 'bbb').write('ccc')
entry.Entry('bbb', 'ccc').write('ddd')
self.assertEqual(1, len(entry.Entry("aaa")))
self.assertEqual(1, len(entry.Entry("bbb")))
self.cli.invoke(clear)
self.assertEqual(0, len(entry.Entry("aaa")))
self.assertEqual(0, len(entry.Entry("bbb")))
def test_deletes_can_be_focused(self):
"""If params are provided to delete certain directories, only those
directories should get removed"""
with self.cli.isolated_filesystem():
to_delete = ['delroot/aaa/bbb', 'delroot/aaa/ccc',
'root/delsub/aaa', 'root/delsub/bbb']
to_keep = ['root/othersub/aaa', 'root/aaa',
'top-level-file', 'other-root/aaa']
for path in to_delete + to_keep:
|
self.cli.invoke(clear, ['delroot', 'root/delsub'])
self.assertItemsEqual(['top-level-file', 'root', 'other-root'],
list(entry.Entry()))
self.assertItemsEqual(['othersub', 'aaa'],
list(entry.Entry('root')))
self.assertItemsEqual(['aaa'],
list(entry.Entry('other-root')))
| entry.Entry(*path.split('/')).write('') | conditional_block |
commands_clear_tests.py | import os
from unittest import TestCase
from click.testing import CliRunner
from regparser.commands.clear import clear
from regparser.index import entry
class | (TestCase):
def setUp(self):
self.cli = CliRunner()
def test_no_errors_when_clear(self):
"""Should raise no errors when no cached files are present"""
with self.cli.isolated_filesystem():
self.cli.invoke(clear)
def test_deletes_fr_cache(self):
with self.cli.isolated_filesystem():
open('fr_cache.sqlite', 'w').close()
self.assertTrue(os.path.exists('fr_cache.sqlite'))
# flag must be present
self.cli.invoke(clear)
self.assertTrue(os.path.exists('fr_cache.sqlite'))
self.cli.invoke(clear, ['--http-cache'])
self.assertFalse(os.path.exists('fr_cache.sqlite'))
def test_deletes_index(self):
with self.cli.isolated_filesystem():
entry.Entry('aaa', 'bbb').write('ccc')
entry.Entry('bbb', 'ccc').write('ddd')
self.assertEqual(1, len(entry.Entry("aaa")))
self.assertEqual(1, len(entry.Entry("bbb")))
self.cli.invoke(clear)
self.assertEqual(0, len(entry.Entry("aaa")))
self.assertEqual(0, len(entry.Entry("bbb")))
def test_deletes_can_be_focused(self):
"""If params are provided to delete certain directories, only those
directories should get removed"""
with self.cli.isolated_filesystem():
to_delete = ['delroot/aaa/bbb', 'delroot/aaa/ccc',
'root/delsub/aaa', 'root/delsub/bbb']
to_keep = ['root/othersub/aaa', 'root/aaa',
'top-level-file', 'other-root/aaa']
for path in to_delete + to_keep:
entry.Entry(*path.split('/')).write('')
self.cli.invoke(clear, ['delroot', 'root/delsub'])
self.assertItemsEqual(['top-level-file', 'root', 'other-root'],
list(entry.Entry()))
self.assertItemsEqual(['othersub', 'aaa'],
list(entry.Entry('root')))
self.assertItemsEqual(['aaa'],
list(entry.Entry('other-root')))
| CommandsClearTests | identifier_name |
tests.rs | use super::rocket;
use diesel::prelude::*;
use rocket::http::Status;
use rocket::local::Client;
use schema::preferences::dsl::*;
#[test]
/// Tests connection to the database through the pool managed by rocket.
fn | () {
let conn = rocket().1;
let expected_keys = vec!["session-key"];
let actual_keys: Vec<String> = preferences.select(key).load(&*conn).unwrap();
assert_eq!(expected_keys, actual_keys);
}
#[test]
/// Compares the session hash in the database to the one returned by /session
fn session_hash() {
let (rocket, conn, _) = rocket();
let client = Client::new(rocket).expect("valid rocket instance");
let mut response = client.get("/oration/session").dispatch();
let session_key: Vec<String> = preferences
.filter(key.eq("session-key"))
.select(value)
.load(&*conn)
.unwrap();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.body_string().unwrap(), session_key[0]);
}
| db_connection | identifier_name |
tests.rs | use super::rocket;
use diesel::prelude::*;
use rocket::http::Status;
use rocket::local::Client;
use schema::preferences::dsl::*;
#[test]
/// Tests connection to the database through the pool managed by rocket.
fn db_connection() {
let conn = rocket().1;
| assert_eq!(expected_keys, actual_keys);
}
#[test]
/// Compares the session hash in the database to the one returned by /session
fn session_hash() {
let (rocket, conn, _) = rocket();
let client = Client::new(rocket).expect("valid rocket instance");
let mut response = client.get("/oration/session").dispatch();
let session_key: Vec<String> = preferences
.filter(key.eq("session-key"))
.select(value)
.load(&*conn)
.unwrap();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.body_string().unwrap(), session_key[0]);
} | let expected_keys = vec!["session-key"];
let actual_keys: Vec<String> = preferences.select(key).load(&*conn).unwrap();
| random_line_split |
tests.rs | use super::rocket;
use diesel::prelude::*;
use rocket::http::Status;
use rocket::local::Client;
use schema::preferences::dsl::*;
#[test]
/// Tests connection to the database through the pool managed by rocket.
fn db_connection() |
#[test]
/// Compares the session hash in the database to the one returned by /session
fn session_hash() {
let (rocket, conn, _) = rocket();
let client = Client::new(rocket).expect("valid rocket instance");
let mut response = client.get("/oration/session").dispatch();
let session_key: Vec<String> = preferences
.filter(key.eq("session-key"))
.select(value)
.load(&*conn)
.unwrap();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.body_string().unwrap(), session_key[0]);
}
| {
let conn = rocket().1;
let expected_keys = vec!["session-key"];
let actual_keys: Vec<String> = preferences.select(key).load(&*conn).unwrap();
assert_eq!(expected_keys, actual_keys);
} | identifier_body |
PowerInputRounded.js | import * as React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon( | <path d="M3 10c0 .55.45 1 1 1h17c.55 0 1-.45 1-1s-.45-1-1-1H4c-.55 0-1 .45-1 1zm1 5h3c.55 0 1-.45 1-1s-.45-1-1-1H4c-.55 0-1 .45-1 1s.45 1 1 1zm7 0h3c.55 0 1-.45 1-1s-.45-1-1-1h-3c-.55 0-1 .45-1 1s.45 1 1 1zm7 0h3c.55 0 1-.45 1-1s-.45-1-1-1h-3c-.55 0-1 .45-1 1s.45 1 1 1z" />
, 'PowerInputRounded'); | random_line_split | |
CoverageDescription.ts | /*
* This file is part of rasdaman community.
*
* Rasdaman community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Rasdaman community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with rasdaman community. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 Peter Baumann /
rasdaman GmbH.
*
* For more information please see <http://www.rasdaman.org>
* or contact Peter Baumann via <baumann@rasdaman.com>.
*/
///<reference path="../gml/_gml.ts"/>
///<reference path="../gmlcov/_gmlcov.ts"/>
///<reference path="ServiceParameters.ts"/>
module wcs {
export class CoverageDescription extends gml.AbstractFeature {
public CoverageId:string;
public CoverageFunction:gml.CoverageFunction;
public Metadata:gmlcov.Metadata[];
public DomainSet:gml.DomainSet;
public RangeType:gmlcov.RangeType;
public ServiceParameters:wcs.ServiceParameters;
public constructor(source:rasdaman.common.ISerializedObject) |
}
} | {
super(source);
rasdaman.common.ArgumentValidator.isNotNull(source, "source");
this.CoverageId = source.getChildAsSerializedObject("wcs:CoverageId").getValueAsString();
if (source.doesElementExist("gml:coverageFunction")) {
this.CoverageFunction = new gml.CoverageFunction(source.getChildAsSerializedObject("gml:coverageFunction"));
}
this.Metadata = [];
source.getChildrenAsSerializedObjects("gmlcov:metadata").forEach(o=> {
this.Metadata.push(new gmlcov.Metadata(o));
});
this.DomainSet = new gml.DomainSet(source.getChildAsSerializedObject("gml:domainSet"));
this.RangeType = new gmlcov.RangeType(source.getChildAsSerializedObject("gmlcov:rangeType"));
this.ServiceParameters = new wcs.ServiceParameters(source.getChildAsSerializedObject("wcs:ServiceParameters"));
} | identifier_body |
CoverageDescription.ts | /*
* This file is part of rasdaman community.
*
* Rasdaman community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Rasdaman community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with rasdaman community. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 Peter Baumann /
rasdaman GmbH.
*
* For more information please see <http://www.rasdaman.org>
* or contact Peter Baumann via <baumann@rasdaman.com>.
*/
///<reference path="../gml/_gml.ts"/>
///<reference path="../gmlcov/_gmlcov.ts"/>
///<reference path="ServiceParameters.ts"/>
module wcs {
export class CoverageDescription extends gml.AbstractFeature {
public CoverageId:string;
public CoverageFunction:gml.CoverageFunction;
public Metadata:gmlcov.Metadata[];
public DomainSet:gml.DomainSet;
public RangeType:gmlcov.RangeType;
public ServiceParameters:wcs.ServiceParameters;
public | (source:rasdaman.common.ISerializedObject) {
super(source);
rasdaman.common.ArgumentValidator.isNotNull(source, "source");
this.CoverageId = source.getChildAsSerializedObject("wcs:CoverageId").getValueAsString();
if (source.doesElementExist("gml:coverageFunction")) {
this.CoverageFunction = new gml.CoverageFunction(source.getChildAsSerializedObject("gml:coverageFunction"));
}
this.Metadata = [];
source.getChildrenAsSerializedObjects("gmlcov:metadata").forEach(o=> {
this.Metadata.push(new gmlcov.Metadata(o));
});
this.DomainSet = new gml.DomainSet(source.getChildAsSerializedObject("gml:domainSet"));
this.RangeType = new gmlcov.RangeType(source.getChildAsSerializedObject("gmlcov:rangeType"));
this.ServiceParameters = new wcs.ServiceParameters(source.getChildAsSerializedObject("wcs:ServiceParameters"));
}
}
} | constructor | identifier_name |
CoverageDescription.ts | /*
* This file is part of rasdaman community.
*
* Rasdaman community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Rasdaman community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with rasdaman community. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 Peter Baumann /
rasdaman GmbH.
*
* For more information please see <http://www.rasdaman.org>
* or contact Peter Baumann via <baumann@rasdaman.com>.
*/
///<reference path="../gml/_gml.ts"/>
///<reference path="../gmlcov/_gmlcov.ts"/>
///<reference path="ServiceParameters.ts"/>
module wcs {
export class CoverageDescription extends gml.AbstractFeature {
public CoverageId:string;
public CoverageFunction:gml.CoverageFunction;
public Metadata:gmlcov.Metadata[];
public DomainSet:gml.DomainSet;
public RangeType:gmlcov.RangeType;
public ServiceParameters:wcs.ServiceParameters;
public constructor(source:rasdaman.common.ISerializedObject) {
super(source);
rasdaman.common.ArgumentValidator.isNotNull(source, "source");
this.CoverageId = source.getChildAsSerializedObject("wcs:CoverageId").getValueAsString();
if (source.doesElementExist("gml:coverageFunction")) {
this.CoverageFunction = new gml.CoverageFunction(source.getChildAsSerializedObject("gml:coverageFunction"));
}
this.Metadata = []; | source.getChildrenAsSerializedObjects("gmlcov:metadata").forEach(o=> {
this.Metadata.push(new gmlcov.Metadata(o));
});
this.DomainSet = new gml.DomainSet(source.getChildAsSerializedObject("gml:domainSet"));
this.RangeType = new gmlcov.RangeType(source.getChildAsSerializedObject("gmlcov:rangeType"));
this.ServiceParameters = new wcs.ServiceParameters(source.getChildAsSerializedObject("wcs:ServiceParameters"));
}
}
} | random_line_split | |
CoverageDescription.ts | /*
* This file is part of rasdaman community.
*
* Rasdaman community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Rasdaman community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with rasdaman community. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 Peter Baumann /
rasdaman GmbH.
*
* For more information please see <http://www.rasdaman.org>
* or contact Peter Baumann via <baumann@rasdaman.com>.
*/
///<reference path="../gml/_gml.ts"/>
///<reference path="../gmlcov/_gmlcov.ts"/>
///<reference path="ServiceParameters.ts"/>
module wcs {
export class CoverageDescription extends gml.AbstractFeature {
public CoverageId:string;
public CoverageFunction:gml.CoverageFunction;
public Metadata:gmlcov.Metadata[];
public DomainSet:gml.DomainSet;
public RangeType:gmlcov.RangeType;
public ServiceParameters:wcs.ServiceParameters;
public constructor(source:rasdaman.common.ISerializedObject) {
super(source);
rasdaman.common.ArgumentValidator.isNotNull(source, "source");
this.CoverageId = source.getChildAsSerializedObject("wcs:CoverageId").getValueAsString();
if (source.doesElementExist("gml:coverageFunction")) |
this.Metadata = [];
source.getChildrenAsSerializedObjects("gmlcov:metadata").forEach(o=> {
this.Metadata.push(new gmlcov.Metadata(o));
});
this.DomainSet = new gml.DomainSet(source.getChildAsSerializedObject("gml:domainSet"));
this.RangeType = new gmlcov.RangeType(source.getChildAsSerializedObject("gmlcov:rangeType"));
this.ServiceParameters = new wcs.ServiceParameters(source.getChildAsSerializedObject("wcs:ServiceParameters"));
}
}
} | {
this.CoverageFunction = new gml.CoverageFunction(source.getChildAsSerializedObject("gml:coverageFunction"));
} | conditional_block |
plugin.js | /**
* @license Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
/**
* @fileOverview The "colorbutton" plugin that makes it possible to assign
* text and background colors to editor contents.
*
*/
CKEDITOR.plugins.add( 'colorbutton', {
requires: 'panelbutton,floatpanel',
// jscs:disable maximumLineLength
lang: 'af,ar,az,bg,bn,bs,ca,cs,cy,da,de,de-ch,el,en,en-au,en-ca,en-gb,eo,es,es-mx,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,oc,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE%
// jscs:enable maximumLineLength
icons: 'bgcolor,textcolor', // %REMOVE_LINE_CORE%
hidpi: true, // %REMOVE_LINE_CORE%
init: function( editor ) {
var config = editor.config,
lang = editor.lang.colorbutton;
if ( !CKEDITOR.env.hc ) {
addButton( 'TextColor', 'fore', lang.textColorTitle, 10, {
contentTransformations: [
[
{
element: 'font',
check: 'span{color}',
left: function( element ) {
return !!element.attributes.color;
},
right: function( element ) {
element.name = 'span';
element.attributes.color && ( element.styles.color = element.attributes.color );
delete element.attributes.color;
}
}
]
]
} );
var bgOptions = {},
normalizeBackground = editor.config.colorButton_normalizeBackground;
if ( normalizeBackground === undefined || normalizeBackground ) {
// If background contains only color, then we want to convert it into background-color so that it's
// correctly picked by colorbutton plugin.
bgOptions.contentTransformations = [
[
{
// Transform span that specify background with color only to background-color.
element: 'span',
left: function( element ) {
var tools = CKEDITOR.tools;
if ( element.name != 'span' || !element.styles || !element.styles.background ) {
return false;
}
var background = tools.style.parse.background( element.styles.background );
// We return true only if background specifies **only** color property, and there's only one background directive.
return background.color && tools.objectKeys( background ).length === 1;
},
right: function( element ) {
var style = new CKEDITOR.style( editor.config.colorButton_backStyle, {
color: element.styles.background
} ),
definition = style.getDefinition();
// Align the output object with the template used in config.
element.name = definition.element;
element.styles = definition.styles;
element.attributes = definition.attributes || {};
return element;
}
}
]
];
}
addButton( 'BGColor', 'back', lang.bgColorTitle, 20, bgOptions );
}
function addButton( name, type, title, order, options ) {
var style = new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ] ),
colorBoxId = CKEDITOR.tools.getNextId() + '_colorBox',
panelBlock;
options = options || {};
editor.ui.add( name, CKEDITOR.UI_PANELBUTTON, {
label: title,
title: title,
modes: { wysiwyg: 1 },
editorFocus: 0,
toolbar: 'colors,' + order,
allowedContent: style,
requiredContent: style,
contentTransformations: options.contentTransformations,
panel: {
css: CKEDITOR.skin.getPath( 'editor' ),
attributes: { role: 'listbox', 'aria-label': lang.panelTitle }
},
onBlock: function( panel, block ) {
panelBlock = block;
block.autoSize = true;
block.element.addClass( 'cke_colorblock' );
block.element.setHtml( renderColors( panel, type, colorBoxId ) );
// The block should not have scrollbars (http://dev.ckeditor.com/ticket/5933, http://dev.ckeditor.com/ticket/6056)
block.element.getDocument().getBody().setStyle( 'overflow', 'hidden' );
CKEDITOR.ui.fire( 'ready', this );
var keys = block.keys;
var rtl = editor.lang.dir == 'rtl';
keys[ rtl ? 37 : 39 ] = 'next'; // ARROW-RIGHT
keys[ 40 ] = 'next'; // ARROW-DOWN
keys[ 9 ] = 'next'; // TAB
keys[ rtl ? 39 : 37 ] = 'prev'; // ARROW-LEFT
keys[ 38 ] = 'prev'; // ARROW-UP
keys[ CKEDITOR.SHIFT + 9 ] = 'prev'; // SHIFT + TAB
keys[ 32 ] = 'click'; // SPACE
},
refresh: function() {
if ( !editor.activeFilter.check( style ) )
this.setState( CKEDITOR.TRISTATE_DISABLED );
},
// The automatic colorbox should represent the real color (http://dev.ckeditor.com/ticket/6010)
onOpen: function() {
var selection = editor.getSelection(),
block = selection && selection.getStartElement(),
path = editor.elementPath( block ),
automaticColor;
if ( !path )
return;
// Find the closest block element.
block = path.block || path.blockLimit || editor.document.getBody();
// The background color might be transparent. In that case, look up the color in the DOM tree.
do {
automaticColor = block && block.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) || 'transparent';
}
while ( type == 'back' && automaticColor == 'transparent' && block && ( block = block.getParent() ) );
// The box should never be transparent.
if ( !automaticColor || automaticColor == 'transparent' )
automaticColor = '#ffffff';
if ( config.colorButton_enableAutomatic !== false ) {
this._.panel._.iframe.getFrameDocument().getById( colorBoxId ).setStyle( 'background-color', automaticColor );
}
var range = selection && selection.getRanges()[ 0 ];
if ( range ) {
var walker = new CKEDITOR.dom.walker( range ),
element = range.collapsed ? range.startContainer : walker.next(),
finalColor = '',
currentColor;
while ( element ) {
if ( element.type === CKEDITOR.NODE_TEXT ) {
element = element.getParent();
}
currentColor = normalizeColor( element.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) );
finalColor = finalColor || currentColor;
if ( finalColor !== currentColor ) {
finalColor = '';
break;
}
element = walker.next();
}
selectColor( panelBlock, finalColor );
}
return automaticColor;
}
} );
}
function renderColors( panel, type, colorBoxId ) {
| function isUnstylable( ele ) {
return ( ele.getAttribute( 'contentEditable' ) == 'false' ) || ele.getAttribute( 'data-nostyle' );
}
/*
* Selects the specified color in the specified panel block.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {CKEDITOR.ui.panel.block} block
* @param {String} color
*/
function selectColor( block, color ) {
var items = block._.getItems();
for ( var i = 0; i < items.count(); i++ ) {
var item = items.getItem( i );
item.removeAttribute( 'aria-selected' );
if ( color && color == normalizeColor( item.getAttribute( 'data-value' ) ) ) {
item.setAttribute( 'aria-selected', true );
}
}
}
/*
* Converts a CSS color value to an easily comparable form.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {String} color
* @returns {String}
*/
function normalizeColor( color ) {
return CKEDITOR.tools.convertRgbToHex( color || '' ).replace( /#/, '' ).toLowerCase();
}
}
} );
/**
* Whether to enable the **More Colors** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableMore = false;
*
* @cfg {Boolean} [colorButton_enableMore=true]
* @member CKEDITOR.config
*/
/**
* Defines the colors to be displayed in the color selectors. This is a string
* containing hexadecimal notation for HTML colors, without the `'#'` prefix.
*
* **Since 3.3:** A color name may optionally be defined by prefixing the entries with
* a name and the slash character. For example, `'FontColor1/FF9900'` will be
* displayed as the color `#FF9900` in the selector, but will be output as `'FontColor1'`.
*
* **Since 4.6.2:** The default color palette has changed. It contains fewer colors in more
* pastel shades than the previous one.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // Brazil colors only.
* config.colorButton_colors = '00923E,F8C100,28166F';
*
* config.colorButton_colors = 'FontColor1/FF9900,FontColor2/0066CC,FontColor3/F00';
*
* // CKEditor color palette available before version 4.6.2.
* config.colorButton_colors =
* '000,800000,8B4513,2F4F4F,008080,000080,4B0082,696969,' +
* 'B22222,A52A2A,DAA520,006400,40E0D0,0000CD,800080,808080,' +
* 'F00,FF8C00,FFD700,008000,0FF,00F,EE82EE,A9A9A9,' +
* 'FFA07A,FFA500,FFFF00,00FF00,AFEEEE,ADD8E6,DDA0DD,D3D3D3,' +
* 'FFF0F5,FAEBD7,FFFFE0,F0FFF0,F0FFFF,F0F8FF,E6E6FA,FFF';
*
* @cfg {String} [colorButton_colors=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_colors = '1ABC9C,2ECC71,3498DB,9B59B6,4E5F70,F1C40F,' +
'16A085,27AE60,2980B9,8E44AD,2C3E50,F39C12,' +
'E67E22,E74C3C,ECF0F1,95A5A6,DDD,FFF,' +
'D35400,C0392B,BDC3C7,7F8C8D,999,000';
/**
* Stores the style definition that applies the text foreground color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_foreStyle = {
* element: 'span',
* styles: { color: '#(color)' }
* };
*
* @cfg [colorButton_foreStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_foreStyle = {
element: 'span',
styles: { 'color': '#(color)' },
overrides: [ {
element: 'font', attributes: { 'color': null }
} ]
};
/**
* Stores the style definition that applies the text background color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_backStyle = {
* element: 'span',
* styles: { 'background-color': '#(color)' }
* };
*
* @cfg [colorButton_backStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_backStyle = {
element: 'span',
styles: { 'background-color': '#(color)' }
};
/**
* Whether to enable the **Automatic** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableAutomatic = false;
*
* @cfg {Boolean} [colorButton_enableAutomatic=true]
* @member CKEDITOR.config
*/
/**
* Defines how many colors will be shown per row in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_colorsPerRow = 8;
*
* @since 4.6.2
* @cfg {Number} [colorButton_colorsPerRow=6]
* @member CKEDITOR.config
*/
/**
* Whether the plugin should convert `background` CSS properties with color only, to a `background-color` property,
* allowing the [Color Button](http://ckeditor.com/addon/colorbutton) plugin to edit these styles.
*
* config.colorButton_normalizeBackground = false;
*
* @since 4.6.1
* @cfg {Boolean} [colorButton_normalizeBackground=true]
* @member CKEDITOR.config
*/
| var output = [],
colors = config.colorButton_colors.split( ',' ),
colorsPerRow = config.colorButton_colorsPerRow || 6,
// Tells if we should include "More Colors..." button.
moreColorsEnabled = editor.plugins.colordialog && config.colorButton_enableMore !== false,
// aria-setsize and aria-posinset attributes are used to indicate size of options, because
// screen readers doesn't play nice with table, based layouts (http://dev.ckeditor.com/ticket/12097).
total = colors.length + ( moreColorsEnabled ? 2 : 1 );
var clickFn = CKEDITOR.tools.addFunction( function applyColorStyle( color, type ) {
function onColorDialogClose( evt ) {
this.removeListener( 'ok', onColorDialogClose );
this.removeListener( 'cancel', onColorDialogClose );
evt.name == 'ok' && applyColorStyle( this.getContentElement( 'picker', 'selectedColor' ).getValue(), type );
}
if ( color == '?' ) {
editor.openDialog( 'colordialog', function() {
this.on( 'ok', onColorDialogClose );
this.on( 'cancel', onColorDialogClose );
} );
return;
}
editor.focus();
panel.hide();
editor.fire( 'saveSnapshot' );
// Clean up any conflicting style within the range.
editor.removeStyle( new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ], { color: 'inherit' } ) );
if ( color ) {
var colorStyle = config[ 'colorButton_' + type + 'Style' ];
colorStyle.childRule = type == 'back' ?
function( element ) {
// It's better to apply background color as the innermost style. (http://dev.ckeditor.com/ticket/3599)
// Except for "unstylable elements". (http://dev.ckeditor.com/ticket/6103)
return isUnstylable( element );
} : function( element ) {
// Fore color style must be applied inside links instead of around it. (http://dev.ckeditor.com/ticket/4772,http://dev.ckeditor.com/ticket/6908)
return !( element.is( 'a' ) || element.getElementsByTag( 'a' ).count() ) || isUnstylable( element );
};
editor.applyStyle( new CKEDITOR.style( colorStyle, { color: color } ) );
}
editor.fire( 'saveSnapshot' );
} );
if ( config.colorButton_enableAutomatic !== false ) {
// Render the "Automatic" button.
output.push( '<a class="cke_colorauto" _cke_focus=1 hidefocus=true' +
' title="', lang.auto, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',null,\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.auto, '\')"' +
' role="option" aria-posinset="1" aria-setsize="', total, '">' +
'<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center"><span class="cke_colorbox" id="', colorBoxId, '"></span>', lang.auto, '</td>' +
'</tr>' +
'</table>' +
'</a>' );
}
output.push( '<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' );
// Render the color boxes.
for ( var i = 0; i < colors.length; i++ ) {
if ( ( i % colorsPerRow ) === 0 )
output.push( '</tr><tr>' );
var parts = colors[ i ].split( '/' ),
colorName = parts[ 0 ],
colorCode = parts[ 1 ] || colorName;
// The data can be only a color code (without #) or colorName + color code
// If only a color code is provided, then the colorName is the color with the hash
// Convert the color from RGB to RRGGBB for better compatibility with IE and <font>. See http://dev.ckeditor.com/ticket/5676
if ( !parts[ 1 ] )
colorName = '#' + colorName.replace( /^(.)(.)(.)$/, '$1$1$2$2$3$3' );
var colorLabel = editor.lang.colorbutton.colors[ colorCode ] || colorCode;
output.push( '<td>' +
'<a class="cke_colorbox" _cke_focus=1 hidefocus=true' +
' title="', colorLabel, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'', colorName, '\',\'', type, '\'); return false;"' +
' href="javascript:void(\'', colorLabel, '\')"' +
' data-value="' + colorCode + '"' +
' role="option" aria-posinset="', ( i + 2 ), '" aria-setsize="', total, '">' +
'<span class="cke_colorbox" style="background-color:#', colorCode, '"></span>' +
'</a>' +
'</td>' );
}
// Render the "More Colors" button.
if ( moreColorsEnabled ) {
output.push( '</tr>' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center">' +
'<a class="cke_colormore" _cke_focus=1 hidefocus=true' +
' title="', lang.more, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'?\',\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.more, '\')"', ' role="option" aria-posinset="', total, '" aria-setsize="', total, '">', lang.more, '</a>' +
'</td>' ); // tr is later in the code.
}
output.push( '</tr></table>' );
return output.join( '' );
}
| identifier_body |
plugin.js | /**
* @license Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
/**
* @fileOverview The "colorbutton" plugin that makes it possible to assign
* text and background colors to editor contents.
*
*/
CKEDITOR.plugins.add( 'colorbutton', {
requires: 'panelbutton,floatpanel',
// jscs:disable maximumLineLength
lang: 'af,ar,az,bg,bn,bs,ca,cs,cy,da,de,de-ch,el,en,en-au,en-ca,en-gb,eo,es,es-mx,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,oc,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE%
// jscs:enable maximumLineLength
icons: 'bgcolor,textcolor', // %REMOVE_LINE_CORE%
hidpi: true, // %REMOVE_LINE_CORE%
init: function( editor ) {
var config = editor.config,
lang = editor.lang.colorbutton;
if ( !CKEDITOR.env.hc ) {
addButton( 'TextColor', 'fore', lang.textColorTitle, 10, {
contentTransformations: [
[
{
element: 'font',
check: 'span{color}',
left: function( element ) {
return !!element.attributes.color;
},
right: function( element ) {
element.name = 'span';
element.attributes.color && ( element.styles.color = element.attributes.color );
delete element.attributes.color;
}
}
]
]
} );
var bgOptions = {},
normalizeBackground = editor.config.colorButton_normalizeBackground;
if ( normalizeBackground === undefined || normalizeBackground ) {
// If background contains only color, then we want to convert it into background-color so that it's
// correctly picked by colorbutton plugin.
bgOptions.contentTransformations = [
[
{
// Transform span that specify background with color only to background-color.
element: 'span',
left: function( element ) {
var tools = CKEDITOR.tools;
if ( element.name != 'span' || !element.styles || !element.styles.background ) {
return false;
}
var background = tools.style.parse.background( element.styles.background );
// We return true only if background specifies **only** color property, and there's only one background directive.
return background.color && tools.objectKeys( background ).length === 1;
},
right: function( element ) {
var style = new CKEDITOR.style( editor.config.colorButton_backStyle, {
color: element.styles.background
} ),
definition = style.getDefinition();
// Align the output object with the template used in config.
element.name = definition.element;
element.styles = definition.styles;
element.attributes = definition.attributes || {};
return element;
}
}
]
];
}
addButton( 'BGColor', 'back', lang.bgColorTitle, 20, bgOptions );
}
function addButton( name, type, title, order, options ) {
var style = new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ] ),
colorBoxId = CKEDITOR.tools.getNextId() + '_colorBox',
panelBlock;
options = options || {};
editor.ui.add( name, CKEDITOR.UI_PANELBUTTON, {
label: title,
title: title,
modes: { wysiwyg: 1 },
editorFocus: 0,
toolbar: 'colors,' + order,
allowedContent: style,
requiredContent: style,
contentTransformations: options.contentTransformations,
panel: {
css: CKEDITOR.skin.getPath( 'editor' ),
attributes: { role: 'listbox', 'aria-label': lang.panelTitle }
},
onBlock: function( panel, block ) {
panelBlock = block;
block.autoSize = true;
block.element.addClass( 'cke_colorblock' );
block.element.setHtml( renderColors( panel, type, colorBoxId ) );
// The block should not have scrollbars (http://dev.ckeditor.com/ticket/5933, http://dev.ckeditor.com/ticket/6056)
block.element.getDocument().getBody().setStyle( 'overflow', 'hidden' );
CKEDITOR.ui.fire( 'ready', this );
var keys = block.keys;
var rtl = editor.lang.dir == 'rtl';
keys[ rtl ? 37 : 39 ] = 'next'; // ARROW-RIGHT
keys[ 40 ] = 'next'; // ARROW-DOWN
keys[ 9 ] = 'next'; // TAB
keys[ rtl ? 39 : 37 ] = 'prev'; // ARROW-LEFT
keys[ 38 ] = 'prev'; // ARROW-UP
keys[ CKEDITOR.SHIFT + 9 ] = 'prev'; // SHIFT + TAB
keys[ 32 ] = 'click'; // SPACE
},
refresh: function() {
if ( !editor.activeFilter.check( style ) )
this.setState( CKEDITOR.TRISTATE_DISABLED );
},
// The automatic colorbox should represent the real color (http://dev.ckeditor.com/ticket/6010)
onOpen: function() {
var selection = editor.getSelection(),
block = selection && selection.getStartElement(),
path = editor.elementPath( block ),
automaticColor;
if ( !path )
return;
// Find the closest block element.
block = path.block || path.blockLimit || editor.document.getBody();
// The background color might be transparent. In that case, look up the color in the DOM tree.
do {
automaticColor = block && block.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) || 'transparent';
}
while ( type == 'back' && automaticColor == 'transparent' && block && ( block = block.getParent() ) );
// The box should never be transparent.
if ( !automaticColor || automaticColor == 'transparent' )
automaticColor = '#ffffff';
if ( config.colorButton_enableAutomatic !== false ) {
this._.panel._.iframe.getFrameDocument().getById( colorBoxId ).setStyle( 'background-color', automaticColor );
}
var range = selection && selection.getRanges()[ 0 ];
if ( range ) {
var walker = new CKEDITOR.dom.walker( range ),
element = range.collapsed ? range.startContainer : walker.next(),
finalColor = '',
currentColor;
while ( element ) {
if ( element.type === CKEDITOR.NODE_TEXT ) {
element = element.getParent();
}
currentColor = normalizeColor( element.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) );
finalColor = finalColor || currentColor;
if ( finalColor !== currentColor ) {
finalColor = '';
break;
}
element = walker.next();
}
selectColor( panelBlock, finalColor );
}
return automaticColor;
}
} );
}
function renderColors( panel, type, colorBoxId ) {
var output = [],
colors = config.colorButton_colors.split( ',' ),
colorsPerRow = config.colorButton_colorsPerRow || 6,
// Tells if we should include "More Colors..." button.
moreColorsEnabled = editor.plugins.colordialog && config.colorButton_enableMore !== false,
// aria-setsize and aria-posinset attributes are used to indicate size of options, because
// screen readers doesn't play nice with table, based layouts (http://dev.ckeditor.com/ticket/12097).
total = colors.length + ( moreColorsEnabled ? 2 : 1 );
var clickFn = CKEDITOR.tools.addFunction( function applyColorStyle( color, type ) {
function onColorDialogClose( evt ) {
this.removeListener( 'ok', onColorDialogClose );
this.removeListener( 'cancel', onColorDialogClose );
evt.name == 'ok' && applyColorStyle( this.getContentElement( 'picker', 'selectedColor' ).getValue(), type );
}
if ( color == '?' ) {
editor.openDialog( 'colordialog', function() {
this.on( 'ok', onColorDialogClose );
this.on( 'cancel', onColorDialogClose );
} );
return;
}
editor.focus();
panel.hide();
editor.fire( 'saveSnapshot' );
// Clean up any conflicting style within the range.
editor.removeStyle( new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ], { color: 'inherit' } ) );
if ( color ) {
var colorStyle = config[ 'colorButton_' + type + 'Style' ];
colorStyle.childRule = type == 'back' ?
function( element ) {
// It's better to apply background color as the innermost style. (http://dev.ckeditor.com/ticket/3599)
// Except for "unstylable elements". (http://dev.ckeditor.com/ticket/6103)
return isUnstylable( element );
} : function( element ) {
// Fore color style must be applied inside links instead of around it. (http://dev.ckeditor.com/ticket/4772,http://dev.ckeditor.com/ticket/6908)
return !( element.is( 'a' ) || element.getElementsByTag( 'a' ).count() ) || isUnstylable( element );
};
editor.applyStyle( new CKEDITOR.style( colorStyle, { color: color } ) );
}
editor.fire( 'saveSnapshot' );
} );
if ( config.colorButton_enableAutomatic !== false ) {
// Render the "Automatic" button.
output.push( '<a class="cke_colorauto" _cke_focus=1 hidefocus=true' +
' title="', lang.auto, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',null,\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.auto, '\')"' +
' role="option" aria-posinset="1" aria-setsize="', total, '">' +
'<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center"><span class="cke_colorbox" id="', colorBoxId, '"></span>', lang.auto, '</td>' +
'</tr>' +
'</table>' +
'</a>' );
}
output.push( '<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' );
// Render the color boxes.
for ( var i = 0; i < colors.length; i++ ) {
if ( ( i % colorsPerRow ) === 0 )
output.push( '</tr><tr>' );
var parts = colors[ i ].split( '/' ),
colorName = parts[ 0 ],
colorCode = parts[ 1 ] || colorName;
// The data can be only a color code (without #) or colorName + color code
// If only a color code is provided, then the colorName is the color with the hash
// Convert the color from RGB to RRGGBB for better compatibility with IE and <font>. See http://dev.ckeditor.com/ticket/5676
if ( !parts[ 1 ] )
colorName = '#' + colorName.replace( /^(.)(.)(.)$/, '$1$1$2$2$3$3' ); | ' title="', colorLabel, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'', colorName, '\',\'', type, '\'); return false;"' +
' href="javascript:void(\'', colorLabel, '\')"' +
' data-value="' + colorCode + '"' +
' role="option" aria-posinset="', ( i + 2 ), '" aria-setsize="', total, '">' +
'<span class="cke_colorbox" style="background-color:#', colorCode, '"></span>' +
'</a>' +
'</td>' );
}
// Render the "More Colors" button.
if ( moreColorsEnabled ) {
output.push( '</tr>' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center">' +
'<a class="cke_colormore" _cke_focus=1 hidefocus=true' +
' title="', lang.more, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'?\',\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.more, '\')"', ' role="option" aria-posinset="', total, '" aria-setsize="', total, '">', lang.more, '</a>' +
'</td>' ); // tr is later in the code.
}
output.push( '</tr></table>' );
return output.join( '' );
}
function isUnstylable( ele ) {
return ( ele.getAttribute( 'contentEditable' ) == 'false' ) || ele.getAttribute( 'data-nostyle' );
}
/*
* Selects the specified color in the specified panel block.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {CKEDITOR.ui.panel.block} block
* @param {String} color
*/
function selectColor( block, color ) {
var items = block._.getItems();
for ( var i = 0; i < items.count(); i++ ) {
var item = items.getItem( i );
item.removeAttribute( 'aria-selected' );
if ( color && color == normalizeColor( item.getAttribute( 'data-value' ) ) ) {
item.setAttribute( 'aria-selected', true );
}
}
}
/*
* Converts a CSS color value to an easily comparable form.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {String} color
* @returns {String}
*/
function normalizeColor( color ) {
return CKEDITOR.tools.convertRgbToHex( color || '' ).replace( /#/, '' ).toLowerCase();
}
}
} );
/**
* Whether to enable the **More Colors** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableMore = false;
*
* @cfg {Boolean} [colorButton_enableMore=true]
* @member CKEDITOR.config
*/
/**
* Defines the colors to be displayed in the color selectors. This is a string
* containing hexadecimal notation for HTML colors, without the `'#'` prefix.
*
* **Since 3.3:** A color name may optionally be defined by prefixing the entries with
* a name and the slash character. For example, `'FontColor1/FF9900'` will be
* displayed as the color `#FF9900` in the selector, but will be output as `'FontColor1'`.
*
* **Since 4.6.2:** The default color palette has changed. It contains fewer colors in more
* pastel shades than the previous one.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // Brazil colors only.
* config.colorButton_colors = '00923E,F8C100,28166F';
*
* config.colorButton_colors = 'FontColor1/FF9900,FontColor2/0066CC,FontColor3/F00';
*
* // CKEditor color palette available before version 4.6.2.
* config.colorButton_colors =
* '000,800000,8B4513,2F4F4F,008080,000080,4B0082,696969,' +
* 'B22222,A52A2A,DAA520,006400,40E0D0,0000CD,800080,808080,' +
* 'F00,FF8C00,FFD700,008000,0FF,00F,EE82EE,A9A9A9,' +
* 'FFA07A,FFA500,FFFF00,00FF00,AFEEEE,ADD8E6,DDA0DD,D3D3D3,' +
* 'FFF0F5,FAEBD7,FFFFE0,F0FFF0,F0FFFF,F0F8FF,E6E6FA,FFF';
*
* @cfg {String} [colorButton_colors=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_colors = '1ABC9C,2ECC71,3498DB,9B59B6,4E5F70,F1C40F,' +
'16A085,27AE60,2980B9,8E44AD,2C3E50,F39C12,' +
'E67E22,E74C3C,ECF0F1,95A5A6,DDD,FFF,' +
'D35400,C0392B,BDC3C7,7F8C8D,999,000';
/**
* Stores the style definition that applies the text foreground color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_foreStyle = {
* element: 'span',
* styles: { color: '#(color)' }
* };
*
* @cfg [colorButton_foreStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_foreStyle = {
element: 'span',
styles: { 'color': '#(color)' },
overrides: [ {
element: 'font', attributes: { 'color': null }
} ]
};
/**
* Stores the style definition that applies the text background color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_backStyle = {
* element: 'span',
* styles: { 'background-color': '#(color)' }
* };
*
* @cfg [colorButton_backStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_backStyle = {
element: 'span',
styles: { 'background-color': '#(color)' }
};
/**
* Whether to enable the **Automatic** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableAutomatic = false;
*
* @cfg {Boolean} [colorButton_enableAutomatic=true]
* @member CKEDITOR.config
*/
/**
* Defines how many colors will be shown per row in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_colorsPerRow = 8;
*
* @since 4.6.2
* @cfg {Number} [colorButton_colorsPerRow=6]
* @member CKEDITOR.config
*/
/**
* Whether the plugin should convert `background` CSS properties with color only, to a `background-color` property,
* allowing the [Color Button](http://ckeditor.com/addon/colorbutton) plugin to edit these styles.
*
* config.colorButton_normalizeBackground = false;
*
* @since 4.6.1
* @cfg {Boolean} [colorButton_normalizeBackground=true]
* @member CKEDITOR.config
*/ |
var colorLabel = editor.lang.colorbutton.colors[ colorCode ] || colorCode;
output.push( '<td>' +
'<a class="cke_colorbox" _cke_focus=1 hidefocus=true' + | random_line_split |
plugin.js | /**
* @license Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
/**
* @fileOverview The "colorbutton" plugin that makes it possible to assign
* text and background colors to editor contents.
*
*/
CKEDITOR.plugins.add( 'colorbutton', {
requires: 'panelbutton,floatpanel',
// jscs:disable maximumLineLength
lang: 'af,ar,az,bg,bn,bs,ca,cs,cy,da,de,de-ch,el,en,en-au,en-ca,en-gb,eo,es,es-mx,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,oc,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE%
// jscs:enable maximumLineLength
icons: 'bgcolor,textcolor', // %REMOVE_LINE_CORE%
hidpi: true, // %REMOVE_LINE_CORE%
init: function( editor ) {
var config = editor.config,
lang = editor.lang.colorbutton;
if ( !CKEDITOR.env.hc ) {
addButton( 'TextColor', 'fore', lang.textColorTitle, 10, {
contentTransformations: [
[
{
element: 'font',
check: 'span{color}',
left: function( element ) {
return !!element.attributes.color;
},
right: function( element ) {
element.name = 'span';
element.attributes.color && ( element.styles.color = element.attributes.color );
delete element.attributes.color;
}
}
]
]
} );
var bgOptions = {},
normalizeBackground = editor.config.colorButton_normalizeBackground;
if ( normalizeBackground === undefined || normalizeBackground ) {
// If background contains only color, then we want to convert it into background-color so that it's
// correctly picked by colorbutton plugin.
bgOptions.contentTransformations = [
[
{
// Transform span that specify background with color only to background-color.
element: 'span',
left: function( element ) {
var tools = CKEDITOR.tools;
if ( element.name != 'span' || !element.styles || !element.styles.background ) {
return false;
}
var background = tools.style.parse.background( element.styles.background );
// We return true only if background specifies **only** color property, and there's only one background directive.
return background.color && tools.objectKeys( background ).length === 1;
},
right: function( element ) {
var style = new CKEDITOR.style( editor.config.colorButton_backStyle, {
color: element.styles.background
} ),
definition = style.getDefinition();
// Align the output object with the template used in config.
element.name = definition.element;
element.styles = definition.styles;
element.attributes = definition.attributes || {};
return element;
}
}
]
];
}
addButton( 'BGColor', 'back', lang.bgColorTitle, 20, bgOptions );
}
function addButton( name, type, title, order, options ) {
var style = new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ] ),
colorBoxId = CKEDITOR.tools.getNextId() + '_colorBox',
panelBlock;
options = options || {};
editor.ui.add( name, CKEDITOR.UI_PANELBUTTON, {
label: title,
title: title,
modes: { wysiwyg: 1 },
editorFocus: 0,
toolbar: 'colors,' + order,
allowedContent: style,
requiredContent: style,
contentTransformations: options.contentTransformations,
panel: {
css: CKEDITOR.skin.getPath( 'editor' ),
attributes: { role: 'listbox', 'aria-label': lang.panelTitle }
},
onBlock: function( panel, block ) {
panelBlock = block;
block.autoSize = true;
block.element.addClass( 'cke_colorblock' );
block.element.setHtml( renderColors( panel, type, colorBoxId ) );
// The block should not have scrollbars (http://dev.ckeditor.com/ticket/5933, http://dev.ckeditor.com/ticket/6056)
block.element.getDocument().getBody().setStyle( 'overflow', 'hidden' );
CKEDITOR.ui.fire( 'ready', this );
var keys = block.keys;
var rtl = editor.lang.dir == 'rtl';
keys[ rtl ? 37 : 39 ] = 'next'; // ARROW-RIGHT
keys[ 40 ] = 'next'; // ARROW-DOWN
keys[ 9 ] = 'next'; // TAB
keys[ rtl ? 39 : 37 ] = 'prev'; // ARROW-LEFT
keys[ 38 ] = 'prev'; // ARROW-UP
keys[ CKEDITOR.SHIFT + 9 ] = 'prev'; // SHIFT + TAB
keys[ 32 ] = 'click'; // SPACE
},
refresh: function() {
if ( !editor.activeFilter.check( style ) )
this.setState( CKEDITOR.TRISTATE_DISABLED );
},
// The automatic colorbox should represent the real color (http://dev.ckeditor.com/ticket/6010)
onOpen: function() {
var selection = editor.getSelection(),
block = selection && selection.getStartElement(),
path = editor.elementPath( block ),
automaticColor;
if ( !path )
return;
// Find the closest block element.
block = path.block || path.blockLimit || editor.document.getBody();
// The background color might be transparent. In that case, look up the color in the DOM tree.
do {
| while ( type == 'back' && automaticColor == 'transparent' && block && ( block = block.getParent() ) );
// The box should never be transparent.
if ( !automaticColor || automaticColor == 'transparent' )
automaticColor = '#ffffff';
if ( config.colorButton_enableAutomatic !== false ) {
this._.panel._.iframe.getFrameDocument().getById( colorBoxId ).setStyle( 'background-color', automaticColor );
}
var range = selection && selection.getRanges()[ 0 ];
if ( range ) {
var walker = new CKEDITOR.dom.walker( range ),
element = range.collapsed ? range.startContainer : walker.next(),
finalColor = '',
currentColor;
while ( element ) {
if ( element.type === CKEDITOR.NODE_TEXT ) {
element = element.getParent();
}
currentColor = normalizeColor( element.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) );
finalColor = finalColor || currentColor;
if ( finalColor !== currentColor ) {
finalColor = '';
break;
}
element = walker.next();
}
selectColor( panelBlock, finalColor );
}
return automaticColor;
}
} );
}
function renderColors( panel, type, colorBoxId ) {
var output = [],
colors = config.colorButton_colors.split( ',' ),
colorsPerRow = config.colorButton_colorsPerRow || 6,
// Tells if we should include "More Colors..." button.
moreColorsEnabled = editor.plugins.colordialog && config.colorButton_enableMore !== false,
// aria-setsize and aria-posinset attributes are used to indicate size of options, because
// screen readers doesn't play nice with table, based layouts (http://dev.ckeditor.com/ticket/12097).
total = colors.length + ( moreColorsEnabled ? 2 : 1 );
var clickFn = CKEDITOR.tools.addFunction( function applyColorStyle( color, type ) {
function onColorDialogClose( evt ) {
this.removeListener( 'ok', onColorDialogClose );
this.removeListener( 'cancel', onColorDialogClose );
evt.name == 'ok' && applyColorStyle( this.getContentElement( 'picker', 'selectedColor' ).getValue(), type );
}
if ( color == '?' ) {
editor.openDialog( 'colordialog', function() {
this.on( 'ok', onColorDialogClose );
this.on( 'cancel', onColorDialogClose );
} );
return;
}
editor.focus();
panel.hide();
editor.fire( 'saveSnapshot' );
// Clean up any conflicting style within the range.
editor.removeStyle( new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ], { color: 'inherit' } ) );
if ( color ) {
var colorStyle = config[ 'colorButton_' + type + 'Style' ];
colorStyle.childRule = type == 'back' ?
function( element ) {
// It's better to apply background color as the innermost style. (http://dev.ckeditor.com/ticket/3599)
// Except for "unstylable elements". (http://dev.ckeditor.com/ticket/6103)
return isUnstylable( element );
} : function( element ) {
// Fore color style must be applied inside links instead of around it. (http://dev.ckeditor.com/ticket/4772,http://dev.ckeditor.com/ticket/6908)
return !( element.is( 'a' ) || element.getElementsByTag( 'a' ).count() ) || isUnstylable( element );
};
editor.applyStyle( new CKEDITOR.style( colorStyle, { color: color } ) );
}
editor.fire( 'saveSnapshot' );
} );
if ( config.colorButton_enableAutomatic !== false ) {
// Render the "Automatic" button.
output.push( '<a class="cke_colorauto" _cke_focus=1 hidefocus=true' +
' title="', lang.auto, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',null,\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.auto, '\')"' +
' role="option" aria-posinset="1" aria-setsize="', total, '">' +
'<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center"><span class="cke_colorbox" id="', colorBoxId, '"></span>', lang.auto, '</td>' +
'</tr>' +
'</table>' +
'</a>' );
}
output.push( '<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' );
// Render the color boxes.
for ( var i = 0; i < colors.length; i++ ) {
if ( ( i % colorsPerRow ) === 0 )
output.push( '</tr><tr>' );
var parts = colors[ i ].split( '/' ),
colorName = parts[ 0 ],
colorCode = parts[ 1 ] || colorName;
// The data can be only a color code (without #) or colorName + color code
// If only a color code is provided, then the colorName is the color with the hash
// Convert the color from RGB to RRGGBB for better compatibility with IE and <font>. See http://dev.ckeditor.com/ticket/5676
if ( !parts[ 1 ] )
colorName = '#' + colorName.replace( /^(.)(.)(.)$/, '$1$1$2$2$3$3' );
var colorLabel = editor.lang.colorbutton.colors[ colorCode ] || colorCode;
output.push( '<td>' +
'<a class="cke_colorbox" _cke_focus=1 hidefocus=true' +
' title="', colorLabel, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'', colorName, '\',\'', type, '\'); return false;"' +
' href="javascript:void(\'', colorLabel, '\')"' +
' data-value="' + colorCode + '"' +
' role="option" aria-posinset="', ( i + 2 ), '" aria-setsize="', total, '">' +
'<span class="cke_colorbox" style="background-color:#', colorCode, '"></span>' +
'</a>' +
'</td>' );
}
// Render the "More Colors" button.
if ( moreColorsEnabled ) {
output.push( '</tr>' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center">' +
'<a class="cke_colormore" _cke_focus=1 hidefocus=true' +
' title="', lang.more, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'?\',\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.more, '\')"', ' role="option" aria-posinset="', total, '" aria-setsize="', total, '">', lang.more, '</a>' +
'</td>' ); // tr is later in the code.
}
output.push( '</tr></table>' );
return output.join( '' );
}
function isUnstylable( ele ) {
return ( ele.getAttribute( 'contentEditable' ) == 'false' ) || ele.getAttribute( 'data-nostyle' );
}
/*
* Selects the specified color in the specified panel block.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {CKEDITOR.ui.panel.block} block
* @param {String} color
*/
function selectColor( block, color ) {
var items = block._.getItems();
for ( var i = 0; i < items.count(); i++ ) {
var item = items.getItem( i );
item.removeAttribute( 'aria-selected' );
if ( color && color == normalizeColor( item.getAttribute( 'data-value' ) ) ) {
item.setAttribute( 'aria-selected', true );
}
}
}
/*
* Converts a CSS color value to an easily comparable form.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {String} color
* @returns {String}
*/
function normalizeColor( color ) {
return CKEDITOR.tools.convertRgbToHex( color || '' ).replace( /#/, '' ).toLowerCase();
}
}
} );
/**
* Whether to enable the **More Colors** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableMore = false;
*
* @cfg {Boolean} [colorButton_enableMore=true]
* @member CKEDITOR.config
*/
/**
* Defines the colors to be displayed in the color selectors. This is a string
* containing hexadecimal notation for HTML colors, without the `'#'` prefix.
*
* **Since 3.3:** A color name may optionally be defined by prefixing the entries with
* a name and the slash character. For example, `'FontColor1/FF9900'` will be
* displayed as the color `#FF9900` in the selector, but will be output as `'FontColor1'`.
*
* **Since 4.6.2:** The default color palette has changed. It contains fewer colors in more
* pastel shades than the previous one.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // Brazil colors only.
* config.colorButton_colors = '00923E,F8C100,28166F';
*
* config.colorButton_colors = 'FontColor1/FF9900,FontColor2/0066CC,FontColor3/F00';
*
* // CKEditor color palette available before version 4.6.2.
* config.colorButton_colors =
* '000,800000,8B4513,2F4F4F,008080,000080,4B0082,696969,' +
* 'B22222,A52A2A,DAA520,006400,40E0D0,0000CD,800080,808080,' +
* 'F00,FF8C00,FFD700,008000,0FF,00F,EE82EE,A9A9A9,' +
* 'FFA07A,FFA500,FFFF00,00FF00,AFEEEE,ADD8E6,DDA0DD,D3D3D3,' +
* 'FFF0F5,FAEBD7,FFFFE0,F0FFF0,F0FFFF,F0F8FF,E6E6FA,FFF';
*
* @cfg {String} [colorButton_colors=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_colors = '1ABC9C,2ECC71,3498DB,9B59B6,4E5F70,F1C40F,' +
'16A085,27AE60,2980B9,8E44AD,2C3E50,F39C12,' +
'E67E22,E74C3C,ECF0F1,95A5A6,DDD,FFF,' +
'D35400,C0392B,BDC3C7,7F8C8D,999,000';
/**
* Stores the style definition that applies the text foreground color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_foreStyle = {
* element: 'span',
* styles: { color: '#(color)' }
* };
*
* @cfg [colorButton_foreStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_foreStyle = {
element: 'span',
styles: { 'color': '#(color)' },
overrides: [ {
element: 'font', attributes: { 'color': null }
} ]
};
/**
* Stores the style definition that applies the text background color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_backStyle = {
* element: 'span',
* styles: { 'background-color': '#(color)' }
* };
*
* @cfg [colorButton_backStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_backStyle = {
element: 'span',
styles: { 'background-color': '#(color)' }
};
/**
* Whether to enable the **Automatic** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableAutomatic = false;
*
* @cfg {Boolean} [colorButton_enableAutomatic=true]
* @member CKEDITOR.config
*/
/**
* Defines how many colors will be shown per row in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_colorsPerRow = 8;
*
* @since 4.6.2
* @cfg {Number} [colorButton_colorsPerRow=6]
* @member CKEDITOR.config
*/
/**
* Whether the plugin should convert `background` CSS properties with color only, to a `background-color` property,
* allowing the [Color Button](http://ckeditor.com/addon/colorbutton) plugin to edit these styles.
*
* config.colorButton_normalizeBackground = false;
*
* @since 4.6.1
* @cfg {Boolean} [colorButton_normalizeBackground=true]
* @member CKEDITOR.config
*/
| automaticColor = block && block.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) || 'transparent';
}
| conditional_block |
plugin.js | /**
* @license Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
/**
* @fileOverview The "colorbutton" plugin that makes it possible to assign
* text and background colors to editor contents.
*
*/
CKEDITOR.plugins.add( 'colorbutton', {
requires: 'panelbutton,floatpanel',
// jscs:disable maximumLineLength
lang: 'af,ar,az,bg,bn,bs,ca,cs,cy,da,de,de-ch,el,en,en-au,en-ca,en-gb,eo,es,es-mx,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,oc,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE%
// jscs:enable maximumLineLength
icons: 'bgcolor,textcolor', // %REMOVE_LINE_CORE%
hidpi: true, // %REMOVE_LINE_CORE%
init: function( editor ) {
var config = editor.config,
lang = editor.lang.colorbutton;
if ( !CKEDITOR.env.hc ) {
addButton( 'TextColor', 'fore', lang.textColorTitle, 10, {
contentTransformations: [
[
{
element: 'font',
check: 'span{color}',
left: function( element ) {
return !!element.attributes.color;
},
right: function( element ) {
element.name = 'span';
element.attributes.color && ( element.styles.color = element.attributes.color );
delete element.attributes.color;
}
}
]
]
} );
var bgOptions = {},
normalizeBackground = editor.config.colorButton_normalizeBackground;
if ( normalizeBackground === undefined || normalizeBackground ) {
// If background contains only color, then we want to convert it into background-color so that it's
// correctly picked by colorbutton plugin.
bgOptions.contentTransformations = [
[
{
// Transform span that specify background with color only to background-color.
element: 'span',
left: function( element ) {
var tools = CKEDITOR.tools;
if ( element.name != 'span' || !element.styles || !element.styles.background ) {
return false;
}
var background = tools.style.parse.background( element.styles.background );
// We return true only if background specifies **only** color property, and there's only one background directive.
return background.color && tools.objectKeys( background ).length === 1;
},
right: function( element ) {
var style = new CKEDITOR.style( editor.config.colorButton_backStyle, {
color: element.styles.background
} ),
definition = style.getDefinition();
// Align the output object with the template used in config.
element.name = definition.element;
element.styles = definition.styles;
element.attributes = definition.attributes || {};
return element;
}
}
]
];
}
addButton( 'BGColor', 'back', lang.bgColorTitle, 20, bgOptions );
}
function addButton( name, type, title, order, options ) {
var style = new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ] ),
colorBoxId = CKEDITOR.tools.getNextId() + '_colorBox',
panelBlock;
options = options || {};
editor.ui.add( name, CKEDITOR.UI_PANELBUTTON, {
label: title,
title: title,
modes: { wysiwyg: 1 },
editorFocus: 0,
toolbar: 'colors,' + order,
allowedContent: style,
requiredContent: style,
contentTransformations: options.contentTransformations,
panel: {
css: CKEDITOR.skin.getPath( 'editor' ),
attributes: { role: 'listbox', 'aria-label': lang.panelTitle }
},
onBlock: function( panel, block ) {
panelBlock = block;
block.autoSize = true;
block.element.addClass( 'cke_colorblock' );
block.element.setHtml( renderColors( panel, type, colorBoxId ) );
// The block should not have scrollbars (http://dev.ckeditor.com/ticket/5933, http://dev.ckeditor.com/ticket/6056)
block.element.getDocument().getBody().setStyle( 'overflow', 'hidden' );
CKEDITOR.ui.fire( 'ready', this );
var keys = block.keys;
var rtl = editor.lang.dir == 'rtl';
keys[ rtl ? 37 : 39 ] = 'next'; // ARROW-RIGHT
keys[ 40 ] = 'next'; // ARROW-DOWN
keys[ 9 ] = 'next'; // TAB
keys[ rtl ? 39 : 37 ] = 'prev'; // ARROW-LEFT
keys[ 38 ] = 'prev'; // ARROW-UP
keys[ CKEDITOR.SHIFT + 9 ] = 'prev'; // SHIFT + TAB
keys[ 32 ] = 'click'; // SPACE
},
refresh: function() {
if ( !editor.activeFilter.check( style ) )
this.setState( CKEDITOR.TRISTATE_DISABLED );
},
// The automatic colorbox should represent the real color (http://dev.ckeditor.com/ticket/6010)
onOpen: function() {
var selection = editor.getSelection(),
block = selection && selection.getStartElement(),
path = editor.elementPath( block ),
automaticColor;
if ( !path )
return;
// Find the closest block element.
block = path.block || path.blockLimit || editor.document.getBody();
// The background color might be transparent. In that case, look up the color in the DOM tree.
do {
automaticColor = block && block.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) || 'transparent';
}
while ( type == 'back' && automaticColor == 'transparent' && block && ( block = block.getParent() ) );
// The box should never be transparent.
if ( !automaticColor || automaticColor == 'transparent' )
automaticColor = '#ffffff';
if ( config.colorButton_enableAutomatic !== false ) {
this._.panel._.iframe.getFrameDocument().getById( colorBoxId ).setStyle( 'background-color', automaticColor );
}
var range = selection && selection.getRanges()[ 0 ];
if ( range ) {
var walker = new CKEDITOR.dom.walker( range ),
element = range.collapsed ? range.startContainer : walker.next(),
finalColor = '',
currentColor;
while ( element ) {
if ( element.type === CKEDITOR.NODE_TEXT ) {
element = element.getParent();
}
currentColor = normalizeColor( element.getComputedStyle( type == 'back' ? 'background-color' : 'color' ) );
finalColor = finalColor || currentColor;
if ( finalColor !== currentColor ) {
finalColor = '';
break;
}
element = walker.next();
}
selectColor( panelBlock, finalColor );
}
return automaticColor;
}
} );
}
function re | panel, type, colorBoxId ) {
var output = [],
colors = config.colorButton_colors.split( ',' ),
colorsPerRow = config.colorButton_colorsPerRow || 6,
// Tells if we should include "More Colors..." button.
moreColorsEnabled = editor.plugins.colordialog && config.colorButton_enableMore !== false,
// aria-setsize and aria-posinset attributes are used to indicate size of options, because
// screen readers doesn't play nice with table, based layouts (http://dev.ckeditor.com/ticket/12097).
total = colors.length + ( moreColorsEnabled ? 2 : 1 );
var clickFn = CKEDITOR.tools.addFunction( function applyColorStyle( color, type ) {
function onColorDialogClose( evt ) {
this.removeListener( 'ok', onColorDialogClose );
this.removeListener( 'cancel', onColorDialogClose );
evt.name == 'ok' && applyColorStyle( this.getContentElement( 'picker', 'selectedColor' ).getValue(), type );
}
if ( color == '?' ) {
editor.openDialog( 'colordialog', function() {
this.on( 'ok', onColorDialogClose );
this.on( 'cancel', onColorDialogClose );
} );
return;
}
editor.focus();
panel.hide();
editor.fire( 'saveSnapshot' );
// Clean up any conflicting style within the range.
editor.removeStyle( new CKEDITOR.style( config[ 'colorButton_' + type + 'Style' ], { color: 'inherit' } ) );
if ( color ) {
var colorStyle = config[ 'colorButton_' + type + 'Style' ];
colorStyle.childRule = type == 'back' ?
function( element ) {
// It's better to apply background color as the innermost style. (http://dev.ckeditor.com/ticket/3599)
// Except for "unstylable elements". (http://dev.ckeditor.com/ticket/6103)
return isUnstylable( element );
} : function( element ) {
// Fore color style must be applied inside links instead of around it. (http://dev.ckeditor.com/ticket/4772,http://dev.ckeditor.com/ticket/6908)
return !( element.is( 'a' ) || element.getElementsByTag( 'a' ).count() ) || isUnstylable( element );
};
editor.applyStyle( new CKEDITOR.style( colorStyle, { color: color } ) );
}
editor.fire( 'saveSnapshot' );
} );
if ( config.colorButton_enableAutomatic !== false ) {
// Render the "Automatic" button.
output.push( '<a class="cke_colorauto" _cke_focus=1 hidefocus=true' +
' title="', lang.auto, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',null,\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.auto, '\')"' +
' role="option" aria-posinset="1" aria-setsize="', total, '">' +
'<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center"><span class="cke_colorbox" id="', colorBoxId, '"></span>', lang.auto, '</td>' +
'</tr>' +
'</table>' +
'</a>' );
}
output.push( '<table role="presentation" cellspacing=0 cellpadding=0 width="100%">' );
// Render the color boxes.
for ( var i = 0; i < colors.length; i++ ) {
if ( ( i % colorsPerRow ) === 0 )
output.push( '</tr><tr>' );
var parts = colors[ i ].split( '/' ),
colorName = parts[ 0 ],
colorCode = parts[ 1 ] || colorName;
// The data can be only a color code (without #) or colorName + color code
// If only a color code is provided, then the colorName is the color with the hash
// Convert the color from RGB to RRGGBB for better compatibility with IE and <font>. See http://dev.ckeditor.com/ticket/5676
if ( !parts[ 1 ] )
colorName = '#' + colorName.replace( /^(.)(.)(.)$/, '$1$1$2$2$3$3' );
var colorLabel = editor.lang.colorbutton.colors[ colorCode ] || colorCode;
output.push( '<td>' +
'<a class="cke_colorbox" _cke_focus=1 hidefocus=true' +
' title="', colorLabel, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'', colorName, '\',\'', type, '\'); return false;"' +
' href="javascript:void(\'', colorLabel, '\')"' +
' data-value="' + colorCode + '"' +
' role="option" aria-posinset="', ( i + 2 ), '" aria-setsize="', total, '">' +
'<span class="cke_colorbox" style="background-color:#', colorCode, '"></span>' +
'</a>' +
'</td>' );
}
// Render the "More Colors" button.
if ( moreColorsEnabled ) {
output.push( '</tr>' +
'<tr>' +
'<td colspan="' + colorsPerRow + '" align="center">' +
'<a class="cke_colormore" _cke_focus=1 hidefocus=true' +
' title="', lang.more, '"' +
' onclick="CKEDITOR.tools.callFunction(', clickFn, ',\'?\',\'', type, '\');return false;"' +
' href="javascript:void(\'', lang.more, '\')"', ' role="option" aria-posinset="', total, '" aria-setsize="', total, '">', lang.more, '</a>' +
'</td>' ); // tr is later in the code.
}
output.push( '</tr></table>' );
return output.join( '' );
}
function isUnstylable( ele ) {
return ( ele.getAttribute( 'contentEditable' ) == 'false' ) || ele.getAttribute( 'data-nostyle' );
}
/*
* Selects the specified color in the specified panel block.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {CKEDITOR.ui.panel.block} block
* @param {String} color
*/
function selectColor( block, color ) {
var items = block._.getItems();
for ( var i = 0; i < items.count(); i++ ) {
var item = items.getItem( i );
item.removeAttribute( 'aria-selected' );
if ( color && color == normalizeColor( item.getAttribute( 'data-value' ) ) ) {
item.setAttribute( 'aria-selected', true );
}
}
}
/*
* Converts a CSS color value to an easily comparable form.
*
* @private
* @member CKEDITOR.plugins.colorbutton
* @param {String} color
* @returns {String}
*/
function normalizeColor( color ) {
return CKEDITOR.tools.convertRgbToHex( color || '' ).replace( /#/, '' ).toLowerCase();
}
}
} );
/**
* Whether to enable the **More Colors** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableMore = false;
*
* @cfg {Boolean} [colorButton_enableMore=true]
* @member CKEDITOR.config
*/
/**
* Defines the colors to be displayed in the color selectors. This is a string
* containing hexadecimal notation for HTML colors, without the `'#'` prefix.
*
* **Since 3.3:** A color name may optionally be defined by prefixing the entries with
* a name and the slash character. For example, `'FontColor1/FF9900'` will be
* displayed as the color `#FF9900` in the selector, but will be output as `'FontColor1'`.
*
* **Since 4.6.2:** The default color palette has changed. It contains fewer colors in more
* pastel shades than the previous one.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // Brazil colors only.
* config.colorButton_colors = '00923E,F8C100,28166F';
*
* config.colorButton_colors = 'FontColor1/FF9900,FontColor2/0066CC,FontColor3/F00';
*
* // CKEditor color palette available before version 4.6.2.
* config.colorButton_colors =
* '000,800000,8B4513,2F4F4F,008080,000080,4B0082,696969,' +
* 'B22222,A52A2A,DAA520,006400,40E0D0,0000CD,800080,808080,' +
* 'F00,FF8C00,FFD700,008000,0FF,00F,EE82EE,A9A9A9,' +
* 'FFA07A,FFA500,FFFF00,00FF00,AFEEEE,ADD8E6,DDA0DD,D3D3D3,' +
* 'FFF0F5,FAEBD7,FFFFE0,F0FFF0,F0FFFF,F0F8FF,E6E6FA,FFF';
*
* @cfg {String} [colorButton_colors=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_colors = '1ABC9C,2ECC71,3498DB,9B59B6,4E5F70,F1C40F,' +
'16A085,27AE60,2980B9,8E44AD,2C3E50,F39C12,' +
'E67E22,E74C3C,ECF0F1,95A5A6,DDD,FFF,' +
'D35400,C0392B,BDC3C7,7F8C8D,999,000';
/**
* Stores the style definition that applies the text foreground color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_foreStyle = {
* element: 'span',
* styles: { color: '#(color)' }
* };
*
* @cfg [colorButton_foreStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_foreStyle = {
element: 'span',
styles: { 'color': '#(color)' },
overrides: [ {
element: 'font', attributes: { 'color': null }
} ]
};
/**
* Stores the style definition that applies the text background color.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* // This is actually the default value.
* config.colorButton_backStyle = {
* element: 'span',
* styles: { 'background-color': '#(color)' }
* };
*
* @cfg [colorButton_backStyle=see source]
* @member CKEDITOR.config
*/
CKEDITOR.config.colorButton_backStyle = {
element: 'span',
styles: { 'background-color': '#(color)' }
};
/**
* Whether to enable the **Automatic** button in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_enableAutomatic = false;
*
* @cfg {Boolean} [colorButton_enableAutomatic=true]
* @member CKEDITOR.config
*/
/**
* Defines how many colors will be shown per row in the color selectors.
*
* Read more in the [documentation](#!/guide/dev_colorbutton)
* and see the [SDK sample](http://sdk.ckeditor.com/samples/colorbutton.html).
*
* config.colorButton_colorsPerRow = 8;
*
* @since 4.6.2
* @cfg {Number} [colorButton_colorsPerRow=6]
* @member CKEDITOR.config
*/
/**
* Whether the plugin should convert `background` CSS properties with color only, to a `background-color` property,
* allowing the [Color Button](http://ckeditor.com/addon/colorbutton) plugin to edit these styles.
*
* config.colorButton_normalizeBackground = false;
*
* @since 4.6.1
* @cfg {Boolean} [colorButton_normalizeBackground=true]
* @member CKEDITOR.config
*/
| nderColors( | identifier_name |
borrowck-let-suggestion-suffixes.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() {
let old = ['o']; // statement 0
let mut v1 = Vec::new(); // statement 1
let mut v2 = Vec::new(); // statement 2
//~^ NOTE reference must be valid for the block suffix following statement 2
let young = ['y']; // statement 3
//~^ NOTE ...but borrowed value is only valid for the block suffix following statement 3
v2.push(&young[0]); // statement 4
//~^ ERROR `young[..]` does not live long enough
let mut v3 = Vec::new(); // statement 5
//~^ NOTE reference must be valid for the block suffix following statement 5
v3.push(&'x'); // statement 6
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
{
let mut v4 = Vec::new(); // (sub) statement 0
//~^ NOTE reference must be valid for the block suffix following statement 0
v4.push(&'y');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
} // (statement 7)
let mut v5 = Vec::new(); // statement 8
//~^ NOTE reference must be valid for the block suffix following statement 8
v5.push(&'z');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
v1.push(&old[0]);
}
fn | () {
f();
}
| main | identifier_name |
borrowck-let-suggestion-suffixes.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() {
let old = ['o']; // statement 0
let mut v1 = Vec::new(); // statement 1
let mut v2 = Vec::new(); // statement 2 | let young = ['y']; // statement 3
//~^ NOTE ...but borrowed value is only valid for the block suffix following statement 3
v2.push(&young[0]); // statement 4
//~^ ERROR `young[..]` does not live long enough
let mut v3 = Vec::new(); // statement 5
//~^ NOTE reference must be valid for the block suffix following statement 5
v3.push(&'x'); // statement 6
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
{
let mut v4 = Vec::new(); // (sub) statement 0
//~^ NOTE reference must be valid for the block suffix following statement 0
v4.push(&'y');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
} // (statement 7)
let mut v5 = Vec::new(); // statement 8
//~^ NOTE reference must be valid for the block suffix following statement 8
v5.push(&'z');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
v1.push(&old[0]);
}
fn main() {
f();
} | //~^ NOTE reference must be valid for the block suffix following statement 2
| random_line_split |
borrowck-let-suggestion-suffixes.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() {
let old = ['o']; // statement 0
let mut v1 = Vec::new(); // statement 1
let mut v2 = Vec::new(); // statement 2
//~^ NOTE reference must be valid for the block suffix following statement 2
let young = ['y']; // statement 3
//~^ NOTE ...but borrowed value is only valid for the block suffix following statement 3
v2.push(&young[0]); // statement 4
//~^ ERROR `young[..]` does not live long enough
let mut v3 = Vec::new(); // statement 5
//~^ NOTE reference must be valid for the block suffix following statement 5
v3.push(&'x'); // statement 6
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
{
let mut v4 = Vec::new(); // (sub) statement 0
//~^ NOTE reference must be valid for the block suffix following statement 0
v4.push(&'y');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
} // (statement 7)
let mut v5 = Vec::new(); // statement 8
//~^ NOTE reference must be valid for the block suffix following statement 8
v5.push(&'z');
//~^ ERROR borrowed value does not live long enough
//~| NOTE ...but borrowed value is only valid for the statement
//~| HELP consider using a `let` binding to increase its lifetime
v1.push(&old[0]);
}
fn main() | {
f();
} | identifier_body | |
test_builddict.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.core.register import build_file_aliases as register_core
from pants.backend.core.tasks import builddictionary, reflect
from pants.backend.jvm.register import build_file_aliases as register_jvm
from pants.backend.python.register import build_file_aliases as register_python
from pants_test.base_test import BaseTest
class | (BaseTest):
@property
def alias_groups(self):
return register_core().merge(register_jvm().merge(register_python()))
def setUp(self):
super(ExtractedContentSanityTests, self).setUp()
self._syms = reflect.assemble_buildsyms(build_file_parser=self.build_file_parser)
def test_sub_tocls(self):
python_symbols = builddictionary.python_sub_tocl(self._syms).e
# python_requirements goes through build_file_aliases.curry_context.
# It's in the "Python" sub_tocl, but tenuously
self.assertTrue('python_requirements' in python_symbols)
# Some less-tenuous sanity checks
for sym in ['python_library', 'python_tests']:
self.assertTrue(sym in python_symbols)
jvm_symbols = builddictionary.jvm_sub_tocl(self._syms).e
for sym in ['java_library', 'scala_library']:
self.assertTrue(sym in jvm_symbols)
| ExtractedContentSanityTests | identifier_name |
test_builddict.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.core.register import build_file_aliases as register_core
from pants.backend.core.tasks import builddictionary, reflect
from pants.backend.jvm.register import build_file_aliases as register_jvm
from pants.backend.python.register import build_file_aliases as register_python
from pants_test.base_test import BaseTest
class ExtractedContentSanityTests(BaseTest):
@property
def alias_groups(self):
return register_core().merge(register_jvm().merge(register_python()))
def setUp(self):
|
def test_sub_tocls(self):
python_symbols = builddictionary.python_sub_tocl(self._syms).e
# python_requirements goes through build_file_aliases.curry_context.
# It's in the "Python" sub_tocl, but tenuously
self.assertTrue('python_requirements' in python_symbols)
# Some less-tenuous sanity checks
for sym in ['python_library', 'python_tests']:
self.assertTrue(sym in python_symbols)
jvm_symbols = builddictionary.jvm_sub_tocl(self._syms).e
for sym in ['java_library', 'scala_library']:
self.assertTrue(sym in jvm_symbols)
| super(ExtractedContentSanityTests, self).setUp()
self._syms = reflect.assemble_buildsyms(build_file_parser=self.build_file_parser) | identifier_body |
test_builddict.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.core.register import build_file_aliases as register_core
from pants.backend.core.tasks import builddictionary, reflect
from pants.backend.jvm.register import build_file_aliases as register_jvm
from pants.backend.python.register import build_file_aliases as register_python
from pants_test.base_test import BaseTest
|
class ExtractedContentSanityTests(BaseTest):
@property
def alias_groups(self):
return register_core().merge(register_jvm().merge(register_python()))
def setUp(self):
super(ExtractedContentSanityTests, self).setUp()
self._syms = reflect.assemble_buildsyms(build_file_parser=self.build_file_parser)
def test_sub_tocls(self):
python_symbols = builddictionary.python_sub_tocl(self._syms).e
# python_requirements goes through build_file_aliases.curry_context.
# It's in the "Python" sub_tocl, but tenuously
self.assertTrue('python_requirements' in python_symbols)
# Some less-tenuous sanity checks
for sym in ['python_library', 'python_tests']:
self.assertTrue(sym in python_symbols)
jvm_symbols = builddictionary.jvm_sub_tocl(self._syms).e
for sym in ['java_library', 'scala_library']:
self.assertTrue(sym in jvm_symbols) | random_line_split | |
test_builddict.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.core.register import build_file_aliases as register_core
from pants.backend.core.tasks import builddictionary, reflect
from pants.backend.jvm.register import build_file_aliases as register_jvm
from pants.backend.python.register import build_file_aliases as register_python
from pants_test.base_test import BaseTest
class ExtractedContentSanityTests(BaseTest):
@property
def alias_groups(self):
return register_core().merge(register_jvm().merge(register_python()))
def setUp(self):
super(ExtractedContentSanityTests, self).setUp()
self._syms = reflect.assemble_buildsyms(build_file_parser=self.build_file_parser)
def test_sub_tocls(self):
python_symbols = builddictionary.python_sub_tocl(self._syms).e
# python_requirements goes through build_file_aliases.curry_context.
# It's in the "Python" sub_tocl, but tenuously
self.assertTrue('python_requirements' in python_symbols)
# Some less-tenuous sanity checks
for sym in ['python_library', 'python_tests']:
|
jvm_symbols = builddictionary.jvm_sub_tocl(self._syms).e
for sym in ['java_library', 'scala_library']:
self.assertTrue(sym in jvm_symbols)
| self.assertTrue(sym in python_symbols) | conditional_block |
__init__.py | __all__ = ["sqlite_dump", "sqlite_merge"]
from random import Random
import math
def random_expectations(depth=0, breadth=3, low=1, high=10, random=Random()):
"""
Generate depth x breadth array of random numbers where each row sums to
high, with a minimum of low.
"""
result = []
if depth == 0:
initial = high + 1
for i in range(breadth - 1):
n = random.randint(low, initial - (low * (breadth - i)))
initial -= n
result.append(n)
result.append(initial - low)
random.shuffle(result) | result = [random_expectations(depth - 1, breadth, low, high, random) for x in range(breadth)]
return result
def rescale(new_low, new_high, low, diff, x):
scaled = (new_high-new_low)*(x - low)
scaled /= diff
return scaled + new_low
def weighted_random_choice(choices, weights, random=Random()):
population = [val for val, cnt in zip(choices, weights) for i in range(int(cnt))]
return random.choice(population)
def multinomial(probabilities, draws=1, random=Random()):
"""
Draw from a multinomial distribution
"""
def pick():
draw = random.random()
bracket = 0.
for i in range(len(probabilities)):
bracket += probabilities[i]
if draw < bracket:
return i
return i
result = [0] * len(probabilities)
for i in range(draws):
result[pick()] += 1
return result
def logistic_random(loc, scale, random=Random()):
"""
Return a random number from a specified logistic distribution.
"""
x = random.random()
return loc + scale * math.log(x / (1 - x))
def shuffled(target, random=Random()):
"""
Return a shuffled version of the argument
"""
a = target[:]
random.shuffle(a)
return a
def make_pbs_script(kwargs, hours=60, mins=0, ppn=16, script_name=None):
"""
Generate a PBS run script to be submitted.
"""
from disclosuregame.Util.sqlite_merge import list_matching
from os.path import split
args_dir, name = split(kwargs.kwargs[0])
kwargs_files = list_matching(args_dir, name)
count = len(kwargs_files)
import sys
args = sys.argv[1:]
args = " ".join(args)
args = args.replace("*", "${PBS_ARRAYID}")
args = args.replace(" %s " % kwargs.file_name, " ${PBS_ARRAYID}_%s " % kwargs.file_name)
if kwargs.file_name == "":
args += " -f ${PBS_ARRAYID}"
interpreter = sys.executable
run_script = ["#!/bin/bash -vx", "#PBS -l walltime=%d:%d:00" % (hours, mins), "#PBS -l nodes=1:ppn=%d" % ppn,
"module load python"]
# Doesn't work on multiple nodes, sadly
# Set up the call
run_call = "%s -m disclosuregame.run %s" % (interpreter, args)
run_script.append(run_call)
# Cleanup after all jobs have run
if script_name is not None:
run_script.append("if [$PBS_ARRAYID -eq %d]" % count)
run_script.append("then")
run_script.append("\trm %s" % script_name)
run_script.append("fi")
return '\n'.join(run_script), count
# ${python} Run.py -R 100 -s ${sig} -r ${resp} --pickled-arguments ../experiment_args/sensitivity_${PBS_ARRAYID}.args -f ${PBS_ARRAYID}_sensitivity -i 1000 -d ${dir} -g ${game} | else: | random_line_split |
__init__.py | __all__ = ["sqlite_dump", "sqlite_merge"]
from random import Random
import math
def random_expectations(depth=0, breadth=3, low=1, high=10, random=Random()):
"""
Generate depth x breadth array of random numbers where each row sums to
high, with a minimum of low.
"""
result = []
if depth == 0:
initial = high + 1
for i in range(breadth - 1):
n = random.randint(low, initial - (low * (breadth - i)))
initial -= n
result.append(n)
result.append(initial - low)
random.shuffle(result)
else:
result = [random_expectations(depth - 1, breadth, low, high, random) for x in range(breadth)]
return result
def rescale(new_low, new_high, low, diff, x):
scaled = (new_high-new_low)*(x - low)
scaled /= diff
return scaled + new_low
def weighted_random_choice(choices, weights, random=Random()):
population = [val for val, cnt in zip(choices, weights) for i in range(int(cnt))]
return random.choice(population)
def | (probabilities, draws=1, random=Random()):
"""
Draw from a multinomial distribution
"""
def pick():
draw = random.random()
bracket = 0.
for i in range(len(probabilities)):
bracket += probabilities[i]
if draw < bracket:
return i
return i
result = [0] * len(probabilities)
for i in range(draws):
result[pick()] += 1
return result
def logistic_random(loc, scale, random=Random()):
"""
Return a random number from a specified logistic distribution.
"""
x = random.random()
return loc + scale * math.log(x / (1 - x))
def shuffled(target, random=Random()):
"""
Return a shuffled version of the argument
"""
a = target[:]
random.shuffle(a)
return a
def make_pbs_script(kwargs, hours=60, mins=0, ppn=16, script_name=None):
"""
Generate a PBS run script to be submitted.
"""
from disclosuregame.Util.sqlite_merge import list_matching
from os.path import split
args_dir, name = split(kwargs.kwargs[0])
kwargs_files = list_matching(args_dir, name)
count = len(kwargs_files)
import sys
args = sys.argv[1:]
args = " ".join(args)
args = args.replace("*", "${PBS_ARRAYID}")
args = args.replace(" %s " % kwargs.file_name, " ${PBS_ARRAYID}_%s " % kwargs.file_name)
if kwargs.file_name == "":
args += " -f ${PBS_ARRAYID}"
interpreter = sys.executable
run_script = ["#!/bin/bash -vx", "#PBS -l walltime=%d:%d:00" % (hours, mins), "#PBS -l nodes=1:ppn=%d" % ppn,
"module load python"]
# Doesn't work on multiple nodes, sadly
# Set up the call
run_call = "%s -m disclosuregame.run %s" % (interpreter, args)
run_script.append(run_call)
# Cleanup after all jobs have run
if script_name is not None:
run_script.append("if [$PBS_ARRAYID -eq %d]" % count)
run_script.append("then")
run_script.append("\trm %s" % script_name)
run_script.append("fi")
return '\n'.join(run_script), count
# ${python} Run.py -R 100 -s ${sig} -r ${resp} --pickled-arguments ../experiment_args/sensitivity_${PBS_ARRAYID}.args -f ${PBS_ARRAYID}_sensitivity -i 1000 -d ${dir} -g ${game}
| multinomial | identifier_name |
__init__.py | __all__ = ["sqlite_dump", "sqlite_merge"]
from random import Random
import math
def random_expectations(depth=0, breadth=3, low=1, high=10, random=Random()):
"""
Generate depth x breadth array of random numbers where each row sums to
high, with a minimum of low.
"""
result = []
if depth == 0:
initial = high + 1
for i in range(breadth - 1):
n = random.randint(low, initial - (low * (breadth - i)))
initial -= n
result.append(n)
result.append(initial - low)
random.shuffle(result)
else:
result = [random_expectations(depth - 1, breadth, low, high, random) for x in range(breadth)]
return result
def rescale(new_low, new_high, low, diff, x):
scaled = (new_high-new_low)*(x - low)
scaled /= diff
return scaled + new_low
def weighted_random_choice(choices, weights, random=Random()):
population = [val for val, cnt in zip(choices, weights) for i in range(int(cnt))]
return random.choice(population)
def multinomial(probabilities, draws=1, random=Random()):
"""
Draw from a multinomial distribution
"""
def pick():
draw = random.random()
bracket = 0.
for i in range(len(probabilities)):
bracket += probabilities[i]
if draw < bracket:
return i
return i
result = [0] * len(probabilities)
for i in range(draws):
result[pick()] += 1
return result
def logistic_random(loc, scale, random=Random()):
"""
Return a random number from a specified logistic distribution.
"""
x = random.random()
return loc + scale * math.log(x / (1 - x))
def shuffled(target, random=Random()):
"""
Return a shuffled version of the argument
"""
a = target[:]
random.shuffle(a)
return a
def make_pbs_script(kwargs, hours=60, mins=0, ppn=16, script_name=None):
| """
Generate a PBS run script to be submitted.
"""
from disclosuregame.Util.sqlite_merge import list_matching
from os.path import split
args_dir, name = split(kwargs.kwargs[0])
kwargs_files = list_matching(args_dir, name)
count = len(kwargs_files)
import sys
args = sys.argv[1:]
args = " ".join(args)
args = args.replace("*", "${PBS_ARRAYID}")
args = args.replace(" %s " % kwargs.file_name, " ${PBS_ARRAYID}_%s " % kwargs.file_name)
if kwargs.file_name == "":
args += " -f ${PBS_ARRAYID}"
interpreter = sys.executable
run_script = ["#!/bin/bash -vx", "#PBS -l walltime=%d:%d:00" % (hours, mins), "#PBS -l nodes=1:ppn=%d" % ppn,
"module load python"]
# Doesn't work on multiple nodes, sadly
# Set up the call
run_call = "%s -m disclosuregame.run %s" % (interpreter, args)
run_script.append(run_call)
# Cleanup after all jobs have run
if script_name is not None:
run_script.append("if [$PBS_ARRAYID -eq %d]" % count)
run_script.append("then")
run_script.append("\trm %s" % script_name)
run_script.append("fi")
return '\n'.join(run_script), count
# ${python} Run.py -R 100 -s ${sig} -r ${resp} --pickled-arguments ../experiment_args/sensitivity_${PBS_ARRAYID}.args -f ${PBS_ARRAYID}_sensitivity -i 1000 -d ${dir} -g ${game} | identifier_body | |
__init__.py | __all__ = ["sqlite_dump", "sqlite_merge"]
from random import Random
import math
def random_expectations(depth=0, breadth=3, low=1, high=10, random=Random()):
"""
Generate depth x breadth array of random numbers where each row sums to
high, with a minimum of low.
"""
result = []
if depth == 0:
initial = high + 1
for i in range(breadth - 1):
n = random.randint(low, initial - (low * (breadth - i)))
initial -= n
result.append(n)
result.append(initial - low)
random.shuffle(result)
else:
|
return result
def rescale(new_low, new_high, low, diff, x):
scaled = (new_high-new_low)*(x - low)
scaled /= diff
return scaled + new_low
def weighted_random_choice(choices, weights, random=Random()):
population = [val for val, cnt in zip(choices, weights) for i in range(int(cnt))]
return random.choice(population)
def multinomial(probabilities, draws=1, random=Random()):
"""
Draw from a multinomial distribution
"""
def pick():
draw = random.random()
bracket = 0.
for i in range(len(probabilities)):
bracket += probabilities[i]
if draw < bracket:
return i
return i
result = [0] * len(probabilities)
for i in range(draws):
result[pick()] += 1
return result
def logistic_random(loc, scale, random=Random()):
"""
Return a random number from a specified logistic distribution.
"""
x = random.random()
return loc + scale * math.log(x / (1 - x))
def shuffled(target, random=Random()):
"""
Return a shuffled version of the argument
"""
a = target[:]
random.shuffle(a)
return a
def make_pbs_script(kwargs, hours=60, mins=0, ppn=16, script_name=None):
"""
Generate a PBS run script to be submitted.
"""
from disclosuregame.Util.sqlite_merge import list_matching
from os.path import split
args_dir, name = split(kwargs.kwargs[0])
kwargs_files = list_matching(args_dir, name)
count = len(kwargs_files)
import sys
args = sys.argv[1:]
args = " ".join(args)
args = args.replace("*", "${PBS_ARRAYID}")
args = args.replace(" %s " % kwargs.file_name, " ${PBS_ARRAYID}_%s " % kwargs.file_name)
if kwargs.file_name == "":
args += " -f ${PBS_ARRAYID}"
interpreter = sys.executable
run_script = ["#!/bin/bash -vx", "#PBS -l walltime=%d:%d:00" % (hours, mins), "#PBS -l nodes=1:ppn=%d" % ppn,
"module load python"]
# Doesn't work on multiple nodes, sadly
# Set up the call
run_call = "%s -m disclosuregame.run %s" % (interpreter, args)
run_script.append(run_call)
# Cleanup after all jobs have run
if script_name is not None:
run_script.append("if [$PBS_ARRAYID -eq %d]" % count)
run_script.append("then")
run_script.append("\trm %s" % script_name)
run_script.append("fi")
return '\n'.join(run_script), count
# ${python} Run.py -R 100 -s ${sig} -r ${resp} --pickled-arguments ../experiment_args/sensitivity_${PBS_ARRAYID}.args -f ${PBS_ARRAYID}_sensitivity -i 1000 -d ${dir} -g ${game}
| result = [random_expectations(depth - 1, breadth, low, high, random) for x in range(breadth)] | conditional_block |
auth_backends.py | from __future__ import unicode_literals
import re
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from account.models import EmailAddress
from account.utils import get_user_lookup_kwargs
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
class UsernameAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
User = get_user_model()
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try:
user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError):
return None
else:
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
class EmailAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
|
class HybridAuthenticationBackend(ModelBackend):
"""User can login via email OR username"""
def authenticate(self, **credentials):
User = get_user_model()
if email_re.search(credentials["username"]):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try: email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError): return None
else: user = email_address.user
else:
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try: user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError): return None
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
| qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try:
email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError):
return None
else:
user = email_address.user
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None | identifier_body |
auth_backends.py | from __future__ import unicode_literals
import re
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from account.models import EmailAddress
from account.utils import get_user_lookup_kwargs
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
class UsernameAuthenticationBackend(ModelBackend):
def | (self, **credentials):
User = get_user_model()
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try:
user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError):
return None
else:
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
class EmailAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try:
email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError):
return None
else:
user = email_address.user
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
class HybridAuthenticationBackend(ModelBackend):
"""User can login via email OR username"""
def authenticate(self, **credentials):
User = get_user_model()
if email_re.search(credentials["username"]):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try: email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError): return None
else: user = email_address.user
else:
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try: user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError): return None
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
| authenticate | identifier_name |
auth_backends.py | from __future__ import unicode_literals
import re
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from account.models import EmailAddress
from account.utils import get_user_lookup_kwargs
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
class UsernameAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
User = get_user_model()
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try:
user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError):
return None
else:
try:
if user.check_password(credentials["password"]):
|
except KeyError:
return None
class EmailAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try:
email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError):
return None
else:
user = email_address.user
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
class HybridAuthenticationBackend(ModelBackend):
"""User can login via email OR username"""
def authenticate(self, **credentials):
User = get_user_model()
if email_re.search(credentials["username"]):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try: email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError): return None
else: user = email_address.user
else:
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try: user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError): return None
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
| return user | conditional_block |
auth_backends.py | from __future__ import unicode_literals
import re
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from account.models import EmailAddress
from account.utils import get_user_lookup_kwargs
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
class UsernameAuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
User = get_user_model()
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try:
user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError):
return None
else:
try:
if user.check_password(credentials["password"]):
return user |
def authenticate(self, **credentials):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try:
email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError):
return None
else:
user = email_address.user
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None
class HybridAuthenticationBackend(ModelBackend):
"""User can login via email OR username"""
def authenticate(self, **credentials):
User = get_user_model()
if email_re.search(credentials["username"]):
qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True))
try: email_address = qs.get(email__iexact=credentials["username"])
except (EmailAddress.DoesNotExist, KeyError): return None
else: user = email_address.user
else:
lookup_kwargs = get_user_lookup_kwargs({
"{username}__iexact": credentials["username"]
})
try: user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, KeyError): return None
try:
if user.check_password(credentials["password"]):
return user
except KeyError:
return None | except KeyError:
return None
class EmailAuthenticationBackend(ModelBackend): | random_line_split |
partially-ordered-set_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { PartiallyOrderedSet } from './partially-ordered-set';
describe('PartiallyOrderedSet', () => {
it('can add an item', () => {
const set = new PartiallyOrderedSet<string>();
set.add('hello');
expect([...set]).toEqual(['hello']);
});
it('can remove an item', () => {
const set = new PartiallyOrderedSet<string>();
set.add('hello');
set.add('world');
set.delete('world'); | it('list items in determistic order of dependency', () => {
const set = new PartiallyOrderedSet<string>();
set.add('red');
set.add('yellow', ['red']);
set.add('green', ['red']);
set.add('blue');
set.add('purple', ['red', 'blue']);
expect([...set]).toEqual(['red', 'blue', 'yellow', 'green', 'purple']);
});
}); |
expect([...set]).toEqual(['hello']);
});
| random_line_split |
regress-306738.js | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): Liam Davis-Mead
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gTestfile = 'regress-306738.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 306738;
var summary = 'uneval() on objects with getter or setter';
var actual = '';
var expect = '';
printBugNumber(BUGNUMBER);
printStatus (summary);
actual = uneval(
{
get foo()
|
});
expect = '({get foo() {return "foo";}})';
compareSource(expect, actual, summary);
| {
return "foo";
} | identifier_body |
regress-306738.js | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): Liam Davis-Mead
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gTestfile = 'regress-306738.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 306738;
var summary = 'uneval() on objects with getter or setter';
var actual = '';
var expect = '';
printBugNumber(BUGNUMBER);
printStatus (summary);
actual = uneval(
{
get | ()
{
return "foo";
}
});
expect = '({get foo() {return "foo";}})';
compareSource(expect, actual, summary);
| foo | identifier_name |
regress-306738.js | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): Liam Davis-Mead
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gTestfile = 'regress-306738.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 306738;
var summary = 'uneval() on objects with getter or setter';
var actual = '';
var expect = '';
printBugNumber(BUGNUMBER);
printStatus (summary);
actual = uneval(
{
get foo()
{
return "foo";
}
});
expect = '({get foo() {return "foo";}})';
| compareSource(expect, actual, summary); | random_line_split | |
TestNativeRsqrt.rs | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, | * See the License for the specific language governing permissions and
* limitations under the License.
*/
// Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh.
#pragma version(1)
#pragma rs java_package_name(android.renderscript.cts)
float __attribute__((kernel)) testNativeRsqrtFloatFloat(float inV) {
return native_rsqrt(inV);
}
float2 __attribute__((kernel)) testNativeRsqrtFloat2Float2(float2 inV) {
return native_rsqrt(inV);
}
float3 __attribute__((kernel)) testNativeRsqrtFloat3Float3(float3 inV) {
return native_rsqrt(inV);
}
float4 __attribute__((kernel)) testNativeRsqrtFloat4Float4(float4 inV) {
return native_rsqrt(inV);
}
half __attribute__((kernel)) testNativeRsqrtHalfHalf(half inV) {
return native_rsqrt(inV);
}
half2 __attribute__((kernel)) testNativeRsqrtHalf2Half2(half2 inV) {
return native_rsqrt(inV);
}
half3 __attribute__((kernel)) testNativeRsqrtHalf3Half3(half3 inV) {
return native_rsqrt(inV);
}
half4 __attribute__((kernel)) testNativeRsqrtHalf4Half4(half4 inV) {
return native_rsqrt(inV);
} | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | random_line_split |
messages.py | NOT_GIT_REPO_MSG = "#{red}Not a git repository (or any of the parent directories)"
HOOK_ALREADY_INSTALLED_MSG = "The pre-commit hook has already been installed."
EXISTING_HOOK_MSG = (
"#{yellow}There is an existing pre-commit hook.\n"
"#{reset_all}Therapist can preserve this legacy hook and run it before the Therapist "
"pre-commit hook."
)
CONFIRM_PRESERVE_LEGACY_HOOK_MSG = "Would you like to preserve this legacy hook?"
COPYING_HOOK_MSG = "Copying `pre-commit` to `pre-commit.legacy`...\t"
DONE_COPYING_HOOK_MSG = "#{green}#{bright}DONE"
CONFIRM_REPLACE_HOOK_MSG = "Do you want to replace this hook?"
INSTALL_ABORTED_MSG = "Installation aborted."
INSTALLING_HOOK_MSG = "Installing pre-commit hook...\t"
DONE_INSTALLING_HOOK_MSG = "#{green}#{bright}DONE"
NO_HOOK_INSTALLED_MSG = "There is no pre-commit hook currently installed."
UNINSTALL_ABORTED_MSG = "Uninstallation aborted." | "#{yellow}The current pre-commit hook is not the Therapist pre-commit hook.\n"
"#{reset_all}Uninstallation aborted."
)
LEGACY_HOOK_EXISTS_MSG = "#{yellow}There is a legacy pre-commit hook present."
CONFIRM_RESTORE_LEGACY_HOOK_MSG = "Would you like to restore the legacy hook?"
COPYING_LEGACY_HOOK_MSG = "Copying `pre-commit.legacy` to `pre-commit`...\t"
DONE_COPYING_LEGACY_HOOK_MSG = "#{green}#{bright}DONE"
REMOVING_LEGACY_HOOK_MSG = "Removing `pre-commit.legacy`...\t"
DONE_REMOVING_LEGACY_HOOK_MSG = "#{green}#{bright}DONE"
UNINSTALLING_HOOK_MSG = "Uninstalling pre-commit hook...\t"
DONE_UNINSTALLING_HOOK_MSG = "#{green}#{bright}DONE"
MISCONFIGURED_MSG = "#{{red}}Misconfigured: {}"
UNSTAGED_CHANGES_MSG = "#{yellow}You have unstaged changes."
NO_THERAPIST_CONFIG_FILE_MSG = "#{red}No Therapist configuration file was found."
UPGRADE_HOOK_MSG = (
"#{red}The installed pre-commit hook is incompatible with the current version of Therapist.\n"
"#{reset_all}Install the latest pre-commit hook by running `therapist install`."
) |
CONFIRM_UNINSTALL_HOOK_MSG = "Are you sure you want to uninstall the current pre-commit hook?"
CURRENT_HOOK_NOT_THERAPIST_MSG = ( | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.