file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
TODO.py | # mesa - toolkit for building dynamic python apps with zero downtime
# basis: package is inspected for all instances of specified abc and each added to internal mesa list
# Casa is a mesa obj is instantiated as holder of dynamic obj list, one for each abc type in specified package
# m = mesa.Casa(hideExceptions=False) parameter instructs whether to generate exception on existance of methods to run against abc method list
# Mesa.run('method name') = for methods executes named method against each concrete class in the package, does a check to ensure method name exists in abc
# Mesa.generate('method name') = a generator for functions that emits the results from calls to the specified function name in each concrete class. also checks
#
# house recipes
# event driven messsage-passing based app framework - each casa contains a specific route or flow
# wsgi based simple mvc web framework using 2bit as templating language. single casa for all pages
# DOTO: decide best way to test, some scenarios require file io but no clicks required - simple unit tests
# DOTO: generate is a generator yielding a dictionary of results
# DOTO: check flickoutr and how to dynamically create classes with parameters
# DOTO: auth - way to supply callback for required input fields collection from ui
# DOTO: base.Casa appears to pass it's own instance as self to called module. Unsure what side effects are? | # DOTO: utility interface to implement by client app to take care of input for each specific data type
# DOTO: accompanying Method utility that once required args are declared once, elegant handling
# ie no passing from interface to host back to interface like it is in unit test right now
# TODO: meta methods that build on the basic iterating methods to abstract away iteration from caller
# TODO: check for abc type conformance
# TODO: at minute convention is that dynamic module contains one class of same name. Change to support all/others
# TODO: mesa test suit scenarios:
# build a casa, add class, rebuild casa
# build casa, call method not in abc
# build casa with concrete class not implementing an abc method | random_line_split | |
issue-888-enum-var-decl-jump.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod root {
#[allow(unused_imports)]
use self::super::root;
pub mod Halide {
#[allow(unused_imports)]
use self::super::super::root;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct Type {
pub _address: u8,
}
extern "C" {
#[link_name = "\u{1}_ZN6Halide4Type1bE"]
pub static mut Type_b: root::a;
}
#[test]
fn bindgen_test_layout_Type() |
}
#[repr(u32)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum a {
__bindgen_cannot_repr_c_on_empty_enum = 0,
}
}
| {
assert_eq!(
::std::mem::size_of::<Type>(),
1usize,
concat!("Size of: ", stringify!(Type))
);
assert_eq!(
::std::mem::align_of::<Type>(),
1usize,
concat!("Alignment of ", stringify!(Type))
);
} | identifier_body |
issue-888-enum-var-decl-jump.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod root {
#[allow(unused_imports)]
use self::super::root;
pub mod Halide {
#[allow(unused_imports)]
use self::super::super::root;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct Type {
pub _address: u8,
}
extern "C" {
#[link_name = "\u{1}_ZN6Halide4Type1bE"]
pub static mut Type_b: root::a;
}
#[test]
fn | () {
assert_eq!(
::std::mem::size_of::<Type>(),
1usize,
concat!("Size of: ", stringify!(Type))
);
assert_eq!(
::std::mem::align_of::<Type>(),
1usize,
concat!("Alignment of ", stringify!(Type))
);
}
}
#[repr(u32)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum a {
__bindgen_cannot_repr_c_on_empty_enum = 0,
}
}
| bindgen_test_layout_Type | identifier_name |
issue-888-enum-var-decl-jump.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod root {
#[allow(unused_imports)]
use self::super::root;
pub mod Halide {
#[allow(unused_imports)]
use self::super::super::root;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct Type {
pub _address: u8,
}
extern "C" {
#[link_name = "\u{1}_ZN6Halide4Type1bE"]
pub static mut Type_b: root::a; | fn bindgen_test_layout_Type() {
assert_eq!(
::std::mem::size_of::<Type>(),
1usize,
concat!("Size of: ", stringify!(Type))
);
assert_eq!(
::std::mem::align_of::<Type>(),
1usize,
concat!("Alignment of ", stringify!(Type))
);
}
}
#[repr(u32)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum a {
__bindgen_cannot_repr_c_on_empty_enum = 0,
}
} | }
#[test] | random_line_split |
SelectDisplayMode.component.js | import React, { PropTypes } from 'react';
import { Nav, NavDropdown, MenuItem } from 'react-bootstrap';
import uuid from 'uuid';
import Icon from '../../../Icon';
function getIcon(selected) {
switch (selected) {
case 'table': return 'talend-table';
case 'large': return 'talend-expanded';
case 'tile': return 'talend-tiles';
default: return 'talend-table';
}
}
function getLabel(selected) {
switch (selected) {
case 'table': return 'Table';
case 'large': return 'Expanded';
case 'tile': return 'Tile';
default: return 'Table';
}
}
const options = ['table', 'large', 'tile'];
function SelectDisplayMode({ id, mode, onChange }) {
const selected = mode || 'table';
const displayIcon = (<Icon name={getIcon(selected)} />);
const onChangeMode = (value, event) => {
onChange(event, value);
};
const getMenuItem = option => (
<MenuItem
id={id && `${id}-${option}`}
key={option}
eventKey={option} | </MenuItem>
);
return (
<Nav>
<NavDropdown
id={id || uuid.v4()}
title={displayIcon}
onSelect={onChangeMode}
>
{options.map(option => getMenuItem(option))}
</NavDropdown>
</Nav>
);
}
SelectDisplayMode.propTypes = {
id: PropTypes.string,
mode: PropTypes.string,
onChange: PropTypes.func.isRequired,
};
export default SelectDisplayMode; | >
<Icon name={getIcon(option)} />
{getLabel(option)} | random_line_split |
SelectDisplayMode.component.js | import React, { PropTypes } from 'react';
import { Nav, NavDropdown, MenuItem } from 'react-bootstrap';
import uuid from 'uuid';
import Icon from '../../../Icon';
function getIcon(selected) {
switch (selected) {
case 'table': return 'talend-table';
case 'large': return 'talend-expanded';
case 'tile': return 'talend-tiles';
default: return 'talend-table';
}
}
function getLabel(selected) {
switch (selected) {
case 'table': return 'Table';
case 'large': return 'Expanded';
case 'tile': return 'Tile';
default: return 'Table';
}
}
const options = ['table', 'large', 'tile'];
function | ({ id, mode, onChange }) {
const selected = mode || 'table';
const displayIcon = (<Icon name={getIcon(selected)} />);
const onChangeMode = (value, event) => {
onChange(event, value);
};
const getMenuItem = option => (
<MenuItem
id={id && `${id}-${option}`}
key={option}
eventKey={option}
>
<Icon name={getIcon(option)} />
{getLabel(option)}
</MenuItem>
);
return (
<Nav>
<NavDropdown
id={id || uuid.v4()}
title={displayIcon}
onSelect={onChangeMode}
>
{options.map(option => getMenuItem(option))}
</NavDropdown>
</Nav>
);
}
SelectDisplayMode.propTypes = {
id: PropTypes.string,
mode: PropTypes.string,
onChange: PropTypes.func.isRequired,
};
export default SelectDisplayMode;
| SelectDisplayMode | identifier_name |
SelectDisplayMode.component.js | import React, { PropTypes } from 'react';
import { Nav, NavDropdown, MenuItem } from 'react-bootstrap';
import uuid from 'uuid';
import Icon from '../../../Icon';
function getIcon(selected) |
function getLabel(selected) {
switch (selected) {
case 'table': return 'Table';
case 'large': return 'Expanded';
case 'tile': return 'Tile';
default: return 'Table';
}
}
const options = ['table', 'large', 'tile'];
function SelectDisplayMode({ id, mode, onChange }) {
const selected = mode || 'table';
const displayIcon = (<Icon name={getIcon(selected)} />);
const onChangeMode = (value, event) => {
onChange(event, value);
};
const getMenuItem = option => (
<MenuItem
id={id && `${id}-${option}`}
key={option}
eventKey={option}
>
<Icon name={getIcon(option)} />
{getLabel(option)}
</MenuItem>
);
return (
<Nav>
<NavDropdown
id={id || uuid.v4()}
title={displayIcon}
onSelect={onChangeMode}
>
{options.map(option => getMenuItem(option))}
</NavDropdown>
</Nav>
);
}
SelectDisplayMode.propTypes = {
id: PropTypes.string,
mode: PropTypes.string,
onChange: PropTypes.func.isRequired,
};
export default SelectDisplayMode;
| {
switch (selected) {
case 'table': return 'talend-table';
case 'large': return 'talend-expanded';
case 'tile': return 'talend-tiles';
default: return 'talend-table';
}
} | identifier_body |
.jsdoc.js | "recurseDepth": 10,
"source": {
"includePattern": ".+\\.js(doc|x)?$",
"excludePattern": "(^|\\/|\\\\)_"
},
"sourceType": "module",
"tags": {
"allowUnknownTags": true,
"dictionaries": ["jsdoc","closure"]
},
"templates": {
"cleverLinks": false,
"monospaceLinks": false,
"default": {
"outputSourceFiles": false,
"includeDate": false,
"useLongnameInNav": false
}
},
"opts": {
"template": "templates/default", // same as -t templates/default
//"template": "node_modules/tui-jsdoc-template",
//"template": "node_modules/docdash",
//"template": "node_modules/minami",
//"template": "node_modules/postman-jsdoc-theme",
//"template": "node_modules/jaguarjs-jsdoc",
"encoding": "utf8", // same as -e utf8
"destination": "./docs/", // same as -d ./out/
"recurse": true, // same as -r
"debug": true, // same as --debug
"readme": "./README.md"
//"package": "./package.json", // same as --package
//"tutorials": "./tutorials/" // same as -u path/to/tutorials
}
}; | 'use strict';
module.exports = {
"plugins": [], | random_line_split | |
pomodoro.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'aen'
import pygame
import sys
from pygame.locals import USEREVENT, QUIT, MOUSEBUTTONDOWN
def pomodoro():
pygame.mixer.pre_init(frequency=44100, size=-16, channels=1, buffer=512)
pygame.init()
# set up the window
font = pygame.font.Font('freesansbold.ttf', 48) # initialize a font
# define a colors
black = (0, 0, 0)
white = (255, 255, 255)
grey = (150, 150, 150)
red = (255, 0, 0)
gree = (0, 255, 0)
dark_green = (0, 100, 0)
blue = (0, 0, 255)
# define a sounds
global pomo_start_sound, pomo_tick_sound, pomo_end_sound
pomo_start_sound = pygame.mixer.Sound('sounds/pomo_start.wav')
pomo_tick_sound = pygame.mixer.Sound('sounds/pomo_tick.wav')
pomo_end_sound = pygame.mixer.Sound('sounds/pomo_ring.wav')
# define a defaults
pomodoro_time = 25
d_width = 400
d_height = 300
image_dir = 'images/'
start_icon_x = 10
start_icon_y = 10
stop_icon_x = d_width - 10 - 64
stop_icon_y = 10
in_pomodoro = False
# create main window
screen = pygame.display.set_mode((d_width, d_height), 0, 32)
pygame.display.set_caption('Pymodoro!')
pomo_start_icon = pygame.image.load(image_dir + 'pomo_start.png')
pomo_stop_icon = pygame.image.load(image_dir + 'pomo_stop.png')
icon = pygame.image.load(image_dir + 'largeicon.png')
pygame.display.set_icon(icon)
# create background
background = pygame.Surface(screen.get_size())
while True: # main loop
for event in pygame.event.get():
# print event
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click_x = event.pos[0]
click_y = event.pos[1]
if click_on_start(click_x, click_y, start_icon_x, start_icon_y) and not in_pomodoro:
timeleft = pomodoro_run(pomodoro_time)
in_pomodoro = True
elif click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y) and in_pomodoro:
pomodoro_stop()
in_pomodoro = False
if event.type == USEREVENT + 1:
timeleft -= 1
text = font.render(str(timeleft), 1, dark_green)
if timeleft == 0:
pomodoro_end()
in_pomodoro = False
if event.type == USEREVENT +2:
pomo_tick_sound.play()
# draw section
background.fill(dark_green)
background.blit(icon, ((d_width-256)/2, (d_height-256)/2))
if in_pomodoro:
background.blit(pomo_stop_icon, (stop_icon_x, stop_icon_y))
text = font.render(str(timeleft), 1, dark_green)
background.blit(text, ((d_width-54)/2, (d_height-50)/2))
elif not in_pomodoro:
background.blit(pomo_start_icon, (start_icon_x, start_icon_y))
background = background.convert()
screen.blit(background, (0, 0))
pygame.display.flip()
pygame.display.update()
def click_on_start(click_x, click_y, start_icon_x, start_icon_y):
if (start_icon_x <= click_x <= start_icon_x + 64) \
and (start_icon_y <= click_y <= start_icon_y + 64):
return True
else:
return False
def | (click_x, click_y, stop_icon_x, stop_icon_y):
if (stop_icon_x <= click_x <= stop_icon_x + 64) \
and (stop_icon_y <= click_y <= stop_icon_y + 64):
return True
else:
return False
def pomodoro_run(pomodoro_time):
pomo_start_sound.play()
timeleft = pomodoro_time
pygame.time.set_timer(USEREVENT + 1, 1000 * 60)
pygame.time.set_timer(USEREVENT + 2, 867)
return timeleft
def pomodoro_stop():
pygame.time.set_timer(USEREVENT + 1, 0)
pygame.time.set_timer(USEREVENT + 2, 0)
def pomodoro_end():
pomo_end_sound.play()
pomodoro_stop()
if __name__ == '__main__':
pomodoro() | click_on_stop | identifier_name |
pomodoro.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'aen' |
def pomodoro():
pygame.mixer.pre_init(frequency=44100, size=-16, channels=1, buffer=512)
pygame.init()
# set up the window
font = pygame.font.Font('freesansbold.ttf', 48) # initialize a font
# define a colors
black = (0, 0, 0)
white = (255, 255, 255)
grey = (150, 150, 150)
red = (255, 0, 0)
gree = (0, 255, 0)
dark_green = (0, 100, 0)
blue = (0, 0, 255)
# define a sounds
global pomo_start_sound, pomo_tick_sound, pomo_end_sound
pomo_start_sound = pygame.mixer.Sound('sounds/pomo_start.wav')
pomo_tick_sound = pygame.mixer.Sound('sounds/pomo_tick.wav')
pomo_end_sound = pygame.mixer.Sound('sounds/pomo_ring.wav')
# define a defaults
pomodoro_time = 25
d_width = 400
d_height = 300
image_dir = 'images/'
start_icon_x = 10
start_icon_y = 10
stop_icon_x = d_width - 10 - 64
stop_icon_y = 10
in_pomodoro = False
# create main window
screen = pygame.display.set_mode((d_width, d_height), 0, 32)
pygame.display.set_caption('Pymodoro!')
pomo_start_icon = pygame.image.load(image_dir + 'pomo_start.png')
pomo_stop_icon = pygame.image.load(image_dir + 'pomo_stop.png')
icon = pygame.image.load(image_dir + 'largeicon.png')
pygame.display.set_icon(icon)
# create background
background = pygame.Surface(screen.get_size())
while True: # main loop
for event in pygame.event.get():
# print event
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click_x = event.pos[0]
click_y = event.pos[1]
if click_on_start(click_x, click_y, start_icon_x, start_icon_y) and not in_pomodoro:
timeleft = pomodoro_run(pomodoro_time)
in_pomodoro = True
elif click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y) and in_pomodoro:
pomodoro_stop()
in_pomodoro = False
if event.type == USEREVENT + 1:
timeleft -= 1
text = font.render(str(timeleft), 1, dark_green)
if timeleft == 0:
pomodoro_end()
in_pomodoro = False
if event.type == USEREVENT +2:
pomo_tick_sound.play()
# draw section
background.fill(dark_green)
background.blit(icon, ((d_width-256)/2, (d_height-256)/2))
if in_pomodoro:
background.blit(pomo_stop_icon, (stop_icon_x, stop_icon_y))
text = font.render(str(timeleft), 1, dark_green)
background.blit(text, ((d_width-54)/2, (d_height-50)/2))
elif not in_pomodoro:
background.blit(pomo_start_icon, (start_icon_x, start_icon_y))
background = background.convert()
screen.blit(background, (0, 0))
pygame.display.flip()
pygame.display.update()
def click_on_start(click_x, click_y, start_icon_x, start_icon_y):
if (start_icon_x <= click_x <= start_icon_x + 64) \
and (start_icon_y <= click_y <= start_icon_y + 64):
return True
else:
return False
def click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y):
if (stop_icon_x <= click_x <= stop_icon_x + 64) \
and (stop_icon_y <= click_y <= stop_icon_y + 64):
return True
else:
return False
def pomodoro_run(pomodoro_time):
pomo_start_sound.play()
timeleft = pomodoro_time
pygame.time.set_timer(USEREVENT + 1, 1000 * 60)
pygame.time.set_timer(USEREVENT + 2, 867)
return timeleft
def pomodoro_stop():
pygame.time.set_timer(USEREVENT + 1, 0)
pygame.time.set_timer(USEREVENT + 2, 0)
def pomodoro_end():
pomo_end_sound.play()
pomodoro_stop()
if __name__ == '__main__':
pomodoro() |
import pygame
import sys
from pygame.locals import USEREVENT, QUIT, MOUSEBUTTONDOWN
| random_line_split |
pomodoro.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'aen'
import pygame
import sys
from pygame.locals import USEREVENT, QUIT, MOUSEBUTTONDOWN
def pomodoro():
pygame.mixer.pre_init(frequency=44100, size=-16, channels=1, buffer=512)
pygame.init()
# set up the window
font = pygame.font.Font('freesansbold.ttf', 48) # initialize a font
# define a colors
black = (0, 0, 0)
white = (255, 255, 255)
grey = (150, 150, 150)
red = (255, 0, 0)
gree = (0, 255, 0)
dark_green = (0, 100, 0)
blue = (0, 0, 255)
# define a sounds
global pomo_start_sound, pomo_tick_sound, pomo_end_sound
pomo_start_sound = pygame.mixer.Sound('sounds/pomo_start.wav')
pomo_tick_sound = pygame.mixer.Sound('sounds/pomo_tick.wav')
pomo_end_sound = pygame.mixer.Sound('sounds/pomo_ring.wav')
# define a defaults
pomodoro_time = 25
d_width = 400
d_height = 300
image_dir = 'images/'
start_icon_x = 10
start_icon_y = 10
stop_icon_x = d_width - 10 - 64
stop_icon_y = 10
in_pomodoro = False
# create main window
screen = pygame.display.set_mode((d_width, d_height), 0, 32)
pygame.display.set_caption('Pymodoro!')
pomo_start_icon = pygame.image.load(image_dir + 'pomo_start.png')
pomo_stop_icon = pygame.image.load(image_dir + 'pomo_stop.png')
icon = pygame.image.load(image_dir + 'largeicon.png')
pygame.display.set_icon(icon)
# create background
background = pygame.Surface(screen.get_size())
while True: # main loop
for event in pygame.event.get():
# print event
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click_x = event.pos[0]
click_y = event.pos[1]
if click_on_start(click_x, click_y, start_icon_x, start_icon_y) and not in_pomodoro:
timeleft = pomodoro_run(pomodoro_time)
in_pomodoro = True
elif click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y) and in_pomodoro:
pomodoro_stop()
in_pomodoro = False
if event.type == USEREVENT + 1:
timeleft -= 1
text = font.render(str(timeleft), 1, dark_green)
if timeleft == 0:
pomodoro_end()
in_pomodoro = False
if event.type == USEREVENT +2:
pomo_tick_sound.play()
# draw section
background.fill(dark_green)
background.blit(icon, ((d_width-256)/2, (d_height-256)/2))
if in_pomodoro:
background.blit(pomo_stop_icon, (stop_icon_x, stop_icon_y))
text = font.render(str(timeleft), 1, dark_green)
background.blit(text, ((d_width-54)/2, (d_height-50)/2))
elif not in_pomodoro:
background.blit(pomo_start_icon, (start_icon_x, start_icon_y))
background = background.convert()
screen.blit(background, (0, 0))
pygame.display.flip()
pygame.display.update()
def click_on_start(click_x, click_y, start_icon_x, start_icon_y):
if (start_icon_x <= click_x <= start_icon_x + 64) \
and (start_icon_y <= click_y <= start_icon_y + 64):
|
else:
return False
def click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y):
if (stop_icon_x <= click_x <= stop_icon_x + 64) \
and (stop_icon_y <= click_y <= stop_icon_y + 64):
return True
else:
return False
def pomodoro_run(pomodoro_time):
pomo_start_sound.play()
timeleft = pomodoro_time
pygame.time.set_timer(USEREVENT + 1, 1000 * 60)
pygame.time.set_timer(USEREVENT + 2, 867)
return timeleft
def pomodoro_stop():
pygame.time.set_timer(USEREVENT + 1, 0)
pygame.time.set_timer(USEREVENT + 2, 0)
def pomodoro_end():
pomo_end_sound.play()
pomodoro_stop()
if __name__ == '__main__':
pomodoro() | return True | conditional_block |
pomodoro.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'aen'
import pygame
import sys
from pygame.locals import USEREVENT, QUIT, MOUSEBUTTONDOWN
def pomodoro():
pygame.mixer.pre_init(frequency=44100, size=-16, channels=1, buffer=512)
pygame.init()
# set up the window
font = pygame.font.Font('freesansbold.ttf', 48) # initialize a font
# define a colors
black = (0, 0, 0)
white = (255, 255, 255)
grey = (150, 150, 150)
red = (255, 0, 0)
gree = (0, 255, 0)
dark_green = (0, 100, 0)
blue = (0, 0, 255)
# define a sounds
global pomo_start_sound, pomo_tick_sound, pomo_end_sound
pomo_start_sound = pygame.mixer.Sound('sounds/pomo_start.wav')
pomo_tick_sound = pygame.mixer.Sound('sounds/pomo_tick.wav')
pomo_end_sound = pygame.mixer.Sound('sounds/pomo_ring.wav')
# define a defaults
pomodoro_time = 25
d_width = 400
d_height = 300
image_dir = 'images/'
start_icon_x = 10
start_icon_y = 10
stop_icon_x = d_width - 10 - 64
stop_icon_y = 10
in_pomodoro = False
# create main window
screen = pygame.display.set_mode((d_width, d_height), 0, 32)
pygame.display.set_caption('Pymodoro!')
pomo_start_icon = pygame.image.load(image_dir + 'pomo_start.png')
pomo_stop_icon = pygame.image.load(image_dir + 'pomo_stop.png')
icon = pygame.image.load(image_dir + 'largeicon.png')
pygame.display.set_icon(icon)
# create background
background = pygame.Surface(screen.get_size())
while True: # main loop
for event in pygame.event.get():
# print event
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click_x = event.pos[0]
click_y = event.pos[1]
if click_on_start(click_x, click_y, start_icon_x, start_icon_y) and not in_pomodoro:
timeleft = pomodoro_run(pomodoro_time)
in_pomodoro = True
elif click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y) and in_pomodoro:
pomodoro_stop()
in_pomodoro = False
if event.type == USEREVENT + 1:
timeleft -= 1
text = font.render(str(timeleft), 1, dark_green)
if timeleft == 0:
pomodoro_end()
in_pomodoro = False
if event.type == USEREVENT +2:
pomo_tick_sound.play()
# draw section
background.fill(dark_green)
background.blit(icon, ((d_width-256)/2, (d_height-256)/2))
if in_pomodoro:
background.blit(pomo_stop_icon, (stop_icon_x, stop_icon_y))
text = font.render(str(timeleft), 1, dark_green)
background.blit(text, ((d_width-54)/2, (d_height-50)/2))
elif not in_pomodoro:
background.blit(pomo_start_icon, (start_icon_x, start_icon_y))
background = background.convert()
screen.blit(background, (0, 0))
pygame.display.flip()
pygame.display.update()
def click_on_start(click_x, click_y, start_icon_x, start_icon_y):
if (start_icon_x <= click_x <= start_icon_x + 64) \
and (start_icon_y <= click_y <= start_icon_y + 64):
return True
else:
return False
def click_on_stop(click_x, click_y, stop_icon_x, stop_icon_y):
if (stop_icon_x <= click_x <= stop_icon_x + 64) \
and (stop_icon_y <= click_y <= stop_icon_y + 64):
return True
else:
return False
def pomodoro_run(pomodoro_time):
pomo_start_sound.play()
timeleft = pomodoro_time
pygame.time.set_timer(USEREVENT + 1, 1000 * 60)
pygame.time.set_timer(USEREVENT + 2, 867)
return timeleft
def pomodoro_stop():
pygame.time.set_timer(USEREVENT + 1, 0)
pygame.time.set_timer(USEREVENT + 2, 0)
def pomodoro_end():
|
if __name__ == '__main__':
pomodoro() | pomo_end_sound.play()
pomodoro_stop() | identifier_body |
resource_files.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(not(target_os = "android"))]
use std::env;
use std::fs::File;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
lazy_static! {
static ref CMD_RESOURCE_DIR: Arc<Mutex<Option<String>>> = {
Arc::new(Mutex::new(None))
};
}
pub fn set_resources_path(path: Option<String>) {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
*dir = path;
}
#[cfg(target_os = "android")]
pub fn resources_dir_path() -> PathBuf {
PathBuf::from("/sdcard/servo/")
}
#[cfg(not(target_os = "android"))]
pub fn resources_dir_path() -> PathBuf {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
if let Some(ref path) = *dir |
// FIXME: Find a way to not rely on the executable being
// under `<servo source>[/$target_triple]/target/debug`
// or `<servo source>[/$target_triple]/target/release`.
let mut path = env::current_exe().expect("can't get exe path");
// Follow symlink
path = path.canonicalize().expect("path does not exist");
path.pop();
path.push("resources");
if !path.is_dir() { // resources dir not in same dir as exe?
// exe is probably in target/{debug,release} so we need to go back to topdir
path.pop();
path.pop();
path.pop();
path.push("resources");
if !path.is_dir() {
// exe is probably in target/$target_triple/{debug,release} so go back one more
path.pop();
path.pop();
path.push("resources");
}
}
*dir = Some(path.to_str().unwrap().to_owned());
path
}
pub fn read_resource_file<P: AsRef<Path>>(relative_path: P) -> io::Result<Vec<u8>> {
let mut path = resources_dir_path();
path.push(relative_path);
let mut file = try!(File::open(&path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data));
Ok(data)
}
| {
return PathBuf::from(path);
} | conditional_block |
resource_files.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(not(target_os = "android"))]
use std::env;
use std::fs::File;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
lazy_static! {
static ref CMD_RESOURCE_DIR: Arc<Mutex<Option<String>>> = {
Arc::new(Mutex::new(None))
};
}
pub fn set_resources_path(path: Option<String>) {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
*dir = path;
}
#[cfg(target_os = "android")]
pub fn resources_dir_path() -> PathBuf {
PathBuf::from("/sdcard/servo/")
}
#[cfg(not(target_os = "android"))]
pub fn resources_dir_path() -> PathBuf {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
if let Some(ref path) = *dir {
return PathBuf::from(path);
}
// FIXME: Find a way to not rely on the executable being
// under `<servo source>[/$target_triple]/target/debug`
// or `<servo source>[/$target_triple]/target/release`.
let mut path = env::current_exe().expect("can't get exe path");
// Follow symlink
path = path.canonicalize().expect("path does not exist");
path.pop();
path.push("resources");
if !path.is_dir() { // resources dir not in same dir as exe?
// exe is probably in target/{debug,release} so we need to go back to topdir
path.pop();
path.pop();
path.pop();
path.push("resources");
if !path.is_dir() {
// exe is probably in target/$target_triple/{debug,release} so go back one more
path.pop();
path.pop();
path.push("resources");
}
}
*dir = Some(path.to_str().unwrap().to_owned());
path
}
pub fn read_resource_file<P: AsRef<Path>>(relative_path: P) -> io::Result<Vec<u8>> | {
let mut path = resources_dir_path();
path.push(relative_path);
let mut file = try!(File::open(&path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data));
Ok(data)
} | identifier_body | |
resource_files.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(not(target_os = "android"))]
use std::env;
use std::fs::File;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
lazy_static! {
static ref CMD_RESOURCE_DIR: Arc<Mutex<Option<String>>> = {
Arc::new(Mutex::new(None))
};
}
pub fn set_resources_path(path: Option<String>) {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
*dir = path;
}
#[cfg(target_os = "android")]
pub fn resources_dir_path() -> PathBuf {
PathBuf::from("/sdcard/servo/")
}
#[cfg(not(target_os = "android"))]
pub fn resources_dir_path() -> PathBuf {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
if let Some(ref path) = *dir {
return PathBuf::from(path);
}
// FIXME: Find a way to not rely on the executable being
// under `<servo source>[/$target_triple]/target/debug`
// or `<servo source>[/$target_triple]/target/release`.
let mut path = env::current_exe().expect("can't get exe path");
// Follow symlink
path = path.canonicalize().expect("path does not exist");
path.pop();
path.push("resources");
if !path.is_dir() { // resources dir not in same dir as exe?
// exe is probably in target/{debug,release} so we need to go back to topdir
path.pop();
path.pop();
path.pop();
path.push("resources");
if !path.is_dir() {
// exe is probably in target/$target_triple/{debug,release} so go back one more
path.pop();
path.pop();
path.push("resources");
}
} |
pub fn read_resource_file<P: AsRef<Path>>(relative_path: P) -> io::Result<Vec<u8>> {
let mut path = resources_dir_path();
path.push(relative_path);
let mut file = try!(File::open(&path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data));
Ok(data)
} | *dir = Some(path.to_str().unwrap().to_owned());
path
} | random_line_split |
resource_files.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(not(target_os = "android"))]
use std::env;
use std::fs::File;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
lazy_static! {
static ref CMD_RESOURCE_DIR: Arc<Mutex<Option<String>>> = {
Arc::new(Mutex::new(None))
};
}
pub fn set_resources_path(path: Option<String>) {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
*dir = path;
}
#[cfg(target_os = "android")]
pub fn resources_dir_path() -> PathBuf {
PathBuf::from("/sdcard/servo/")
}
#[cfg(not(target_os = "android"))]
pub fn | () -> PathBuf {
let mut dir = CMD_RESOURCE_DIR.lock().unwrap();
if let Some(ref path) = *dir {
return PathBuf::from(path);
}
// FIXME: Find a way to not rely on the executable being
// under `<servo source>[/$target_triple]/target/debug`
// or `<servo source>[/$target_triple]/target/release`.
let mut path = env::current_exe().expect("can't get exe path");
// Follow symlink
path = path.canonicalize().expect("path does not exist");
path.pop();
path.push("resources");
if !path.is_dir() { // resources dir not in same dir as exe?
// exe is probably in target/{debug,release} so we need to go back to topdir
path.pop();
path.pop();
path.pop();
path.push("resources");
if !path.is_dir() {
// exe is probably in target/$target_triple/{debug,release} so go back one more
path.pop();
path.pop();
path.push("resources");
}
}
*dir = Some(path.to_str().unwrap().to_owned());
path
}
pub fn read_resource_file<P: AsRef<Path>>(relative_path: P) -> io::Result<Vec<u8>> {
let mut path = resources_dir_path();
path.push(relative_path);
let mut file = try!(File::open(&path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data));
Ok(data)
}
| resources_dir_path | identifier_name |
TextZones.ts | import { Universe } from '@ephox/boss';
import { Fun, Optional } from '@ephox/katamari';
import * as Parent from '../api/general/Parent';
import { ZoneViewports } from '../api/general/ZoneViewports';
import * as Clustering from '../words/Clustering';
import { WordDecision, WordDecisionItem } from '../words/WordDecision';
import { LanguageZones, ZoneDetails } from './LanguageZones';
import * as Zones from './Zones';
import * as ZoneWalker from './ZoneWalker';
type Zones<E> = Zones.Zones<E>;
const rangeOn = <E, D>(universe: Universe<E, D>, first: E, last: E, envLang: string, transform: (universe: Universe<E, D>, item: E) => WordDecisionItem<E>, viewport: ZoneViewports<E>): Optional<ZoneDetails<E>[]> => {
const ancestor = universe.eq(first, last) ? Optional.some(first) : universe.property().parent(first);
return ancestor.map((parent) => {
const defaultLang = LanguageZones.calculate(universe, parent).getOr(envLang);
return ZoneWalker.walk(universe, first, last, defaultLang, transform, viewport);
});
};
const fromBoundedWith = <E, D>(universe: Universe<E, D>, left: E, right: E, envLang: string, transform: (universe: Universe<E, D>, item: E) => WordDecisionItem<E>, viewport: ZoneViewports<E>): Zones<E> => {
const groups: ZoneDetails<E>[] = Parent.subset(universe, left, right).bind((children) => {
if (children.length === 0) {
return Optional.none<ZoneDetails<E>[]>();
}
const first = children[0];
const last = children[children.length - 1];
return rangeOn(universe, first, last, envLang, transform, viewport);
}).getOr([]);
return Zones.fromWalking(universe, groups);
};
const fromBounded = <E, D>(universe: Universe<E, D>, left: E, right: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
return fromBoundedWith(universe, left, right, envLang, WordDecision.detail, viewport);
};
const fromRange = <E, D>(universe: Universe<E, D>, start: E, finish: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
const edges = Clustering.getEdges(universe, start, finish, Fun.never);
const transform = transformEdges(edges.left, edges.right);
return fromBoundedWith(universe, edges.left.item, edges.right.item, envLang, transform, viewport);
};
const transformEdges = <E>(leftEdge: WordDecisionItem<E>, rightEdge: WordDecisionItem<E>) => {
return <D> (universe: Universe<E, D>, element: E): WordDecisionItem<E> => {
if (universe.eq(element, leftEdge.item)) {
return leftEdge;
} else if (universe.eq(element, rightEdge.item)) {
return rightEdge;
} else {
return WordDecision.detail(universe, element);
}
};
};
const fromInline = <E, D>(universe: Universe<E, D>, element: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
// Create a cluster that branches to the edge of words, and then apply the zones. We will move
// past language boundaries, because we might need to be retokenizing words post a language
// change
const bounded = Clustering.byBoundary(universe, element);
const transform = transformEdges(bounded.left, bounded.right);
return bounded.isEmpty ? empty<E>() : fromBoundedWith(universe, bounded.left.item, bounded.right.item, envLang, transform, viewport);
};
| zones: []
};
};
export {
fromRange,
transformEdges,
fromBounded,
fromBoundedWith,
fromInline,
empty
}; | const empty = <E>(): Zones<E> => {
return { | random_line_split |
TextZones.ts | import { Universe } from '@ephox/boss';
import { Fun, Optional } from '@ephox/katamari';
import * as Parent from '../api/general/Parent';
import { ZoneViewports } from '../api/general/ZoneViewports';
import * as Clustering from '../words/Clustering';
import { WordDecision, WordDecisionItem } from '../words/WordDecision';
import { LanguageZones, ZoneDetails } from './LanguageZones';
import * as Zones from './Zones';
import * as ZoneWalker from './ZoneWalker';
type Zones<E> = Zones.Zones<E>;
const rangeOn = <E, D>(universe: Universe<E, D>, first: E, last: E, envLang: string, transform: (universe: Universe<E, D>, item: E) => WordDecisionItem<E>, viewport: ZoneViewports<E>): Optional<ZoneDetails<E>[]> => {
const ancestor = universe.eq(first, last) ? Optional.some(first) : universe.property().parent(first);
return ancestor.map((parent) => {
const defaultLang = LanguageZones.calculate(universe, parent).getOr(envLang);
return ZoneWalker.walk(universe, first, last, defaultLang, transform, viewport);
});
};
const fromBoundedWith = <E, D>(universe: Universe<E, D>, left: E, right: E, envLang: string, transform: (universe: Universe<E, D>, item: E) => WordDecisionItem<E>, viewport: ZoneViewports<E>): Zones<E> => {
const groups: ZoneDetails<E>[] = Parent.subset(universe, left, right).bind((children) => {
if (children.length === 0) {
return Optional.none<ZoneDetails<E>[]>();
}
const first = children[0];
const last = children[children.length - 1];
return rangeOn(universe, first, last, envLang, transform, viewport);
}).getOr([]);
return Zones.fromWalking(universe, groups);
};
const fromBounded = <E, D>(universe: Universe<E, D>, left: E, right: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
return fromBoundedWith(universe, left, right, envLang, WordDecision.detail, viewport);
};
const fromRange = <E, D>(universe: Universe<E, D>, start: E, finish: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
const edges = Clustering.getEdges(universe, start, finish, Fun.never);
const transform = transformEdges(edges.left, edges.right);
return fromBoundedWith(universe, edges.left.item, edges.right.item, envLang, transform, viewport);
};
const transformEdges = <E>(leftEdge: WordDecisionItem<E>, rightEdge: WordDecisionItem<E>) => {
return <D> (universe: Universe<E, D>, element: E): WordDecisionItem<E> => {
if (universe.eq(element, leftEdge.item)) {
return leftEdge;
} else if (universe.eq(element, rightEdge.item)) | else {
return WordDecision.detail(universe, element);
}
};
};
const fromInline = <E, D>(universe: Universe<E, D>, element: E, envLang: string, viewport: ZoneViewports<E>): Zones<E> => {
// Create a cluster that branches to the edge of words, and then apply the zones. We will move
// past language boundaries, because we might need to be retokenizing words post a language
// change
const bounded = Clustering.byBoundary(universe, element);
const transform = transformEdges(bounded.left, bounded.right);
return bounded.isEmpty ? empty<E>() : fromBoundedWith(universe, bounded.left.item, bounded.right.item, envLang, transform, viewport);
};
const empty = <E>(): Zones<E> => {
return {
zones: []
};
};
export {
fromRange,
transformEdges,
fromBounded,
fromBoundedWith,
fromInline,
empty
};
| {
return rightEdge;
} | conditional_block |
create.js |
import isObject from 'lodash-es/isObject'
import isEmpty from 'lodash-es/isEmpty'
import isFunction from 'lodash-es/isFunction'
import isArray from 'lodash-es/isArray'
import reduce from 'lodash-es/reduce'
import cloneDeep from 'lodash-es/cloneDeep'
import Observable from 'zen-observable'
import * as most from 'most'
import libGet from './../fns/lib-get'
import libOn from './../fns/lib-on'
const lib = db => {
return {
get: libGet(db),
on: libOn(db)
}
}
function buildObservable (source, lib, ops) {
ops.forEach(x => {
let op = x[0]
let args = x[1]
args = Array.prototype.slice.call(args)
if (isFunction(args[0])) {
let fn = args[0]
args[0] = function controllerWrapperFn () {
let fnArgs = Array.prototype.slice.call(arguments)
fnArgs.push(lib)
return fn.apply(null, fnArgs)
}
}
source = source[op].apply(source, args)
})
return source
}
function createController (db, controller, name) |
export default createController
| {
let off = () => {}
if (!controller.args || !controller.fn) {
throw new Error(`Controller [${name}] should look like:
--
{
args: Object,
fn: Function | Observable
}
--
example:
{
args: {
foo: '/bam/bar/foo',
baz: '/boo/baz'
},
fn: args => {
args.foo // The value at /bam/bar/foo
args.baz // => /boo/baz
}
}
`)
}
let observable = new Observable(observer => {
let keys = Object.keys(controller.args)
let args = keys.reduce((acc, x) => {
acc[x] = undefined
return acc
}, {})
let unsubs = keys.map(key => {
return db.on(controller.args[key], val => {
args[key] = val
observer.next(cloneDeep(args))
})
})
off = () => {
unsubs.forEach(x => {
x()
})
observer.complete()
}
})
let ops = []
if (isFunction(controller.fn)) {
ops.push(['map', [controller.fn]])
} else if (isObject(controller.fn) && controller.fn['__instance__'] && !isEmpty(controller.fn['__instance__'].op)) {
ops = controller.fn['__instance__'].op
} else {
throw new Error('Controller fn is neither a function nor an observable placeholder')
}
let internalLib = lib(db)
controller.name = name
controller.result = buildObservable(most.from(observable), internalLib, ops)
controller.off = off
} | identifier_body |
create.js | import isObject from 'lodash-es/isObject'
import isEmpty from 'lodash-es/isEmpty'
import isFunction from 'lodash-es/isFunction'
import isArray from 'lodash-es/isArray'
import reduce from 'lodash-es/reduce'
import cloneDeep from 'lodash-es/cloneDeep'
import Observable from 'zen-observable'
import * as most from 'most'
import libGet from './../fns/lib-get'
import libOn from './../fns/lib-on'
const lib = db => {
return {
get: libGet(db),
on: libOn(db)
}
}
function buildObservable (source, lib, ops) {
ops.forEach(x => {
let op = x[0]
let args = x[1]
args = Array.prototype.slice.call(args)
if (isFunction(args[0])) {
let fn = args[0]
args[0] = function controllerWrapperFn () {
let fnArgs = Array.prototype.slice.call(arguments)
fnArgs.push(lib)
return fn.apply(null, fnArgs)
}
}
source = source[op].apply(source, args)
})
return source
}
function createController (db, controller, name) {
let off = () => {}
if (!controller.args || !controller.fn) {
throw new Error(`Controller [${name}] should look like:
--
{
args: Object,
fn: Function | Observable
}
--
example:
{
args: {
foo: '/bam/bar/foo', | }
}
`)
}
let observable = new Observable(observer => {
let keys = Object.keys(controller.args)
let args = keys.reduce((acc, x) => {
acc[x] = undefined
return acc
}, {})
let unsubs = keys.map(key => {
return db.on(controller.args[key], val => {
args[key] = val
observer.next(cloneDeep(args))
})
})
off = () => {
unsubs.forEach(x => {
x()
})
observer.complete()
}
})
let ops = []
if (isFunction(controller.fn)) {
ops.push(['map', [controller.fn]])
} else if (isObject(controller.fn) && controller.fn['__instance__'] && !isEmpty(controller.fn['__instance__'].op)) {
ops = controller.fn['__instance__'].op
} else {
throw new Error('Controller fn is neither a function nor an observable placeholder')
}
let internalLib = lib(db)
controller.name = name
controller.result = buildObservable(most.from(observable), internalLib, ops)
controller.off = off
}
export default createController | baz: '/boo/baz'
},
fn: args => {
args.foo // The value at /bam/bar/foo
args.baz // => /boo/baz | random_line_split |
create.js |
import isObject from 'lodash-es/isObject'
import isEmpty from 'lodash-es/isEmpty'
import isFunction from 'lodash-es/isFunction'
import isArray from 'lodash-es/isArray'
import reduce from 'lodash-es/reduce'
import cloneDeep from 'lodash-es/cloneDeep'
import Observable from 'zen-observable'
import * as most from 'most'
import libGet from './../fns/lib-get'
import libOn from './../fns/lib-on'
const lib = db => {
return {
get: libGet(db),
on: libOn(db)
}
}
function buildObservable (source, lib, ops) {
ops.forEach(x => {
let op = x[0]
let args = x[1]
args = Array.prototype.slice.call(args)
if (isFunction(args[0])) {
let fn = args[0]
args[0] = function controllerWrapperFn () {
let fnArgs = Array.prototype.slice.call(arguments)
fnArgs.push(lib)
return fn.apply(null, fnArgs)
}
}
source = source[op].apply(source, args)
})
return source
}
function createController (db, controller, name) {
let off = () => {}
if (!controller.args || !controller.fn) |
let observable = new Observable(observer => {
let keys = Object.keys(controller.args)
let args = keys.reduce((acc, x) => {
acc[x] = undefined
return acc
}, {})
let unsubs = keys.map(key => {
return db.on(controller.args[key], val => {
args[key] = val
observer.next(cloneDeep(args))
})
})
off = () => {
unsubs.forEach(x => {
x()
})
observer.complete()
}
})
let ops = []
if (isFunction(controller.fn)) {
ops.push(['map', [controller.fn]])
} else if (isObject(controller.fn) && controller.fn['__instance__'] && !isEmpty(controller.fn['__instance__'].op)) {
ops = controller.fn['__instance__'].op
} else {
throw new Error('Controller fn is neither a function nor an observable placeholder')
}
let internalLib = lib(db)
controller.name = name
controller.result = buildObservable(most.from(observable), internalLib, ops)
controller.off = off
}
export default createController
| {
throw new Error(`Controller [${name}] should look like:
--
{
args: Object,
fn: Function | Observable
}
--
example:
{
args: {
foo: '/bam/bar/foo',
baz: '/boo/baz'
},
fn: args => {
args.foo // The value at /bam/bar/foo
args.baz // => /boo/baz
}
}
`)
} | conditional_block |
create.js |
import isObject from 'lodash-es/isObject'
import isEmpty from 'lodash-es/isEmpty'
import isFunction from 'lodash-es/isFunction'
import isArray from 'lodash-es/isArray'
import reduce from 'lodash-es/reduce'
import cloneDeep from 'lodash-es/cloneDeep'
import Observable from 'zen-observable'
import * as most from 'most'
import libGet from './../fns/lib-get'
import libOn from './../fns/lib-on'
const lib = db => {
return {
get: libGet(db),
on: libOn(db)
}
}
function buildObservable (source, lib, ops) {
ops.forEach(x => {
let op = x[0]
let args = x[1]
args = Array.prototype.slice.call(args)
if (isFunction(args[0])) {
let fn = args[0]
args[0] = function controllerWrapperFn () {
let fnArgs = Array.prototype.slice.call(arguments)
fnArgs.push(lib)
return fn.apply(null, fnArgs)
}
}
source = source[op].apply(source, args)
})
return source
}
function | (db, controller, name) {
let off = () => {}
if (!controller.args || !controller.fn) {
throw new Error(`Controller [${name}] should look like:
--
{
args: Object,
fn: Function | Observable
}
--
example:
{
args: {
foo: '/bam/bar/foo',
baz: '/boo/baz'
},
fn: args => {
args.foo // The value at /bam/bar/foo
args.baz // => /boo/baz
}
}
`)
}
let observable = new Observable(observer => {
let keys = Object.keys(controller.args)
let args = keys.reduce((acc, x) => {
acc[x] = undefined
return acc
}, {})
let unsubs = keys.map(key => {
return db.on(controller.args[key], val => {
args[key] = val
observer.next(cloneDeep(args))
})
})
off = () => {
unsubs.forEach(x => {
x()
})
observer.complete()
}
})
let ops = []
if (isFunction(controller.fn)) {
ops.push(['map', [controller.fn]])
} else if (isObject(controller.fn) && controller.fn['__instance__'] && !isEmpty(controller.fn['__instance__'].op)) {
ops = controller.fn['__instance__'].op
} else {
throw new Error('Controller fn is neither a function nor an observable placeholder')
}
let internalLib = lib(db)
controller.name = name
controller.result = buildObservable(most.from(observable), internalLib, ops)
controller.off = off
}
export default createController
| createController | identifier_name |
make-pathway2list.py | #!/usr/bin/env python
import os
import sys
## A name of directory containing 'path:...' file
## You can download them using 'make-wget_pathway.sh' script
dir_name = sys.argv[1]
f_summary = open('%s.summary'%dir_name,'w')
f_genes = open('%s.genes'%dir_name,'w')
f_compounds = open('%s.compounds'%dir_name,'w')
gene_total = []
for filename in os.listdir( dir_name ):
if( not filename.startswith('path:') ):
continue
#sys.stderr.write('Read %s ... '%filename)
path_id = ''
path_name = ''
gene_list = []
comp_list = []
prev_tag = ''
f = open(os.path.join(dir_name,filename),'r')
for line in f:
tmp_tag = line[:11].strip()
if( tmp_tag == 'ENTRY' ):
path_id = line.strip().split()[1]
if( tmp_tag == 'NAME' ):
path_name = line[11:].split(' - ')[0].strip()
if( tmp_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == '' and prev_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] )
f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
elif( tmp_tag == '' and prev_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] )
f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
if( tmp_tag != '' ):
prev_tag = tmp_tag
f.close()
if( len(gene_list) == 0 ):
sys.stderr.write('//SKIP// %s(%d) %s\n'%(path_id, len(gene_list), path_name))
continue
f_summary.write('path:%s\t%s\t%d\t%d\n'%(path_id, path_name, len(gene_list), len(comp_list)))
f_summary.close() | f_genes.close()
f_compounds.close() | random_line_split | |
make-pathway2list.py | #!/usr/bin/env python
import os
import sys
## A name of directory containing 'path:...' file
## You can download them using 'make-wget_pathway.sh' script
dir_name = sys.argv[1]
f_summary = open('%s.summary'%dir_name,'w')
f_genes = open('%s.genes'%dir_name,'w')
f_compounds = open('%s.compounds'%dir_name,'w')
gene_total = []
for filename in os.listdir( dir_name ):
if( not filename.startswith('path:') ):
continue
#sys.stderr.write('Read %s ... '%filename)
path_id = ''
path_name = ''
gene_list = []
comp_list = []
prev_tag = ''
f = open(os.path.join(dir_name,filename),'r')
for line in f:
tmp_tag = line[:11].strip()
if( tmp_tag == 'ENTRY' ):
path_id = line.strip().split()[1]
if( tmp_tag == 'NAME' ):
path_name = line[11:].split(' - ')[0].strip()
if( tmp_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == '' and prev_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] )
f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
elif( tmp_tag == '' and prev_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] )
f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
if( tmp_tag != '' ):
|
f.close()
if( len(gene_list) == 0 ):
sys.stderr.write('//SKIP// %s(%d) %s\n'%(path_id, len(gene_list), path_name))
continue
f_summary.write('path:%s\t%s\t%d\t%d\n'%(path_id, path_name, len(gene_list), len(comp_list)))
f_summary.close()
f_genes.close()
f_compounds.close()
| prev_tag = tmp_tag | conditional_block |
filetransfer.py | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2010 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.msnp2p.constants import ApplicationID, EufGuid
from papyon.msnp2p.errors import FTParseError
from papyon.msnp2p.session import P2PSession
import gobject
import struct
__all__ = ['FileTransferSession']
class | (P2PSession):
def __init__(self, session_manager, peer, guid, message=None):
P2PSession.__init__(self, session_manager, peer, guid,
EufGuid.FILE_TRANSFER, ApplicationID.FILE_TRANSFER, message)
self._filename = ""
self._size = 0
self._has_preview = False
self._preview = None
# data to be send if sending
self._data = None
@property
def filename(self):
return self._filename
@property
def size(self):
return self._size
@property
def has_preview(self):
return self._has_preview
@property
def preview(self):
return self._preview
def invite(self, filename, size, data):
self._filename = filename
self._size = size
self._data = data
context = self._build_context()
self._invite(context)
def accept(self, buffer=None):
if buffer is not None:
self.set_receive_data_buffer(buffer, self._size)
self._accept()
def reject(self):
self._decline(603)
def cancel(self):
self._close()
def send(self, data):
self._data = data
self._send_data("\x00" * 4)
self._send_data(self._data)
def parse_context(self, context):
try:
info = struct.unpack("<5I", context[0:20])
self._size = info[2]
self._has_preview = not bool(info[4])
self._filename = unicode(context[20:570], "utf-16-le").rstrip("\x00")
except:
raise FTParseError(context)
def _build_context(self):
filename = self._filename.encode('utf-16_le')
context = struct.pack("<5I", 574, 2, self._size, 0, int(self._has_preview))
context += struct.pack("550s", filename)
context += "\xFF" * 4
return context
def _on_session_accepted(self):
if self._data:
self.send(self._data)
def _on_bye_received(self, message):
if not self.completed:
self._emit("canceled")
self._dispose()
| FileTransferSession | identifier_name |
filetransfer.py | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2010 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.msnp2p.constants import ApplicationID, EufGuid
from papyon.msnp2p.errors import FTParseError
from papyon.msnp2p.session import P2PSession
import gobject
import struct
__all__ = ['FileTransferSession']
class FileTransferSession(P2PSession):
def __init__(self, session_manager, peer, guid, message=None):
P2PSession.__init__(self, session_manager, peer, guid,
EufGuid.FILE_TRANSFER, ApplicationID.FILE_TRANSFER, message)
self._filename = ""
self._size = 0
self._has_preview = False
self._preview = None
# data to be send if sending
self._data = None
@property
def filename(self):
return self._filename
@property
def size(self):
return self._size
@property
def has_preview(self):
return self._has_preview
@property
def preview(self):
return self._preview
def invite(self, filename, size, data):
self._filename = filename
self._size = size
self._data = data
context = self._build_context()
self._invite(context)
def accept(self, buffer=None):
if buffer is not None:
|
self._accept()
def reject(self):
self._decline(603)
def cancel(self):
self._close()
def send(self, data):
self._data = data
self._send_data("\x00" * 4)
self._send_data(self._data)
def parse_context(self, context):
try:
info = struct.unpack("<5I", context[0:20])
self._size = info[2]
self._has_preview = not bool(info[4])
self._filename = unicode(context[20:570], "utf-16-le").rstrip("\x00")
except:
raise FTParseError(context)
def _build_context(self):
filename = self._filename.encode('utf-16_le')
context = struct.pack("<5I", 574, 2, self._size, 0, int(self._has_preview))
context += struct.pack("550s", filename)
context += "\xFF" * 4
return context
def _on_session_accepted(self):
if self._data:
self.send(self._data)
def _on_bye_received(self, message):
if not self.completed:
self._emit("canceled")
self._dispose()
| self.set_receive_data_buffer(buffer, self._size) | conditional_block |
filetransfer.py | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2010 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.msnp2p.constants import ApplicationID, EufGuid
from papyon.msnp2p.errors import FTParseError
from papyon.msnp2p.session import P2PSession
import gobject
import struct
__all__ = ['FileTransferSession']
class FileTransferSession(P2PSession):
def __init__(self, session_manager, peer, guid, message=None):
P2PSession.__init__(self, session_manager, peer, guid,
EufGuid.FILE_TRANSFER, ApplicationID.FILE_TRANSFER, message)
self._filename = ""
self._size = 0
self._has_preview = False
self._preview = None
# data to be send if sending
self._data = None
@property
def filename(self):
return self._filename
@property
def size(self):
return self._size
@property
def has_preview(self):
return self._has_preview
@property
def preview(self):
return self._preview
def invite(self, filename, size, data):
self._filename = filename
self._size = size
self._data = data
context = self._build_context()
self._invite(context)
def accept(self, buffer=None):
|
def reject(self):
self._decline(603)
def cancel(self):
self._close()
def send(self, data):
self._data = data
self._send_data("\x00" * 4)
self._send_data(self._data)
def parse_context(self, context):
try:
info = struct.unpack("<5I", context[0:20])
self._size = info[2]
self._has_preview = not bool(info[4])
self._filename = unicode(context[20:570], "utf-16-le").rstrip("\x00")
except:
raise FTParseError(context)
def _build_context(self):
filename = self._filename.encode('utf-16_le')
context = struct.pack("<5I", 574, 2, self._size, 0, int(self._has_preview))
context += struct.pack("550s", filename)
context += "\xFF" * 4
return context
def _on_session_accepted(self):
if self._data:
self.send(self._data)
def _on_bye_received(self, message):
if not self.completed:
self._emit("canceled")
self._dispose()
| if buffer is not None:
self.set_receive_data_buffer(buffer, self._size)
self._accept() | identifier_body |
filetransfer.py | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn | # the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.msnp2p.constants import ApplicationID, EufGuid
from papyon.msnp2p.errors import FTParseError
from papyon.msnp2p.session import P2PSession
import gobject
import struct
__all__ = ['FileTransferSession']
class FileTransferSession(P2PSession):
def __init__(self, session_manager, peer, guid, message=None):
P2PSession.__init__(self, session_manager, peer, guid,
EufGuid.FILE_TRANSFER, ApplicationID.FILE_TRANSFER, message)
self._filename = ""
self._size = 0
self._has_preview = False
self._preview = None
# data to be send if sending
self._data = None
@property
def filename(self):
return self._filename
@property
def size(self):
return self._size
@property
def has_preview(self):
return self._has_preview
@property
def preview(self):
return self._preview
def invite(self, filename, size, data):
self._filename = filename
self._size = size
self._data = data
context = self._build_context()
self._invite(context)
def accept(self, buffer=None):
if buffer is not None:
self.set_receive_data_buffer(buffer, self._size)
self._accept()
def reject(self):
self._decline(603)
def cancel(self):
self._close()
def send(self, data):
self._data = data
self._send_data("\x00" * 4)
self._send_data(self._data)
def parse_context(self, context):
try:
info = struct.unpack("<5I", context[0:20])
self._size = info[2]
self._has_preview = not bool(info[4])
self._filename = unicode(context[20:570], "utf-16-le").rstrip("\x00")
except:
raise FTParseError(context)
def _build_context(self):
filename = self._filename.encode('utf-16_le')
context = struct.pack("<5I", 574, 2, self._size, 0, int(self._has_preview))
context += struct.pack("550s", filename)
context += "\xFF" * 4
return context
def _on_session_accepted(self):
if self._data:
self.send(self._data)
def _on_bye_received(self, message):
if not self.completed:
self._emit("canceled")
self._dispose() | #
# Copyright (C) 2010 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by | random_line_split |
action.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::error::*;
use anyhow::Result;
use log::{error, info};
use std::path::Path;
use std::process::{Command, Stdio};
use std::time::Instant;
pub struct CloudSyncTrigger;
impl CloudSyncTrigger {
pub fn fire<P: AsRef<Path>>(
sid: &String,
path: P,
retries: u32,
version: Option<u64>,
workspace: String,
) -> Result<()> {
let mut workspace_args = vec!["--raw-workspace-name".to_owned(), workspace];
if let Some(version) = version {
workspace_args.append(&mut vec![
"--workspace-version".to_owned(),
version.to_string(),
]);
}
for i in 0..retries {
let now = Instant::now();
let child = Command::new("hg")
.current_dir(&path)
.env("HGPLAIN", "hint")
.args(vec!["cloud", "sync"])
.arg("--check-autosync-enabled")
.arg("--use-bgssh")
.args(&workspace_args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?; // do not retry if failed to start
info!(
"{} Fire `hg cloud sync` attempt {}, spawned process id '{}'",
sid,
i,
child.id()
);
let output = child.wait_with_output()?;
info!(
"{} stdout: \n{}",
sid,
String::from_utf8_lossy(&output.stdout).trim()
);
info!(
"{} stderr: \n{}",
sid,
String::from_utf8_lossy(&output.stderr).trim()
);
let end = now.elapsed();
info!(
"{} Cloud Sync time: {} sec {} ms",
sid,
end.as_secs(),
end.subsec_nanos() as u64 / 1_000_000
);
if !output.status.success() {
error!("{} Process exited with: {}", sid, output.status);
if i == retries - 1 {
return Err(ErrorKind::CommitCloudHgCloudSyncError(format!(
"process exited with: {}, retry later",
output.status
))
.into());
}
} else |
}
Ok(())
}
}
| {
info!("{} Cloud Sync was successful", sid);
return Ok(());
} | conditional_block |
action.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::error::*;
use anyhow::Result;
use log::{error, info};
use std::path::Path;
use std::process::{Command, Stdio};
use std::time::Instant;
pub struct CloudSyncTrigger;
impl CloudSyncTrigger {
pub fn fire<P: AsRef<Path>>(
sid: &String,
path: P,
retries: u32,
version: Option<u64>,
workspace: String,
) -> Result<()> |
}
| {
let mut workspace_args = vec!["--raw-workspace-name".to_owned(), workspace];
if let Some(version) = version {
workspace_args.append(&mut vec![
"--workspace-version".to_owned(),
version.to_string(),
]);
}
for i in 0..retries {
let now = Instant::now();
let child = Command::new("hg")
.current_dir(&path)
.env("HGPLAIN", "hint")
.args(vec!["cloud", "sync"])
.arg("--check-autosync-enabled")
.arg("--use-bgssh")
.args(&workspace_args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?; // do not retry if failed to start
info!(
"{} Fire `hg cloud sync` attempt {}, spawned process id '{}'",
sid,
i,
child.id()
);
let output = child.wait_with_output()?;
info!(
"{} stdout: \n{}",
sid,
String::from_utf8_lossy(&output.stdout).trim()
);
info!(
"{} stderr: \n{}",
sid,
String::from_utf8_lossy(&output.stderr).trim()
);
let end = now.elapsed();
info!(
"{} Cloud Sync time: {} sec {} ms",
sid,
end.as_secs(),
end.subsec_nanos() as u64 / 1_000_000
);
if !output.status.success() {
error!("{} Process exited with: {}", sid, output.status);
if i == retries - 1 {
return Err(ErrorKind::CommitCloudHgCloudSyncError(format!(
"process exited with: {}, retry later",
output.status
))
.into());
}
} else {
info!("{} Cloud Sync was successful", sid);
return Ok(());
}
}
Ok(())
} | identifier_body |
action.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::error::*;
use anyhow::Result;
use log::{error, info}; | use std::path::Path;
use std::process::{Command, Stdio};
use std::time::Instant;
pub struct CloudSyncTrigger;
impl CloudSyncTrigger {
pub fn fire<P: AsRef<Path>>(
sid: &String,
path: P,
retries: u32,
version: Option<u64>,
workspace: String,
) -> Result<()> {
let mut workspace_args = vec!["--raw-workspace-name".to_owned(), workspace];
if let Some(version) = version {
workspace_args.append(&mut vec![
"--workspace-version".to_owned(),
version.to_string(),
]);
}
for i in 0..retries {
let now = Instant::now();
let child = Command::new("hg")
.current_dir(&path)
.env("HGPLAIN", "hint")
.args(vec!["cloud", "sync"])
.arg("--check-autosync-enabled")
.arg("--use-bgssh")
.args(&workspace_args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?; // do not retry if failed to start
info!(
"{} Fire `hg cloud sync` attempt {}, spawned process id '{}'",
sid,
i,
child.id()
);
let output = child.wait_with_output()?;
info!(
"{} stdout: \n{}",
sid,
String::from_utf8_lossy(&output.stdout).trim()
);
info!(
"{} stderr: \n{}",
sid,
String::from_utf8_lossy(&output.stderr).trim()
);
let end = now.elapsed();
info!(
"{} Cloud Sync time: {} sec {} ms",
sid,
end.as_secs(),
end.subsec_nanos() as u64 / 1_000_000
);
if !output.status.success() {
error!("{} Process exited with: {}", sid, output.status);
if i == retries - 1 {
return Err(ErrorKind::CommitCloudHgCloudSyncError(format!(
"process exited with: {}, retry later",
output.status
))
.into());
}
} else {
info!("{} Cloud Sync was successful", sid);
return Ok(());
}
}
Ok(())
}
} | random_line_split | |
action.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::error::*;
use anyhow::Result;
use log::{error, info};
use std::path::Path;
use std::process::{Command, Stdio};
use std::time::Instant;
pub struct CloudSyncTrigger;
impl CloudSyncTrigger {
pub fn | <P: AsRef<Path>>(
sid: &String,
path: P,
retries: u32,
version: Option<u64>,
workspace: String,
) -> Result<()> {
let mut workspace_args = vec!["--raw-workspace-name".to_owned(), workspace];
if let Some(version) = version {
workspace_args.append(&mut vec![
"--workspace-version".to_owned(),
version.to_string(),
]);
}
for i in 0..retries {
let now = Instant::now();
let child = Command::new("hg")
.current_dir(&path)
.env("HGPLAIN", "hint")
.args(vec!["cloud", "sync"])
.arg("--check-autosync-enabled")
.arg("--use-bgssh")
.args(&workspace_args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?; // do not retry if failed to start
info!(
"{} Fire `hg cloud sync` attempt {}, spawned process id '{}'",
sid,
i,
child.id()
);
let output = child.wait_with_output()?;
info!(
"{} stdout: \n{}",
sid,
String::from_utf8_lossy(&output.stdout).trim()
);
info!(
"{} stderr: \n{}",
sid,
String::from_utf8_lossy(&output.stderr).trim()
);
let end = now.elapsed();
info!(
"{} Cloud Sync time: {} sec {} ms",
sid,
end.as_secs(),
end.subsec_nanos() as u64 / 1_000_000
);
if !output.status.success() {
error!("{} Process exited with: {}", sid, output.status);
if i == retries - 1 {
return Err(ErrorKind::CommitCloudHgCloudSyncError(format!(
"process exited with: {}, retry later",
output.status
))
.into());
}
} else {
info!("{} Cloud Sync was successful", sid);
return Ok(());
}
}
Ok(())
}
}
| fire | identifier_name |
make_erpnext_demo.py | if __name__=="__main__":
import sys
sys.path.extend([".", "lib", "app"])
import webnotes, os
import utilities.demo.make_demo
def make_demo_app():
webnotes.mute_emails = 1
webnotes.connect()
utilities.demo.make_demo.make(reset=True, simulate=False)
# setup demo user etc so that the site it up faster, while the data loads
make_demo_user()
make_demo_login_page()
make_demo_on_login_script()
utilities.demo.make_demo.make(reset=False, simulate=True)
def make_demo_user():
roles = ["Accounts Manager", "Analytics", "Expense Approver", "Accounts User",
"Leave Approver", "Blogger", "Customer", "Sales Manager", "Employee", "Support Manager",
"HR Manager", "HR User", "Maintenance Manager", "Maintenance User", "Material Manager",
"Material Master Manager", "Material User", "Manufacturing Manager",
"Manufacturing User", "Projects User", "Purchase Manager", "Purchase Master Manager",
"Purchase User", "Quality Manager", "Report Manager", "Sales Master Manager",
"Sales User", "Supplier", "Support Team"]
def add_roles(bean):
for role in roles:
p.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
})
# make demo user
if webnotes.conn.exists("Profile", "demo@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "demo@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "demo@owrang.yellowen.com"
p.doc.first_name = "Demo"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "Owrang Demo"
p.doc.send_invite_email = 0
p.doc.new_password = "demo"
p.insert()
add_roles(p)
p.save()
# make system manager user
if webnotes.conn.exists("Profile", "admin@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "admin@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "admin@owrang.yellowen.com"
p.doc.first_name = "Admin"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "System User"
p.doc.send_invite_email = 0
p.doc.new_password = "admin010123"
p.insert()
roles.append("System Manager")
add_roles(p)
p.save()
# only read for newsletter
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Newsletter'""")
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Profile' and role='All'""")
webnotes.conn.commit()
def make_demo_login_page():
webnotes.conn.set_value("Website Settings", None, "home_page", "")
webnotes.conn.sql("""delete from `tabWeb Page` where name='demo-login'""")
p = webnotes.new_bean("Web Page")
p.doc.title = "Demo Login"
p.doc.published = 1
p.doc.description = "Owrang Demo Login"
with open(os.path.join(os.path.dirname(__file__), "demo-login.html"), "r") as dfile:
p.doc.main_section = dfile.read()
p.doc.insert_code = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.js"), "r") as dfile:
p.doc.javascript = dfile.read()
p.doc.insert_style = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.css"), "r") as dfile:
p.doc.css = dfile.read()
p.insert()
website_settings = webnotes.bean("Website Settings", "Website Settings")
website_settings.doc.home_page = "demo-login"
website_settings.doc.disable_signup = 1
website_settings.save()
webnotes.conn.commit()
def make_demo_on_login_script():
webnotes.conn.sql("""delete from `tabCustom Script` where dt='Control Panel'""")
s = webnotes.new_bean("Custom Script")
s.doc.dt = "Control Panel"
s.doc.script_type = "Server"
with open(os.path.join(os.path.dirname(__file__), "demo_control_panel.py"), "r") as dfile:
s.doc.script = dfile.read()
s.insert()
cp = webnotes.bean("Control Panel")
cp.doc.custom_startup_code = """wn.ui.toolbar.show_banner('You are using Owrang Demo. To start your own Owrang Trial, <a href="https://owrang.yellowen.com/pricing-and-signup" target="_blank">click here</a>')"""
cp.save()
webnotes.conn.commit()
if __name__=="__main__":
| make_demo_app() | conditional_block | |
make_erpnext_demo.py | if __name__=="__main__":
import sys
sys.path.extend([".", "lib", "app"])
import webnotes, os
import utilities.demo.make_demo
def make_demo_app():
webnotes.mute_emails = 1
webnotes.connect()
utilities.demo.make_demo.make(reset=True, simulate=False)
# setup demo user etc so that the site it up faster, while the data loads
make_demo_user()
make_demo_login_page()
make_demo_on_login_script()
utilities.demo.make_demo.make(reset=False, simulate=True)
def make_demo_user():
roles = ["Accounts Manager", "Analytics", "Expense Approver", "Accounts User",
"Leave Approver", "Blogger", "Customer", "Sales Manager", "Employee", "Support Manager",
"HR Manager", "HR User", "Maintenance Manager", "Maintenance User", "Material Manager",
"Material Master Manager", "Material User", "Manufacturing Manager",
"Manufacturing User", "Projects User", "Purchase Manager", "Purchase Master Manager",
"Purchase User", "Quality Manager", "Report Manager", "Sales Master Manager",
"Sales User", "Supplier", "Support Team"]
def add_roles(bean):
for role in roles:
p.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
})
# make demo user | p.doc.email = "demo@owrang.yellowen.com"
p.doc.first_name = "Demo"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "Owrang Demo"
p.doc.send_invite_email = 0
p.doc.new_password = "demo"
p.insert()
add_roles(p)
p.save()
# make system manager user
if webnotes.conn.exists("Profile", "admin@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "admin@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "admin@owrang.yellowen.com"
p.doc.first_name = "Admin"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "System User"
p.doc.send_invite_email = 0
p.doc.new_password = "admin010123"
p.insert()
roles.append("System Manager")
add_roles(p)
p.save()
# only read for newsletter
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Newsletter'""")
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Profile' and role='All'""")
webnotes.conn.commit()
def make_demo_login_page():
webnotes.conn.set_value("Website Settings", None, "home_page", "")
webnotes.conn.sql("""delete from `tabWeb Page` where name='demo-login'""")
p = webnotes.new_bean("Web Page")
p.doc.title = "Demo Login"
p.doc.published = 1
p.doc.description = "Owrang Demo Login"
with open(os.path.join(os.path.dirname(__file__), "demo-login.html"), "r") as dfile:
p.doc.main_section = dfile.read()
p.doc.insert_code = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.js"), "r") as dfile:
p.doc.javascript = dfile.read()
p.doc.insert_style = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.css"), "r") as dfile:
p.doc.css = dfile.read()
p.insert()
website_settings = webnotes.bean("Website Settings", "Website Settings")
website_settings.doc.home_page = "demo-login"
website_settings.doc.disable_signup = 1
website_settings.save()
webnotes.conn.commit()
def make_demo_on_login_script():
webnotes.conn.sql("""delete from `tabCustom Script` where dt='Control Panel'""")
s = webnotes.new_bean("Custom Script")
s.doc.dt = "Control Panel"
s.doc.script_type = "Server"
with open(os.path.join(os.path.dirname(__file__), "demo_control_panel.py"), "r") as dfile:
s.doc.script = dfile.read()
s.insert()
cp = webnotes.bean("Control Panel")
cp.doc.custom_startup_code = """wn.ui.toolbar.show_banner('You are using Owrang Demo. To start your own Owrang Trial, <a href="https://owrang.yellowen.com/pricing-and-signup" target="_blank">click here</a>')"""
cp.save()
webnotes.conn.commit()
if __name__=="__main__":
make_demo_app() | if webnotes.conn.exists("Profile", "demo@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "demo@owrang.yellowen.com")
p = webnotes.new_bean("Profile") | random_line_split |
make_erpnext_demo.py | if __name__=="__main__":
import sys
sys.path.extend([".", "lib", "app"])
import webnotes, os
import utilities.demo.make_demo
def make_demo_app():
webnotes.mute_emails = 1
webnotes.connect()
utilities.demo.make_demo.make(reset=True, simulate=False)
# setup demo user etc so that the site it up faster, while the data loads
make_demo_user()
make_demo_login_page()
make_demo_on_login_script()
utilities.demo.make_demo.make(reset=False, simulate=True)
def | ():
roles = ["Accounts Manager", "Analytics", "Expense Approver", "Accounts User",
"Leave Approver", "Blogger", "Customer", "Sales Manager", "Employee", "Support Manager",
"HR Manager", "HR User", "Maintenance Manager", "Maintenance User", "Material Manager",
"Material Master Manager", "Material User", "Manufacturing Manager",
"Manufacturing User", "Projects User", "Purchase Manager", "Purchase Master Manager",
"Purchase User", "Quality Manager", "Report Manager", "Sales Master Manager",
"Sales User", "Supplier", "Support Team"]
def add_roles(bean):
for role in roles:
p.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
})
# make demo user
if webnotes.conn.exists("Profile", "demo@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "demo@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "demo@owrang.yellowen.com"
p.doc.first_name = "Demo"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "Owrang Demo"
p.doc.send_invite_email = 0
p.doc.new_password = "demo"
p.insert()
add_roles(p)
p.save()
# make system manager user
if webnotes.conn.exists("Profile", "admin@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "admin@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "admin@owrang.yellowen.com"
p.doc.first_name = "Admin"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "System User"
p.doc.send_invite_email = 0
p.doc.new_password = "admin010123"
p.insert()
roles.append("System Manager")
add_roles(p)
p.save()
# only read for newsletter
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Newsletter'""")
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Profile' and role='All'""")
webnotes.conn.commit()
def make_demo_login_page():
webnotes.conn.set_value("Website Settings", None, "home_page", "")
webnotes.conn.sql("""delete from `tabWeb Page` where name='demo-login'""")
p = webnotes.new_bean("Web Page")
p.doc.title = "Demo Login"
p.doc.published = 1
p.doc.description = "Owrang Demo Login"
with open(os.path.join(os.path.dirname(__file__), "demo-login.html"), "r") as dfile:
p.doc.main_section = dfile.read()
p.doc.insert_code = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.js"), "r") as dfile:
p.doc.javascript = dfile.read()
p.doc.insert_style = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.css"), "r") as dfile:
p.doc.css = dfile.read()
p.insert()
website_settings = webnotes.bean("Website Settings", "Website Settings")
website_settings.doc.home_page = "demo-login"
website_settings.doc.disable_signup = 1
website_settings.save()
webnotes.conn.commit()
def make_demo_on_login_script():
webnotes.conn.sql("""delete from `tabCustom Script` where dt='Control Panel'""")
s = webnotes.new_bean("Custom Script")
s.doc.dt = "Control Panel"
s.doc.script_type = "Server"
with open(os.path.join(os.path.dirname(__file__), "demo_control_panel.py"), "r") as dfile:
s.doc.script = dfile.read()
s.insert()
cp = webnotes.bean("Control Panel")
cp.doc.custom_startup_code = """wn.ui.toolbar.show_banner('You are using Owrang Demo. To start your own Owrang Trial, <a href="https://owrang.yellowen.com/pricing-and-signup" target="_blank">click here</a>')"""
cp.save()
webnotes.conn.commit()
if __name__=="__main__":
make_demo_app() | make_demo_user | identifier_name |
make_erpnext_demo.py | if __name__=="__main__":
import sys
sys.path.extend([".", "lib", "app"])
import webnotes, os
import utilities.demo.make_demo
def make_demo_app():
webnotes.mute_emails = 1
webnotes.connect()
utilities.demo.make_demo.make(reset=True, simulate=False)
# setup demo user etc so that the site it up faster, while the data loads
make_demo_user()
make_demo_login_page()
make_demo_on_login_script()
utilities.demo.make_demo.make(reset=False, simulate=True)
def make_demo_user():
roles = ["Accounts Manager", "Analytics", "Expense Approver", "Accounts User",
"Leave Approver", "Blogger", "Customer", "Sales Manager", "Employee", "Support Manager",
"HR Manager", "HR User", "Maintenance Manager", "Maintenance User", "Material Manager",
"Material Master Manager", "Material User", "Manufacturing Manager",
"Manufacturing User", "Projects User", "Purchase Manager", "Purchase Master Manager",
"Purchase User", "Quality Manager", "Report Manager", "Sales Master Manager",
"Sales User", "Supplier", "Support Team"]
def add_roles(bean):
|
# make demo user
if webnotes.conn.exists("Profile", "demo@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "demo@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "demo@owrang.yellowen.com"
p.doc.first_name = "Demo"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "Owrang Demo"
p.doc.send_invite_email = 0
p.doc.new_password = "demo"
p.insert()
add_roles(p)
p.save()
# make system manager user
if webnotes.conn.exists("Profile", "admin@owrang.yellowen.com"):
webnotes.delete_doc("Profile", "admin@owrang.yellowen.com")
p = webnotes.new_bean("Profile")
p.doc.email = "admin@owrang.yellowen.com"
p.doc.first_name = "Admin"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "System User"
p.doc.send_invite_email = 0
p.doc.new_password = "admin010123"
p.insert()
roles.append("System Manager")
add_roles(p)
p.save()
# only read for newsletter
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Newsletter'""")
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Profile' and role='All'""")
webnotes.conn.commit()
def make_demo_login_page():
webnotes.conn.set_value("Website Settings", None, "home_page", "")
webnotes.conn.sql("""delete from `tabWeb Page` where name='demo-login'""")
p = webnotes.new_bean("Web Page")
p.doc.title = "Demo Login"
p.doc.published = 1
p.doc.description = "Owrang Demo Login"
with open(os.path.join(os.path.dirname(__file__), "demo-login.html"), "r") as dfile:
p.doc.main_section = dfile.read()
p.doc.insert_code = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.js"), "r") as dfile:
p.doc.javascript = dfile.read()
p.doc.insert_style = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.css"), "r") as dfile:
p.doc.css = dfile.read()
p.insert()
website_settings = webnotes.bean("Website Settings", "Website Settings")
website_settings.doc.home_page = "demo-login"
website_settings.doc.disable_signup = 1
website_settings.save()
webnotes.conn.commit()
def make_demo_on_login_script():
webnotes.conn.sql("""delete from `tabCustom Script` where dt='Control Panel'""")
s = webnotes.new_bean("Custom Script")
s.doc.dt = "Control Panel"
s.doc.script_type = "Server"
with open(os.path.join(os.path.dirname(__file__), "demo_control_panel.py"), "r") as dfile:
s.doc.script = dfile.read()
s.insert()
cp = webnotes.bean("Control Panel")
cp.doc.custom_startup_code = """wn.ui.toolbar.show_banner('You are using Owrang Demo. To start your own Owrang Trial, <a href="https://owrang.yellowen.com/pricing-and-signup" target="_blank">click here</a>')"""
cp.save()
webnotes.conn.commit()
if __name__=="__main__":
make_demo_app() | for role in roles:
p.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
}) | identifier_body |
findOverlapGene.py | import subprocess
import os.path
import re
import argparse
import sys
from pybedtools import BedTool
DEBUG = False
parser = argparse.ArgumentParser(description="find overlap gene.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
if not DEBUG:
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input locus file (bed format)', required=True) | input_file=args.input
gene_file = args.gene_sorted_bed
output_file=args.output
else:
input_file= "/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.bed"
gene_file = "/scratch/cqs/shengq1/references/smallrna/v3/hg19_miRBase21_GtRNAdb2_gencode19_ncbi.sorted.bed"
output_file="/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.overlap.tsv"
closet = [nearest for nearest in BedTool(input_file).closest(gene_file, d=True)]
with open(output_file, 'w') as w:
for nearest in closet:
overlap = nearest.fields[12]
if overlap == u'0':
w.write(str(nearest)) | parser.add_argument('-g', '--gene_sorted_bed', action='store', nargs='?', help='Gene locus file (sorted bed format)', required=True)
parser.add_argument('-o', '--output', action='store', nargs='?', help='Output overlap file', required=True)
args = parser.parse_args() | random_line_split |
findOverlapGene.py | import subprocess
import os.path
import re
import argparse
import sys
from pybedtools import BedTool
DEBUG = False
parser = argparse.ArgumentParser(description="find overlap gene.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
if not DEBUG:
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input locus file (bed format)', required=True)
parser.add_argument('-g', '--gene_sorted_bed', action='store', nargs='?', help='Gene locus file (sorted bed format)', required=True)
parser.add_argument('-o', '--output', action='store', nargs='?', help='Output overlap file', required=True)
args = parser.parse_args()
input_file=args.input
gene_file = args.gene_sorted_bed
output_file=args.output
else:
|
closet = [nearest for nearest in BedTool(input_file).closest(gene_file, d=True)]
with open(output_file, 'w') as w:
for nearest in closet:
overlap = nearest.fields[12]
if overlap == u'0':
w.write(str(nearest))
| input_file= "/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.bed"
gene_file = "/scratch/cqs/shengq1/references/smallrna/v3/hg19_miRBase21_GtRNAdb2_gencode19_ncbi.sorted.bed"
output_file="/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.overlap.tsv" | conditional_block |
gogo_octogist.py | '''
Use the Github API to get the most recent Gist for a list of users
Created on 5 Nov 2019
@author: si
'''
from datetime import datetime
import sys
import requests
class OctoGist:
def __init__(self):
self.base_url = 'https://api.github.com'
self.items_per_page = 100
self.gist_path = (f'{self.base_url}/users/'
'{username}'
f'/gists?per_page={self.items_per_page}'
)
# support 1.1 keep alive
self.requests_session = requests.Session()
#self.get_headers = {'Content-type': 'application/json'}
self.get_headers = {'Accept': 'application/vnd.github.v3+json'}
def go(self, usernames):
"""
:param: usernames (str) comma separated list of user names
"""
# sort order doesn't exist on the per user gist endpoint. Only on /search/
# so find latest entry by iterating through all docs.
target_field = 'created_at' # or could be 'updated_at'
target_users = usernames.split(',')
latest_gist = {}
for username in target_users:
for gist_doc in self.gist_documents(username):
if username not in latest_gist \
or gist_doc[target_field] > latest_gist[username][target_field]:
latest_gist[username] = gist_doc
# overall sort for all users
one_gist_per_user = [(username, gist) for username, gist in latest_gist.items()]
one_gist_per_user.sort(key=lambda g: g[1][target_field], reverse=True)
for username, gist in one_gist_per_user:
# description is optional
gist_desc = f"said something about {gist['description']}" \
if gist['description'] else "wrote a gist"
self.log(f"{username} @ {gist[target_field]} {gist_desc}")
for username in target_users:
if username not in latest_gist:
self.log(f"{username} hasn't ever written a public gist")
def gist_documents(self, username, max_docs=None):
"""
Generator yielding (dict) as returned from github API
:param: username (str)
:param: max_docs (int or None) None for no limit
"""
r = self.requests_session.get(self.gist_path.format(username=username))
if r.status_code != 200:
self.log(f"Couldn't get gists for {username}", "ERROR")
return
docs_fetched = 0
for d in r.json():
docs_fetched += 1
yield d
if docs_fetched >= self.items_per_page:
# this will only print once
# TODO pagination
msg = (f"TODO pagination not enabled so gists by user:{username} might have be "
f"skipped as they have written more than {self.items_per_page} gists." | self.log(msg, "WARNING")
if max_docs is not None and docs_fetched > max_docs:
return
def log(self, msg, level="INFO"):
"""
Dependency injection ready logger.
:param: msg (str)
:param: level (str) , DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
if level in ['ERROR', 'CRITICAL']:
outfunc = sys.stderr.write
else:
outfunc = print
# TODO stderr for level = "ERROR"
log_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
level_just = level.ljust(10)
msg = f"{log_time} {level_just}{msg}"
outfunc(msg)
if __name__ == '__main__':
if len(sys.argv) != 2:
msg = "usage: python gogo_octogist.py <comma separated github usernames>\n"
sys.stderr.write(msg)
sys.exit(1)
o = OctoGist()
o.go(sys.argv[1]) | ) | random_line_split |
gogo_octogist.py | '''
Use the Github API to get the most recent Gist for a list of users
Created on 5 Nov 2019
@author: si
'''
from datetime import datetime
import sys
import requests
class OctoGist:
def __init__(self):
self.base_url = 'https://api.github.com'
self.items_per_page = 100
self.gist_path = (f'{self.base_url}/users/'
'{username}'
f'/gists?per_page={self.items_per_page}'
)
# support 1.1 keep alive
self.requests_session = requests.Session()
#self.get_headers = {'Content-type': 'application/json'}
self.get_headers = {'Accept': 'application/vnd.github.v3+json'}
def go(self, usernames):
"""
:param: usernames (str) comma separated list of user names
"""
# sort order doesn't exist on the per user gist endpoint. Only on /search/
# so find latest entry by iterating through all docs.
target_field = 'created_at' # or could be 'updated_at'
target_users = usernames.split(',')
latest_gist = {}
for username in target_users:
for gist_doc in self.gist_documents(username):
|
# overall sort for all users
one_gist_per_user = [(username, gist) for username, gist in latest_gist.items()]
one_gist_per_user.sort(key=lambda g: g[1][target_field], reverse=True)
for username, gist in one_gist_per_user:
# description is optional
gist_desc = f"said something about {gist['description']}" \
if gist['description'] else "wrote a gist"
self.log(f"{username} @ {gist[target_field]} {gist_desc}")
for username in target_users:
if username not in latest_gist:
self.log(f"{username} hasn't ever written a public gist")
def gist_documents(self, username, max_docs=None):
"""
Generator yielding (dict) as returned from github API
:param: username (str)
:param: max_docs (int or None) None for no limit
"""
r = self.requests_session.get(self.gist_path.format(username=username))
if r.status_code != 200:
self.log(f"Couldn't get gists for {username}", "ERROR")
return
docs_fetched = 0
for d in r.json():
docs_fetched += 1
yield d
if docs_fetched >= self.items_per_page:
# this will only print once
# TODO pagination
msg = (f"TODO pagination not enabled so gists by user:{username} might have be "
f"skipped as they have written more than {self.items_per_page} gists."
)
self.log(msg, "WARNING")
if max_docs is not None and docs_fetched > max_docs:
return
def log(self, msg, level="INFO"):
"""
Dependency injection ready logger.
:param: msg (str)
:param: level (str) , DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
if level in ['ERROR', 'CRITICAL']:
outfunc = sys.stderr.write
else:
outfunc = print
# TODO stderr for level = "ERROR"
log_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
level_just = level.ljust(10)
msg = f"{log_time} {level_just}{msg}"
outfunc(msg)
if __name__ == '__main__':
if len(sys.argv) != 2:
msg = "usage: python gogo_octogist.py <comma separated github usernames>\n"
sys.stderr.write(msg)
sys.exit(1)
o = OctoGist()
o.go(sys.argv[1])
| if username not in latest_gist \
or gist_doc[target_field] > latest_gist[username][target_field]:
latest_gist[username] = gist_doc | conditional_block |
gogo_octogist.py | '''
Use the Github API to get the most recent Gist for a list of users
Created on 5 Nov 2019
@author: si
'''
from datetime import datetime
import sys
import requests
class OctoGist:
def __init__(self):
|
def go(self, usernames):
"""
:param: usernames (str) comma separated list of user names
"""
# sort order doesn't exist on the per user gist endpoint. Only on /search/
# so find latest entry by iterating through all docs.
target_field = 'created_at' # or could be 'updated_at'
target_users = usernames.split(',')
latest_gist = {}
for username in target_users:
for gist_doc in self.gist_documents(username):
if username not in latest_gist \
or gist_doc[target_field] > latest_gist[username][target_field]:
latest_gist[username] = gist_doc
# overall sort for all users
one_gist_per_user = [(username, gist) for username, gist in latest_gist.items()]
one_gist_per_user.sort(key=lambda g: g[1][target_field], reverse=True)
for username, gist in one_gist_per_user:
# description is optional
gist_desc = f"said something about {gist['description']}" \
if gist['description'] else "wrote a gist"
self.log(f"{username} @ {gist[target_field]} {gist_desc}")
for username in target_users:
if username not in latest_gist:
self.log(f"{username} hasn't ever written a public gist")
def gist_documents(self, username, max_docs=None):
"""
Generator yielding (dict) as returned from github API
:param: username (str)
:param: max_docs (int or None) None for no limit
"""
r = self.requests_session.get(self.gist_path.format(username=username))
if r.status_code != 200:
self.log(f"Couldn't get gists for {username}", "ERROR")
return
docs_fetched = 0
for d in r.json():
docs_fetched += 1
yield d
if docs_fetched >= self.items_per_page:
# this will only print once
# TODO pagination
msg = (f"TODO pagination not enabled so gists by user:{username} might have be "
f"skipped as they have written more than {self.items_per_page} gists."
)
self.log(msg, "WARNING")
if max_docs is not None and docs_fetched > max_docs:
return
def log(self, msg, level="INFO"):
"""
Dependency injection ready logger.
:param: msg (str)
:param: level (str) , DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
if level in ['ERROR', 'CRITICAL']:
outfunc = sys.stderr.write
else:
outfunc = print
# TODO stderr for level = "ERROR"
log_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
level_just = level.ljust(10)
msg = f"{log_time} {level_just}{msg}"
outfunc(msg)
if __name__ == '__main__':
if len(sys.argv) != 2:
msg = "usage: python gogo_octogist.py <comma separated github usernames>\n"
sys.stderr.write(msg)
sys.exit(1)
o = OctoGist()
o.go(sys.argv[1])
| self.base_url = 'https://api.github.com'
self.items_per_page = 100
self.gist_path = (f'{self.base_url}/users/'
'{username}'
f'/gists?per_page={self.items_per_page}'
)
# support 1.1 keep alive
self.requests_session = requests.Session()
#self.get_headers = {'Content-type': 'application/json'}
self.get_headers = {'Accept': 'application/vnd.github.v3+json'} | identifier_body |
gogo_octogist.py | '''
Use the Github API to get the most recent Gist for a list of users
Created on 5 Nov 2019
@author: si
'''
from datetime import datetime
import sys
import requests
class | :
def __init__(self):
self.base_url = 'https://api.github.com'
self.items_per_page = 100
self.gist_path = (f'{self.base_url}/users/'
'{username}'
f'/gists?per_page={self.items_per_page}'
)
# support 1.1 keep alive
self.requests_session = requests.Session()
#self.get_headers = {'Content-type': 'application/json'}
self.get_headers = {'Accept': 'application/vnd.github.v3+json'}
def go(self, usernames):
"""
:param: usernames (str) comma separated list of user names
"""
# sort order doesn't exist on the per user gist endpoint. Only on /search/
# so find latest entry by iterating through all docs.
target_field = 'created_at' # or could be 'updated_at'
target_users = usernames.split(',')
latest_gist = {}
for username in target_users:
for gist_doc in self.gist_documents(username):
if username not in latest_gist \
or gist_doc[target_field] > latest_gist[username][target_field]:
latest_gist[username] = gist_doc
# overall sort for all users
one_gist_per_user = [(username, gist) for username, gist in latest_gist.items()]
one_gist_per_user.sort(key=lambda g: g[1][target_field], reverse=True)
for username, gist in one_gist_per_user:
# description is optional
gist_desc = f"said something about {gist['description']}" \
if gist['description'] else "wrote a gist"
self.log(f"{username} @ {gist[target_field]} {gist_desc}")
for username in target_users:
if username not in latest_gist:
self.log(f"{username} hasn't ever written a public gist")
def gist_documents(self, username, max_docs=None):
"""
Generator yielding (dict) as returned from github API
:param: username (str)
:param: max_docs (int or None) None for no limit
"""
r = self.requests_session.get(self.gist_path.format(username=username))
if r.status_code != 200:
self.log(f"Couldn't get gists for {username}", "ERROR")
return
docs_fetched = 0
for d in r.json():
docs_fetched += 1
yield d
if docs_fetched >= self.items_per_page:
# this will only print once
# TODO pagination
msg = (f"TODO pagination not enabled so gists by user:{username} might have be "
f"skipped as they have written more than {self.items_per_page} gists."
)
self.log(msg, "WARNING")
if max_docs is not None and docs_fetched > max_docs:
return
def log(self, msg, level="INFO"):
"""
Dependency injection ready logger.
:param: msg (str)
:param: level (str) , DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
if level in ['ERROR', 'CRITICAL']:
outfunc = sys.stderr.write
else:
outfunc = print
# TODO stderr for level = "ERROR"
log_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
level_just = level.ljust(10)
msg = f"{log_time} {level_just}{msg}"
outfunc(msg)
if __name__ == '__main__':
if len(sys.argv) != 2:
msg = "usage: python gogo_octogist.py <comma separated github usernames>\n"
sys.stderr.write(msg)
sys.exit(1)
o = OctoGist()
o.go(sys.argv[1])
| OctoGist | identifier_name |
chromosome_surgery.py | #!/usr/bin/env python3
import sys
from Bio import SeqIO
from argparse import ArgumentParser, RawDescriptionHelpFormatter
usage = "Chromosome surgery: Splice something into and/or out of a chromosome."
# Main Parsers
parser = ArgumentParser(description=usage, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-f", "--fasta", type=str, required=True, help="Input FASTA.")
parser.add_argument("-o", "--output", type=str, required=False, help="Output FASTA.")
parser.add_argument("-c", "--cid", type=str, required=True, help="Chromosome ID to edit.")
parser.add_argument("-i", "--insert", type=str, required=False, help="FASTA of sequence to insert.")
parser.add_argument("-I", "--incision", type=int, required=False, help="1-based nucleotide after which to insert the insert.")
parser.add_argument("-e", "--excision_start", type=int, required=False, help="1-based nucleotide that is the first to delete (0).")
parser.add_argument("-E", "--excision_end", type=int, required=False, help="1-based nucleotide that is the last to delete (0).")
args = parser.parse_args()
# Harmless defaults
splice_in = ''
incision = 0
excision_start = 0
excision_end = 0
no_insert = (not args.incision) or (not args.insert)
no_excision = (not args.excision_start) or (not args.excision_end)
if no_insert and no_excision:
sys.stderr.write("Incomplete edit arguments!\n")
exit(0)
# Get insert
if not no_insert:
incision = args.incision
with open(args.insert, 'r') as splicein:
record = list(SeqIO.parse(splicein, 'fasta'))[0]
splice_in = record.seq
# No need to shift the incision coordinate.
# The 1-based right-closed index after which to cut is the same location as the 0-based right-open substring end before the cut.
if not no_excision:
excision_start = args.excision_start
excision_end = args.excision_end
# Pythonize start coordinate from 1-based left-closed to 0-based left-closed.
excision_start -= 1
# No need to change the end coordinate. The 1-based right-closed index is the same location as the 0-based right-open substring end.
if (not no_insert) and not (no_excision):
# Do excision after the incision.
# Adjust coordinates.
if args.excision_start > args.incision and args.excision_end > args.incision:
excision_start = args.excision_start + len(splice_in)
excision_end = args.excision_end + len(splice_in)
| pass # The incision will be applied first, no need to adjust it. The excision is unaffected by the incision anyway.
else:
sys.stderr.write('Error: Cannot apply the specified coordinates. Excision end must be after excision start, and the incision cannot be inside the excision.')
# Parse and apply edit
with open(args.fasta, 'r') as genome:
if args.output:
out = open(args.output, 'w')
else:
out =sys.stdout
for record in SeqIO.parse(genome, 'fasta'):
# Only edit the relevant entry
if (record.id == args.cid):
# Splice-in
record.seq = record.seq[:incision] + splice_in + record.seq[incision:]
# Splice-out
record.seq = record.seq[:excision_start] + record.seq[excision_end:]
# Output all the entries
SeqIO.write(record, out, 'fasta')
print("Done") | elif args.excision_start < incision and args.excision_end < incision: | random_line_split |
chromosome_surgery.py | #!/usr/bin/env python3
import sys
from Bio import SeqIO
from argparse import ArgumentParser, RawDescriptionHelpFormatter
usage = "Chromosome surgery: Splice something into and/or out of a chromosome."
# Main Parsers
parser = ArgumentParser(description=usage, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-f", "--fasta", type=str, required=True, help="Input FASTA.")
parser.add_argument("-o", "--output", type=str, required=False, help="Output FASTA.")
parser.add_argument("-c", "--cid", type=str, required=True, help="Chromosome ID to edit.")
parser.add_argument("-i", "--insert", type=str, required=False, help="FASTA of sequence to insert.")
parser.add_argument("-I", "--incision", type=int, required=False, help="1-based nucleotide after which to insert the insert.")
parser.add_argument("-e", "--excision_start", type=int, required=False, help="1-based nucleotide that is the first to delete (0).")
parser.add_argument("-E", "--excision_end", type=int, required=False, help="1-based nucleotide that is the last to delete (0).")
args = parser.parse_args()
# Harmless defaults
splice_in = ''
incision = 0
excision_start = 0
excision_end = 0
no_insert = (not args.incision) or (not args.insert)
no_excision = (not args.excision_start) or (not args.excision_end)
if no_insert and no_excision:
sys.stderr.write("Incomplete edit arguments!\n")
exit(0)
# Get insert
if not no_insert:
incision = args.incision
with open(args.insert, 'r') as splicein:
record = list(SeqIO.parse(splicein, 'fasta'))[0]
splice_in = record.seq
# No need to shift the incision coordinate.
# The 1-based right-closed index after which to cut is the same location as the 0-based right-open substring end before the cut.
if not no_excision:
|
if (not no_insert) and not (no_excision):
# Do excision after the incision.
# Adjust coordinates.
if args.excision_start > args.incision and args.excision_end > args.incision:
excision_start = args.excision_start + len(splice_in)
excision_end = args.excision_end + len(splice_in)
elif args.excision_start < incision and args.excision_end < incision:
pass # The incision will be applied first, no need to adjust it. The excision is unaffected by the incision anyway.
else:
sys.stderr.write('Error: Cannot apply the specified coordinates. Excision end must be after excision start, and the incision cannot be inside the excision.')
# Parse and apply edit
with open(args.fasta, 'r') as genome:
if args.output:
out = open(args.output, 'w')
else:
out =sys.stdout
for record in SeqIO.parse(genome, 'fasta'):
# Only edit the relevant entry
if (record.id == args.cid):
# Splice-in
record.seq = record.seq[:incision] + splice_in + record.seq[incision:]
# Splice-out
record.seq = record.seq[:excision_start] + record.seq[excision_end:]
# Output all the entries
SeqIO.write(record, out, 'fasta')
print("Done")
| excision_start = args.excision_start
excision_end = args.excision_end
# Pythonize start coordinate from 1-based left-closed to 0-based left-closed.
excision_start -= 1
# No need to change the end coordinate. The 1-based right-closed index is the same location as the 0-based right-open substring end. | conditional_block |
guts.rs | // Copyright 2019 The CryptoCorrosion Contributors
// Copyright 2020 Developers of the Rand project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use ppv_lite86::{dispatch, dispatch_light128};
pub use ppv_lite86::Machine;
use ppv_lite86::{vec128_storage, ArithOps, BitOps32, LaneWords4, MultiLane, StoreBytes, Vec4};
pub(crate) const BLOCK: usize = 64;
pub(crate) const BLOCK64: u64 = BLOCK as u64;
const LOG2_BUFBLOCKS: u64 = 2;
const BUFBLOCKS: u64 = 1 << LOG2_BUFBLOCKS;
pub(crate) const BUFSZ64: u64 = BLOCK64 * BUFBLOCKS;
pub(crate) const BUFSZ: usize = BUFSZ64 as usize;
#[derive(Clone, PartialEq, Eq)]
pub struct ChaCha {
pub(crate) b: vec128_storage,
pub(crate) c: vec128_storage,
pub(crate) d: vec128_storage,
}
#[derive(Clone)]
pub struct State<V> {
pub(crate) a: V,
pub(crate) b: V,
pub(crate) c: V,
pub(crate) d: V,
}
#[inline(always)]
pub(crate) fn round<V: ArithOps + BitOps32>(mut x: State<V>) -> State<V> {
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right16();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right20();
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right24();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right25();
x
}
#[inline(always)]
pub(crate) fn diagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words3012();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words1230();
x
}
#[inline(always)]
pub(crate) fn undiagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words1230();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words3012();
x
}
impl ChaCha {
#[inline(always)]
pub fn new(key: &[u8; 32], nonce: &[u8]) -> Self |
#[inline(always)]
fn pos64<M: Machine>(&self, m: M) -> u64 {
let d: M::u32x4 = m.unpack(self.d);
((d.extract(1) as u64) << 32) | d.extract(0) as u64
}
/// Produce 4 blocks of output, advancing the state
#[inline(always)]
pub fn refill4(&mut self, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide(self, drounds, out)
}
#[inline(always)]
pub fn set_stream_param(&mut self, param: u32, value: u64) {
set_stream_param(self, param, value)
}
#[inline(always)]
pub fn get_stream_param(&self, param: u32) -> u64 {
get_stream_param(self, param)
}
/// Return whether rhs is equal in all parameters except current 64-bit position.
#[inline]
pub fn stream64_eq(&self, rhs: &Self) -> bool {
let self_d: [u32; 4] = self.d.into();
let rhs_d: [u32; 4] = rhs.d.into();
self.b == rhs.b && self.c == rhs.c && self_d[3] == rhs_d[3] && self_d[2] == rhs_d[2]
}
}
#[allow(clippy::many_single_char_names)]
#[inline(always)]
fn refill_wide_impl<Mach: Machine>(
m: Mach, state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ],
) {
let k = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d);
pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let b = m.unpack(state.b);
let c = m.unpack(state.c);
let mut x = State {
a: Mach::u32x4x4::from_lanes([k, k, k, k]),
b: Mach::u32x4x4::from_lanes([b, b, b, b]),
c: Mach::u32x4x4::from_lanes([c, c, c, c]),
d: m.unpack(Mach::u32x4x4::from_lanes([d0, d1, d2, d3]).into()),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d);
pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d4 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let (a, b, c, d) = (
x.a.to_lanes(),
x.b.to_lanes(),
x.c.to_lanes(),
x.d.to_lanes(),
);
let sb = m.unpack(state.b);
let sc = m.unpack(state.c);
let sd = [m.unpack(state.d), d1, d2, d3];
state.d = d4.into();
let mut words = out.chunks_exact_mut(16);
for ((((&a, &b), &c), &d), &sd) in a.iter().zip(&b).zip(&c).zip(&d).zip(&sd) {
(a + k).write_le(words.next().unwrap());
(b + sb).write_le(words.next().unwrap());
(c + sc).write_le(words.next().unwrap());
(d + sd).write_le(words.next().unwrap());
}
}
dispatch!(m, Mach, {
fn refill_wide(state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide_impl(m, state, drounds, out);
}
});
// Single-block, rounds-only; shared by try_apply_keystream for tails shorter than BUFSZ
// and XChaCha's setup step.
dispatch!(m, Mach, {
fn refill_narrow_rounds(state: &mut ChaCha, drounds: u32) -> State<vec128_storage> {
let k: Mach::u32x4 = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut x = State {
a: k,
b: m.unpack(state.b),
c: m.unpack(state.c),
d: m.unpack(state.d),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
State {
a: x.a.into(),
b: x.b.into(),
c: x.c.into(),
d: x.d.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn set_stream_param(state: &mut ChaCha, param: u32, value: u64) {
let d: Mach::u32x4 = m.unpack(state.d);
state.d = d
.insert((value >> 32) as u32, (param << 1) | 1)
.insert(value as u32, param << 1)
.into();
}
});
dispatch_light128!(m, Mach, {
fn get_stream_param(state: &ChaCha, param: u32) -> u64 {
let d: Mach::u32x4 = m.unpack(state.d);
((d.extract((param << 1) | 1) as u64) << 32) | d.extract(param << 1) as u64
}
});
fn read_u32le(xs: &[u8]) -> u32 {
assert_eq!(xs.len(), 4);
u32::from(xs[0]) | (u32::from(xs[1]) << 8) | (u32::from(xs[2]) << 16) | (u32::from(xs[3]) << 24)
}
dispatch_light128!(m, Mach, {
fn init_chacha(key: &[u8; 32], nonce: &[u8]) -> ChaCha {
let ctr_nonce = [
0,
if nonce.len() == 12 {
read_u32le(&nonce[0..4])
} else {
0
},
read_u32le(&nonce[nonce.len() - 8..nonce.len() - 4]),
read_u32le(&nonce[nonce.len() - 4..]),
];
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
ChaCha {
b: key0.into(),
c: key1.into(),
d: ctr_nonce.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn init_chacha_x(key: &[u8; 32], nonce: &[u8; 24], rounds: u32) -> ChaCha {
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
let nonce0: Mach::u32x4 = m.read_le(&nonce[..16]);
let mut state = ChaCha {
b: key0.into(),
c: key1.into(),
d: nonce0.into(),
};
let x = refill_narrow_rounds(&mut state, rounds);
let ctr_nonce1 = [0, 0, read_u32le(&nonce[16..20]), read_u32le(&nonce[20..24])];
state.b = x.a;
state.c = x.d;
state.d = ctr_nonce1.into();
state
}
});
| {
init_chacha(key, nonce)
} | identifier_body |
guts.rs | // Copyright 2019 The CryptoCorrosion Contributors
// Copyright 2020 Developers of the Rand project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use ppv_lite86::{dispatch, dispatch_light128};
pub use ppv_lite86::Machine;
use ppv_lite86::{vec128_storage, ArithOps, BitOps32, LaneWords4, MultiLane, StoreBytes, Vec4};
pub(crate) const BLOCK: usize = 64;
pub(crate) const BLOCK64: u64 = BLOCK as u64;
const LOG2_BUFBLOCKS: u64 = 2;
const BUFBLOCKS: u64 = 1 << LOG2_BUFBLOCKS;
pub(crate) const BUFSZ64: u64 = BLOCK64 * BUFBLOCKS;
pub(crate) const BUFSZ: usize = BUFSZ64 as usize;
#[derive(Clone, PartialEq, Eq)]
pub struct ChaCha {
pub(crate) b: vec128_storage,
pub(crate) c: vec128_storage,
pub(crate) d: vec128_storage,
}
#[derive(Clone)]
pub struct State<V> {
pub(crate) a: V,
pub(crate) b: V,
pub(crate) c: V,
pub(crate) d: V,
}
#[inline(always)]
pub(crate) fn round<V: ArithOps + BitOps32>(mut x: State<V>) -> State<V> {
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right16();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right20();
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right24();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right25();
x
}
#[inline(always)]
pub(crate) fn diagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words3012();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words1230();
x
}
#[inline(always)]
pub(crate) fn undiagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words1230();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words3012();
x
}
impl ChaCha {
#[inline(always)]
pub fn new(key: &[u8; 32], nonce: &[u8]) -> Self {
init_chacha(key, nonce)
}
#[inline(always)]
fn pos64<M: Machine>(&self, m: M) -> u64 {
let d: M::u32x4 = m.unpack(self.d);
((d.extract(1) as u64) << 32) | d.extract(0) as u64
}
/// Produce 4 blocks of output, advancing the state
#[inline(always)]
pub fn refill4(&mut self, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide(self, drounds, out)
}
#[inline(always)]
pub fn set_stream_param(&mut self, param: u32, value: u64) {
set_stream_param(self, param, value)
}
#[inline(always)]
pub fn get_stream_param(&self, param: u32) -> u64 {
get_stream_param(self, param)
}
/// Return whether rhs is equal in all parameters except current 64-bit position.
#[inline]
pub fn stream64_eq(&self, rhs: &Self) -> bool {
let self_d: [u32; 4] = self.d.into();
let rhs_d: [u32; 4] = rhs.d.into();
self.b == rhs.b && self.c == rhs.c && self_d[3] == rhs_d[3] && self_d[2] == rhs_d[2]
}
}
#[allow(clippy::many_single_char_names)]
#[inline(always)]
fn refill_wide_impl<Mach: Machine>(
m: Mach, state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ],
) {
let k = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d);
pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let b = m.unpack(state.b);
let c = m.unpack(state.c);
let mut x = State {
a: Mach::u32x4x4::from_lanes([k, k, k, k]),
b: Mach::u32x4x4::from_lanes([b, b, b, b]),
c: Mach::u32x4x4::from_lanes([c, c, c, c]),
d: m.unpack(Mach::u32x4x4::from_lanes([d0, d1, d2, d3]).into()),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d); | let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d4 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let (a, b, c, d) = (
x.a.to_lanes(),
x.b.to_lanes(),
x.c.to_lanes(),
x.d.to_lanes(),
);
let sb = m.unpack(state.b);
let sc = m.unpack(state.c);
let sd = [m.unpack(state.d), d1, d2, d3];
state.d = d4.into();
let mut words = out.chunks_exact_mut(16);
for ((((&a, &b), &c), &d), &sd) in a.iter().zip(&b).zip(&c).zip(&d).zip(&sd) {
(a + k).write_le(words.next().unwrap());
(b + sb).write_le(words.next().unwrap());
(c + sc).write_le(words.next().unwrap());
(d + sd).write_le(words.next().unwrap());
}
}
dispatch!(m, Mach, {
fn refill_wide(state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide_impl(m, state, drounds, out);
}
});
// Single-block, rounds-only; shared by try_apply_keystream for tails shorter than BUFSZ
// and XChaCha's setup step.
dispatch!(m, Mach, {
fn refill_narrow_rounds(state: &mut ChaCha, drounds: u32) -> State<vec128_storage> {
let k: Mach::u32x4 = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut x = State {
a: k,
b: m.unpack(state.b),
c: m.unpack(state.c),
d: m.unpack(state.d),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
State {
a: x.a.into(),
b: x.b.into(),
c: x.c.into(),
d: x.d.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn set_stream_param(state: &mut ChaCha, param: u32, value: u64) {
let d: Mach::u32x4 = m.unpack(state.d);
state.d = d
.insert((value >> 32) as u32, (param << 1) | 1)
.insert(value as u32, param << 1)
.into();
}
});
dispatch_light128!(m, Mach, {
fn get_stream_param(state: &ChaCha, param: u32) -> u64 {
let d: Mach::u32x4 = m.unpack(state.d);
((d.extract((param << 1) | 1) as u64) << 32) | d.extract(param << 1) as u64
}
});
fn read_u32le(xs: &[u8]) -> u32 {
assert_eq!(xs.len(), 4);
u32::from(xs[0]) | (u32::from(xs[1]) << 8) | (u32::from(xs[2]) << 16) | (u32::from(xs[3]) << 24)
}
dispatch_light128!(m, Mach, {
fn init_chacha(key: &[u8; 32], nonce: &[u8]) -> ChaCha {
let ctr_nonce = [
0,
if nonce.len() == 12 {
read_u32le(&nonce[0..4])
} else {
0
},
read_u32le(&nonce[nonce.len() - 8..nonce.len() - 4]),
read_u32le(&nonce[nonce.len() - 4..]),
];
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
ChaCha {
b: key0.into(),
c: key1.into(),
d: ctr_nonce.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn init_chacha_x(key: &[u8; 32], nonce: &[u8; 24], rounds: u32) -> ChaCha {
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
let nonce0: Mach::u32x4 = m.read_le(&nonce[..16]);
let mut state = ChaCha {
b: key0.into(),
c: key1.into(),
d: nonce0.into(),
};
let x = refill_narrow_rounds(&mut state, rounds);
let ctr_nonce1 = [0, 0, read_u32le(&nonce[16..20]), read_u32le(&nonce[20..24])];
state.b = x.a;
state.c = x.d;
state.d = ctr_nonce1.into();
state
}
}); | pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1); | random_line_split |
guts.rs | // Copyright 2019 The CryptoCorrosion Contributors
// Copyright 2020 Developers of the Rand project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use ppv_lite86::{dispatch, dispatch_light128};
pub use ppv_lite86::Machine;
use ppv_lite86::{vec128_storage, ArithOps, BitOps32, LaneWords4, MultiLane, StoreBytes, Vec4};
pub(crate) const BLOCK: usize = 64;
pub(crate) const BLOCK64: u64 = BLOCK as u64;
const LOG2_BUFBLOCKS: u64 = 2;
const BUFBLOCKS: u64 = 1 << LOG2_BUFBLOCKS;
pub(crate) const BUFSZ64: u64 = BLOCK64 * BUFBLOCKS;
pub(crate) const BUFSZ: usize = BUFSZ64 as usize;
#[derive(Clone, PartialEq, Eq)]
pub struct ChaCha {
pub(crate) b: vec128_storage,
pub(crate) c: vec128_storage,
pub(crate) d: vec128_storage,
}
#[derive(Clone)]
pub struct State<V> {
pub(crate) a: V,
pub(crate) b: V,
pub(crate) c: V,
pub(crate) d: V,
}
#[inline(always)]
pub(crate) fn round<V: ArithOps + BitOps32>(mut x: State<V>) -> State<V> {
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right16();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right20();
x.a += x.b;
x.d = (x.d ^ x.a).rotate_each_word_right24();
x.c += x.d;
x.b = (x.b ^ x.c).rotate_each_word_right25();
x
}
#[inline(always)]
pub(crate) fn diagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words3012();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words1230();
x
}
#[inline(always)]
pub(crate) fn undiagonalize<V: LaneWords4>(mut x: State<V>) -> State<V> {
x.b = x.b.shuffle_lane_words1230();
x.c = x.c.shuffle_lane_words2301();
x.d = x.d.shuffle_lane_words3012();
x
}
impl ChaCha {
#[inline(always)]
pub fn new(key: &[u8; 32], nonce: &[u8]) -> Self {
init_chacha(key, nonce)
}
#[inline(always)]
fn pos64<M: Machine>(&self, m: M) -> u64 {
let d: M::u32x4 = m.unpack(self.d);
((d.extract(1) as u64) << 32) | d.extract(0) as u64
}
/// Produce 4 blocks of output, advancing the state
#[inline(always)]
pub fn | (&mut self, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide(self, drounds, out)
}
#[inline(always)]
pub fn set_stream_param(&mut self, param: u32, value: u64) {
set_stream_param(self, param, value)
}
#[inline(always)]
pub fn get_stream_param(&self, param: u32) -> u64 {
get_stream_param(self, param)
}
/// Return whether rhs is equal in all parameters except current 64-bit position.
#[inline]
pub fn stream64_eq(&self, rhs: &Self) -> bool {
let self_d: [u32; 4] = self.d.into();
let rhs_d: [u32; 4] = rhs.d.into();
self.b == rhs.b && self.c == rhs.c && self_d[3] == rhs_d[3] && self_d[2] == rhs_d[2]
}
}
#[allow(clippy::many_single_char_names)]
#[inline(always)]
fn refill_wide_impl<Mach: Machine>(
m: Mach, state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ],
) {
let k = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d);
pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let b = m.unpack(state.b);
let c = m.unpack(state.c);
let mut x = State {
a: Mach::u32x4x4::from_lanes([k, k, k, k]),
b: Mach::u32x4x4::from_lanes([b, b, b, b]),
c: Mach::u32x4x4::from_lanes([c, c, c, c]),
d: m.unpack(Mach::u32x4x4::from_lanes([d0, d1, d2, d3]).into()),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
let mut pos = state.pos64(m);
let d0: Mach::u32x4 = m.unpack(state.d);
pos = pos.wrapping_add(1);
let d1 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d2 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d3 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
pos = pos.wrapping_add(1);
let d4 = d0.insert((pos >> 32) as u32, 1).insert(pos as u32, 0);
let (a, b, c, d) = (
x.a.to_lanes(),
x.b.to_lanes(),
x.c.to_lanes(),
x.d.to_lanes(),
);
let sb = m.unpack(state.b);
let sc = m.unpack(state.c);
let sd = [m.unpack(state.d), d1, d2, d3];
state.d = d4.into();
let mut words = out.chunks_exact_mut(16);
for ((((&a, &b), &c), &d), &sd) in a.iter().zip(&b).zip(&c).zip(&d).zip(&sd) {
(a + k).write_le(words.next().unwrap());
(b + sb).write_le(words.next().unwrap());
(c + sc).write_le(words.next().unwrap());
(d + sd).write_le(words.next().unwrap());
}
}
dispatch!(m, Mach, {
fn refill_wide(state: &mut ChaCha, drounds: u32, out: &mut [u8; BUFSZ]) {
refill_wide_impl(m, state, drounds, out);
}
});
// Single-block, rounds-only; shared by try_apply_keystream for tails shorter than BUFSZ
// and XChaCha's setup step.
dispatch!(m, Mach, {
fn refill_narrow_rounds(state: &mut ChaCha, drounds: u32) -> State<vec128_storage> {
let k: Mach::u32x4 = m.vec([0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574]);
let mut x = State {
a: k,
b: m.unpack(state.b),
c: m.unpack(state.c),
d: m.unpack(state.d),
};
for _ in 0..drounds {
x = round(x);
x = undiagonalize(round(diagonalize(x)));
}
State {
a: x.a.into(),
b: x.b.into(),
c: x.c.into(),
d: x.d.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn set_stream_param(state: &mut ChaCha, param: u32, value: u64) {
let d: Mach::u32x4 = m.unpack(state.d);
state.d = d
.insert((value >> 32) as u32, (param << 1) | 1)
.insert(value as u32, param << 1)
.into();
}
});
dispatch_light128!(m, Mach, {
fn get_stream_param(state: &ChaCha, param: u32) -> u64 {
let d: Mach::u32x4 = m.unpack(state.d);
((d.extract((param << 1) | 1) as u64) << 32) | d.extract(param << 1) as u64
}
});
fn read_u32le(xs: &[u8]) -> u32 {
assert_eq!(xs.len(), 4);
u32::from(xs[0]) | (u32::from(xs[1]) << 8) | (u32::from(xs[2]) << 16) | (u32::from(xs[3]) << 24)
}
dispatch_light128!(m, Mach, {
fn init_chacha(key: &[u8; 32], nonce: &[u8]) -> ChaCha {
let ctr_nonce = [
0,
if nonce.len() == 12 {
read_u32le(&nonce[0..4])
} else {
0
},
read_u32le(&nonce[nonce.len() - 8..nonce.len() - 4]),
read_u32le(&nonce[nonce.len() - 4..]),
];
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
ChaCha {
b: key0.into(),
c: key1.into(),
d: ctr_nonce.into(),
}
}
});
dispatch_light128!(m, Mach, {
fn init_chacha_x(key: &[u8; 32], nonce: &[u8; 24], rounds: u32) -> ChaCha {
let key0: Mach::u32x4 = m.read_le(&key[..16]);
let key1: Mach::u32x4 = m.read_le(&key[16..]);
let nonce0: Mach::u32x4 = m.read_le(&nonce[..16]);
let mut state = ChaCha {
b: key0.into(),
c: key1.into(),
d: nonce0.into(),
};
let x = refill_narrow_rounds(&mut state, rounds);
let ctr_nonce1 = [0, 0, read_u32le(&nonce[16..20]), read_u32le(&nonce[20..24])];
state.b = x.a;
state.c = x.d;
state.d = ctr_nonce1.into();
state
}
});
| refill4 | identifier_name |
active_app_02_objc_bridge.py | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2015 deanishe@deanishe.net
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2015-11-23
#
"""Get app info with AppKit via objc bridge."""
from __future__ import print_function, unicode_literals, absolute_import
import time
import unicodedata
from AppKit import NSWorkspace
def decode(s):
"""Decode bytestring to Unicode."""
if isinstance(s, str):
s = unicode(s, 'utf-8')
elif not isinstance(s, unicode):
raise TypeError("str or unicode required, not {}".format(type(s)))
return unicodedata.normalize('NFC', s)
def | ():
"""Return (name, bundle_id and path) of frontmost application.
Raise a `RuntimeError` if frontmost application cannot be
determined.
"""
for app in NSWorkspace.sharedWorkspace().runningApplications():
if app.isActive():
app_name = app.localizedName()
bundle_id = app.bundleIdentifier()
app_path = app.bundleURL().fileSystemRepresentation()
return (app_name, bundle_id, app_path)
else:
raise RuntimeError("Couldn't get frontmost application.")
if __name__ == '__main__':
s = time.time()
get_frontmost_app()
d = time.time() - s
| get_frontmost_app | identifier_name |
active_app_02_objc_bridge.py | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2015 deanishe@deanishe.net
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2015-11-23
#
"""Get app info with AppKit via objc bridge."""
from __future__ import print_function, unicode_literals, absolute_import
import time
import unicodedata
from AppKit import NSWorkspace
def decode(s):
|
def get_frontmost_app():
"""Return (name, bundle_id and path) of frontmost application.
Raise a `RuntimeError` if frontmost application cannot be
determined.
"""
for app in NSWorkspace.sharedWorkspace().runningApplications():
if app.isActive():
app_name = app.localizedName()
bundle_id = app.bundleIdentifier()
app_path = app.bundleURL().fileSystemRepresentation()
return (app_name, bundle_id, app_path)
else:
raise RuntimeError("Couldn't get frontmost application.")
if __name__ == '__main__':
s = time.time()
get_frontmost_app()
d = time.time() - s
| """Decode bytestring to Unicode."""
if isinstance(s, str):
s = unicode(s, 'utf-8')
elif not isinstance(s, unicode):
raise TypeError("str or unicode required, not {}".format(type(s)))
return unicodedata.normalize('NFC', s) | identifier_body |
active_app_02_objc_bridge.py | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2015 deanishe@deanishe.net
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2015-11-23
#
"""Get app info with AppKit via objc bridge."""
from __future__ import print_function, unicode_literals, absolute_import
import time
import unicodedata
from AppKit import NSWorkspace
def decode(s):
"""Decode bytestring to Unicode."""
if isinstance(s, str):
s = unicode(s, 'utf-8')
elif not isinstance(s, unicode):
raise TypeError("str or unicode required, not {}".format(type(s)))
return unicodedata.normalize('NFC', s) |
def get_frontmost_app():
"""Return (name, bundle_id and path) of frontmost application.
Raise a `RuntimeError` if frontmost application cannot be
determined.
"""
for app in NSWorkspace.sharedWorkspace().runningApplications():
if app.isActive():
app_name = app.localizedName()
bundle_id = app.bundleIdentifier()
app_path = app.bundleURL().fileSystemRepresentation()
return (app_name, bundle_id, app_path)
else:
raise RuntimeError("Couldn't get frontmost application.")
if __name__ == '__main__':
s = time.time()
get_frontmost_app()
d = time.time() - s | random_line_split | |
active_app_02_objc_bridge.py | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2015 deanishe@deanishe.net
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2015-11-23
#
"""Get app info with AppKit via objc bridge."""
from __future__ import print_function, unicode_literals, absolute_import
import time
import unicodedata
from AppKit import NSWorkspace
def decode(s):
"""Decode bytestring to Unicode."""
if isinstance(s, str):
|
elif not isinstance(s, unicode):
raise TypeError("str or unicode required, not {}".format(type(s)))
return unicodedata.normalize('NFC', s)
def get_frontmost_app():
"""Return (name, bundle_id and path) of frontmost application.
Raise a `RuntimeError` if frontmost application cannot be
determined.
"""
for app in NSWorkspace.sharedWorkspace().runningApplications():
if app.isActive():
app_name = app.localizedName()
bundle_id = app.bundleIdentifier()
app_path = app.bundleURL().fileSystemRepresentation()
return (app_name, bundle_id, app_path)
else:
raise RuntimeError("Couldn't get frontmost application.")
if __name__ == '__main__':
s = time.time()
get_frontmost_app()
d = time.time() - s
| s = unicode(s, 'utf-8') | conditional_block |
rt.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! General — Library initialization and miscellaneous functions
use std::ptr;
use glib::translate::{FromGlibPtr, ToGlibPtr};
use gdk::ffi;
pub fn init() {
unsafe { ffi::gdk_init(ptr::null_mut(), ptr::null_mut()) }
}
/*pub fn init_check(argc: *mut c_int, argv: *mut *mut *mut c_char) -> bool {
unsafe { ::glib::to_bool(ffi::gdk_init_check(argc, argv)) }
}
pub fn parse_args(argc: *mut c_int, argv: *mut *mut *mut c_char) {
unsafe { ffi::gdk_parse_args(argc, argv) }
}*/
pub fn get_display_arg_name() -> Option<String> {
unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_display_arg_name())
}
}
pub fn notify_startup_complete() {
unsafe { ffi::gdk_notify_startup_complete() }
}
pub fn notify_startup_complete_with_id(startup_id: &str) {
unsafe {
ffi::gdk_notify_startup_complete_with_id(startup_id.borrow_to_glib().0);
}
}
#[cfg(feature = "GTK_3_10")]
pub fn set_allowed_backends(backends: &str) {
unsafe {
ffi::gdk_set_allowed_backends(backends.borrow_to_glib().0)
}
}
pub fn get_program_class() -> Option<String> {
unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_program_class())
}
}
pub fn set_program_class(program_class: &str) {
unsafe {
ffi::gdk_set_program_class(program_class.borrow_to_glib().0)
}
}
pub fn flush() {
unsafe { ffi::gdk_flush() }
}
pub fn screen_width() -> i32 {
unsafe { ffi::gdk_screen_width() }
}
pub fn screen_height() -> i32 {
unsafe { ffi::gdk_screen_height() }
}
pub fn screen_width_mm() -> i32 {
unsafe { ffi::gdk_screen_width_mm() }
}
pub fn screen_height_mm() -> i32 {
unsafe { ffi::gdk_screen_height_mm() }
}
pub fn beep() {
| pub fn error_trap_push() {
unsafe { ffi::gdk_error_trap_push() }
}
pub fn error_trap_pop() {
unsafe { ffi::gdk_error_trap_pop() }
}
pub fn error_trap_pop_ignored() {
unsafe { ffi::gdk_error_trap_pop_ignored() }
} | unsafe { ffi::gdk_flush() }
}
| identifier_body |
rt.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! General — Library initialization and miscellaneous functions
use std::ptr;
use glib::translate::{FromGlibPtr, ToGlibPtr};
use gdk::ffi;
pub fn init() {
unsafe { ffi::gdk_init(ptr::null_mut(), ptr::null_mut()) }
}
/*pub fn init_check(argc: *mut c_int, argv: *mut *mut *mut c_char) -> bool {
unsafe { ::glib::to_bool(ffi::gdk_init_check(argc, argv)) }
}
pub fn parse_args(argc: *mut c_int, argv: *mut *mut *mut c_char) {
unsafe { ffi::gdk_parse_args(argc, argv) }
}*/
| unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_display_arg_name())
}
}
pub fn notify_startup_complete() {
unsafe { ffi::gdk_notify_startup_complete() }
}
pub fn notify_startup_complete_with_id(startup_id: &str) {
unsafe {
ffi::gdk_notify_startup_complete_with_id(startup_id.borrow_to_glib().0);
}
}
#[cfg(feature = "GTK_3_10")]
pub fn set_allowed_backends(backends: &str) {
unsafe {
ffi::gdk_set_allowed_backends(backends.borrow_to_glib().0)
}
}
pub fn get_program_class() -> Option<String> {
unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_program_class())
}
}
pub fn set_program_class(program_class: &str) {
unsafe {
ffi::gdk_set_program_class(program_class.borrow_to_glib().0)
}
}
pub fn flush() {
unsafe { ffi::gdk_flush() }
}
pub fn screen_width() -> i32 {
unsafe { ffi::gdk_screen_width() }
}
pub fn screen_height() -> i32 {
unsafe { ffi::gdk_screen_height() }
}
pub fn screen_width_mm() -> i32 {
unsafe { ffi::gdk_screen_width_mm() }
}
pub fn screen_height_mm() -> i32 {
unsafe { ffi::gdk_screen_height_mm() }
}
pub fn beep() {
unsafe { ffi::gdk_flush() }
}
pub fn error_trap_push() {
unsafe { ffi::gdk_error_trap_push() }
}
pub fn error_trap_pop() {
unsafe { ffi::gdk_error_trap_pop() }
}
pub fn error_trap_pop_ignored() {
unsafe { ffi::gdk_error_trap_pop_ignored() }
} | pub fn get_display_arg_name() -> Option<String> { | random_line_split |
rt.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! General — Library initialization and miscellaneous functions
use std::ptr;
use glib::translate::{FromGlibPtr, ToGlibPtr};
use gdk::ffi;
pub fn init() {
unsafe { ffi::gdk_init(ptr::null_mut(), ptr::null_mut()) }
}
/*pub fn init_check(argc: *mut c_int, argv: *mut *mut *mut c_char) -> bool {
unsafe { ::glib::to_bool(ffi::gdk_init_check(argc, argv)) }
}
pub fn parse_args(argc: *mut c_int, argv: *mut *mut *mut c_char) {
unsafe { ffi::gdk_parse_args(argc, argv) }
}*/
pub fn get_display_arg_name() -> Option<String> {
unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_display_arg_name())
}
}
pub fn notify_startup_complete() {
unsafe { ffi::gdk_notify_startup_complete() }
}
pub fn notify_startup_complete_with_id(startup_id: &str) {
unsafe {
ffi::gdk_notify_startup_complete_with_id(startup_id.borrow_to_glib().0);
}
}
#[cfg(feature = "GTK_3_10")]
pub fn set_allowed_backends(backends: &str) {
unsafe {
ffi::gdk_set_allowed_backends(backends.borrow_to_glib().0)
}
}
pub fn get_program_class() -> Option<String> {
unsafe {
FromGlibPtr::borrow(
ffi::gdk_get_program_class())
}
}
pub fn set_program_class(program_class: &str) {
unsafe {
ffi::gdk_set_program_class(program_class.borrow_to_glib().0)
}
}
pub fn flush() {
unsafe { ffi::gdk_flush() }
}
pub fn screen_width() -> i32 {
unsafe { ffi::gdk_screen_width() }
}
pub fn screen_height() -> i32 {
unsafe { ffi::gdk_screen_height() }
}
pub fn screen_width_mm() -> i32 {
unsafe { ffi::gdk_screen_width_mm() }
}
pub fn screen_height_mm() -> i32 {
unsafe { ffi::gdk_screen_height_mm() }
}
pub fn beep() {
unsafe { ffi::gdk_flush() }
}
pub fn error_trap_push() {
unsafe { ffi::gdk_error_trap_push() }
}
pub fn er | {
unsafe { ffi::gdk_error_trap_pop() }
}
pub fn error_trap_pop_ignored() {
unsafe { ffi::gdk_error_trap_pop_ignored() }
} | ror_trap_pop() | identifier_name |
example_all_O2.py | # -*- coding: utf-8 -*-
# This example compares the available inverse Abel transform methods
# currently - direct, hansenlaw, and basex
# processing the O2- photoelectron velocity-map image
#
# Note it transforms only the Q0 (top-right) quadrant
# using the fundamental transform code
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import abel
import collections
import matplotlib.pylab as plt
from time import time
import bz2
# inverse Abel transform methods -----------------------------
# dictionary of method: function()
transforms = {
"basex": abel.basex.basex_transform,
"linbasex": abel.linbasex.linbasex_transform,
"direct": abel.direct.direct_transform,
"hansenlaw": abel.hansenlaw.hansenlaw_transform,
"onion_bordas": abel.onion_bordas.onion_bordas_transform,
"onion_dasch": abel.dasch.onion_peeling_transform,
"three_point": abel.dasch.three_point_transform,
"two_point" : abel.dasch.two_point_transform,
}
# sort dictionary
transforms = collections.OrderedDict(sorted(transforms.items()))
ntrans = np.size(transforms.keys()) # number of transforms
# Image: O2- VMI 1024x1024 pixel ------------------
imagefile = bz2.BZ2File('data/O2-ANU1024.txt.bz2')
IM = np.loadtxt(imagefile)
# recenter the image to mid-pixel (odd image width)
IModd = abel.tools.center.center_image(IM, center="slice", odd_size=True)
h, w = IModd.shape
print("centered image 'data/O2-ANU2048.txt' shape = {:d}x{:d}".format(h, w))
# split image into quadrants
Q = abel.tools.symmetry.get_image_quadrants(IModd, reorient=True)
Q0 = Q[0]
Q0fresh = Q0.copy() # keep clean copy
print ("quadrant shape {}".format(Q0.shape))
# Intensity mask used for intensity normalization
# quadrant image region of bright pixels
mask = np.zeros(Q0.shape, dtype=bool)
mask[500:512, 358:365] = True
# process Q0 quadrant using each method --------------------
iabelQ = [] # keep inverse Abel transformed image
sp = [] # speed distributions
meth = [] # methods
for q, method in enumerate(transforms.keys()):
Q0 = Q0fresh.copy() # top-right quadrant of O2- image
print ("\n------- {:s} inverse ...".format(method))
t0 = time()
# inverse Abel transform using 'method'
IAQ0 = transforms[method](Q0, direction="inverse", dr=0.1,
basis_dir='bases')
print (" {:.1f} sec".format(time()-t0))
# polar projection and speed profile
radial, speed = abel.tools.vmi.angular_integration(IAQ0, origin=(0, 0),
dr=0.1)
# normalize image intensity and speed distribution
IAQ0 /= IAQ0[mask].max()
speed /= speed[radial > 50].max()
# keep data for plots
iabelQ.append(IAQ0)
sp.append((radial, speed))
meth.append(method)
# reassemble image, each quadrant a different method
# plot inverse Abel transformed image slices, and respective speed distributions
ax0 = plt.subplot2grid((1, 2), (0, 0))
ax1 = plt.subplot2grid((1, 2), (0, 1))
def ann_plt (quad, subquad, txt):
# -ve because numpy coords from top
|
# for < 4 images pad using a blank quadrant
r, c = Q0.shape
Q = np.zeros((4, r, c))
indx = np.triu_indices(iabelQ[0].shape[0])
iq = 0
for q in range(4):
Q[q] = iabelQ[iq].copy()
ann_plt(q, 0, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
if iq < len(transforms):
Q[q][indx] = np.triu(iabelQ[iq])[indx]
ann_plt(q, 1, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
# reassemble image from transformed (part-)quadrants
im = abel.tools.symmetry.put_image_quadrants((Q[0], Q[1], Q[2], Q[3]),
original_image_shape=IModd.shape)
ax0.axis('off')
ax0.set_title("inverse Abel transforms")
ax0.imshow(im, vmin=0, vmax=0.8)
ax1.set_title("speed distribution")
ax1.axis(ymin=-0.05, ymax=1.1, xmin=50, xmax=450)
ax1.legend(loc=0, labelspacing=0.1, fontsize=10, frameon=False)
plt.tight_layout()
# save a copy of the plot
plt.savefig('plot_example_all_O2.png', dpi=100)
plt.show()
| annot_angle = -(30+30*subquad+quad*90)*np.pi/180
annot_coord = (h/2+(h*0.8)*np.cos(annot_angle)/2,
w/2+(w*0.8)*np.sin(annot_angle)/2)
ax0.annotate(txt, annot_coord, color="yellow", horizontalalignment='left') | identifier_body |
example_all_O2.py | # -*- coding: utf-8 -*-
# This example compares the available inverse Abel transform methods
# currently - direct, hansenlaw, and basex
# processing the O2- photoelectron velocity-map image
#
# Note it transforms only the Q0 (top-right) quadrant
# using the fundamental transform code
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import abel
import collections
import matplotlib.pylab as plt
from time import time
import bz2
# inverse Abel transform methods -----------------------------
# dictionary of method: function()
transforms = {
"basex": abel.basex.basex_transform,
"linbasex": abel.linbasex.linbasex_transform,
"direct": abel.direct.direct_transform,
"hansenlaw": abel.hansenlaw.hansenlaw_transform,
"onion_bordas": abel.onion_bordas.onion_bordas_transform,
"onion_dasch": abel.dasch.onion_peeling_transform,
"three_point": abel.dasch.three_point_transform,
"two_point" : abel.dasch.two_point_transform,
}
# sort dictionary
transforms = collections.OrderedDict(sorted(transforms.items()))
ntrans = np.size(transforms.keys()) # number of transforms
# Image: O2- VMI 1024x1024 pixel ------------------
imagefile = bz2.BZ2File('data/O2-ANU1024.txt.bz2')
IM = np.loadtxt(imagefile)
# recenter the image to mid-pixel (odd image width)
IModd = abel.tools.center.center_image(IM, center="slice", odd_size=True)
h, w = IModd.shape
print("centered image 'data/O2-ANU2048.txt' shape = {:d}x{:d}".format(h, w))
# split image into quadrants
Q = abel.tools.symmetry.get_image_quadrants(IModd, reorient=True)
Q0 = Q[0]
Q0fresh = Q0.copy() # keep clean copy
print ("quadrant shape {}".format(Q0.shape))
# Intensity mask used for intensity normalization
# quadrant image region of bright pixels
mask = np.zeros(Q0.shape, dtype=bool)
mask[500:512, 358:365] = True
# process Q0 quadrant using each method --------------------
iabelQ = [] # keep inverse Abel transformed image
sp = [] # speed distributions
meth = [] # methods
for q, method in enumerate(transforms.keys()):
Q0 = Q0fresh.copy() # top-right quadrant of O2- image
print ("\n------- {:s} inverse ...".format(method))
t0 = time()
# inverse Abel transform using 'method'
IAQ0 = transforms[method](Q0, direction="inverse", dr=0.1,
basis_dir='bases')
print (" {:.1f} sec".format(time()-t0))
# polar projection and speed profile
radial, speed = abel.tools.vmi.angular_integration(IAQ0, origin=(0, 0),
dr=0.1)
# normalize image intensity and speed distribution
IAQ0 /= IAQ0[mask].max()
speed /= speed[radial > 50].max()
# keep data for plots
iabelQ.append(IAQ0)
sp.append((radial, speed))
meth.append(method)
# reassemble image, each quadrant a different method
# plot inverse Abel transformed image slices, and respective speed distributions
ax0 = plt.subplot2grid((1, 2), (0, 0))
ax1 = plt.subplot2grid((1, 2), (0, 1))
def ann_plt (quad, subquad, txt):
# -ve because numpy coords from top
annot_angle = -(30+30*subquad+quad*90)*np.pi/180
annot_coord = (h/2+(h*0.8)*np.cos(annot_angle)/2,
w/2+(w*0.8)*np.sin(annot_angle)/2)
ax0.annotate(txt, annot_coord, color="yellow", horizontalalignment='left')
# for < 4 images pad using a blank quadrant
r, c = Q0.shape
Q = np.zeros((4, r, c))
indx = np.triu_indices(iabelQ[0].shape[0])
iq = 0
for q in range(4):
|
# reassemble image from transformed (part-)quadrants
im = abel.tools.symmetry.put_image_quadrants((Q[0], Q[1], Q[2], Q[3]),
original_image_shape=IModd.shape)
ax0.axis('off')
ax0.set_title("inverse Abel transforms")
ax0.imshow(im, vmin=0, vmax=0.8)
ax1.set_title("speed distribution")
ax1.axis(ymin=-0.05, ymax=1.1, xmin=50, xmax=450)
ax1.legend(loc=0, labelspacing=0.1, fontsize=10, frameon=False)
plt.tight_layout()
# save a copy of the plot
plt.savefig('plot_example_all_O2.png', dpi=100)
plt.show()
| Q[q] = iabelQ[iq].copy()
ann_plt(q, 0, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
if iq < len(transforms):
Q[q][indx] = np.triu(iabelQ[iq])[indx]
ann_plt(q, 1, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1 | conditional_block |
example_all_O2.py | # -*- coding: utf-8 -*-
# This example compares the available inverse Abel transform methods
# currently - direct, hansenlaw, and basex
# processing the O2- photoelectron velocity-map image
#
# Note it transforms only the Q0 (top-right) quadrant
# using the fundamental transform code
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import abel
import collections
import matplotlib.pylab as plt
from time import time
import bz2
# inverse Abel transform methods -----------------------------
# dictionary of method: function()
transforms = {
"basex": abel.basex.basex_transform,
"linbasex": abel.linbasex.linbasex_transform,
"direct": abel.direct.direct_transform,
"hansenlaw": abel.hansenlaw.hansenlaw_transform,
"onion_bordas": abel.onion_bordas.onion_bordas_transform,
"onion_dasch": abel.dasch.onion_peeling_transform,
"three_point": abel.dasch.three_point_transform,
"two_point" : abel.dasch.two_point_transform,
}
# sort dictionary
transforms = collections.OrderedDict(sorted(transforms.items()))
ntrans = np.size(transforms.keys()) # number of transforms
# Image: O2- VMI 1024x1024 pixel ------------------
imagefile = bz2.BZ2File('data/O2-ANU1024.txt.bz2')
IM = np.loadtxt(imagefile)
# recenter the image to mid-pixel (odd image width) |
h, w = IModd.shape
print("centered image 'data/O2-ANU2048.txt' shape = {:d}x{:d}".format(h, w))
# split image into quadrants
Q = abel.tools.symmetry.get_image_quadrants(IModd, reorient=True)
Q0 = Q[0]
Q0fresh = Q0.copy() # keep clean copy
print ("quadrant shape {}".format(Q0.shape))
# Intensity mask used for intensity normalization
# quadrant image region of bright pixels
mask = np.zeros(Q0.shape, dtype=bool)
mask[500:512, 358:365] = True
# process Q0 quadrant using each method --------------------
iabelQ = [] # keep inverse Abel transformed image
sp = [] # speed distributions
meth = [] # methods
for q, method in enumerate(transforms.keys()):
Q0 = Q0fresh.copy() # top-right quadrant of O2- image
print ("\n------- {:s} inverse ...".format(method))
t0 = time()
# inverse Abel transform using 'method'
IAQ0 = transforms[method](Q0, direction="inverse", dr=0.1,
basis_dir='bases')
print (" {:.1f} sec".format(time()-t0))
# polar projection and speed profile
radial, speed = abel.tools.vmi.angular_integration(IAQ0, origin=(0, 0),
dr=0.1)
# normalize image intensity and speed distribution
IAQ0 /= IAQ0[mask].max()
speed /= speed[radial > 50].max()
# keep data for plots
iabelQ.append(IAQ0)
sp.append((radial, speed))
meth.append(method)
# reassemble image, each quadrant a different method
# plot inverse Abel transformed image slices, and respective speed distributions
ax0 = plt.subplot2grid((1, 2), (0, 0))
ax1 = plt.subplot2grid((1, 2), (0, 1))
def ann_plt (quad, subquad, txt):
# -ve because numpy coords from top
annot_angle = -(30+30*subquad+quad*90)*np.pi/180
annot_coord = (h/2+(h*0.8)*np.cos(annot_angle)/2,
w/2+(w*0.8)*np.sin(annot_angle)/2)
ax0.annotate(txt, annot_coord, color="yellow", horizontalalignment='left')
# for < 4 images pad using a blank quadrant
r, c = Q0.shape
Q = np.zeros((4, r, c))
indx = np.triu_indices(iabelQ[0].shape[0])
iq = 0
for q in range(4):
Q[q] = iabelQ[iq].copy()
ann_plt(q, 0, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
if iq < len(transforms):
Q[q][indx] = np.triu(iabelQ[iq])[indx]
ann_plt(q, 1, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
# reassemble image from transformed (part-)quadrants
im = abel.tools.symmetry.put_image_quadrants((Q[0], Q[1], Q[2], Q[3]),
original_image_shape=IModd.shape)
ax0.axis('off')
ax0.set_title("inverse Abel transforms")
ax0.imshow(im, vmin=0, vmax=0.8)
ax1.set_title("speed distribution")
ax1.axis(ymin=-0.05, ymax=1.1, xmin=50, xmax=450)
ax1.legend(loc=0, labelspacing=0.1, fontsize=10, frameon=False)
plt.tight_layout()
# save a copy of the plot
plt.savefig('plot_example_all_O2.png', dpi=100)
plt.show() | IModd = abel.tools.center.center_image(IM, center="slice", odd_size=True) | random_line_split |
example_all_O2.py | # -*- coding: utf-8 -*-
# This example compares the available inverse Abel transform methods
# currently - direct, hansenlaw, and basex
# processing the O2- photoelectron velocity-map image
#
# Note it transforms only the Q0 (top-right) quadrant
# using the fundamental transform code
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import abel
import collections
import matplotlib.pylab as plt
from time import time
import bz2
# inverse Abel transform methods -----------------------------
# dictionary of method: function()
transforms = {
"basex": abel.basex.basex_transform,
"linbasex": abel.linbasex.linbasex_transform,
"direct": abel.direct.direct_transform,
"hansenlaw": abel.hansenlaw.hansenlaw_transform,
"onion_bordas": abel.onion_bordas.onion_bordas_transform,
"onion_dasch": abel.dasch.onion_peeling_transform,
"three_point": abel.dasch.three_point_transform,
"two_point" : abel.dasch.two_point_transform,
}
# sort dictionary
transforms = collections.OrderedDict(sorted(transforms.items()))
ntrans = np.size(transforms.keys()) # number of transforms
# Image: O2- VMI 1024x1024 pixel ------------------
imagefile = bz2.BZ2File('data/O2-ANU1024.txt.bz2')
IM = np.loadtxt(imagefile)
# recenter the image to mid-pixel (odd image width)
IModd = abel.tools.center.center_image(IM, center="slice", odd_size=True)
h, w = IModd.shape
print("centered image 'data/O2-ANU2048.txt' shape = {:d}x{:d}".format(h, w))
# split image into quadrants
Q = abel.tools.symmetry.get_image_quadrants(IModd, reorient=True)
Q0 = Q[0]
Q0fresh = Q0.copy() # keep clean copy
print ("quadrant shape {}".format(Q0.shape))
# Intensity mask used for intensity normalization
# quadrant image region of bright pixels
mask = np.zeros(Q0.shape, dtype=bool)
mask[500:512, 358:365] = True
# process Q0 quadrant using each method --------------------
iabelQ = [] # keep inverse Abel transformed image
sp = [] # speed distributions
meth = [] # methods
for q, method in enumerate(transforms.keys()):
Q0 = Q0fresh.copy() # top-right quadrant of O2- image
print ("\n------- {:s} inverse ...".format(method))
t0 = time()
# inverse Abel transform using 'method'
IAQ0 = transforms[method](Q0, direction="inverse", dr=0.1,
basis_dir='bases')
print (" {:.1f} sec".format(time()-t0))
# polar projection and speed profile
radial, speed = abel.tools.vmi.angular_integration(IAQ0, origin=(0, 0),
dr=0.1)
# normalize image intensity and speed distribution
IAQ0 /= IAQ0[mask].max()
speed /= speed[radial > 50].max()
# keep data for plots
iabelQ.append(IAQ0)
sp.append((radial, speed))
meth.append(method)
# reassemble image, each quadrant a different method
# plot inverse Abel transformed image slices, and respective speed distributions
ax0 = plt.subplot2grid((1, 2), (0, 0))
ax1 = plt.subplot2grid((1, 2), (0, 1))
def | (quad, subquad, txt):
# -ve because numpy coords from top
annot_angle = -(30+30*subquad+quad*90)*np.pi/180
annot_coord = (h/2+(h*0.8)*np.cos(annot_angle)/2,
w/2+(w*0.8)*np.sin(annot_angle)/2)
ax0.annotate(txt, annot_coord, color="yellow", horizontalalignment='left')
# for < 4 images pad using a blank quadrant
r, c = Q0.shape
Q = np.zeros((4, r, c))
indx = np.triu_indices(iabelQ[0].shape[0])
iq = 0
for q in range(4):
Q[q] = iabelQ[iq].copy()
ann_plt(q, 0, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
if iq < len(transforms):
Q[q][indx] = np.triu(iabelQ[iq])[indx]
ann_plt(q, 1, meth[iq])
ax1.plot(*(sp[iq]), label=meth[iq], alpha=0.5)
iq += 1
# reassemble image from transformed (part-)quadrants
im = abel.tools.symmetry.put_image_quadrants((Q[0], Q[1], Q[2], Q[3]),
original_image_shape=IModd.shape)
ax0.axis('off')
ax0.set_title("inverse Abel transforms")
ax0.imshow(im, vmin=0, vmax=0.8)
ax1.set_title("speed distribution")
ax1.axis(ymin=-0.05, ymax=1.1, xmin=50, xmax=450)
ax1.legend(loc=0, labelspacing=0.1, fontsize=10, frameon=False)
plt.tight_layout()
# save a copy of the plot
plt.savefig('plot_example_all_O2.png', dpi=100)
plt.show()
| ann_plt | identifier_name |
main.js | /*jslint node:true, vars:true, bitwise:true, unparam:true */
/*jshint unused:true */
/*global */
var ledController = require("./ledController.js");
ledController.clear();
ledController.smile();
var motorController = require("./motorController.js")
motorController.go();
motorController.stop();
//motorController.demo();
var headControler = require("./headcontroller.js");
headControler.lookMiddle();
var theThingsAPI = require('thethingsio-api');
var motionKEY = 'motion';
//create Client
var client = theThingsAPI.createClient();
var lastAction = "@@@";
function readMotions()
{
//read latest write
var req1 = client.thingReadLatest(motionKEY);
//event fired when the response arrives
req1.on('response',function(res){
if (res.statusCode == 200 && res.payload !== undefined)
|
});
req1.end();
}
readMotions();
motorController.stop();
var loopCounter = 0;
setInterval(function() {
readMotions();
}, 200);
| {
var payload = JSON.parse(res.payload);
var newAction = payload.data[0].value;
if (newAction !== lastAction)
{
lastAction = newAction;
motorController.doAction(newAction);
}
} | conditional_block |
main.js | /*jslint node:true, vars:true, bitwise:true, unparam:true */
/*jshint unused:true */
/*global */
var ledController = require("./ledController.js");
ledController.clear();
ledController.smile();
var motorController = require("./motorController.js")
motorController.go();
motorController.stop();
//motorController.demo();
var headControler = require("./headcontroller.js");
headControler.lookMiddle();
var theThingsAPI = require('thethingsio-api');
var motionKEY = 'motion';
//create Client
var client = theThingsAPI.createClient();
var lastAction = "@@@";
function | ()
{
//read latest write
var req1 = client.thingReadLatest(motionKEY);
//event fired when the response arrives
req1.on('response',function(res){
if (res.statusCode == 200 && res.payload !== undefined)
{
var payload = JSON.parse(res.payload);
var newAction = payload.data[0].value;
if (newAction !== lastAction)
{
lastAction = newAction;
motorController.doAction(newAction);
}
}
});
req1.end();
}
readMotions();
motorController.stop();
var loopCounter = 0;
setInterval(function() {
readMotions();
}, 200);
| readMotions | identifier_name |
main.js | /*jslint node:true, vars:true, bitwise:true, unparam:true */
/*jshint unused:true */
/*global */
var ledController = require("./ledController.js");
ledController.clear();
ledController.smile();
var motorController = require("./motorController.js")
motorController.go();
motorController.stop();
//motorController.demo();
var headControler = require("./headcontroller.js");
headControler.lookMiddle();
var theThingsAPI = require('thethingsio-api');
var motionKEY = 'motion';
//create Client
var client = theThingsAPI.createClient();
var lastAction = "@@@";
function readMotions()
|
readMotions();
motorController.stop();
var loopCounter = 0;
setInterval(function() {
readMotions();
}, 200);
| {
//read latest write
var req1 = client.thingReadLatest(motionKEY);
//event fired when the response arrives
req1.on('response',function(res){
if (res.statusCode == 200 && res.payload !== undefined)
{
var payload = JSON.parse(res.payload);
var newAction = payload.data[0].value;
if (newAction !== lastAction)
{
lastAction = newAction;
motorController.doAction(newAction);
}
}
});
req1.end();
} | identifier_body |
main.js | /*jslint node:true, vars:true, bitwise:true, unparam:true */
/*jshint unused:true */
/*global */
var ledController = require("./ledController.js");
ledController.clear();
ledController.smile();
var motorController = require("./motorController.js")
motorController.go();
motorController.stop();
//motorController.demo();
var headControler = require("./headcontroller.js");
headControler.lookMiddle();
var theThingsAPI = require('thethingsio-api');
var motionKEY = 'motion';
//create Client
var client = theThingsAPI.createClient();
var lastAction = "@@@";
| //read latest write
var req1 = client.thingReadLatest(motionKEY);
//event fired when the response arrives
req1.on('response',function(res){
if (res.statusCode == 200 && res.payload !== undefined)
{
var payload = JSON.parse(res.payload);
var newAction = payload.data[0].value;
if (newAction !== lastAction)
{
lastAction = newAction;
motorController.doAction(newAction);
}
}
});
req1.end();
}
readMotions();
motorController.stop();
var loopCounter = 0;
setInterval(function() {
readMotions();
}, 200); | function readMotions()
{ | random_line_split |
sale_order.py | # -*- coding: utf-8 -*-
from openerp import models, api
class sale_order_line(models.Model):
_inherit = "sale.order.line"
@api.one | invoice_line_ids = [((0, 0, {
'product_id': self.product_id.id,
'analytic_account_id': self.order_id.project_id.id,
'name': self.name,
'quantity': self.product_uom_qty,
'uom_id': self.product_uom.id,
'price_unit': self.price_unit,
'price_subtotal': self.price_subtotal
}))]
analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids}
if not self.order_id.project_id.partner_id:
analytic_values['partner_id'] = self.order_id.partner_id.id
self.order_id.project_id.write(analytic_values)
return super(sale_order_line, self).button_confirm() | def button_confirm(self):
if self.product_id.recurring_invoice and self.order_id.project_id: | random_line_split |
sale_order.py | # -*- coding: utf-8 -*-
from openerp import models, api
class sale_order_line(models.Model):
| _inherit = "sale.order.line"
@api.one
def button_confirm(self):
if self.product_id.recurring_invoice and self.order_id.project_id:
invoice_line_ids = [((0, 0, {
'product_id': self.product_id.id,
'analytic_account_id': self.order_id.project_id.id,
'name': self.name,
'quantity': self.product_uom_qty,
'uom_id': self.product_uom.id,
'price_unit': self.price_unit,
'price_subtotal': self.price_subtotal
}))]
analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids}
if not self.order_id.project_id.partner_id:
analytic_values['partner_id'] = self.order_id.partner_id.id
self.order_id.project_id.write(analytic_values)
return super(sale_order_line, self).button_confirm() | identifier_body | |
sale_order.py | # -*- coding: utf-8 -*-
from openerp import models, api
class sale_order_line(models.Model):
_inherit = "sale.order.line"
@api.one
def | (self):
if self.product_id.recurring_invoice and self.order_id.project_id:
invoice_line_ids = [((0, 0, {
'product_id': self.product_id.id,
'analytic_account_id': self.order_id.project_id.id,
'name': self.name,
'quantity': self.product_uom_qty,
'uom_id': self.product_uom.id,
'price_unit': self.price_unit,
'price_subtotal': self.price_subtotal
}))]
analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids}
if not self.order_id.project_id.partner_id:
analytic_values['partner_id'] = self.order_id.partner_id.id
self.order_id.project_id.write(analytic_values)
return super(sale_order_line, self).button_confirm()
| button_confirm | identifier_name |
sale_order.py | # -*- coding: utf-8 -*-
from openerp import models, api
class sale_order_line(models.Model):
_inherit = "sale.order.line"
@api.one
def button_confirm(self):
if self.product_id.recurring_invoice and self.order_id.project_id:
|
return super(sale_order_line, self).button_confirm()
| invoice_line_ids = [((0, 0, {
'product_id': self.product_id.id,
'analytic_account_id': self.order_id.project_id.id,
'name': self.name,
'quantity': self.product_uom_qty,
'uom_id': self.product_uom.id,
'price_unit': self.price_unit,
'price_subtotal': self.price_subtotal
}))]
analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids}
if not self.order_id.project_id.partner_id:
analytic_values['partner_id'] = self.order_id.partner_id.id
self.order_id.project_id.write(analytic_values) | conditional_block |
scroll-dispatcher.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ElementRef, Injectable, NgZone, Optional, SkipSelf} from '@angular/core';
import {Platform} from '@angular/cdk/platform';
import {Subject} from 'rxjs/Subject';
import {Subscription} from 'rxjs/Subscription';
import {Observable} from 'rxjs/Observable';
import {fromEvent} from 'rxjs/observable/fromEvent';
import {of as observableOf} from 'rxjs/observable/of';
import {auditTime} from 'rxjs/operator/auditTime';
import {Scrollable} from './scrollable';
/** Time in ms to throttle the scrolling events by default. */
export const DEFAULT_SCROLL_TIME = 20;
/**
* Service contained all registered Scrollable references and emits an event when any one of the
* Scrollable references emit a scrolled event.
*/
@Injectable()
export class ScrollDispatcher {
constructor(private _ngZone: NgZone, private _platform: Platform) { }
/** Subject for notifying that a registered scrollable reference element has been scrolled. */
private _scrolled: Subject<void> = new Subject<void>();
/** Keeps track of the global `scroll` and `resize` subscriptions. */
_globalSubscription: Subscription | null = null;
/** Keeps track of the amount of subscriptions to `scrolled`. Used for cleaning up afterwards. */
private _scrolledCount = 0;
/**
* Map of all the scrollable references that are registered with the service and their
* scroll event subscriptions.
*/
scrollableReferences: Map<Scrollable, Subscription> = new Map();
/**
* Registers a Scrollable with the service and listens for its scrolled events. When the
* scrollable is scrolled, the service emits the event in its scrolled observable.
* @param scrollable Scrollable instance to be registered.
*/
register(scrollable: Scrollable): void {
const scrollSubscription = scrollable.elementScrolled().subscribe(() => this._scrolled.next());
this.scrollableReferences.set(scrollable, scrollSubscription);
}
/**
* Deregisters a Scrollable reference and unsubscribes from its scroll event observable.
* @param scrollable Scrollable instance to be deregistered.
*/
deregister(scrollable: Scrollable): void {
const scrollableReference = this.scrollableReferences.get(scrollable);
if (scrollableReference) {
scrollableReference.unsubscribe();
this.scrollableReferences.delete(scrollable);
}
}
/**
* Returns an observable that emits an event whenever any of the registered Scrollable
* references (or window, document, or body) fire a scrolled event. Can provide a time in ms
* to override the default "throttle" time.
*/
scrolled(auditTimeInMs: number = DEFAULT_SCROLL_TIME): Observable<void> {
return this._platform.isBrowser ? Observable.create(observer => {
if (!this._globalSubscription) {
this._addGlobalListener();
}
// In the case of a 0ms delay, use an observable without auditTime
// since it does add a perceptible delay in processing overhead.
const subscription = auditTimeInMs > 0 ?
auditTime.call(this._scrolled, auditTimeInMs).subscribe(observer) :
this._scrolled.subscribe(observer);
this._scrolledCount++;
return () => {
subscription.unsubscribe();
this._scrolledCount--;
if (this._globalSubscription && !this.scrollableReferences.size && !this._scrolledCount) {
this._globalSubscription.unsubscribe();
this._globalSubscription = null;
}
};
}) : observableOf<void>();
}
/** Returns all registered Scrollables that contain the provided element. */
getScrollContainers(elementRef: ElementRef): Scrollable[] {
const scrollingContainers: Scrollable[] = [];
this.scrollableReferences.forEach((_subscription: Subscription, scrollable: Scrollable) => {
if (this.scrollableContainsElement(scrollable, elementRef)) {
scrollingContainers.push(scrollable);
}
});
return scrollingContainers;
}
/** Returns true if the element is contained within the provided Scrollable. */
scrollableContainsElement(scrollable: Scrollable, elementRef: ElementRef): boolean {
let element = elementRef.nativeElement;
let scrollableElement = scrollable.getElementRef().nativeElement;
// Traverse through the element parents until we reach null, checking if any of the elements
// are the scrollable's element.
do {
if (element == scrollableElement) { return true; }
} while (element = element.parentElement);
return false;
}
/** Sets up the global scroll and resize listeners. */
private _addGlobalListener() {
this._globalSubscription = this._ngZone.runOutsideAngular(() => {
return fromEvent(window.document, 'scroll').subscribe(() => this._scrolled.next());
});
}
}
/** @docs-private */
export function | (
parentDispatcher: ScrollDispatcher, ngZone: NgZone, platform: Platform) {
return parentDispatcher || new ScrollDispatcher(ngZone, platform);
}
/** @docs-private */
export const SCROLL_DISPATCHER_PROVIDER = {
// If there is already a ScrollDispatcher available, use that. Otherwise, provide a new one.
provide: ScrollDispatcher,
deps: [[new Optional(), new SkipSelf(), ScrollDispatcher], NgZone, Platform],
useFactory: SCROLL_DISPATCHER_PROVIDER_FACTORY
};
| SCROLL_DISPATCHER_PROVIDER_FACTORY | identifier_name |
scroll-dispatcher.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ElementRef, Injectable, NgZone, Optional, SkipSelf} from '@angular/core';
import {Platform} from '@angular/cdk/platform';
import {Subject} from 'rxjs/Subject';
import {Subscription} from 'rxjs/Subscription';
import {Observable} from 'rxjs/Observable';
import {fromEvent} from 'rxjs/observable/fromEvent';
import {of as observableOf} from 'rxjs/observable/of';
import {auditTime} from 'rxjs/operator/auditTime';
import {Scrollable} from './scrollable';
/** Time in ms to throttle the scrolling events by default. */
export const DEFAULT_SCROLL_TIME = 20;
/**
* Service contained all registered Scrollable references and emits an event when any one of the
* Scrollable references emit a scrolled event.
*/
@Injectable()
export class ScrollDispatcher {
constructor(private _ngZone: NgZone, private _platform: Platform) { }
/** Subject for notifying that a registered scrollable reference element has been scrolled. */
private _scrolled: Subject<void> = new Subject<void>();
/** Keeps track of the global `scroll` and `resize` subscriptions. */
_globalSubscription: Subscription | null = null;
/** Keeps track of the amount of subscriptions to `scrolled`. Used for cleaning up afterwards. */
private _scrolledCount = 0;
/**
* Map of all the scrollable references that are registered with the service and their
* scroll event subscriptions.
*/
scrollableReferences: Map<Scrollable, Subscription> = new Map();
/**
* Registers a Scrollable with the service and listens for its scrolled events. When the
* scrollable is scrolled, the service emits the event in its scrolled observable.
* @param scrollable Scrollable instance to be registered.
*/
register(scrollable: Scrollable): void {
const scrollSubscription = scrollable.elementScrolled().subscribe(() => this._scrolled.next());
this.scrollableReferences.set(scrollable, scrollSubscription);
}
/**
* Deregisters a Scrollable reference and unsubscribes from its scroll event observable.
* @param scrollable Scrollable instance to be deregistered.
*/
deregister(scrollable: Scrollable): void |
/**
* Returns an observable that emits an event whenever any of the registered Scrollable
* references (or window, document, or body) fire a scrolled event. Can provide a time in ms
* to override the default "throttle" time.
*/
scrolled(auditTimeInMs: number = DEFAULT_SCROLL_TIME): Observable<void> {
return this._platform.isBrowser ? Observable.create(observer => {
if (!this._globalSubscription) {
this._addGlobalListener();
}
// In the case of a 0ms delay, use an observable without auditTime
// since it does add a perceptible delay in processing overhead.
const subscription = auditTimeInMs > 0 ?
auditTime.call(this._scrolled, auditTimeInMs).subscribe(observer) :
this._scrolled.subscribe(observer);
this._scrolledCount++;
return () => {
subscription.unsubscribe();
this._scrolledCount--;
if (this._globalSubscription && !this.scrollableReferences.size && !this._scrolledCount) {
this._globalSubscription.unsubscribe();
this._globalSubscription = null;
}
};
}) : observableOf<void>();
}
/** Returns all registered Scrollables that contain the provided element. */
getScrollContainers(elementRef: ElementRef): Scrollable[] {
const scrollingContainers: Scrollable[] = [];
this.scrollableReferences.forEach((_subscription: Subscription, scrollable: Scrollable) => {
if (this.scrollableContainsElement(scrollable, elementRef)) {
scrollingContainers.push(scrollable);
}
});
return scrollingContainers;
}
/** Returns true if the element is contained within the provided Scrollable. */
scrollableContainsElement(scrollable: Scrollable, elementRef: ElementRef): boolean {
let element = elementRef.nativeElement;
let scrollableElement = scrollable.getElementRef().nativeElement;
// Traverse through the element parents until we reach null, checking if any of the elements
// are the scrollable's element.
do {
if (element == scrollableElement) { return true; }
} while (element = element.parentElement);
return false;
}
/** Sets up the global scroll and resize listeners. */
private _addGlobalListener() {
this._globalSubscription = this._ngZone.runOutsideAngular(() => {
return fromEvent(window.document, 'scroll').subscribe(() => this._scrolled.next());
});
}
}
/** @docs-private */
export function SCROLL_DISPATCHER_PROVIDER_FACTORY(
parentDispatcher: ScrollDispatcher, ngZone: NgZone, platform: Platform) {
return parentDispatcher || new ScrollDispatcher(ngZone, platform);
}
/** @docs-private */
export const SCROLL_DISPATCHER_PROVIDER = {
// If there is already a ScrollDispatcher available, use that. Otherwise, provide a new one.
provide: ScrollDispatcher,
deps: [[new Optional(), new SkipSelf(), ScrollDispatcher], NgZone, Platform],
useFactory: SCROLL_DISPATCHER_PROVIDER_FACTORY
};
| {
const scrollableReference = this.scrollableReferences.get(scrollable);
if (scrollableReference) {
scrollableReference.unsubscribe();
this.scrollableReferences.delete(scrollable);
}
} | identifier_body |
scroll-dispatcher.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ElementRef, Injectable, NgZone, Optional, SkipSelf} from '@angular/core';
import {Platform} from '@angular/cdk/platform';
import {Subject} from 'rxjs/Subject';
import {Subscription} from 'rxjs/Subscription';
import {Observable} from 'rxjs/Observable';
import {fromEvent} from 'rxjs/observable/fromEvent';
import {of as observableOf} from 'rxjs/observable/of';
import {auditTime} from 'rxjs/operator/auditTime';
import {Scrollable} from './scrollable';
/** Time in ms to throttle the scrolling events by default. */
export const DEFAULT_SCROLL_TIME = 20;
/**
* Service contained all registered Scrollable references and emits an event when any one of the
* Scrollable references emit a scrolled event.
*/
@Injectable()
export class ScrollDispatcher {
constructor(private _ngZone: NgZone, private _platform: Platform) { }
/** Subject for notifying that a registered scrollable reference element has been scrolled. */
private _scrolled: Subject<void> = new Subject<void>();
/** Keeps track of the global `scroll` and `resize` subscriptions. */
_globalSubscription: Subscription | null = null;
/** Keeps track of the amount of subscriptions to `scrolled`. Used for cleaning up afterwards. */
private _scrolledCount = 0;
/**
* Map of all the scrollable references that are registered with the service and their
* scroll event subscriptions.
*/
scrollableReferences: Map<Scrollable, Subscription> = new Map();
/**
* Registers a Scrollable with the service and listens for its scrolled events. When the
* scrollable is scrolled, the service emits the event in its scrolled observable.
* @param scrollable Scrollable instance to be registered.
*/
register(scrollable: Scrollable): void {
const scrollSubscription = scrollable.elementScrolled().subscribe(() => this._scrolled.next());
this.scrollableReferences.set(scrollable, scrollSubscription);
}
/**
* Deregisters a Scrollable reference and unsubscribes from its scroll event observable.
* @param scrollable Scrollable instance to be deregistered.
*/
deregister(scrollable: Scrollable): void {
const scrollableReference = this.scrollableReferences.get(scrollable);
if (scrollableReference) {
scrollableReference.unsubscribe();
this.scrollableReferences.delete(scrollable);
}
}
/**
* Returns an observable that emits an event whenever any of the registered Scrollable
* references (or window, document, or body) fire a scrolled event. Can provide a time in ms
* to override the default "throttle" time.
*/
scrolled(auditTimeInMs: number = DEFAULT_SCROLL_TIME): Observable<void> {
return this._platform.isBrowser ? Observable.create(observer => {
if (!this._globalSubscription) {
this._addGlobalListener();
}
// In the case of a 0ms delay, use an observable without auditTime
// since it does add a perceptible delay in processing overhead.
const subscription = auditTimeInMs > 0 ?
auditTime.call(this._scrolled, auditTimeInMs).subscribe(observer) :
this._scrolled.subscribe(observer);
this._scrolledCount++;
return () => {
subscription.unsubscribe();
this._scrolledCount--;
if (this._globalSubscription && !this.scrollableReferences.size && !this._scrolledCount) {
this._globalSubscription.unsubscribe();
this._globalSubscription = null;
}
};
}) : observableOf<void>();
}
/** Returns all registered Scrollables that contain the provided element. */
getScrollContainers(elementRef: ElementRef): Scrollable[] {
const scrollingContainers: Scrollable[] = [];
this.scrollableReferences.forEach((_subscription: Subscription, scrollable: Scrollable) => {
if (this.scrollableContainsElement(scrollable, elementRef)) {
scrollingContainers.push(scrollable);
}
});
return scrollingContainers;
}
/** Returns true if the element is contained within the provided Scrollable. */
scrollableContainsElement(scrollable: Scrollable, elementRef: ElementRef): boolean {
let element = elementRef.nativeElement;
let scrollableElement = scrollable.getElementRef().nativeElement;
// Traverse through the element parents until we reach null, checking if any of the elements
// are the scrollable's element.
do {
if (element == scrollableElement) |
} while (element = element.parentElement);
return false;
}
/** Sets up the global scroll and resize listeners. */
private _addGlobalListener() {
this._globalSubscription = this._ngZone.runOutsideAngular(() => {
return fromEvent(window.document, 'scroll').subscribe(() => this._scrolled.next());
});
}
}
/** @docs-private */
export function SCROLL_DISPATCHER_PROVIDER_FACTORY(
parentDispatcher: ScrollDispatcher, ngZone: NgZone, platform: Platform) {
return parentDispatcher || new ScrollDispatcher(ngZone, platform);
}
/** @docs-private */
export const SCROLL_DISPATCHER_PROVIDER = {
// If there is already a ScrollDispatcher available, use that. Otherwise, provide a new one.
provide: ScrollDispatcher,
deps: [[new Optional(), new SkipSelf(), ScrollDispatcher], NgZone, Platform],
useFactory: SCROLL_DISPATCHER_PROVIDER_FACTORY
};
| { return true; } | conditional_block |
scroll-dispatcher.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ElementRef, Injectable, NgZone, Optional, SkipSelf} from '@angular/core';
import {Platform} from '@angular/cdk/platform';
import {Subject} from 'rxjs/Subject';
import {Subscription} from 'rxjs/Subscription';
import {Observable} from 'rxjs/Observable';
import {fromEvent} from 'rxjs/observable/fromEvent';
import {of as observableOf} from 'rxjs/observable/of';
import {auditTime} from 'rxjs/operator/auditTime';
import {Scrollable} from './scrollable';
/** Time in ms to throttle the scrolling events by default. */
export const DEFAULT_SCROLL_TIME = 20; | @Injectable()
export class ScrollDispatcher {
constructor(private _ngZone: NgZone, private _platform: Platform) { }
/** Subject for notifying that a registered scrollable reference element has been scrolled. */
private _scrolled: Subject<void> = new Subject<void>();
/** Keeps track of the global `scroll` and `resize` subscriptions. */
_globalSubscription: Subscription | null = null;
/** Keeps track of the amount of subscriptions to `scrolled`. Used for cleaning up afterwards. */
private _scrolledCount = 0;
/**
* Map of all the scrollable references that are registered with the service and their
* scroll event subscriptions.
*/
scrollableReferences: Map<Scrollable, Subscription> = new Map();
/**
* Registers a Scrollable with the service and listens for its scrolled events. When the
* scrollable is scrolled, the service emits the event in its scrolled observable.
* @param scrollable Scrollable instance to be registered.
*/
register(scrollable: Scrollable): void {
const scrollSubscription = scrollable.elementScrolled().subscribe(() => this._scrolled.next());
this.scrollableReferences.set(scrollable, scrollSubscription);
}
/**
* Deregisters a Scrollable reference and unsubscribes from its scroll event observable.
* @param scrollable Scrollable instance to be deregistered.
*/
deregister(scrollable: Scrollable): void {
const scrollableReference = this.scrollableReferences.get(scrollable);
if (scrollableReference) {
scrollableReference.unsubscribe();
this.scrollableReferences.delete(scrollable);
}
}
/**
* Returns an observable that emits an event whenever any of the registered Scrollable
* references (or window, document, or body) fire a scrolled event. Can provide a time in ms
* to override the default "throttle" time.
*/
scrolled(auditTimeInMs: number = DEFAULT_SCROLL_TIME): Observable<void> {
return this._platform.isBrowser ? Observable.create(observer => {
if (!this._globalSubscription) {
this._addGlobalListener();
}
// In the case of a 0ms delay, use an observable without auditTime
// since it does add a perceptible delay in processing overhead.
const subscription = auditTimeInMs > 0 ?
auditTime.call(this._scrolled, auditTimeInMs).subscribe(observer) :
this._scrolled.subscribe(observer);
this._scrolledCount++;
return () => {
subscription.unsubscribe();
this._scrolledCount--;
if (this._globalSubscription && !this.scrollableReferences.size && !this._scrolledCount) {
this._globalSubscription.unsubscribe();
this._globalSubscription = null;
}
};
}) : observableOf<void>();
}
/** Returns all registered Scrollables that contain the provided element. */
getScrollContainers(elementRef: ElementRef): Scrollable[] {
const scrollingContainers: Scrollable[] = [];
this.scrollableReferences.forEach((_subscription: Subscription, scrollable: Scrollable) => {
if (this.scrollableContainsElement(scrollable, elementRef)) {
scrollingContainers.push(scrollable);
}
});
return scrollingContainers;
}
/** Returns true if the element is contained within the provided Scrollable. */
scrollableContainsElement(scrollable: Scrollable, elementRef: ElementRef): boolean {
let element = elementRef.nativeElement;
let scrollableElement = scrollable.getElementRef().nativeElement;
// Traverse through the element parents until we reach null, checking if any of the elements
// are the scrollable's element.
do {
if (element == scrollableElement) { return true; }
} while (element = element.parentElement);
return false;
}
/** Sets up the global scroll and resize listeners. */
private _addGlobalListener() {
this._globalSubscription = this._ngZone.runOutsideAngular(() => {
return fromEvent(window.document, 'scroll').subscribe(() => this._scrolled.next());
});
}
}
/** @docs-private */
export function SCROLL_DISPATCHER_PROVIDER_FACTORY(
parentDispatcher: ScrollDispatcher, ngZone: NgZone, platform: Platform) {
return parentDispatcher || new ScrollDispatcher(ngZone, platform);
}
/** @docs-private */
export const SCROLL_DISPATCHER_PROVIDER = {
// If there is already a ScrollDispatcher available, use that. Otherwise, provide a new one.
provide: ScrollDispatcher,
deps: [[new Optional(), new SkipSelf(), ScrollDispatcher], NgZone, Platform],
useFactory: SCROLL_DISPATCHER_PROVIDER_FACTORY
}; |
/**
* Service contained all registered Scrollable references and emits an event when any one of the
* Scrollable references emit a scrolled event.
*/ | random_line_split |
operands.rs | // This example shows how to get operands details.
|
use capstone_rust::capstone as cs;
fn main() {
// Buffer of code.
let code = vec![0x01, 0xc0, 0x33, 0x19, 0x66, 0x83, 0xeb, 0x0a, 0xe8, 0x0c, 0x00, 0x00,
0x00, 0x21, 0x5c, 0xca, 0xfd];
let dec = cs::Capstone::new(cs::cs_arch::CS_ARCH_X86, cs::cs_mode::CS_MODE_32).unwrap();
// Enable detail mode. This is needed if you want to get instruction details.
dec.option(cs::cs_opt_type::CS_OPT_DETAIL, cs::cs_opt_value::CS_OPT_ON).unwrap();
let buf = dec.disasm(code.as_slice(), 0x100, 0).unwrap();
for instr in buf.iter() {
println!("0x{:x}:\t{}\t{}", instr.address, instr.mnemonic, instr.op_str);
let details = instr.detail.unwrap();
// Get the arch-specific part of details.
if let cs::DetailsArch::X86(arch) = details.arch {
for i in 0..arch.op_count {
// Get the current operand.
let op: cs::cs_x86_op = arch.operands[i as usize];
match op.type_ {
cs::x86_op_type::X86_OP_REG => {
let reg: &cs::x86_reg = op.reg();
println!(" Register operand: {}", dec.reg_name(reg.as_int()).unwrap());
// note: reg can be printed also with the `{:?}` formatter.
},
cs::x86_op_type::X86_OP_IMM => {
let imm: i64 = op.imm();
println!(" Immediate operand: 0x{:x}", imm);
},
cs::x86_op_type::X86_OP_FP => {
let fp: f64 = op.fp();
println!(" Floating-point operand: {}", fp);
},
cs::x86_op_type::X86_OP_MEM => {
let mem: &cs::x86_op_mem = op.mem();
println!(" Memory operand:");
println!(" segment: {}", mem.segment);
println!(" base: {}", mem.base);
println!(" index: {}", mem.index);
println!(" scale: {}", mem.scale);
println!(" disp: {}", mem.disp);
},
cs::x86_op_type::X86_OP_INVALID => {
println!(" Invalid operand");
},
};
}
}
}
} | extern crate capstone_rust; | random_line_split |
operands.rs | // This example shows how to get operands details.
extern crate capstone_rust;
use capstone_rust::capstone as cs;
fn | () {
// Buffer of code.
let code = vec![0x01, 0xc0, 0x33, 0x19, 0x66, 0x83, 0xeb, 0x0a, 0xe8, 0x0c, 0x00, 0x00,
0x00, 0x21, 0x5c, 0xca, 0xfd];
let dec = cs::Capstone::new(cs::cs_arch::CS_ARCH_X86, cs::cs_mode::CS_MODE_32).unwrap();
// Enable detail mode. This is needed if you want to get instruction details.
dec.option(cs::cs_opt_type::CS_OPT_DETAIL, cs::cs_opt_value::CS_OPT_ON).unwrap();
let buf = dec.disasm(code.as_slice(), 0x100, 0).unwrap();
for instr in buf.iter() {
println!("0x{:x}:\t{}\t{}", instr.address, instr.mnemonic, instr.op_str);
let details = instr.detail.unwrap();
// Get the arch-specific part of details.
if let cs::DetailsArch::X86(arch) = details.arch {
for i in 0..arch.op_count {
// Get the current operand.
let op: cs::cs_x86_op = arch.operands[i as usize];
match op.type_ {
cs::x86_op_type::X86_OP_REG => {
let reg: &cs::x86_reg = op.reg();
println!(" Register operand: {}", dec.reg_name(reg.as_int()).unwrap());
// note: reg can be printed also with the `{:?}` formatter.
},
cs::x86_op_type::X86_OP_IMM => {
let imm: i64 = op.imm();
println!(" Immediate operand: 0x{:x}", imm);
},
cs::x86_op_type::X86_OP_FP => {
let fp: f64 = op.fp();
println!(" Floating-point operand: {}", fp);
},
cs::x86_op_type::X86_OP_MEM => {
let mem: &cs::x86_op_mem = op.mem();
println!(" Memory operand:");
println!(" segment: {}", mem.segment);
println!(" base: {}", mem.base);
println!(" index: {}", mem.index);
println!(" scale: {}", mem.scale);
println!(" disp: {}", mem.disp);
},
cs::x86_op_type::X86_OP_INVALID => {
println!(" Invalid operand");
},
};
}
}
}
}
| main | identifier_name |
operands.rs | // This example shows how to get operands details.
extern crate capstone_rust;
use capstone_rust::capstone as cs;
fn main() | {
// Buffer of code.
let code = vec![0x01, 0xc0, 0x33, 0x19, 0x66, 0x83, 0xeb, 0x0a, 0xe8, 0x0c, 0x00, 0x00,
0x00, 0x21, 0x5c, 0xca, 0xfd];
let dec = cs::Capstone::new(cs::cs_arch::CS_ARCH_X86, cs::cs_mode::CS_MODE_32).unwrap();
// Enable detail mode. This is needed if you want to get instruction details.
dec.option(cs::cs_opt_type::CS_OPT_DETAIL, cs::cs_opt_value::CS_OPT_ON).unwrap();
let buf = dec.disasm(code.as_slice(), 0x100, 0).unwrap();
for instr in buf.iter() {
println!("0x{:x}:\t{}\t{}", instr.address, instr.mnemonic, instr.op_str);
let details = instr.detail.unwrap();
// Get the arch-specific part of details.
if let cs::DetailsArch::X86(arch) = details.arch {
for i in 0..arch.op_count {
// Get the current operand.
let op: cs::cs_x86_op = arch.operands[i as usize];
match op.type_ {
cs::x86_op_type::X86_OP_REG => {
let reg: &cs::x86_reg = op.reg();
println!(" Register operand: {}", dec.reg_name(reg.as_int()).unwrap());
// note: reg can be printed also with the `{:?}` formatter.
},
cs::x86_op_type::X86_OP_IMM => {
let imm: i64 = op.imm();
println!(" Immediate operand: 0x{:x}", imm);
},
cs::x86_op_type::X86_OP_FP => {
let fp: f64 = op.fp();
println!(" Floating-point operand: {}", fp);
},
cs::x86_op_type::X86_OP_MEM => {
let mem: &cs::x86_op_mem = op.mem();
println!(" Memory operand:");
println!(" segment: {}", mem.segment);
println!(" base: {}", mem.base);
println!(" index: {}", mem.index);
println!(" scale: {}", mem.scale);
println!(" disp: {}", mem.disp);
},
cs::x86_op_type::X86_OP_INVALID => {
println!(" Invalid operand");
},
};
}
}
}
} | identifier_body | |
main.js | // your answer would go here
$(function (){
var container = $('#rating-container');
$('#max-value').val('');
var update_circles =function (){
for (var i = 0; i < container.attr('max-value'); i++){
container.append('<div class="rating-circle"></div>');
}
}
$('#save-rating').click(function(){
$.post('http://jquery-edx.azurewebsites.net/api/Rating',
{
value: $('.rating-chosen').length,
maxValue: container.attr('max-value')
},
function(data) {
$('#output').text(data.message);
}
);
})
$('#update-max-value').click(function(){
$('.rating-circle').remove();
input_num = parseInt($('#max-value').val());
if (Number.isInteger(input_num) && input_num > 0 && input_num < 100){
container.attr('max-value', input_num);
}
else{
alert('Please input number from 1 to 99');
}
update_circles();
});
container.delegate('.rating-circle', 'mouseover', function(){
$('.rating-chosen').addClass('rating-chosen-removed');
$('.rating-chosen').removeClass('rating-chosen');
$(this).prevAll().andSelf().addClass('rating-hover');
});
container.delegate('.rating-circle', 'mouseout', function(){
$('.rating-chosen-removed').addClass('rating-chosen');
$('.rating-chosen').removeClass('rating-chosen-removed');
$(this).prevAll().andSelf().removeClass('rating-hover');
}); |
container.delegate('.rating-circle', 'click', function(){
$(this).prevAll().andSelf().addClass('rating-chosen');
$(this).nextAll().removeClass('rating-chosen-removed rating-chosen');
});
update_circles();
}); | random_line_split | |
main.js | // your answer would go here
$(function (){
var container = $('#rating-container');
$('#max-value').val('');
var update_circles =function (){
for (var i = 0; i < container.attr('max-value'); i++){
container.append('<div class="rating-circle"></div>');
}
}
$('#save-rating').click(function(){
$.post('http://jquery-edx.azurewebsites.net/api/Rating',
{
value: $('.rating-chosen').length,
maxValue: container.attr('max-value')
},
function(data) {
$('#output').text(data.message);
}
);
})
$('#update-max-value').click(function(){
$('.rating-circle').remove();
input_num = parseInt($('#max-value').val());
if (Number.isInteger(input_num) && input_num > 0 && input_num < 100){
container.attr('max-value', input_num);
}
else |
update_circles();
});
container.delegate('.rating-circle', 'mouseover', function(){
$('.rating-chosen').addClass('rating-chosen-removed');
$('.rating-chosen').removeClass('rating-chosen');
$(this).prevAll().andSelf().addClass('rating-hover');
});
container.delegate('.rating-circle', 'mouseout', function(){
$('.rating-chosen-removed').addClass('rating-chosen');
$('.rating-chosen').removeClass('rating-chosen-removed');
$(this).prevAll().andSelf().removeClass('rating-hover');
});
container.delegate('.rating-circle', 'click', function(){
$(this).prevAll().andSelf().addClass('rating-chosen');
$(this).nextAll().removeClass('rating-chosen-removed rating-chosen');
});
update_circles();
});
| {
alert('Please input number from 1 to 99');
} | conditional_block |
macros.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Standard library macros
//!
//! This modules contains a set of macros which are exported from the standard
//! library. Each macro is available for use when linking against the standard
//! library.
/// The entry point for panic of Rust threads.
///
/// This macro is used to inject panic into a Rust thread, causing the thread to
/// unwind and panic entirely. Each thread's panic can be reaped as the
/// `Box<Any>` type, and the single-argument form of the `panic!` macro will be
/// the value which is transmitted.
///
/// The multi-argument form of this macro panics with a string and has the
/// `format!` syntax for building a string.
///
/// # Examples
///
/// ```should_panic
/// # #![allow(unreachable_code)]
/// panic!();
/// panic!("this is a terrible mistake!");
/// panic!(4); // panic with the value of 4 to be collected elsewhere
/// panic!("this is a {} {message}", "fancy", message = "message");
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable]
macro_rules! panic {
() => ({
panic!("explicit panic")
});
($msg:expr) => ({
$crate::rt::begin_unwind($msg, {
// static requires less code at runtime, more constant data
static _FILE_LINE: (&'static str, u32) = (file!(), line!());
&_FILE_LINE
})
});
($fmt:expr, $($arg:tt)+) => ({
$crate::rt::begin_unwind_fmt(format_args!($fmt, $($arg)+), {
// The leading _'s are to avoid dead code warnings if this is
// used inside a dead function. Just `#[allow(dead_code)]` is
// insufficient, since the user may have
// `#[forbid(dead_code)]` and which cannot be overridden.
static _FILE_LINE: (&'static str, u32) = (file!(), line!());
&_FILE_LINE
})
});
}
/// Macro for printing to the standard output.
///
/// Equivalent to the `println!` macro except that a newline is not printed at
/// the end of the message.
///
/// Note that stdout is frequently line-buffered by default so it may be
/// necessary to use `io::stdout().flush()` to ensure the output is emitted
/// immediately.
///
/// # Panics
///
/// Panics if writing to `io::stdout()` fails.
///
/// # Examples
///
/// ```
/// use std::io::{self, Write};
///
/// print!("this ");
/// print!("will ");
/// print!("be ");
/// print!("on ");
/// print!("the ");
/// print!("same ");
/// print!("line ");
///
/// io::stdout().flush().unwrap();
///
/// print!("this string has a newline, why not choose println! instead?\n");
///
/// io::stdout().flush().unwrap();
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable]
macro_rules! print {
($($arg:tt)*) => ($crate::io::_print(format_args!($($arg)*)));
}
/// Macro for printing to the standard output, with a newline.
///
/// Use the `format!` syntax to write data to the standard output.
/// See `std::fmt` for more information.
///
/// # Panics
///
/// Panics if writing to `io::stdout()` fails.
///
/// # Examples
///
/// ```
/// println!("hello there!");
/// println!("format {} arguments", "some");
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
/// Helper macro for unwrapping `Result` values while returning early with an
/// error if the value of the expression is `Err`. Can only be used in
/// functions that return `Result` because of the early return of `Err` that
/// it provides.
///
/// # Examples
///
/// ```
/// use std::io; | /// let mut file = try!(File::create("my_best_friends.txt"));
/// try!(file.write_all(b"This is a list of my best friends."));
/// println!("I wrote to the file");
/// Ok(())
/// }
/// // This is equivalent to:
/// fn write_to_file_using_match() -> Result<(), io::Error> {
/// let mut file = try!(File::create("my_best_friends.txt"));
/// match file.write_all(b"This is a list of my best friends.") {
/// Ok(_) => (),
/// Err(e) => return Err(e),
/// }
/// println!("I wrote to the file");
/// Ok(())
/// }
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! try {
($expr:expr) => (match $expr {
$crate::result::Result::Ok(val) => val,
$crate::result::Result::Err(err) => {
return $crate::result::Result::Err($crate::convert::From::from(err))
}
})
}
/// A macro to select an event from a number of receivers.
///
/// This macro is used to wait for the first event to occur on a number of
/// receivers. It places no restrictions on the types of receivers given to
/// this macro, this can be viewed as a heterogeneous select.
///
/// # Examples
///
/// ```
/// #![feature(mpsc_select)]
///
/// use std::thread;
/// use std::sync::mpsc;
///
/// // two placeholder functions for now
/// fn long_running_thread() {}
/// fn calculate_the_answer() -> u32 { 42 }
///
/// let (tx1, rx1) = mpsc::channel();
/// let (tx2, rx2) = mpsc::channel();
///
/// thread::spawn(move|| { long_running_thread(); tx1.send(()).unwrap(); });
/// thread::spawn(move|| { tx2.send(calculate_the_answer()).unwrap(); });
///
/// select! {
/// _ = rx1.recv() => println!("the long running thread finished first"),
/// answer = rx2.recv() => {
/// println!("the answer was: {}", answer.unwrap());
/// }
/// }
/// # drop(rx1.recv());
/// # drop(rx2.recv());
/// ```
///
/// For more information about select, see the `std::sync::mpsc::Select` structure.
#[macro_export]
#[unstable(feature = "mpsc_select", issue = "27800")]
macro_rules! select {
(
$($name:pat = $rx:ident.$meth:ident() => $code:expr),+
) => ({
use $crate::sync::mpsc::Select;
let sel = Select::new();
$( let mut $rx = sel.handle(&$rx); )+
unsafe {
$( $rx.add(); )+
}
let ret = sel.wait();
$( if ret == $rx.id() { let $name = $rx.$meth(); $code } else )+
{ unreachable!() }
})
}
// When testing the standard library, we link to the liblog crate to get the
// logging macros. In doing so, the liblog crate was linked against the real
// version of libstd, and uses a different std::fmt module than the test crate
// uses. To get around this difference, we redefine the log!() macro here to be
// just a dumb version of what it should be.
#[cfg(test)]
macro_rules! log {
($lvl:expr, $($args:tt)*) => (
if log_enabled!($lvl) { println!($($args)*) }
)
}
#[cfg(test)]
macro_rules! assert_approx_eq {
($a:expr, $b:expr) => ({
let (a, b) = (&$a, &$b);
assert!((*a - *b).abs() < 1.0e-6,
"{} is not approximately equal to {}", *a, *b);
})
}
/// Built-in macros to the compiler itself.
///
/// These macros do not have any corresponding definition with a `macro_rules!`
/// macro, but are documented here. Their implementations can be found hardcoded
/// into libsyntax itself.
#[cfg(dox)]
pub mod builtin {
/// The core macro for formatted string creation & output.
///
/// This macro produces a value of type `fmt::Arguments`. This value can be
/// passed to the functions in `std::fmt` for performing useful functions.
/// All other formatting macros (`format!`, `write!`, `println!`, etc) are
/// proxied through this one.
///
/// For more information, see the documentation in `std::fmt`.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// let s = fmt::format(format_args!("hello {}", "world"));
/// assert_eq!(s, format!("hello {}", "world"));
///
/// ```
#[macro_export]
macro_rules! format_args { ($fmt:expr, $($args:tt)*) => ({
/* compiler built-in */
}) }
/// Inspect an environment variable at compile time.
///
/// This macro will expand to the value of the named environment variable at
/// compile time, yielding an expression of type `&'static str`.
///
/// If the environment variable is not defined, then a compilation error
/// will be emitted. To not emit a compile error, use the `option_env!`
/// macro instead.
///
/// # Examples
///
/// ```
/// let path: &'static str = env!("PATH");
/// println!("the $PATH variable at the time of compiling was: {}", path);
/// ```
#[macro_export]
macro_rules! env { ($name:expr) => ({ /* compiler built-in */ }) }
/// Optionally inspect an environment variable at compile time.
///
/// If the named environment variable is present at compile time, this will
/// expand into an expression of type `Option<&'static str>` whose value is
/// `Some` of the value of the environment variable. If the environment
/// variable is not present, then this will expand to `None`.
///
/// A compile time error is never emitted when using this macro regardless
/// of whether the environment variable is present or not.
///
/// # Examples
///
/// ```
/// let key: Option<&'static str> = option_env!("SECRET_KEY");
/// println!("the secret key might be: {:?}", key);
/// ```
#[macro_export]
macro_rules! option_env { ($name:expr) => ({ /* compiler built-in */ }) }
/// Concatenate identifiers into one identifier.
///
/// This macro takes any number of comma-separated identifiers, and
/// concatenates them all into one, yielding an expression which is a new
/// identifier. Note that hygiene makes it such that this macro cannot
/// capture local variables, and macros are only allowed in item,
/// statement or expression position, meaning this macro may be difficult to
/// use in some situations.
///
/// # Examples
///
/// ```
/// #![feature(concat_idents)]
///
/// # fn main() {
/// fn foobar() -> u32 { 23 }
///
/// let f = concat_idents!(foo, bar);
/// println!("{}", f());
/// # }
/// ```
#[macro_export]
macro_rules! concat_idents {
($($e:ident),*) => ({ /* compiler built-in */ })
}
/// Concatenates literals into a static string slice.
///
/// This macro takes any number of comma-separated literals, yielding an
/// expression of type `&'static str` which represents all of the literals
/// concatenated left-to-right.
///
/// Integer and floating point literals are stringified in order to be
/// concatenated.
///
/// # Examples
///
/// ```
/// let s = concat!("test", 10, 'b', true);
/// assert_eq!(s, "test10btrue");
/// ```
#[macro_export]
macro_rules! concat { ($($e:expr),*) => ({ /* compiler built-in */ }) }
/// A macro which expands to the line number on which it was invoked.
///
/// The expanded expression has type `u32`, and the returned line is not
/// the invocation of the `line!()` macro itself, but rather the first macro
/// invocation leading up to the invocation of the `line!()` macro.
///
/// # Examples
///
/// ```
/// let current_line = line!();
/// println!("defined on line: {}", current_line);
/// ```
#[macro_export]
macro_rules! line { () => ({ /* compiler built-in */ }) }
/// A macro which expands to the column number on which it was invoked.
///
/// The expanded expression has type `u32`, and the returned column is not
/// the invocation of the `column!()` macro itself, but rather the first macro
/// invocation leading up to the invocation of the `column!()` macro.
///
/// # Examples
///
/// ```
/// let current_col = column!();
/// println!("defined on column: {}", current_col);
/// ```
#[macro_export]
macro_rules! column { () => ({ /* compiler built-in */ }) }
/// A macro which expands to the file name from which it was invoked.
///
/// The expanded expression has type `&'static str`, and the returned file
/// is not the invocation of the `file!()` macro itself, but rather the
/// first macro invocation leading up to the invocation of the `file!()`
/// macro.
///
/// # Examples
///
/// ```
/// let this_file = file!();
/// println!("defined in file: {}", this_file);
/// ```
#[macro_export]
macro_rules! file { () => ({ /* compiler built-in */ }) }
/// A macro which stringifies its argument.
///
/// This macro will yield an expression of type `&'static str` which is the
/// stringification of all the tokens passed to the macro. No restrictions
/// are placed on the syntax of the macro invocation itself.
///
/// # Examples
///
/// ```
/// let one_plus_one = stringify!(1 + 1);
/// assert_eq!(one_plus_one, "1 + 1");
/// ```
#[macro_export]
macro_rules! stringify { ($t:tt) => ({ /* compiler built-in */ }) }
/// Includes a utf8-encoded file as a string.
///
/// This macro will yield an expression of type `&'static str` which is the
/// contents of the filename specified. The file is located relative to the
/// current file (similarly to how modules are found),
///
/// # Examples
///
/// ```rust,ignore
/// let secret_key = include_str!("secret-key.ascii");
/// ```
#[macro_export]
macro_rules! include_str { ($file:expr) => ({ /* compiler built-in */ }) }
/// Includes a file as a reference to a byte array.
///
/// This macro will yield an expression of type `&'static [u8; N]` which is
/// the contents of the filename specified. The file is located relative to
/// the current file (similarly to how modules are found),
///
/// # Examples
///
/// ```rust,ignore
/// let secret_key = include_bytes!("secret-key.bin");
/// ```
#[macro_export]
macro_rules! include_bytes { ($file:expr) => ({ /* compiler built-in */ }) }
/// Expands to a string that represents the current module path.
///
/// The current module path can be thought of as the hierarchy of modules
/// leading back up to the crate root. The first component of the path
/// returned is the name of the crate currently being compiled.
///
/// # Examples
///
/// ```
/// mod test {
/// pub fn foo() {
/// assert!(module_path!().ends_with("test"));
/// }
/// }
///
/// test::foo();
/// ```
#[macro_export]
macro_rules! module_path { () => ({ /* compiler built-in */ }) }
/// Boolean evaluation of configuration flags.
///
/// In addition to the `#[cfg]` attribute, this macro is provided to allow
/// boolean expression evaluation of configuration flags. This frequently
/// leads to less duplicated code.
///
/// The syntax given to this macro is the same syntax as the `cfg`
/// attribute.
///
/// # Examples
///
/// ```
/// let my_directory = if cfg!(windows) {
/// "windows-specific-directory"
/// } else {
/// "unix-directory"
/// };
/// ```
#[macro_export]
macro_rules! cfg { ($cfg:tt) => ({ /* compiler built-in */ }) }
/// Parse the current given file as an expression.
///
/// This is generally a bad idea, because it's going to behave unhygienically.
///
/// # Examples
///
/// ```ignore
/// fn foo() {
/// include!("/path/to/a/file")
/// }
/// ```
#[macro_export]
macro_rules! include { ($cfg:tt) => ({ /* compiler built-in */ }) }
} | /// use std::fs::File;
/// use std::io::prelude::*;
///
/// fn write_to_file_using_try() -> Result<(), io::Error> { | random_line_split |
FamilyCounter.py | from __future__ import print_function
import pandas as pd
from sklearn.base import TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True):
self.use = use
def transform(self, features_raw, **transform_params):
if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def | (self, **params):
if 'use' in params:
self.use = params.get('use') | set_params | identifier_name |
FamilyCounter.py | from __future__ import print_function
import pandas as pd
from sklearn.base import TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True):
|
def transform(self, features_raw, **transform_params):
if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def set_params(self, **params):
if 'use' in params:
self.use = params.get('use') | self.use = use | identifier_body |
FamilyCounter.py | from __future__ import print_function
import pandas as pd
from sklearn.base import TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True):
self.use = use
def transform(self, features_raw, **transform_params):
if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def set_params(self, **params):
if 'use' in params:
| self.use = params.get('use') | conditional_block | |
FamilyCounter.py | from __future__ import print_function
import pandas as pd
from sklearn.base import TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True): | if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def set_params(self, **params):
if 'use' in params:
self.use = params.get('use') | self.use = use
def transform(self, features_raw, **transform_params): | random_line_split |
main.js | (function($) {
$(document).ready( function() {
//prettyPhoto
$("a[rel^='prettyPhoto']").prettyPhoto({changepicturecallback: onPictureChanged,});
// TABS
(function() {
$('.b-tabs').on('click', 'li', function() {
var title = $(this),
tab = title.parent().siblings().children().eq(title.index());
if (title.parent().parent().hasClass('a-slide')) {
var curTab = tab.siblings('.active');
curTab.addClass('cur-tab').siblings().removeClass('cur-tab');
}
title.addClass('active').siblings().removeClass('active');
tab.addClass('active').siblings().removeClass('active');
});
}());
// MESSAGES
(function() {
$(document).on('click', '.message-close', function() {
$(this).parent()
.animate({'opacity':'0'}, 220, function() {
$(this).hide(200);
});
})
}());
// SPOILER
$('.spoiler-title').on('click', function() {
$(this)
.toggleClass('active')
.next().slideToggle(250);
});
$('.b-accordion .spoiler-title').on('click', function() {
$(this).parent().siblings()
.children('.spoiler-title').removeClass('active')
.next('.spoiler-content').slideUp(250);
});
// PROGRESS BAR
$('.b-progress-bar').each(function() {
var cap = parseInt($(this).attr('data-capacity'), 10),
val = parseInt($(this).attr('data-value'), 10),
len = 100 * (val / cap) + '%';
$(this).find('.progress-line').css('width', len);
});
// TEAM
$('.member-photo')
.on('mouseenter', function() {
$(this).children('.b-social').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.b-social').stop().fadeOut(200);
});
$('.b-member.m-compact')
.on('mouseenter', function() {
$(this).children('.member-meta').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.member-meta').stop().fadeOut(200);
});
// PORTFOLIO
$('.work-preview a').on('click', function() {
$(this).parent().trigger('click');
});
// CAROUSEL
$.fn.carousel = function(op) {
var op, ui = {};
op = $.extend({
speed: 500,
autoChange: false,
interval: 5000
}, op);
ui.carousel = this;
ui.items = ui.carousel.find('.carousel-item');
ui.itemsLen = ui.items.length;
// CREATE CONTROLS
ui.ctrl = $('<div />', {'class': 'carousel-control'});
ui.prev = $('<div />', {'class': 'carousel-prev'});
ui.next = $('<div />', {'class': 'carousel-next'});
ui.pagList = $('<ul />', {'class': 'carousel-pagination'});
ui.pagItem = $('<li></li>');
for (var i = 0; i < ui.itemsLen; i++) {
ui.pagItem.clone().appendTo(ui.pagList);
}
ui.prev.appendTo(ui.ctrl);
ui.next.appendTo(ui.ctrl);
ui.pagList.appendTo(ui.ctrl);
ui.ctrl.appendTo(ui.carousel);
ui.carousel.find('.carousel-pagination li').eq(0).addClass('active');
ui.carousel.find('.carousel-item').each(function() {
$(this).hide();
});
ui.carousel.find('.carousel-item').eq(0).show().addClass('active');
// CHANGE ITEM
var changeImage = function(direction, context) {
var current = ui.carousel.find('.carousel-item.active');
if (direction == 'index') {
if(current.index() === context.index())
return false;
context.addClass('active').siblings().removeClass('active');
ui.items.eq(context.index()).addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
if (direction == 'prev') {
if (current.index() == 0) {
ui.carousel.find('.carousel-item:last').addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.prev().addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
if (direction == undefined) {
if (current.index() == ui.itemsLen - 1) {
ui.carousel.find('.carousel-item:first').addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.next().addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
ui.carousel.find('.carousel-pagination li').eq( ui.carousel.find('.carousel-item.current').index() ).addClass('active').siblings().removeClass('active');
};
ui.carousel
.on('click', 'li', function() {
changeImage('index', $(this));
})
.on('click', '.carousel-prev', function() {
changeImage('prev');
})
.on('click', '.carousel-next', function() {
changeImage();
});
// AUTO CHANGE
if (op.autoChange) {
var changeInterval = setInterval(changeImage, op.interval);
ui.carousel
.on('mouseenter', function() {
clearInterval(changeInterval);
})
.on('mouseleave', function() {
changeInterval = setInterval(changeImage, op.interval);
});
}
return this;
};
$('.b-carousel').each(function() {
$(this).carousel({
autoChange: true
});
});
// BUTTON UP
var btnUp = $('<div/>', {'class':'btn-up'});
btnUp.appendTo('body');
$(document)
.on('click', '.btn-up', function() {
$('html, body').animate({
scrollTop: 0
}, 700);
});
$(window)
.on('scroll', function() {
if ($(this).scrollTop() > 200)
$('.btn-up').addClass('active');
else
$('.btn-up').removeClass('active');
});
});
})(jQuery);
(function ($) {
$(document).ready(function() {
var swap_val = [];
$(".webform-client-form .form-text, .webform-client-form .form-textarea").each(function(i){
swap_val[i] = $(this).val();
$(this).focus(function(){
if ($(this).val() == swap_val[i]) {
$(this).val("");
}
}).blur(function(){
if ($.trim($(this).val()) == "") |
});
});
});
})(jQuery);
// EXPERIENCIAS
(function ($) {
$(document).ready(function() {
$('.experiencia-link').on('click', function() {
moveButtons(".pp_details");
});
});
})(jQuery);
function moveButtons(dest) {
//alert('{^-^}');
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").appendTo(dest);
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").hide();
//$('.pp_social').hide();
}
function onPictureChanged() {
moveButtons(".pp_details");
var href = "http://pinterest.com/pin/create/button/?url="+ encodeURIComponent(location.href.replace(location.hash,"")) +"&media="+jQuery("#fullResImage").attr("src");
jQuery(".pp_social").append("<div class='pinterest' ><a href='"+ href +"' class='pin-it-button' count-layout='horizontal' target='_blank'><img border='0' src='http://assets.pinterest.com/images/PinExt.png' title='Pin It' /></a></div>");
} | {
jQuery(this).val(swap_val[i]);
} | conditional_block |
main.js | (function($) {
$(document).ready( function() {
//prettyPhoto
$("a[rel^='prettyPhoto']").prettyPhoto({changepicturecallback: onPictureChanged,});
// TABS
(function() {
$('.b-tabs').on('click', 'li', function() {
var title = $(this),
tab = title.parent().siblings().children().eq(title.index());
if (title.parent().parent().hasClass('a-slide')) {
var curTab = tab.siblings('.active');
curTab.addClass('cur-tab').siblings().removeClass('cur-tab');
}
title.addClass('active').siblings().removeClass('active');
tab.addClass('active').siblings().removeClass('active');
});
}());
// MESSAGES
(function() {
$(document).on('click', '.message-close', function() {
$(this).parent()
.animate({'opacity':'0'}, 220, function() {
$(this).hide(200);
});
})
}());
// SPOILER
$('.spoiler-title').on('click', function() {
$(this)
.toggleClass('active')
.next().slideToggle(250);
});
$('.b-accordion .spoiler-title').on('click', function() {
$(this).parent().siblings()
.children('.spoiler-title').removeClass('active')
.next('.spoiler-content').slideUp(250);
});
// PROGRESS BAR
$('.b-progress-bar').each(function() {
var cap = parseInt($(this).attr('data-capacity'), 10),
val = parseInt($(this).attr('data-value'), 10),
len = 100 * (val / cap) + '%';
$(this).find('.progress-line').css('width', len);
});
// TEAM
$('.member-photo')
.on('mouseenter', function() {
$(this).children('.b-social').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.b-social').stop().fadeOut(200);
});
$('.b-member.m-compact')
.on('mouseenter', function() {
$(this).children('.member-meta').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.member-meta').stop().fadeOut(200);
});
// PORTFOLIO
$('.work-preview a').on('click', function() {
$(this).parent().trigger('click');
});
// CAROUSEL
$.fn.carousel = function(op) {
var op, ui = {};
op = $.extend({
speed: 500,
autoChange: false,
interval: 5000
}, op);
ui.carousel = this;
ui.items = ui.carousel.find('.carousel-item');
ui.itemsLen = ui.items.length;
// CREATE CONTROLS
ui.ctrl = $('<div />', {'class': 'carousel-control'});
ui.prev = $('<div />', {'class': 'carousel-prev'});
ui.next = $('<div />', {'class': 'carousel-next'});
ui.pagList = $('<ul />', {'class': 'carousel-pagination'});
ui.pagItem = $('<li></li>');
for (var i = 0; i < ui.itemsLen; i++) {
ui.pagItem.clone().appendTo(ui.pagList);
}
ui.prev.appendTo(ui.ctrl);
ui.next.appendTo(ui.ctrl);
ui.pagList.appendTo(ui.ctrl);
ui.ctrl.appendTo(ui.carousel);
ui.carousel.find('.carousel-pagination li').eq(0).addClass('active');
ui.carousel.find('.carousel-item').each(function() {
$(this).hide();
});
ui.carousel.find('.carousel-item').eq(0).show().addClass('active');
// CHANGE ITEM
var changeImage = function(direction, context) {
var current = ui.carousel.find('.carousel-item.active');
if (direction == 'index') {
if(current.index() === context.index())
return false;
context.addClass('active').siblings().removeClass('active');
ui.items.eq(context.index()).addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
if (direction == 'prev') {
if (current.index() == 0) {
ui.carousel.find('.carousel-item:last').addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.prev().addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
if (direction == undefined) {
if (current.index() == ui.itemsLen - 1) {
ui.carousel.find('.carousel-item:first').addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.next().addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
ui.carousel.find('.carousel-pagination li').eq( ui.carousel.find('.carousel-item.current').index() ).addClass('active').siblings().removeClass('active');
};
ui.carousel
.on('click', 'li', function() {
changeImage('index', $(this));
})
.on('click', '.carousel-prev', function() {
changeImage('prev');
})
.on('click', '.carousel-next', function() {
changeImage();
});
// AUTO CHANGE
if (op.autoChange) {
var changeInterval = setInterval(changeImage, op.interval);
ui.carousel
.on('mouseenter', function() {
clearInterval(changeInterval);
})
.on('mouseleave', function() {
changeInterval = setInterval(changeImage, op.interval);
});
}
return this;
};
$('.b-carousel').each(function() {
$(this).carousel({
autoChange: true
}); | // BUTTON UP
var btnUp = $('<div/>', {'class':'btn-up'});
btnUp.appendTo('body');
$(document)
.on('click', '.btn-up', function() {
$('html, body').animate({
scrollTop: 0
}, 700);
});
$(window)
.on('scroll', function() {
if ($(this).scrollTop() > 200)
$('.btn-up').addClass('active');
else
$('.btn-up').removeClass('active');
});
});
})(jQuery);
(function ($) {
$(document).ready(function() {
var swap_val = [];
$(".webform-client-form .form-text, .webform-client-form .form-textarea").each(function(i){
swap_val[i] = $(this).val();
$(this).focus(function(){
if ($(this).val() == swap_val[i]) {
$(this).val("");
}
}).blur(function(){
if ($.trim($(this).val()) == "") {
jQuery(this).val(swap_val[i]);
}
});
});
});
})(jQuery);
// EXPERIENCIAS
(function ($) {
$(document).ready(function() {
$('.experiencia-link').on('click', function() {
moveButtons(".pp_details");
});
});
})(jQuery);
function moveButtons(dest) {
//alert('{^-^}');
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").appendTo(dest);
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").hide();
//$('.pp_social').hide();
}
function onPictureChanged() {
moveButtons(".pp_details");
var href = "http://pinterest.com/pin/create/button/?url="+ encodeURIComponent(location.href.replace(location.hash,"")) +"&media="+jQuery("#fullResImage").attr("src");
jQuery(".pp_social").append("<div class='pinterest' ><a href='"+ href +"' class='pin-it-button' count-layout='horizontal' target='_blank'><img border='0' src='http://assets.pinterest.com/images/PinExt.png' title='Pin It' /></a></div>");
} | });
| random_line_split |
main.js | (function($) {
$(document).ready( function() {
//prettyPhoto
$("a[rel^='prettyPhoto']").prettyPhoto({changepicturecallback: onPictureChanged,});
// TABS
(function() {
$('.b-tabs').on('click', 'li', function() {
var title = $(this),
tab = title.parent().siblings().children().eq(title.index());
if (title.parent().parent().hasClass('a-slide')) {
var curTab = tab.siblings('.active');
curTab.addClass('cur-tab').siblings().removeClass('cur-tab');
}
title.addClass('active').siblings().removeClass('active');
tab.addClass('active').siblings().removeClass('active');
});
}());
// MESSAGES
(function() {
$(document).on('click', '.message-close', function() {
$(this).parent()
.animate({'opacity':'0'}, 220, function() {
$(this).hide(200);
});
})
}());
// SPOILER
$('.spoiler-title').on('click', function() {
$(this)
.toggleClass('active')
.next().slideToggle(250);
});
$('.b-accordion .spoiler-title').on('click', function() {
$(this).parent().siblings()
.children('.spoiler-title').removeClass('active')
.next('.spoiler-content').slideUp(250);
});
// PROGRESS BAR
$('.b-progress-bar').each(function() {
var cap = parseInt($(this).attr('data-capacity'), 10),
val = parseInt($(this).attr('data-value'), 10),
len = 100 * (val / cap) + '%';
$(this).find('.progress-line').css('width', len);
});
// TEAM
$('.member-photo')
.on('mouseenter', function() {
$(this).children('.b-social').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.b-social').stop().fadeOut(200);
});
$('.b-member.m-compact')
.on('mouseenter', function() {
$(this).children('.member-meta').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.member-meta').stop().fadeOut(200);
});
// PORTFOLIO
$('.work-preview a').on('click', function() {
$(this).parent().trigger('click');
});
// CAROUSEL
$.fn.carousel = function(op) {
var op, ui = {};
op = $.extend({
speed: 500,
autoChange: false,
interval: 5000
}, op);
ui.carousel = this;
ui.items = ui.carousel.find('.carousel-item');
ui.itemsLen = ui.items.length;
// CREATE CONTROLS
ui.ctrl = $('<div />', {'class': 'carousel-control'});
ui.prev = $('<div />', {'class': 'carousel-prev'});
ui.next = $('<div />', {'class': 'carousel-next'});
ui.pagList = $('<ul />', {'class': 'carousel-pagination'});
ui.pagItem = $('<li></li>');
for (var i = 0; i < ui.itemsLen; i++) {
ui.pagItem.clone().appendTo(ui.pagList);
}
ui.prev.appendTo(ui.ctrl);
ui.next.appendTo(ui.ctrl);
ui.pagList.appendTo(ui.ctrl);
ui.ctrl.appendTo(ui.carousel);
ui.carousel.find('.carousel-pagination li').eq(0).addClass('active');
ui.carousel.find('.carousel-item').each(function() {
$(this).hide();
});
ui.carousel.find('.carousel-item').eq(0).show().addClass('active');
// CHANGE ITEM
var changeImage = function(direction, context) {
var current = ui.carousel.find('.carousel-item.active');
if (direction == 'index') {
if(current.index() === context.index())
return false;
context.addClass('active').siblings().removeClass('active');
ui.items.eq(context.index()).addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
if (direction == 'prev') {
if (current.index() == 0) {
ui.carousel.find('.carousel-item:last').addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.prev().addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
if (direction == undefined) {
if (current.index() == ui.itemsLen - 1) {
ui.carousel.find('.carousel-item:first').addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.next().addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
ui.carousel.find('.carousel-pagination li').eq( ui.carousel.find('.carousel-item.current').index() ).addClass('active').siblings().removeClass('active');
};
ui.carousel
.on('click', 'li', function() {
changeImage('index', $(this));
})
.on('click', '.carousel-prev', function() {
changeImage('prev');
})
.on('click', '.carousel-next', function() {
changeImage();
});
// AUTO CHANGE
if (op.autoChange) {
var changeInterval = setInterval(changeImage, op.interval);
ui.carousel
.on('mouseenter', function() {
clearInterval(changeInterval);
})
.on('mouseleave', function() {
changeInterval = setInterval(changeImage, op.interval);
});
}
return this;
};
$('.b-carousel').each(function() {
$(this).carousel({
autoChange: true
});
});
// BUTTON UP
var btnUp = $('<div/>', {'class':'btn-up'});
btnUp.appendTo('body');
$(document)
.on('click', '.btn-up', function() {
$('html, body').animate({
scrollTop: 0
}, 700);
});
$(window)
.on('scroll', function() {
if ($(this).scrollTop() > 200)
$('.btn-up').addClass('active');
else
$('.btn-up').removeClass('active');
});
});
})(jQuery);
(function ($) {
$(document).ready(function() {
var swap_val = [];
$(".webform-client-form .form-text, .webform-client-form .form-textarea").each(function(i){
swap_val[i] = $(this).val();
$(this).focus(function(){
if ($(this).val() == swap_val[i]) {
$(this).val("");
}
}).blur(function(){
if ($.trim($(this).val()) == "") {
jQuery(this).val(swap_val[i]);
}
});
});
});
})(jQuery);
// EXPERIENCIAS
(function ($) {
$(document).ready(function() {
$('.experiencia-link').on('click', function() {
moveButtons(".pp_details");
});
});
})(jQuery);
function | (dest) {
//alert('{^-^}');
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").appendTo(dest);
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").hide();
//$('.pp_social').hide();
}
function onPictureChanged() {
moveButtons(".pp_details");
var href = "http://pinterest.com/pin/create/button/?url="+ encodeURIComponent(location.href.replace(location.hash,"")) +"&media="+jQuery("#fullResImage").attr("src");
jQuery(".pp_social").append("<div class='pinterest' ><a href='"+ href +"' class='pin-it-button' count-layout='horizontal' target='_blank'><img border='0' src='http://assets.pinterest.com/images/PinExt.png' title='Pin It' /></a></div>");
} | moveButtons | identifier_name |
main.js | (function($) {
$(document).ready( function() {
//prettyPhoto
$("a[rel^='prettyPhoto']").prettyPhoto({changepicturecallback: onPictureChanged,});
// TABS
(function() {
$('.b-tabs').on('click', 'li', function() {
var title = $(this),
tab = title.parent().siblings().children().eq(title.index());
if (title.parent().parent().hasClass('a-slide')) {
var curTab = tab.siblings('.active');
curTab.addClass('cur-tab').siblings().removeClass('cur-tab');
}
title.addClass('active').siblings().removeClass('active');
tab.addClass('active').siblings().removeClass('active');
});
}());
// MESSAGES
(function() {
$(document).on('click', '.message-close', function() {
$(this).parent()
.animate({'opacity':'0'}, 220, function() {
$(this).hide(200);
});
})
}());
// SPOILER
$('.spoiler-title').on('click', function() {
$(this)
.toggleClass('active')
.next().slideToggle(250);
});
$('.b-accordion .spoiler-title').on('click', function() {
$(this).parent().siblings()
.children('.spoiler-title').removeClass('active')
.next('.spoiler-content').slideUp(250);
});
// PROGRESS BAR
$('.b-progress-bar').each(function() {
var cap = parseInt($(this).attr('data-capacity'), 10),
val = parseInt($(this).attr('data-value'), 10),
len = 100 * (val / cap) + '%';
$(this).find('.progress-line').css('width', len);
});
// TEAM
$('.member-photo')
.on('mouseenter', function() {
$(this).children('.b-social').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.b-social').stop().fadeOut(200);
});
$('.b-member.m-compact')
.on('mouseenter', function() {
$(this).children('.member-meta').stop().fadeIn(200);
})
.on('mouseleave', function() {
$(this).children('.member-meta').stop().fadeOut(200);
});
// PORTFOLIO
$('.work-preview a').on('click', function() {
$(this).parent().trigger('click');
});
// CAROUSEL
$.fn.carousel = function(op) {
var op, ui = {};
op = $.extend({
speed: 500,
autoChange: false,
interval: 5000
}, op);
ui.carousel = this;
ui.items = ui.carousel.find('.carousel-item');
ui.itemsLen = ui.items.length;
// CREATE CONTROLS
ui.ctrl = $('<div />', {'class': 'carousel-control'});
ui.prev = $('<div />', {'class': 'carousel-prev'});
ui.next = $('<div />', {'class': 'carousel-next'});
ui.pagList = $('<ul />', {'class': 'carousel-pagination'});
ui.pagItem = $('<li></li>');
for (var i = 0; i < ui.itemsLen; i++) {
ui.pagItem.clone().appendTo(ui.pagList);
}
ui.prev.appendTo(ui.ctrl);
ui.next.appendTo(ui.ctrl);
ui.pagList.appendTo(ui.ctrl);
ui.ctrl.appendTo(ui.carousel);
ui.carousel.find('.carousel-pagination li').eq(0).addClass('active');
ui.carousel.find('.carousel-item').each(function() {
$(this).hide();
});
ui.carousel.find('.carousel-item').eq(0).show().addClass('active');
// CHANGE ITEM
var changeImage = function(direction, context) {
var current = ui.carousel.find('.carousel-item.active');
if (direction == 'index') {
if(current.index() === context.index())
return false;
context.addClass('active').siblings().removeClass('active');
ui.items.eq(context.index()).addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
if (direction == 'prev') {
if (current.index() == 0) {
ui.carousel.find('.carousel-item:last').addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.prev().addClass('current').fadeIn(op.speed, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
if (direction == undefined) {
if (current.index() == ui.itemsLen - 1) {
ui.carousel.find('.carousel-item:first').addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
else {
current.next().addClass('current').fadeIn(300, function() {
current.removeClass('active').hide();
$(this).addClass('active').removeClass('current');
});
}
}
ui.carousel.find('.carousel-pagination li').eq( ui.carousel.find('.carousel-item.current').index() ).addClass('active').siblings().removeClass('active');
};
ui.carousel
.on('click', 'li', function() {
changeImage('index', $(this));
})
.on('click', '.carousel-prev', function() {
changeImage('prev');
})
.on('click', '.carousel-next', function() {
changeImage();
});
// AUTO CHANGE
if (op.autoChange) {
var changeInterval = setInterval(changeImage, op.interval);
ui.carousel
.on('mouseenter', function() {
clearInterval(changeInterval);
})
.on('mouseleave', function() {
changeInterval = setInterval(changeImage, op.interval);
});
}
return this;
};
$('.b-carousel').each(function() {
$(this).carousel({
autoChange: true
});
});
// BUTTON UP
var btnUp = $('<div/>', {'class':'btn-up'});
btnUp.appendTo('body');
$(document)
.on('click', '.btn-up', function() {
$('html, body').animate({
scrollTop: 0
}, 700);
});
$(window)
.on('scroll', function() {
if ($(this).scrollTop() > 200)
$('.btn-up').addClass('active');
else
$('.btn-up').removeClass('active');
});
});
})(jQuery);
(function ($) {
$(document).ready(function() {
var swap_val = [];
$(".webform-client-form .form-text, .webform-client-form .form-textarea").each(function(i){
swap_val[i] = $(this).val();
$(this).focus(function(){
if ($(this).val() == swap_val[i]) {
$(this).val("");
}
}).blur(function(){
if ($.trim($(this).val()) == "") {
jQuery(this).val(swap_val[i]);
}
});
});
});
})(jQuery);
// EXPERIENCIAS
(function ($) {
$(document).ready(function() {
$('.experiencia-link').on('click', function() {
moveButtons(".pp_details");
});
});
})(jQuery);
function moveButtons(dest) {
//alert('{^-^}');
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").appendTo(dest);
jQuery('.pp_full_res > iframe').contents().find(".sharethis-buttons").hide();
//$('.pp_social').hide();
}
function onPictureChanged() | {
moveButtons(".pp_details");
var href = "http://pinterest.com/pin/create/button/?url="+ encodeURIComponent(location.href.replace(location.hash,"")) +"&media="+jQuery("#fullResImage").attr("src");
jQuery(".pp_social").append("<div class='pinterest' ><a href='"+ href +"' class='pin-it-button' count-layout='horizontal' target='_blank'><img border='0' src='http://assets.pinterest.com/images/PinExt.png' title='Pin It' /></a></div>");
} | identifier_body | |
redirect.py | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth import authenticate
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden)
from django.core.exceptions import ValidationError
from django.views.decorators.http import require_http_methods
from urlparse import urlunsplit, urlsplit, parse_qsl
from astakos.im.util import restrict_next
from astakos.im.user_utils import login as auth_login, logout
from astakos.im.views.decorators import cookie_fix
import astakos.im.messages as astakos_messages
from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES
import logging
logger = logging.getLogger(__name__)
@require_http_methods(["GET"])
@cookie_fix
def login(request):
"""
If there is no ``next`` request parameter redirects to astakos index page
displaying an error message.
If the request user is authenticated and has signed the approval terms,
redirects to `next` request parameter. If not, redirects to approval terms
in order to return back here after agreeing with the terms.
Otherwise, redirects to login in order to return back here after successful
login.
"""
next = request.GET.get('next')
if not next:
return HttpResponseBadRequest('Missing next parameter')
if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES):
|
force = request.GET.get('force', None)
response = HttpResponse()
if force == '' and request.user.is_authenticated():
logout(request)
if request.user.is_authenticated():
# if user has not signed the approval terms
# redirect to approval terms with next the request path
if not request.user.signed_terms:
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('latest_terms')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
renew = request.GET.get('renew', None)
if renew == '':
request.user.renew_token(
flush_sessions=True,
current_key=request.session.session_key
)
try:
request.user.save()
except ValidationError, e:
return HttpResponseBadRequest(e)
# authenticate before login
user = authenticate(
username=request.user.username,
auth_token=request.user.auth_token
)
auth_login(request, user)
logger.info('Token reset for %s' % user.username)
parts = list(urlsplit(next))
parts[3] = urlencode({
'uuid': request.user.uuid,
'token': request.user.auth_token
})
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
else:
# redirect to login with next the request path
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
# delete force parameter
if 'force' in params:
del params['force']
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('login')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
| return HttpResponseForbidden(_(
astakos_messages.NOT_ALLOWED_NEXT_PARAM)) | conditional_block |
redirect.py | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth import authenticate
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden)
from django.core.exceptions import ValidationError
from django.views.decorators.http import require_http_methods
from urlparse import urlunsplit, urlsplit, parse_qsl
from astakos.im.util import restrict_next
from astakos.im.user_utils import login as auth_login, logout
from astakos.im.views.decorators import cookie_fix
import astakos.im.messages as astakos_messages
from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES
import logging
logger = logging.getLogger(__name__)
@require_http_methods(["GET"])
@cookie_fix
def login(request):
"""
If there is no ``next`` request parameter redirects to astakos index page
displaying an error message.
If the request user is authenticated and has signed the approval terms,
redirects to `next` request parameter. If not, redirects to approval terms
in order to return back here after agreeing with the terms.
Otherwise, redirects to login in order to return back here after successful
login.
"""
next = request.GET.get('next')
if not next:
return HttpResponseBadRequest('Missing next parameter')
if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES):
return HttpResponseForbidden(_(
astakos_messages.NOT_ALLOWED_NEXT_PARAM))
force = request.GET.get('force', None)
response = HttpResponse()
if force == '' and request.user.is_authenticated():
logout(request) | if not request.user.signed_terms:
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('latest_terms')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
renew = request.GET.get('renew', None)
if renew == '':
request.user.renew_token(
flush_sessions=True,
current_key=request.session.session_key
)
try:
request.user.save()
except ValidationError, e:
return HttpResponseBadRequest(e)
# authenticate before login
user = authenticate(
username=request.user.username,
auth_token=request.user.auth_token
)
auth_login(request, user)
logger.info('Token reset for %s' % user.username)
parts = list(urlsplit(next))
parts[3] = urlencode({
'uuid': request.user.uuid,
'token': request.user.auth_token
})
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
else:
# redirect to login with next the request path
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
# delete force parameter
if 'force' in params:
del params['force']
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('login')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response |
if request.user.is_authenticated():
# if user has not signed the approval terms
# redirect to approval terms with next the request path | random_line_split |
redirect.py | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth import authenticate
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden)
from django.core.exceptions import ValidationError
from django.views.decorators.http import require_http_methods
from urlparse import urlunsplit, urlsplit, parse_qsl
from astakos.im.util import restrict_next
from astakos.im.user_utils import login as auth_login, logout
from astakos.im.views.decorators import cookie_fix
import astakos.im.messages as astakos_messages
from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES
import logging
logger = logging.getLogger(__name__)
@require_http_methods(["GET"])
@cookie_fix
def | (request):
"""
If there is no ``next`` request parameter redirects to astakos index page
displaying an error message.
If the request user is authenticated and has signed the approval terms,
redirects to `next` request parameter. If not, redirects to approval terms
in order to return back here after agreeing with the terms.
Otherwise, redirects to login in order to return back here after successful
login.
"""
next = request.GET.get('next')
if not next:
return HttpResponseBadRequest('Missing next parameter')
if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES):
return HttpResponseForbidden(_(
astakos_messages.NOT_ALLOWED_NEXT_PARAM))
force = request.GET.get('force', None)
response = HttpResponse()
if force == '' and request.user.is_authenticated():
logout(request)
if request.user.is_authenticated():
# if user has not signed the approval terms
# redirect to approval terms with next the request path
if not request.user.signed_terms:
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('latest_terms')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
renew = request.GET.get('renew', None)
if renew == '':
request.user.renew_token(
flush_sessions=True,
current_key=request.session.session_key
)
try:
request.user.save()
except ValidationError, e:
return HttpResponseBadRequest(e)
# authenticate before login
user = authenticate(
username=request.user.username,
auth_token=request.user.auth_token
)
auth_login(request, user)
logger.info('Token reset for %s' % user.username)
parts = list(urlsplit(next))
parts[3] = urlencode({
'uuid': request.user.uuid,
'token': request.user.auth_token
})
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
else:
# redirect to login with next the request path
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
# delete force parameter
if 'force' in params:
del params['force']
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('login')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
| login | identifier_name |
redirect.py | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth import authenticate
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden)
from django.core.exceptions import ValidationError
from django.views.decorators.http import require_http_methods
from urlparse import urlunsplit, urlsplit, parse_qsl
from astakos.im.util import restrict_next
from astakos.im.user_utils import login as auth_login, logout
from astakos.im.views.decorators import cookie_fix
import astakos.im.messages as astakos_messages
from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES
import logging
logger = logging.getLogger(__name__)
@require_http_methods(["GET"])
@cookie_fix
def login(request):
| """
If there is no ``next`` request parameter redirects to astakos index page
displaying an error message.
If the request user is authenticated and has signed the approval terms,
redirects to `next` request parameter. If not, redirects to approval terms
in order to return back here after agreeing with the terms.
Otherwise, redirects to login in order to return back here after successful
login.
"""
next = request.GET.get('next')
if not next:
return HttpResponseBadRequest('Missing next parameter')
if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES):
return HttpResponseForbidden(_(
astakos_messages.NOT_ALLOWED_NEXT_PARAM))
force = request.GET.get('force', None)
response = HttpResponse()
if force == '' and request.user.is_authenticated():
logout(request)
if request.user.is_authenticated():
# if user has not signed the approval terms
# redirect to approval terms with next the request path
if not request.user.signed_terms:
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('latest_terms')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
renew = request.GET.get('renew', None)
if renew == '':
request.user.renew_token(
flush_sessions=True,
current_key=request.session.session_key
)
try:
request.user.save()
except ValidationError, e:
return HttpResponseBadRequest(e)
# authenticate before login
user = authenticate(
username=request.user.username,
auth_token=request.user.auth_token
)
auth_login(request, user)
logger.info('Token reset for %s' % user.username)
parts = list(urlsplit(next))
parts[3] = urlencode({
'uuid': request.user.uuid,
'token': request.user.auth_token
})
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
else:
# redirect to login with next the request path
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
# delete force parameter
if 'force' in params:
del params['force']
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('login')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response | identifier_body | |
edit_message_reply_markup.rs | use crate::requests::*;
use crate::types::*;
/// Use this method to edit only the reply markup of messages sent by the bot.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct EditMessageReplyMarkup {
chat_id: ChatRef,
message_id: MessageId,
#[serde(skip_serializing_if = "Option::is_none")]
reply_markup: Option<ReplyMarkup>,
}
impl Request for EditMessageReplyMarkup {
type Type = JsonRequestType<Self>;
type Response = JsonIdResponse<Message>;
fn serialize(&self) -> Result<HttpRequest, Error> |
}
impl EditMessageReplyMarkup {
pub fn new<C, M, R>(chat: C, message_id: M, reply_markup: Option<R>) -> Self
where
C: ToChatRef,
M: ToMessageId,
R: Into<ReplyMarkup>,
{
EditMessageReplyMarkup {
chat_id: chat.to_chat_ref(),
message_id: message_id.to_message_id(),
reply_markup: reply_markup.map(|r| r.into()),
}
}
}
/// Edit reply markup of messages sent by the bot.
pub trait CanEditMessageReplyMarkup {
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>;
}
impl<M> CanEditMessageReplyMarkup for M
where
M: ToMessageId + ToSourceChat,
{
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>,
{
EditMessageReplyMarkup::new(self.to_source_chat(), self.to_message_id(), reply_markup)
}
}
| {
Self::Type::serialize(RequestUrl::method("editMessageReplyMarkup"), self)
} | identifier_body |
edit_message_reply_markup.rs | use crate::requests::*;
use crate::types::*;
/// Use this method to edit only the reply markup of messages sent by the bot.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct | {
chat_id: ChatRef,
message_id: MessageId,
#[serde(skip_serializing_if = "Option::is_none")]
reply_markup: Option<ReplyMarkup>,
}
impl Request for EditMessageReplyMarkup {
type Type = JsonRequestType<Self>;
type Response = JsonIdResponse<Message>;
fn serialize(&self) -> Result<HttpRequest, Error> {
Self::Type::serialize(RequestUrl::method("editMessageReplyMarkup"), self)
}
}
impl EditMessageReplyMarkup {
pub fn new<C, M, R>(chat: C, message_id: M, reply_markup: Option<R>) -> Self
where
C: ToChatRef,
M: ToMessageId,
R: Into<ReplyMarkup>,
{
EditMessageReplyMarkup {
chat_id: chat.to_chat_ref(),
message_id: message_id.to_message_id(),
reply_markup: reply_markup.map(|r| r.into()),
}
}
}
/// Edit reply markup of messages sent by the bot.
pub trait CanEditMessageReplyMarkup {
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>;
}
impl<M> CanEditMessageReplyMarkup for M
where
M: ToMessageId + ToSourceChat,
{
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>,
{
EditMessageReplyMarkup::new(self.to_source_chat(), self.to_message_id(), reply_markup)
}
}
| EditMessageReplyMarkup | identifier_name |
edit_message_reply_markup.rs | use crate::requests::*;
use crate::types::*;
/// Use this method to edit only the reply markup of messages sent by the bot.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct EditMessageReplyMarkup {
chat_id: ChatRef,
message_id: MessageId,
#[serde(skip_serializing_if = "Option::is_none")]
reply_markup: Option<ReplyMarkup>,
}
impl Request for EditMessageReplyMarkup {
type Type = JsonRequestType<Self>;
type Response = JsonIdResponse<Message>;
fn serialize(&self) -> Result<HttpRequest, Error> {
Self::Type::serialize(RequestUrl::method("editMessageReplyMarkup"), self)
}
}
impl EditMessageReplyMarkup {
pub fn new<C, M, R>(chat: C, message_id: M, reply_markup: Option<R>) -> Self
where
C: ToChatRef,
M: ToMessageId,
R: Into<ReplyMarkup>,
{
EditMessageReplyMarkup {
chat_id: chat.to_chat_ref(),
message_id: message_id.to_message_id(),
reply_markup: reply_markup.map(|r| r.into()),
}
}
}
/// Edit reply markup of messages sent by the bot.
pub trait CanEditMessageReplyMarkup {
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>;
}
impl<M> CanEditMessageReplyMarkup for M
where
M: ToMessageId + ToSourceChat,
{
fn edit_reply_markup<R>(&self, reply_markup: Option<R>) -> EditMessageReplyMarkup
where
R: Into<ReplyMarkup>, | {
EditMessageReplyMarkup::new(self.to_source_chat(), self.to_message_id(), reply_markup)
}
} | random_line_split | |
index3.js | import {TextFileLoader} from './painter/TextFileLoader'
import {InstancedGrid} from './painter/InstancedGrid'
import {QCurve} from './painter/QCurve'
import {QCurveObj} from './painter/QCurve'
import {StrokePath} from './painter/StrokePath'
import {Particle} from './painter/Particle'
import {VectorField} from './painter/VectorField'
import Preloader from "./preloader/Preloader"
import {MathUtils} from "./util/MathUtils"
import * as THREE from 'three'
import * as Stats from 'stats-js'
/**
* Created by David on 14/12/2016.
*/
//if ( !Detector.webgl ) Detector.addGetWebGLMessage();
///////////////////////
var container, stats;
var controls;
var camera, scene, renderer;
//var orientations;
//var offsets;
//var lengths;
//var speeds;
var fragshader;
var vertshader;
TextFileLoader.Instance().loadFiles(["shaders/fragShader.glsl","shaders/vertShader.glsl"], filesLoaded);
var textureFilenames = [];
for(var i=1;i<=41;++i)
{
textureFilenames.push( "colourful" + "/" + ((i < 10) ?"0":"") + i + ".jpg");
}
var textureIX = 0;
function filesLoaded(files)
{
fragshader = files[0];
vertshader = files[1];
makeMeshObj();
}
// emit particles from a bound, bottom of the bound is the horizon line?
// if camera is cetnered, then it should be centered in x pos
// if camera is not centered,
var ExportMode = {
"png": "png",
"jpg": "jpg"
};
var exportMode = ExportMode.png;
//var renderScale = 5.4;
var renderScale,bw,bh,bottomy;
var Mode = {
"skyline": "skyline",
"maps": "maps"
};
var mode = Mode.maps;
//renderScale = 7.2;
renderScale = 1.0;
if(mode == Mode.skyline)
{
bw = 1000;
bh = bw*(3/4);
bottomy = bh *0.6;
}
else{
// "maps"
// renderScale = 7.2;
bh = 1000;
bw = bh*(3/4);
bottomy = bh * 1.0;
}
var w = bw * renderScale;
var h = bh * renderScale;
var noiseOffsetX, noiseOffsetY ;
function init() {
randomiseField();
container = document.getElementById( 'container' );
//var w = window.innerWidth;
//var h = window.innerHeight;
// w = 6000;
// h = 6000;
// todo uncenter the camera
//camera = new THREE.OrthographicCamera( w / - 2, w / 2, h / 2, h / - 2, - 500, 500 );
camera = new THREE.OrthographicCamera( 0, w , h , 0, - 500, 500 );
// camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
//camera.position.z = 20;
//camera.position.z = 50;
renderer = new THREE.WebGLRenderer({ antialias: true, preserveDrawingBuffer: true });
scene = new THREE.Scene();
// mouse orbit control
/*
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = false;*/
/*
controls = new THREE.TrackballControls(camera);
controls.rotateSpeed = 10.0;
controls.zoomSpeed = 10.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
*/
if ( renderer.extensions.get( 'ANGLE_instanced_arrays' ) === false ) {
document.getElementById( "notSupported" ).style.display = "";
return;
}
renderer.setClearColor( 0xFFFFFF );
renderer.autoClear = false;
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( w,h);
var div = document.getElementById("canvasContainer");
div.appendChild(renderer.domElement );
// document.body.appendChild( renderer.domElement );
renderer.clear();
/*
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
document.body.appendChild( stats.domElement );
*/
window.addEventListener( 'resize', onWindowResize, false );
renderer.domElement.addEventListener( 'mousemove', onMouseMove, true );
renderer.domElement.addEventListener( 'mousedown', onMouseDown, true );
renderer.domElement.addEventListener( 'mouseup', onMouseUp, true );
createGui();
reset();
}
var ismousedown =false;
var mousex = 0;
var mousey = 0;
document.addEventListener('keydown',onDocumentKeyDown,false);
function onDocumentKeyDown(event) {
console.log(event);
if(event.key == 's') {
//saveAsImage();
// savePixels();
saveCanvas();
}
if(event.key == " ")
{
// next teuxtre
textureIX = (textureIX + 1) % textureFilenames.length;
reset();
}
if(event.key == "r")
{
randomiseField();
// refresh noise field
reset();
}
}
var gui;
var originalPoints = [];
for(var i = 0; i< 4;++i)
{
originalPoints.push({"x":0, "y":0});
}
var points = originalPoints.slice(0);
var rectModel = {
xOffset: 0.5, //
yOffset: 0.5,
xScale: 1,
yScale: 1,
imageFilename: "image"
};
updatePoints(); // inital points should be a normalised rect
console.log(points);
//////////////////////////////////////////////////////////////////////////////////////////////
// colour map sampling options
var particleOptions = {
directionForward: true
}
function createGui()
{
gui = new dat.GUI();
// My sample abject
var obj = {
flipX: function() {flipX();},
flipY: function() {flipY();},
rotate: function(){rotate();},
resetPoints: function(){resetPoints();}
};
// Number field with slider
gui.add(rectModel, "xOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "xScale").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yScale").min(0).max(1).step(0.01).onChange(function(val) {
//console.log("changed " + val);
updatePoints();
}
).listen();
// Checkbox field
gui.add(obj, "flipX");
gui.add(obj, "flipY");
gui.add(obj, "rotate");
gui.add(obj, "resetPoints");
gui.add(rectModel, "imageFilename").listen();
gui.add(particleOptions, "directionForward").listen();
}
function flipX()
{
console.log("flip x");
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[0];
points[2] = temp[3];
points[3] = temp[2];
console.log(points);
}
function flipY()
{
console.log("flipY");
var temp = points.slice(0);
points[0] = temp[3];
points[1] = temp[2];
points[2] = temp[1];
points[3] = temp[0];
console.log(points);
}
function rotate()
{
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[2];
points[2] = temp[3];
points[3] = temp[0];
console.log(points);
}
function resetPoints()
{
rectModel.xOffset = 0.5;
rectModel.yOffset = 0.5;
rectModel.xScale = 1;
rectModel.yScale = 1;
updatePoints();
points = originalPoints.slice(0);
console.log(points);
}
function updatePoints()
{
var w = rectModel.xScale;
var h = rectModel.yScale;
var x = (rectModel.xOffset - 0.5)*(1-w) - 0.5*w +0.5;
var y = (rectModel.yOffset - 0.5)*(1-h) - 0.5*h + 0.5;
originalPoints[0].x = x;
originalPoints[0].y = y;
originalPoints[1].x = x + w;
originalPoints[1].y = y;
originalPoints[2].x = x + w;
originalPoints[2].y = y + h;
originalPoints[3].x = x ;
originalPoints[3].y = y + h;
console.log(originalPoints);
}
function getPoint(x,y)
{
var p0 = points[0];
var p1 = points[1];
var p2 = points[2];
var p3 = points[3];
var x0 = p0.x + x*(p1.x - p0.x);
var y0 = p0.y + x*(p1.y - p0.y);
var x1 = p2.x + x*(p2.x - p3.x);
var y1 = p2.y + x*(p2.y - p3.y);
var tx = x0 + y*(x1- x0);
var ty = y0 + y*(y1- y0);
return {"y":ty,"x":tx};
}
//////////////////////////////////////////////////////////////////////////////////////////////
function randomiseField()
{
noiseOffsetX = MathUtils.GetRandomFloat(0,100);
noiseOffsetY = MathUtils.GetRandomFloat(0,100);
}
function reset()
{
console.log("reset");
imageDataLoaded = false;
// clear
renderer.clear();
// choose a texture and load it
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = textureFilenames[textureIX];
// var imageURL = 'grad.png';
console.log("imageURL "+ imageURL);
rectModel.imageFilename = imageURL; // show filename for debugin
var _this = this;
var texture = loader.load(imageURL,
function ( texture ) {
// do something with the texture on complete
// console.log("texture", texture);
imagedata = getImageData(texture.image );
// console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
/////////////////////////////////////////////////////////////////////////////////////////////
function saveCanvas()
{
if(exportMode == ExportMode.png) {
renderer.domElement.toBlob(function(blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".png");
});
}
else {
renderer.domElement.toBlob(function (blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".jpg");
}, "image/jpeg");
}
}
////////////////////////////////////////////////
function getImageData( image ) {
var canvas = document.createElement( 'canvas' );
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0 );
return context.getImageData( 0, 0, image.width, image.height );
}
function getPixel( imagedata, nx, ny ) {
var x = Math.floor( nx *(imagedata.width - 1));
var y = Math.floor( ny *(imagedata.height-1));
var position = ( x + imagedata.width * y ) * 4, data = imagedata.data;
return { r: data[ position ] /255.0, g: data[ position + 1 ]/255.0, b: data[ position + 2 ]/255.0, a: data[ position + 3 ]/255.0 };
}
var imagedata = null;
var imageDataLoaded = false;
//var preloader = new Preloader();
//load();
function load() {
//preloader.load(() => {
// this.scene.add(this.cube);
//this.render();
// var imgTexture = THREE.ImageUtils.loadTexture( "environment/floor.jpg" );
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = '01.jpg';
// var imageURL = 'grad.png';
var _this = this;
var texture = loader.load(imageURL,
function (texture) {
// do something with the texture on complete
console.log("texture", texture);
imagedata = getImageData(texture.image);
console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
var field = new VectorField();
var p0s;
var p1s;
var p2s;
var q0s;
var q1s;
var q2s;
var colours0;
var colours1;
var startRs;
var endRs;
var nInstances;
var basepath;
var pathobj;
var ready = false;
var bufferix;
var grid;
function makeMeshObj()
{
//
basepath = new QCurve();
basepath.p1.x = 200;
basepath.p1.y = 0;
basepath.p2.x = 200;
basepath.p2.y = 100;
pathobj = new QCurveObj(basepath, 10);
// pathobj.addToScene(scene);
// geometry
nInstances = 200000; // max number of instances that can be render in one go
bufferix = 0;
grid = new InstancedGrid();
grid.print();
var nx = 2; // keep this as 2
var nz = 5; // resolution
var zLen = 25;
//grid.createTube(nx,nz,1,1,zLen);
//grid.createRectTube(7,5,100,40);
grid.createFlatGrid(nx,nz,1,1);
grid.createIndices(nx,nz);
grid.createUVGrid(nx,nz);
// per instance data
// offsets = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( false );
p0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
colours0 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
colours1 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
// remove this
// startRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
// endRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
//grid.geometry.addAttribute( 'offset', offsets ); // per mesh translation
grid.geometry.addAttribute( 'p0', p0s);
grid.geometry.addAttribute( 'p1', p1s);
grid.geometry.addAttribute( 'p2', p2s);
grid.geometry.addAttribute( 'q0', q0s);
grid.geometry.addAttribute( 'q1', q1s);
grid.geometry.addAttribute( 'q2', q2s);
grid.geometry.addAttribute( 'colour0', colours0);
grid.geometry.addAttribute( 'colour1', colours1);
// grid.geometry.addAttribute( 'startR', startRs);
// grid.geometry.addAttribute( 'endR', endRs);
var material = new THREE.RawShaderMaterial( {
uniforms: {
//map: { type: "t", value: texture }
},
vertexShader: vertshader,
//fragmentShader: FragShader,
fragmentShader: fragshader,
//side: THREE.DoubleSide,
transparent: true,
// wireframe: true
} );
var mesh = new THREE.Mesh( grid.geometry, material );
mesh.frustumCulled = false;
//var zoom = 0.5;
// mesh.position.y = meshPositionY;
mesh.scale.set(renderScale,renderScale);
scene.add( mesh );
//add a test horizon line
//addTestLine();
ready = true;
// drawParticle();
}
function addTestLine()
{
var material = new THREE.LineBasicMaterial({
color: 0xff0000
});
var geometry = new THREE.Geometry();
geometry.vertices.push(
new THREE.Vector3( -1000*renderScale, 0, 0 ),
new THREE.Vector3( 1000*renderScale, 0, 0 )
);
var line = new THREE.Line( geometry, material );
scene.add( line );
}
Math.clamp = function(number, min, max) {
return Math.max(min, Math.min(number, max));
}
function onMouseMove(event){
mousex = (event.clientX);
mousey = (event.clientY);
console.log(mousex,mousey);
//mouseY = (event.clientY - window.innerHeight/2) / window.innerHeight/2;
}
function onMouseUp(event){
ismousedown = false;
console.log("onMouseUp");
}
function onMouseDown(event){
ismousedown = true;
console.log("onMouseDown");
nsteps = 20 + Math.random()*160;
}
var nsteps = 20;
function drawParticleUpdate()
{
if(ismousedown)
{
var n = 50;
var nx = mousex/w + Math.random()*0.02 ;
var ny = mousey/h + Math.random()*0.02 ;
console.log(mousex/w, mousex/h);
var direction = particleOptions.directionForward ? 1: -1;// (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var alpha = 0.3 + 0.7*Math.random();
for (var i = 0; i < n; ++i) {
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
//drawRandomParticles(400);
}
function drawRandomParticles(n)
{
for (var i = 0; i < n; ++i) {
// particles are nomralised [0,1] -> remap to [-h,h]
var nx = Math.random()*0.99 ;
var ny = Math.random()*0.99 ;
var direction = (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var nsteps = 30 + Math.random()*100;
var alpha = 0.3 + 0.7*Math.random();
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
// draw particle at nx,ny
function drawParticle(nx,ny, direction, nsteps, thickness, alpha)
{
// todo use canvas coordinates
// convert to the emission bound
var canvasx = nx*bw; // stretch the width
var canvasy = bh - ny*( bottomy); // do
//get slight random position
var randomColPositionAmount= 0.01;
var colx = Math.clamp( MathUtils.GetRandomFloat(nx- randomColPositionAmount,nx + randomColPositionAmount) ,0,0.999);
var coly = Math.clamp( MathUtils.GetRandomFloat(ny- randomColPositionAmount,ny + randomColPositionAmount) ,0,0.999);
var transformedPoint = getPoint(colx,coly);
colx = transformedPoint.x;
coly = transformedPoint.y;
var col = getPixel(imagedata, colx,coly);
//var x =-1000+ nx*2000;
//var y =-450+ ny*950;
var particle;
// set a random seed
var seed = MathUtils.GetRandomIntBetween(0,100000);
// draw the shading (alpha black)
var brightness = 0.5;
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
var thicknessShade = Math.min( thickness + 4, thickness *1.2);
particle.init( canvasx,canvasy, thicknessShade, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r*brightness,col.g*brightness,col.b*brightness);
particle.strokePath.alpha = alpha*0.2;
for(var i =0; i< nsteps;++i)
{
particle.update(thicknessShade);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
// draw the colour
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
particle.init(canvasx,canvasy, thickness, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r,col.g,col.b);
particle.strokePath.alpha =alpha;
for(var i =0; i< nsteps;++i)
{
particle.update(thickness);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
/*
// test a couple of curves
var i = 0;
p0s.setXY(i, 0,0);
p1s.setXY(i, 102,0);
p2s.setXY(i, 202,25);
q0s.setXY(i, 0,0 + 50);
q1s.setXY(i, 102,0 + 50);
q2s.setXY(i, 202,25);
*/
}
function | ( event ) {
/*
camera.left = window.innerWidth / - 2;
camera.right = window.innerWidth / 2;
camera.top = window.innerHeight / 2;
camera.bottom = window.innerHeight / - 2;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
*/
}
//
function animate() {
requestAnimationFrame( animate );
if(ready && imageDataLoaded) {
bufferix = 0;
// console.log("imageDataLoaded", imageDataLoaded);
drawParticleUpdate();
grid.setDrawCount(bufferix);
//console.log(bufferix);
// update
p0s.needsUpdate = true;
p1s.needsUpdate = true;
p2s.needsUpdate = true;
q0s.needsUpdate = true;
q1s.needsUpdate = true;
q2s.needsUpdate = true;
colours0.needsUpdate =true;
colours1.needsUpdate =true;
render();
}
// stats.update();
//controls.update(); // required if controls.enableDamping = true, or if controls.autoRotate = true
}
var lastTime = 0;
var moveQ = ( new THREE.Quaternion( .5, .5, .5, 0.0 ) ).normalize();
var tmpQ = new THREE.Quaternion();
var currentQ = new THREE.Quaternion();
function render() {
var time = performance.now();
if(ready) {
//var object = scene.children[0];
var x;
var age;
var introDuration = 0.2;
var outroDuration = 0.2;
var r;
// endRs.needsUpdate = true;
// startRs.needsUpdate = true;
}
//renderer.autoClear = false;
renderer.render( scene, camera );
// pathobj.update();
lastTime = time;
}
init();
animate(); | onWindowResize | identifier_name |
index3.js | import {TextFileLoader} from './painter/TextFileLoader'
import {InstancedGrid} from './painter/InstancedGrid'
import {QCurve} from './painter/QCurve'
import {QCurveObj} from './painter/QCurve'
import {StrokePath} from './painter/StrokePath'
import {Particle} from './painter/Particle'
import {VectorField} from './painter/VectorField'
import Preloader from "./preloader/Preloader"
import {MathUtils} from "./util/MathUtils"
import * as THREE from 'three'
import * as Stats from 'stats-js'
/**
* Created by David on 14/12/2016.
*/
//if ( !Detector.webgl ) Detector.addGetWebGLMessage();
///////////////////////
var container, stats;
var controls;
var camera, scene, renderer;
//var orientations;
//var offsets;
//var lengths;
//var speeds;
var fragshader;
var vertshader;
TextFileLoader.Instance().loadFiles(["shaders/fragShader.glsl","shaders/vertShader.glsl"], filesLoaded);
var textureFilenames = [];
for(var i=1;i<=41;++i)
{
textureFilenames.push( "colourful" + "/" + ((i < 10) ?"0":"") + i + ".jpg");
}
var textureIX = 0;
function filesLoaded(files)
{
fragshader = files[0];
vertshader = files[1];
makeMeshObj();
}
// emit particles from a bound, bottom of the bound is the horizon line?
// if camera is cetnered, then it should be centered in x pos
// if camera is not centered,
var ExportMode = {
"png": "png",
"jpg": "jpg"
};
var exportMode = ExportMode.png;
//var renderScale = 5.4;
var renderScale,bw,bh,bottomy;
var Mode = {
"skyline": "skyline",
"maps": "maps"
};
var mode = Mode.maps;
//renderScale = 7.2;
renderScale = 1.0;
if(mode == Mode.skyline)
{
bw = 1000;
bh = bw*(3/4);
bottomy = bh *0.6;
}
else{
// "maps"
// renderScale = 7.2;
bh = 1000;
bw = bh*(3/4);
bottomy = bh * 1.0;
}
var w = bw * renderScale;
var h = bh * renderScale;
var noiseOffsetX, noiseOffsetY ;
function init() {
randomiseField();
container = document.getElementById( 'container' );
//var w = window.innerWidth;
//var h = window.innerHeight;
// w = 6000;
// h = 6000;
// todo uncenter the camera
//camera = new THREE.OrthographicCamera( w / - 2, w / 2, h / 2, h / - 2, - 500, 500 );
camera = new THREE.OrthographicCamera( 0, w , h , 0, - 500, 500 );
// camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
//camera.position.z = 20;
//camera.position.z = 50;
renderer = new THREE.WebGLRenderer({ antialias: true, preserveDrawingBuffer: true });
scene = new THREE.Scene();
// mouse orbit control
/*
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = false;*/
/*
controls = new THREE.TrackballControls(camera);
controls.rotateSpeed = 10.0;
controls.zoomSpeed = 10.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
*/
if ( renderer.extensions.get( 'ANGLE_instanced_arrays' ) === false ) {
document.getElementById( "notSupported" ).style.display = "";
return;
}
renderer.setClearColor( 0xFFFFFF );
renderer.autoClear = false;
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( w,h);
var div = document.getElementById("canvasContainer");
div.appendChild(renderer.domElement );
// document.body.appendChild( renderer.domElement );
renderer.clear();
/*
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
document.body.appendChild( stats.domElement );
*/
window.addEventListener( 'resize', onWindowResize, false );
renderer.domElement.addEventListener( 'mousemove', onMouseMove, true );
renderer.domElement.addEventListener( 'mousedown', onMouseDown, true );
renderer.domElement.addEventListener( 'mouseup', onMouseUp, true );
createGui();
reset();
}
var ismousedown =false;
var mousex = 0;
var mousey = 0;
document.addEventListener('keydown',onDocumentKeyDown,false);
function onDocumentKeyDown(event) {
console.log(event);
if(event.key == 's') {
//saveAsImage();
// savePixels();
saveCanvas();
}
if(event.key == " ")
{
// next teuxtre
textureIX = (textureIX + 1) % textureFilenames.length;
reset();
}
if(event.key == "r")
{
randomiseField();
// refresh noise field
reset();
}
}
var gui;
var originalPoints = [];
for(var i = 0; i< 4;++i)
{
originalPoints.push({"x":0, "y":0});
}
var points = originalPoints.slice(0);
var rectModel = {
xOffset: 0.5, //
yOffset: 0.5,
xScale: 1,
yScale: 1,
imageFilename: "image"
};
updatePoints(); // inital points should be a normalised rect
console.log(points);
//////////////////////////////////////////////////////////////////////////////////////////////
// colour map sampling options
var particleOptions = {
directionForward: true
}
function createGui()
{
gui = new dat.GUI();
// My sample abject
var obj = {
flipX: function() {flipX();},
flipY: function() {flipY();},
rotate: function(){rotate();},
resetPoints: function(){resetPoints();}
};
// Number field with slider
gui.add(rectModel, "xOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "xScale").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yScale").min(0).max(1).step(0.01).onChange(function(val) {
//console.log("changed " + val);
updatePoints();
}
).listen();
// Checkbox field
gui.add(obj, "flipX");
gui.add(obj, "flipY");
gui.add(obj, "rotate");
gui.add(obj, "resetPoints");
gui.add(rectModel, "imageFilename").listen();
gui.add(particleOptions, "directionForward").listen();
}
function flipX()
{
console.log("flip x");
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[0];
points[2] = temp[3];
points[3] = temp[2];
console.log(points);
}
function flipY()
{
console.log("flipY");
var temp = points.slice(0);
points[0] = temp[3];
points[1] = temp[2];
points[2] = temp[1];
points[3] = temp[0];
console.log(points);
}
function rotate()
{
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[2];
points[2] = temp[3];
points[3] = temp[0];
console.log(points);
}
function resetPoints()
{
rectModel.xOffset = 0.5;
rectModel.yOffset = 0.5;
rectModel.xScale = 1;
rectModel.yScale = 1;
updatePoints();
points = originalPoints.slice(0);
console.log(points);
}
function updatePoints()
{
var w = rectModel.xScale;
var h = rectModel.yScale;
var x = (rectModel.xOffset - 0.5)*(1-w) - 0.5*w +0.5;
var y = (rectModel.yOffset - 0.5)*(1-h) - 0.5*h + 0.5;
originalPoints[0].x = x;
originalPoints[0].y = y;
originalPoints[1].x = x + w;
originalPoints[1].y = y;
originalPoints[2].x = x + w;
originalPoints[2].y = y + h;
originalPoints[3].x = x ;
originalPoints[3].y = y + h;
console.log(originalPoints);
}
function getPoint(x,y)
{
var p0 = points[0];
var p1 = points[1];
var p2 = points[2];
var p3 = points[3];
var x0 = p0.x + x*(p1.x - p0.x);
var y0 = p0.y + x*(p1.y - p0.y);
var x1 = p2.x + x*(p2.x - p3.x);
var y1 = p2.y + x*(p2.y - p3.y);
var tx = x0 + y*(x1- x0);
var ty = y0 + y*(y1- y0);
return {"y":ty,"x":tx};
}
//////////////////////////////////////////////////////////////////////////////////////////////
function randomiseField()
{
noiseOffsetX = MathUtils.GetRandomFloat(0,100);
noiseOffsetY = MathUtils.GetRandomFloat(0,100);
}
function reset()
{
console.log("reset");
imageDataLoaded = false;
// clear
renderer.clear();
// choose a texture and load it
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = textureFilenames[textureIX];
// var imageURL = 'grad.png';
console.log("imageURL "+ imageURL);
rectModel.imageFilename = imageURL; // show filename for debugin
var _this = this;
var texture = loader.load(imageURL,
function ( texture ) {
// do something with the texture on complete
// console.log("texture", texture);
imagedata = getImageData(texture.image );
// console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
/////////////////////////////////////////////////////////////////////////////////////////////
function saveCanvas()
{
if(exportMode == ExportMode.png) {
renderer.domElement.toBlob(function(blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".png");
});
}
else {
renderer.domElement.toBlob(function (blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".jpg");
}, "image/jpeg");
}
}
////////////////////////////////////////////////
function getImageData( image ) {
var canvas = document.createElement( 'canvas' );
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0 );
return context.getImageData( 0, 0, image.width, image.height );
}
function getPixel( imagedata, nx, ny ) {
var x = Math.floor( nx *(imagedata.width - 1));
var y = Math.floor( ny *(imagedata.height-1));
var position = ( x + imagedata.width * y ) * 4, data = imagedata.data;
return { r: data[ position ] /255.0, g: data[ position + 1 ]/255.0, b: data[ position + 2 ]/255.0, a: data[ position + 3 ]/255.0 };
}
var imagedata = null;
var imageDataLoaded = false;
//var preloader = new Preloader();
//load();
function load() {
//preloader.load(() => {
// this.scene.add(this.cube);
//this.render();
// var imgTexture = THREE.ImageUtils.loadTexture( "environment/floor.jpg" );
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = '01.jpg';
// var imageURL = 'grad.png';
var _this = this;
var texture = loader.load(imageURL,
function (texture) {
// do something with the texture on complete
console.log("texture", texture);
imagedata = getImageData(texture.image);
console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
var field = new VectorField();
var p0s;
var p1s;
var p2s;
var q0s;
var q1s;
var q2s;
var colours0;
var colours1;
var startRs;
var endRs;
var nInstances;
var basepath;
var pathobj;
var ready = false;
var bufferix;
var grid;
function makeMeshObj()
{
//
basepath = new QCurve();
basepath.p1.x = 200;
basepath.p1.y = 0;
basepath.p2.x = 200;
basepath.p2.y = 100;
pathobj = new QCurveObj(basepath, 10);
// pathobj.addToScene(scene);
// geometry
nInstances = 200000; // max number of instances that can be render in one go
bufferix = 0;
grid = new InstancedGrid();
grid.print();
var nx = 2; // keep this as 2
var nz = 5; // resolution
var zLen = 25;
//grid.createTube(nx,nz,1,1,zLen);
//grid.createRectTube(7,5,100,40);
grid.createFlatGrid(nx,nz,1,1);
grid.createIndices(nx,nz);
grid.createUVGrid(nx,nz);
// per instance data
// offsets = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( false );
p0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
colours0 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
colours1 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
// remove this
// startRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
// endRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
//grid.geometry.addAttribute( 'offset', offsets ); // per mesh translation
grid.geometry.addAttribute( 'p0', p0s);
grid.geometry.addAttribute( 'p1', p1s);
grid.geometry.addAttribute( 'p2', p2s);
grid.geometry.addAttribute( 'q0', q0s);
grid.geometry.addAttribute( 'q1', q1s);
grid.geometry.addAttribute( 'q2', q2s);
grid.geometry.addAttribute( 'colour0', colours0);
grid.geometry.addAttribute( 'colour1', colours1);
// grid.geometry.addAttribute( 'startR', startRs);
// grid.geometry.addAttribute( 'endR', endRs);
var material = new THREE.RawShaderMaterial( {
uniforms: {
//map: { type: "t", value: texture }
},
vertexShader: vertshader,
//fragmentShader: FragShader,
fragmentShader: fragshader,
//side: THREE.DoubleSide,
transparent: true,
// wireframe: true
} );
var mesh = new THREE.Mesh( grid.geometry, material );
mesh.frustumCulled = false;
//var zoom = 0.5;
// mesh.position.y = meshPositionY;
mesh.scale.set(renderScale,renderScale);
scene.add( mesh );
//add a test horizon line
//addTestLine();
ready = true;
// drawParticle();
}
function addTestLine()
{
var material = new THREE.LineBasicMaterial({
color: 0xff0000
});
var geometry = new THREE.Geometry();
geometry.vertices.push(
new THREE.Vector3( -1000*renderScale, 0, 0 ),
new THREE.Vector3( 1000*renderScale, 0, 0 )
);
var line = new THREE.Line( geometry, material );
scene.add( line );
}
Math.clamp = function(number, min, max) {
return Math.max(min, Math.min(number, max));
}
function onMouseMove(event){
mousex = (event.clientX);
mousey = (event.clientY);
console.log(mousex,mousey);
//mouseY = (event.clientY - window.innerHeight/2) / window.innerHeight/2;
}
function onMouseUp(event){
ismousedown = false;
console.log("onMouseUp");
}
function onMouseDown(event){
ismousedown = true;
console.log("onMouseDown");
nsteps = 20 + Math.random()*160;
}
var nsteps = 20;
function drawParticleUpdate()
{
if(ismousedown)
{
var n = 50;
var nx = mousex/w + Math.random()*0.02 ;
var ny = mousey/h + Math.random()*0.02 ;
console.log(mousex/w, mousex/h);
var direction = particleOptions.directionForward ? 1: -1;// (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var alpha = 0.3 + 0.7*Math.random();
for (var i = 0; i < n; ++i) {
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
//drawRandomParticles(400);
}
function drawRandomParticles(n)
|
// draw particle at nx,ny
function drawParticle(nx,ny, direction, nsteps, thickness, alpha)
{
// todo use canvas coordinates
// convert to the emission bound
var canvasx = nx*bw; // stretch the width
var canvasy = bh - ny*( bottomy); // do
//get slight random position
var randomColPositionAmount= 0.01;
var colx = Math.clamp( MathUtils.GetRandomFloat(nx- randomColPositionAmount,nx + randomColPositionAmount) ,0,0.999);
var coly = Math.clamp( MathUtils.GetRandomFloat(ny- randomColPositionAmount,ny + randomColPositionAmount) ,0,0.999);
var transformedPoint = getPoint(colx,coly);
colx = transformedPoint.x;
coly = transformedPoint.y;
var col = getPixel(imagedata, colx,coly);
//var x =-1000+ nx*2000;
//var y =-450+ ny*950;
var particle;
// set a random seed
var seed = MathUtils.GetRandomIntBetween(0,100000);
// draw the shading (alpha black)
var brightness = 0.5;
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
var thicknessShade = Math.min( thickness + 4, thickness *1.2);
particle.init( canvasx,canvasy, thicknessShade, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r*brightness,col.g*brightness,col.b*brightness);
particle.strokePath.alpha = alpha*0.2;
for(var i =0; i< nsteps;++i)
{
particle.update(thicknessShade);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
// draw the colour
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
particle.init(canvasx,canvasy, thickness, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r,col.g,col.b);
particle.strokePath.alpha =alpha;
for(var i =0; i< nsteps;++i)
{
particle.update(thickness);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
/*
// test a couple of curves
var i = 0;
p0s.setXY(i, 0,0);
p1s.setXY(i, 102,0);
p2s.setXY(i, 202,25);
q0s.setXY(i, 0,0 + 50);
q1s.setXY(i, 102,0 + 50);
q2s.setXY(i, 202,25);
*/
}
function onWindowResize( event ) {
/*
camera.left = window.innerWidth / - 2;
camera.right = window.innerWidth / 2;
camera.top = window.innerHeight / 2;
camera.bottom = window.innerHeight / - 2;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
*/
}
//
function animate() {
requestAnimationFrame( animate );
if(ready && imageDataLoaded) {
bufferix = 0;
// console.log("imageDataLoaded", imageDataLoaded);
drawParticleUpdate();
grid.setDrawCount(bufferix);
//console.log(bufferix);
// update
p0s.needsUpdate = true;
p1s.needsUpdate = true;
p2s.needsUpdate = true;
q0s.needsUpdate = true;
q1s.needsUpdate = true;
q2s.needsUpdate = true;
colours0.needsUpdate =true;
colours1.needsUpdate =true;
render();
}
// stats.update();
//controls.update(); // required if controls.enableDamping = true, or if controls.autoRotate = true
}
var lastTime = 0;
var moveQ = ( new THREE.Quaternion( .5, .5, .5, 0.0 ) ).normalize();
var tmpQ = new THREE.Quaternion();
var currentQ = new THREE.Quaternion();
function render() {
var time = performance.now();
if(ready) {
//var object = scene.children[0];
var x;
var age;
var introDuration = 0.2;
var outroDuration = 0.2;
var r;
// endRs.needsUpdate = true;
// startRs.needsUpdate = true;
}
//renderer.autoClear = false;
renderer.render( scene, camera );
// pathobj.update();
lastTime = time;
}
init();
animate(); | {
for (var i = 0; i < n; ++i) {
// particles are nomralised [0,1] -> remap to [-h,h]
var nx = Math.random()*0.99 ;
var ny = Math.random()*0.99 ;
var direction = (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var nsteps = 30 + Math.random()*100;
var alpha = 0.3 + 0.7*Math.random();
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
} | identifier_body |
index3.js | import {TextFileLoader} from './painter/TextFileLoader'
import {InstancedGrid} from './painter/InstancedGrid'
import {QCurve} from './painter/QCurve'
import {QCurveObj} from './painter/QCurve'
import {StrokePath} from './painter/StrokePath'
import {Particle} from './painter/Particle'
import {VectorField} from './painter/VectorField'
import Preloader from "./preloader/Preloader"
import {MathUtils} from "./util/MathUtils"
import * as THREE from 'three'
import * as Stats from 'stats-js'
/**
* Created by David on 14/12/2016.
*/
//if ( !Detector.webgl ) Detector.addGetWebGLMessage();
///////////////////////
var container, stats;
var controls;
var camera, scene, renderer;
//var orientations;
//var offsets;
//var lengths;
//var speeds;
var fragshader;
var vertshader;
TextFileLoader.Instance().loadFiles(["shaders/fragShader.glsl","shaders/vertShader.glsl"], filesLoaded);
var textureFilenames = [];
for(var i=1;i<=41;++i)
{
textureFilenames.push( "colourful" + "/" + ((i < 10) ?"0":"") + i + ".jpg");
}
var textureIX = 0;
function filesLoaded(files)
{
fragshader = files[0];
vertshader = files[1];
makeMeshObj();
}
// emit particles from a bound, bottom of the bound is the horizon line?
// if camera is cetnered, then it should be centered in x pos
// if camera is not centered,
var ExportMode = {
"png": "png",
"jpg": "jpg"
};
var exportMode = ExportMode.png;
//var renderScale = 5.4;
var renderScale,bw,bh,bottomy;
var Mode = {
"skyline": "skyline",
"maps": "maps"
};
var mode = Mode.maps;
//renderScale = 7.2;
renderScale = 1.0;
if(mode == Mode.skyline)
{
bw = 1000;
bh = bw*(3/4);
bottomy = bh *0.6;
}
else{
// "maps"
// renderScale = 7.2;
bh = 1000;
bw = bh*(3/4);
bottomy = bh * 1.0;
}
var w = bw * renderScale;
var h = bh * renderScale;
var noiseOffsetX, noiseOffsetY ;
function init() {
randomiseField();
container = document.getElementById( 'container' );
//var w = window.innerWidth;
//var h = window.innerHeight;
// w = 6000;
// h = 6000;
// todo uncenter the camera
//camera = new THREE.OrthographicCamera( w / - 2, w / 2, h / 2, h / - 2, - 500, 500 );
camera = new THREE.OrthographicCamera( 0, w , h , 0, - 500, 500 );
// camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
//camera.position.z = 20;
//camera.position.z = 50;
renderer = new THREE.WebGLRenderer({ antialias: true, preserveDrawingBuffer: true });
scene = new THREE.Scene();
// mouse orbit control
/*
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = false;*/
/*
controls = new THREE.TrackballControls(camera);
controls.rotateSpeed = 10.0;
controls.zoomSpeed = 10.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
*/
if ( renderer.extensions.get( 'ANGLE_instanced_arrays' ) === false ) {
document.getElementById( "notSupported" ).style.display = "";
return;
}
renderer.setClearColor( 0xFFFFFF );
renderer.autoClear = false;
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( w,h);
var div = document.getElementById("canvasContainer");
div.appendChild(renderer.domElement );
// document.body.appendChild( renderer.domElement );
renderer.clear();
/*
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
document.body.appendChild( stats.domElement );
*/
window.addEventListener( 'resize', onWindowResize, false );
renderer.domElement.addEventListener( 'mousemove', onMouseMove, true );
renderer.domElement.addEventListener( 'mousedown', onMouseDown, true );
renderer.domElement.addEventListener( 'mouseup', onMouseUp, true );
createGui();
reset();
}
var ismousedown =false;
var mousex = 0;
var mousey = 0;
document.addEventListener('keydown',onDocumentKeyDown,false);
function onDocumentKeyDown(event) {
console.log(event);
if(event.key == 's') {
//saveAsImage();
// savePixels();
saveCanvas();
}
if(event.key == " ")
{
// next teuxtre
textureIX = (textureIX + 1) % textureFilenames.length;
reset();
}
if(event.key == "r")
{
randomiseField();
// refresh noise field
reset();
}
}
var gui;
var originalPoints = [];
for(var i = 0; i< 4;++i)
{
originalPoints.push({"x":0, "y":0});
}
var points = originalPoints.slice(0);
var rectModel = {
xOffset: 0.5, //
yOffset: 0.5,
xScale: 1,
yScale: 1,
imageFilename: "image"
};
updatePoints(); // inital points should be a normalised rect
console.log(points);
//////////////////////////////////////////////////////////////////////////////////////////////
// colour map sampling options
var particleOptions = {
directionForward: true
}
function createGui()
{
gui = new dat.GUI();
// My sample abject
var obj = {
flipX: function() {flipX();},
flipY: function() {flipY();},
rotate: function(){rotate();},
resetPoints: function(){resetPoints();}
};
// Number field with slider
gui.add(rectModel, "xOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "xScale").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yScale").min(0).max(1).step(0.01).onChange(function(val) {
//console.log("changed " + val);
updatePoints();
}
).listen();
// Checkbox field
gui.add(obj, "flipX");
gui.add(obj, "flipY");
gui.add(obj, "rotate");
gui.add(obj, "resetPoints");
gui.add(rectModel, "imageFilename").listen();
gui.add(particleOptions, "directionForward").listen();
}
function flipX()
{
console.log("flip x");
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[0];
points[2] = temp[3];
points[3] = temp[2];
console.log(points);
}
function flipY()
{
console.log("flipY");
var temp = points.slice(0);
points[0] = temp[3];
points[1] = temp[2];
points[2] = temp[1];
points[3] = temp[0];
console.log(points);
}
function rotate()
{
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[2];
points[2] = temp[3];
points[3] = temp[0];
console.log(points);
}
function resetPoints()
{
rectModel.xOffset = 0.5;
rectModel.yOffset = 0.5;
rectModel.xScale = 1;
rectModel.yScale = 1;
updatePoints();
points = originalPoints.slice(0);
console.log(points);
}
function updatePoints()
{
var w = rectModel.xScale;
var h = rectModel.yScale;
var x = (rectModel.xOffset - 0.5)*(1-w) - 0.5*w +0.5;
var y = (rectModel.yOffset - 0.5)*(1-h) - 0.5*h + 0.5;
originalPoints[0].x = x;
originalPoints[0].y = y;
originalPoints[1].x = x + w;
originalPoints[1].y = y;
originalPoints[2].x = x + w;
originalPoints[2].y = y + h;
originalPoints[3].x = x ;
originalPoints[3].y = y + h;
console.log(originalPoints);
}
function getPoint(x,y)
{
var p0 = points[0];
var p1 = points[1];
var p2 = points[2];
var p3 = points[3];
var x0 = p0.x + x*(p1.x - p0.x);
var y0 = p0.y + x*(p1.y - p0.y);
var x1 = p2.x + x*(p2.x - p3.x);
var y1 = p2.y + x*(p2.y - p3.y);
var tx = x0 + y*(x1- x0);
var ty = y0 + y*(y1- y0);
return {"y":ty,"x":tx};
}
//////////////////////////////////////////////////////////////////////////////////////////////
function randomiseField()
{
noiseOffsetX = MathUtils.GetRandomFloat(0,100);
noiseOffsetY = MathUtils.GetRandomFloat(0,100);
}
function reset()
{
console.log("reset");
imageDataLoaded = false;
// clear
renderer.clear();
// choose a texture and load it
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = textureFilenames[textureIX];
// var imageURL = 'grad.png';
console.log("imageURL "+ imageURL);
rectModel.imageFilename = imageURL; // show filename for debugin
var _this = this;
var texture = loader.load(imageURL,
function ( texture ) {
// do something with the texture on complete
// console.log("texture", texture);
imagedata = getImageData(texture.image );
// console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
/////////////////////////////////////////////////////////////////////////////////////////////
function saveCanvas()
{
if(exportMode == ExportMode.png) {
renderer.domElement.toBlob(function(blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".png");
});
}
else {
renderer.domElement.toBlob(function (blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".jpg");
}, "image/jpeg");
}
}
////////////////////////////////////////////////
function getImageData( image ) {
var canvas = document.createElement( 'canvas' );
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0 );
return context.getImageData( 0, 0, image.width, image.height );
}
function getPixel( imagedata, nx, ny ) {
var x = Math.floor( nx *(imagedata.width - 1));
var y = Math.floor( ny *(imagedata.height-1));
var position = ( x + imagedata.width * y ) * 4, data = imagedata.data;
return { r: data[ position ] /255.0, g: data[ position + 1 ]/255.0, b: data[ position + 2 ]/255.0, a: data[ position + 3 ]/255.0 };
}
var imagedata = null;
var imageDataLoaded = false;
//var preloader = new Preloader();
//load();
function load() {
//preloader.load(() => {
// this.scene.add(this.cube);
//this.render();
// var imgTexture = THREE.ImageUtils.loadTexture( "environment/floor.jpg" );
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = '01.jpg';
// var imageURL = 'grad.png';
var _this = this;
var texture = loader.load(imageURL,
function (texture) {
// do something with the texture on complete
console.log("texture", texture);
imagedata = getImageData(texture.image);
console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
var field = new VectorField();
var p0s;
var p1s;
var p2s;
var q0s;
var q1s;
var q2s;
var colours0;
var colours1;
var startRs;
var endRs;
var nInstances;
var basepath;
var pathobj;
var ready = false;
var bufferix;
var grid;
function makeMeshObj()
{
//
basepath = new QCurve();
basepath.p1.x = 200;
basepath.p1.y = 0;
basepath.p2.x = 200;
basepath.p2.y = 100;
pathobj = new QCurveObj(basepath, 10);
// pathobj.addToScene(scene);
// geometry
nInstances = 200000; // max number of instances that can be render in one go
bufferix = 0;
grid = new InstancedGrid();
grid.print();
var nx = 2; // keep this as 2
var nz = 5; // resolution
var zLen = 25;
//grid.createTube(nx,nz,1,1,zLen);
//grid.createRectTube(7,5,100,40);
grid.createFlatGrid(nx,nz,1,1);
grid.createIndices(nx,nz);
grid.createUVGrid(nx,nz);
// per instance data
// offsets = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( false );
p0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
colours0 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
colours1 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
// remove this
// startRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
// endRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
//grid.geometry.addAttribute( 'offset', offsets ); // per mesh translation
grid.geometry.addAttribute( 'p0', p0s);
grid.geometry.addAttribute( 'p1', p1s);
grid.geometry.addAttribute( 'p2', p2s);
grid.geometry.addAttribute( 'q0', q0s);
grid.geometry.addAttribute( 'q1', q1s);
grid.geometry.addAttribute( 'q2', q2s);
grid.geometry.addAttribute( 'colour0', colours0);
grid.geometry.addAttribute( 'colour1', colours1);
// grid.geometry.addAttribute( 'startR', startRs);
// grid.geometry.addAttribute( 'endR', endRs);
var material = new THREE.RawShaderMaterial( {
uniforms: {
//map: { type: "t", value: texture }
},
vertexShader: vertshader,
//fragmentShader: FragShader,
fragmentShader: fragshader,
//side: THREE.DoubleSide,
transparent: true,
// wireframe: true
} );
var mesh = new THREE.Mesh( grid.geometry, material );
mesh.frustumCulled = false;
//var zoom = 0.5;
// mesh.position.y = meshPositionY;
mesh.scale.set(renderScale,renderScale);
scene.add( mesh );
//add a test horizon line
//addTestLine();
ready = true;
// drawParticle();
}
function addTestLine()
{
var material = new THREE.LineBasicMaterial({
color: 0xff0000
});
var geometry = new THREE.Geometry();
geometry.vertices.push(
new THREE.Vector3( -1000*renderScale, 0, 0 ),
new THREE.Vector3( 1000*renderScale, 0, 0 )
);
var line = new THREE.Line( geometry, material );
scene.add( line );
}
Math.clamp = function(number, min, max) {
return Math.max(min, Math.min(number, max));
}
function onMouseMove(event){
mousex = (event.clientX);
mousey = (event.clientY);
console.log(mousex,mousey);
//mouseY = (event.clientY - window.innerHeight/2) / window.innerHeight/2;
}
function onMouseUp(event){
ismousedown = false;
console.log("onMouseUp");
}
function onMouseDown(event){
ismousedown = true;
console.log("onMouseDown");
nsteps = 20 + Math.random()*160;
}
var nsteps = 20;
| {
if(ismousedown)
{
var n = 50;
var nx = mousex/w + Math.random()*0.02 ;
var ny = mousey/h + Math.random()*0.02 ;
console.log(mousex/w, mousex/h);
var direction = particleOptions.directionForward ? 1: -1;// (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var alpha = 0.3 + 0.7*Math.random();
for (var i = 0; i < n; ++i) {
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
//drawRandomParticles(400);
}
function drawRandomParticles(n)
{
for (var i = 0; i < n; ++i) {
// particles are nomralised [0,1] -> remap to [-h,h]
var nx = Math.random()*0.99 ;
var ny = Math.random()*0.99 ;
var direction = (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var nsteps = 30 + Math.random()*100;
var alpha = 0.3 + 0.7*Math.random();
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
// draw particle at nx,ny
function drawParticle(nx,ny, direction, nsteps, thickness, alpha)
{
// todo use canvas coordinates
// convert to the emission bound
var canvasx = nx*bw; // stretch the width
var canvasy = bh - ny*( bottomy); // do
//get slight random position
var randomColPositionAmount= 0.01;
var colx = Math.clamp( MathUtils.GetRandomFloat(nx- randomColPositionAmount,nx + randomColPositionAmount) ,0,0.999);
var coly = Math.clamp( MathUtils.GetRandomFloat(ny- randomColPositionAmount,ny + randomColPositionAmount) ,0,0.999);
var transformedPoint = getPoint(colx,coly);
colx = transformedPoint.x;
coly = transformedPoint.y;
var col = getPixel(imagedata, colx,coly);
//var x =-1000+ nx*2000;
//var y =-450+ ny*950;
var particle;
// set a random seed
var seed = MathUtils.GetRandomIntBetween(0,100000);
// draw the shading (alpha black)
var brightness = 0.5;
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
var thicknessShade = Math.min( thickness + 4, thickness *1.2);
particle.init( canvasx,canvasy, thicknessShade, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r*brightness,col.g*brightness,col.b*brightness);
particle.strokePath.alpha = alpha*0.2;
for(var i =0; i< nsteps;++i)
{
particle.update(thicknessShade);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
// draw the colour
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
particle.init(canvasx,canvasy, thickness, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r,col.g,col.b);
particle.strokePath.alpha =alpha;
for(var i =0; i< nsteps;++i)
{
particle.update(thickness);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
/*
// test a couple of curves
var i = 0;
p0s.setXY(i, 0,0);
p1s.setXY(i, 102,0);
p2s.setXY(i, 202,25);
q0s.setXY(i, 0,0 + 50);
q1s.setXY(i, 102,0 + 50);
q2s.setXY(i, 202,25);
*/
}
function onWindowResize( event ) {
/*
camera.left = window.innerWidth / - 2;
camera.right = window.innerWidth / 2;
camera.top = window.innerHeight / 2;
camera.bottom = window.innerHeight / - 2;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
*/
}
//
function animate() {
requestAnimationFrame( animate );
if(ready && imageDataLoaded) {
bufferix = 0;
// console.log("imageDataLoaded", imageDataLoaded);
drawParticleUpdate();
grid.setDrawCount(bufferix);
//console.log(bufferix);
// update
p0s.needsUpdate = true;
p1s.needsUpdate = true;
p2s.needsUpdate = true;
q0s.needsUpdate = true;
q1s.needsUpdate = true;
q2s.needsUpdate = true;
colours0.needsUpdate =true;
colours1.needsUpdate =true;
render();
}
// stats.update();
//controls.update(); // required if controls.enableDamping = true, or if controls.autoRotate = true
}
var lastTime = 0;
var moveQ = ( new THREE.Quaternion( .5, .5, .5, 0.0 ) ).normalize();
var tmpQ = new THREE.Quaternion();
var currentQ = new THREE.Quaternion();
function render() {
var time = performance.now();
if(ready) {
//var object = scene.children[0];
var x;
var age;
var introDuration = 0.2;
var outroDuration = 0.2;
var r;
// endRs.needsUpdate = true;
// startRs.needsUpdate = true;
}
//renderer.autoClear = false;
renderer.render( scene, camera );
// pathobj.update();
lastTime = time;
}
init();
animate(); | function drawParticleUpdate() | random_line_split |
index3.js | import {TextFileLoader} from './painter/TextFileLoader'
import {InstancedGrid} from './painter/InstancedGrid'
import {QCurve} from './painter/QCurve'
import {QCurveObj} from './painter/QCurve'
import {StrokePath} from './painter/StrokePath'
import {Particle} from './painter/Particle'
import {VectorField} from './painter/VectorField'
import Preloader from "./preloader/Preloader"
import {MathUtils} from "./util/MathUtils"
import * as THREE from 'three'
import * as Stats from 'stats-js'
/**
* Created by David on 14/12/2016.
*/
//if ( !Detector.webgl ) Detector.addGetWebGLMessage();
///////////////////////
var container, stats;
var controls;
var camera, scene, renderer;
//var orientations;
//var offsets;
//var lengths;
//var speeds;
var fragshader;
var vertshader;
TextFileLoader.Instance().loadFiles(["shaders/fragShader.glsl","shaders/vertShader.glsl"], filesLoaded);
var textureFilenames = [];
for(var i=1;i<=41;++i)
{
textureFilenames.push( "colourful" + "/" + ((i < 10) ?"0":"") + i + ".jpg");
}
var textureIX = 0;
function filesLoaded(files)
{
fragshader = files[0];
vertshader = files[1];
makeMeshObj();
}
// emit particles from a bound, bottom of the bound is the horizon line?
// if camera is cetnered, then it should be centered in x pos
// if camera is not centered,
var ExportMode = {
"png": "png",
"jpg": "jpg"
};
var exportMode = ExportMode.png;
//var renderScale = 5.4;
var renderScale,bw,bh,bottomy;
var Mode = {
"skyline": "skyline",
"maps": "maps"
};
var mode = Mode.maps;
//renderScale = 7.2;
renderScale = 1.0;
if(mode == Mode.skyline)
{
bw = 1000;
bh = bw*(3/4);
bottomy = bh *0.6;
}
else{
// "maps"
// renderScale = 7.2;
bh = 1000;
bw = bh*(3/4);
bottomy = bh * 1.0;
}
var w = bw * renderScale;
var h = bh * renderScale;
var noiseOffsetX, noiseOffsetY ;
function init() {
randomiseField();
container = document.getElementById( 'container' );
//var w = window.innerWidth;
//var h = window.innerHeight;
// w = 6000;
// h = 6000;
// todo uncenter the camera
//camera = new THREE.OrthographicCamera( w / - 2, w / 2, h / 2, h / - 2, - 500, 500 );
camera = new THREE.OrthographicCamera( 0, w , h , 0, - 500, 500 );
// camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
//camera.position.z = 20;
//camera.position.z = 50;
renderer = new THREE.WebGLRenderer({ antialias: true, preserveDrawingBuffer: true });
scene = new THREE.Scene();
// mouse orbit control
/*
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = false;*/
/*
controls = new THREE.TrackballControls(camera);
controls.rotateSpeed = 10.0;
controls.zoomSpeed = 10.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
*/
if ( renderer.extensions.get( 'ANGLE_instanced_arrays' ) === false ) {
document.getElementById( "notSupported" ).style.display = "";
return;
}
renderer.setClearColor( 0xFFFFFF );
renderer.autoClear = false;
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( w,h);
var div = document.getElementById("canvasContainer");
div.appendChild(renderer.domElement );
// document.body.appendChild( renderer.domElement );
renderer.clear();
/*
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
document.body.appendChild( stats.domElement );
*/
window.addEventListener( 'resize', onWindowResize, false );
renderer.domElement.addEventListener( 'mousemove', onMouseMove, true );
renderer.domElement.addEventListener( 'mousedown', onMouseDown, true );
renderer.domElement.addEventListener( 'mouseup', onMouseUp, true );
createGui();
reset();
}
var ismousedown =false;
var mousex = 0;
var mousey = 0;
document.addEventListener('keydown',onDocumentKeyDown,false);
function onDocumentKeyDown(event) {
console.log(event);
if(event.key == 's') {
//saveAsImage();
// savePixels();
saveCanvas();
}
if(event.key == " ")
{
// next teuxtre
textureIX = (textureIX + 1) % textureFilenames.length;
reset();
}
if(event.key == "r")
{
randomiseField();
// refresh noise field
reset();
}
}
var gui;
var originalPoints = [];
for(var i = 0; i< 4;++i)
{
originalPoints.push({"x":0, "y":0});
}
var points = originalPoints.slice(0);
var rectModel = {
xOffset: 0.5, //
yOffset: 0.5,
xScale: 1,
yScale: 1,
imageFilename: "image"
};
updatePoints(); // inital points should be a normalised rect
console.log(points);
//////////////////////////////////////////////////////////////////////////////////////////////
// colour map sampling options
var particleOptions = {
directionForward: true
}
function createGui()
{
gui = new dat.GUI();
// My sample abject
var obj = {
flipX: function() {flipX();},
flipY: function() {flipY();},
rotate: function(){rotate();},
resetPoints: function(){resetPoints();}
};
// Number field with slider
gui.add(rectModel, "xOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yOffset").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "xScale").min(0).max(1).step(0.01).onChange(function(val) {
// console.log("changed " + val);
updatePoints();
}
).listen();
gui.add(rectModel, "yScale").min(0).max(1).step(0.01).onChange(function(val) {
//console.log("changed " + val);
updatePoints();
}
).listen();
// Checkbox field
gui.add(obj, "flipX");
gui.add(obj, "flipY");
gui.add(obj, "rotate");
gui.add(obj, "resetPoints");
gui.add(rectModel, "imageFilename").listen();
gui.add(particleOptions, "directionForward").listen();
}
function flipX()
{
console.log("flip x");
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[0];
points[2] = temp[3];
points[3] = temp[2];
console.log(points);
}
function flipY()
{
console.log("flipY");
var temp = points.slice(0);
points[0] = temp[3];
points[1] = temp[2];
points[2] = temp[1];
points[3] = temp[0];
console.log(points);
}
function rotate()
{
var temp = points.slice(0);
points[0] = temp[1];
points[1] = temp[2];
points[2] = temp[3];
points[3] = temp[0];
console.log(points);
}
function resetPoints()
{
rectModel.xOffset = 0.5;
rectModel.yOffset = 0.5;
rectModel.xScale = 1;
rectModel.yScale = 1;
updatePoints();
points = originalPoints.slice(0);
console.log(points);
}
function updatePoints()
{
var w = rectModel.xScale;
var h = rectModel.yScale;
var x = (rectModel.xOffset - 0.5)*(1-w) - 0.5*w +0.5;
var y = (rectModel.yOffset - 0.5)*(1-h) - 0.5*h + 0.5;
originalPoints[0].x = x;
originalPoints[0].y = y;
originalPoints[1].x = x + w;
originalPoints[1].y = y;
originalPoints[2].x = x + w;
originalPoints[2].y = y + h;
originalPoints[3].x = x ;
originalPoints[3].y = y + h;
console.log(originalPoints);
}
function getPoint(x,y)
{
var p0 = points[0];
var p1 = points[1];
var p2 = points[2];
var p3 = points[3];
var x0 = p0.x + x*(p1.x - p0.x);
var y0 = p0.y + x*(p1.y - p0.y);
var x1 = p2.x + x*(p2.x - p3.x);
var y1 = p2.y + x*(p2.y - p3.y);
var tx = x0 + y*(x1- x0);
var ty = y0 + y*(y1- y0);
return {"y":ty,"x":tx};
}
//////////////////////////////////////////////////////////////////////////////////////////////
function randomiseField()
{
noiseOffsetX = MathUtils.GetRandomFloat(0,100);
noiseOffsetY = MathUtils.GetRandomFloat(0,100);
}
function reset()
{
console.log("reset");
imageDataLoaded = false;
// clear
renderer.clear();
// choose a texture and load it
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = textureFilenames[textureIX];
// var imageURL = 'grad.png';
console.log("imageURL "+ imageURL);
rectModel.imageFilename = imageURL; // show filename for debugin
var _this = this;
var texture = loader.load(imageURL,
function ( texture ) {
// do something with the texture on complete
// console.log("texture", texture);
imagedata = getImageData(texture.image );
// console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
/////////////////////////////////////////////////////////////////////////////////////////////
function saveCanvas()
{
if(exportMode == ExportMode.png) {
renderer.domElement.toBlob(function(blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".png");
});
}
else {
renderer.domElement.toBlob(function (blob) {
saveAs(blob, "output" + MathUtils.GenerateUUID() + ".jpg");
}, "image/jpeg");
}
}
////////////////////////////////////////////////
function getImageData( image ) {
var canvas = document.createElement( 'canvas' );
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0 );
return context.getImageData( 0, 0, image.width, image.height );
}
function getPixel( imagedata, nx, ny ) {
var x = Math.floor( nx *(imagedata.width - 1));
var y = Math.floor( ny *(imagedata.height-1));
var position = ( x + imagedata.width * y ) * 4, data = imagedata.data;
return { r: data[ position ] /255.0, g: data[ position + 1 ]/255.0, b: data[ position + 2 ]/255.0, a: data[ position + 3 ]/255.0 };
}
var imagedata = null;
var imageDataLoaded = false;
//var preloader = new Preloader();
//load();
function load() {
//preloader.load(() => {
// this.scene.add(this.cube);
//this.render();
// var imgTexture = THREE.ImageUtils.loadTexture( "environment/floor.jpg" );
var loader = new THREE.TextureLoader();
loader.setPath('textures/');
var imageURL = '01.jpg';
// var imageURL = 'grad.png';
var _this = this;
var texture = loader.load(imageURL,
function (texture) {
// do something with the texture on complete
console.log("texture", texture);
imagedata = getImageData(texture.image);
console.log("imagedata", imagedata);
imageDataLoaded = true;
//test();
}
);
}
var field = new VectorField();
var p0s;
var p1s;
var p2s;
var q0s;
var q1s;
var q2s;
var colours0;
var colours1;
var startRs;
var endRs;
var nInstances;
var basepath;
var pathobj;
var ready = false;
var bufferix;
var grid;
function makeMeshObj()
{
//
basepath = new QCurve();
basepath.p1.x = 200;
basepath.p1.y = 0;
basepath.p2.x = 200;
basepath.p2.y = 100;
pathobj = new QCurveObj(basepath, 10);
// pathobj.addToScene(scene);
// geometry
nInstances = 200000; // max number of instances that can be render in one go
bufferix = 0;
grid = new InstancedGrid();
grid.print();
var nx = 2; // keep this as 2
var nz = 5; // resolution
var zLen = 25;
//grid.createTube(nx,nz,1,1,zLen);
//grid.createRectTube(7,5,100,40);
grid.createFlatGrid(nx,nz,1,1);
grid.createIndices(nx,nz);
grid.createUVGrid(nx,nz);
// per instance data
// offsets = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( false );
p0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
p2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q0s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q1s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
q2s = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 3 ), 3, 1 ).setDynamic( true);
colours0 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
colours1 = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 4 ), 4, 1 ).setDynamic( true);
// remove this
// startRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
// endRs = new THREE.InstancedBufferAttribute( new Float32Array( nInstances * 1 ), 1, 1 ).setDynamic( true);
//grid.geometry.addAttribute( 'offset', offsets ); // per mesh translation
grid.geometry.addAttribute( 'p0', p0s);
grid.geometry.addAttribute( 'p1', p1s);
grid.geometry.addAttribute( 'p2', p2s);
grid.geometry.addAttribute( 'q0', q0s);
grid.geometry.addAttribute( 'q1', q1s);
grid.geometry.addAttribute( 'q2', q2s);
grid.geometry.addAttribute( 'colour0', colours0);
grid.geometry.addAttribute( 'colour1', colours1);
// grid.geometry.addAttribute( 'startR', startRs);
// grid.geometry.addAttribute( 'endR', endRs);
var material = new THREE.RawShaderMaterial( {
uniforms: {
//map: { type: "t", value: texture }
},
vertexShader: vertshader,
//fragmentShader: FragShader,
fragmentShader: fragshader,
//side: THREE.DoubleSide,
transparent: true,
// wireframe: true
} );
var mesh = new THREE.Mesh( grid.geometry, material );
mesh.frustumCulled = false;
//var zoom = 0.5;
// mesh.position.y = meshPositionY;
mesh.scale.set(renderScale,renderScale);
scene.add( mesh );
//add a test horizon line
//addTestLine();
ready = true;
// drawParticle();
}
function addTestLine()
{
var material = new THREE.LineBasicMaterial({
color: 0xff0000
});
var geometry = new THREE.Geometry();
geometry.vertices.push(
new THREE.Vector3( -1000*renderScale, 0, 0 ),
new THREE.Vector3( 1000*renderScale, 0, 0 )
);
var line = new THREE.Line( geometry, material );
scene.add( line );
}
Math.clamp = function(number, min, max) {
return Math.max(min, Math.min(number, max));
}
function onMouseMove(event){
mousex = (event.clientX);
mousey = (event.clientY);
console.log(mousex,mousey);
//mouseY = (event.clientY - window.innerHeight/2) / window.innerHeight/2;
}
function onMouseUp(event){
ismousedown = false;
console.log("onMouseUp");
}
function onMouseDown(event){
ismousedown = true;
console.log("onMouseDown");
nsteps = 20 + Math.random()*160;
}
var nsteps = 20;
function drawParticleUpdate()
{
if(ismousedown)
{
var n = 50;
var nx = mousex/w + Math.random()*0.02 ;
var ny = mousey/h + Math.random()*0.02 ;
console.log(mousex/w, mousex/h);
var direction = particleOptions.directionForward ? 1: -1;// (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var alpha = 0.3 + 0.7*Math.random();
for (var i = 0; i < n; ++i) {
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
//drawRandomParticles(400);
}
function drawRandomParticles(n)
{
for (var i = 0; i < n; ++i) {
// particles are nomralised [0,1] -> remap to [-h,h]
var nx = Math.random()*0.99 ;
var ny = Math.random()*0.99 ;
var direction = (Math.random() < 0.5)? -1 : 1;
var thickness = 0.5 + Math.random()*1.5;
var nsteps = 30 + Math.random()*100;
var alpha = 0.3 + 0.7*Math.random();
drawParticle(nx,ny, direction, nsteps, thickness, alpha);
}
}
// draw particle at nx,ny
function drawParticle(nx,ny, direction, nsteps, thickness, alpha)
{
// todo use canvas coordinates
// convert to the emission bound
var canvasx = nx*bw; // stretch the width
var canvasy = bh - ny*( bottomy); // do
//get slight random position
var randomColPositionAmount= 0.01;
var colx = Math.clamp( MathUtils.GetRandomFloat(nx- randomColPositionAmount,nx + randomColPositionAmount) ,0,0.999);
var coly = Math.clamp( MathUtils.GetRandomFloat(ny- randomColPositionAmount,ny + randomColPositionAmount) ,0,0.999);
var transformedPoint = getPoint(colx,coly);
colx = transformedPoint.x;
coly = transformedPoint.y;
var col = getPixel(imagedata, colx,coly);
//var x =-1000+ nx*2000;
//var y =-450+ ny*950;
var particle;
// set a random seed
var seed = MathUtils.GetRandomIntBetween(0,100000);
// draw the shading (alpha black)
var brightness = 0.5;
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
var thicknessShade = Math.min( thickness + 4, thickness *1.2);
particle.init( canvasx,canvasy, thicknessShade, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r*brightness,col.g*brightness,col.b*brightness);
particle.strokePath.alpha = alpha*0.2;
for(var i =0; i< nsteps;++i)
{
particle.update(thicknessShade);
}
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
// draw the colour
MathUtils.SetSeed(seed); // rset seed
particle = new Particle(field);
particle.init(canvasx,canvasy, thickness, direction);
particle.noiseOffsetX = noiseOffsetX;
particle.noiseOffsetY = noiseOffsetY;
particle.strokePath.colour = new THREE.Vector3(col.r,col.g,col.b);
particle.strokePath.alpha =alpha;
for(var i =0; i< nsteps;++i)
|
bufferix = particle.strokePath.constructPath(p0s,p1s,p2s,q0s,q1s,q2s,colours0,colours1,bufferix);
/*
// test a couple of curves
var i = 0;
p0s.setXY(i, 0,0);
p1s.setXY(i, 102,0);
p2s.setXY(i, 202,25);
q0s.setXY(i, 0,0 + 50);
q1s.setXY(i, 102,0 + 50);
q2s.setXY(i, 202,25);
*/
}
function onWindowResize( event ) {
/*
camera.left = window.innerWidth / - 2;
camera.right = window.innerWidth / 2;
camera.top = window.innerHeight / 2;
camera.bottom = window.innerHeight / - 2;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
*/
}
//
function animate() {
requestAnimationFrame( animate );
if(ready && imageDataLoaded) {
bufferix = 0;
// console.log("imageDataLoaded", imageDataLoaded);
drawParticleUpdate();
grid.setDrawCount(bufferix);
//console.log(bufferix);
// update
p0s.needsUpdate = true;
p1s.needsUpdate = true;
p2s.needsUpdate = true;
q0s.needsUpdate = true;
q1s.needsUpdate = true;
q2s.needsUpdate = true;
colours0.needsUpdate =true;
colours1.needsUpdate =true;
render();
}
// stats.update();
//controls.update(); // required if controls.enableDamping = true, or if controls.autoRotate = true
}
var lastTime = 0;
var moveQ = ( new THREE.Quaternion( .5, .5, .5, 0.0 ) ).normalize();
var tmpQ = new THREE.Quaternion();
var currentQ = new THREE.Quaternion();
function render() {
var time = performance.now();
if(ready) {
//var object = scene.children[0];
var x;
var age;
var introDuration = 0.2;
var outroDuration = 0.2;
var r;
// endRs.needsUpdate = true;
// startRs.needsUpdate = true;
}
//renderer.autoClear = false;
renderer.render( scene, camera );
// pathobj.update();
lastTime = time;
}
init();
animate(); | {
particle.update(thickness);
} | conditional_block |
push_error.rs | // Copyright 2017 ThetaSinner
//
// This file is part of Osmium.
// Osmium is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Osmium is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Osmium. If not, see <http://www.gnu.org/licenses/>.
/// Error enumeration for relaying errors which occur when the application tries to
/// push a promise.
pub enum | {
/// This error occurs when an attempt is made to create a new push promise but
/// the allowed limit for concurrent promises has already been reached.
TooManyActiveStreams
}
| PushError | identifier_name |
push_error.rs | // Copyright 2017 ThetaSinner
//
// This file is part of Osmium.
// Osmium is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Osmium is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Osmium. If not, see <http://www.gnu.org/licenses/>.
/// Error enumeration for relaying errors which occur when the application tries to
/// push a promise.
pub enum PushError {
/// This error occurs when an attempt is made to create a new push promise but
/// the allowed limit for concurrent promises has already been reached.
TooManyActiveStreams | } | random_line_split | |
activitybarPart.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./media/activityBarPart';
import nls = require('vs/nls');
import {Promise} from 'vs/base/common/winjs.base';
import {Builder, $} from 'vs/base/browser/builder';
import {Action, IAction} from 'vs/base/common/actions';
import errors = require('vs/base/common/errors');
import events = require('vs/base/common/events');
import {ActionsOrientation, ActionBar, IActionItem} from 'vs/base/browser/ui/actionbar/actionbar';
import {Scope, IActionBarRegistry, Extensions as ActionBarExtensions, prepareActions} from 'vs/workbench/browser/actionBarRegistry';
import {CONTEXT, ToolBar} from 'vs/base/browser/ui/toolbar/toolbar';
import {Registry} from 'vs/platform/platform';
import {CompositeEvent, EventType} from 'vs/workbench/common/events';
import {ViewletDescriptor, ViewletRegistry, Extensions as ViewletExtensions} from 'vs/workbench/browser/viewlet';
import {Part} from 'vs/workbench/browser/part';
import {ActivityAction, ActivityActionItem} from 'vs/workbench/browser/parts/activitybar/activityAction';
import {IViewletService} from 'vs/workbench/services/viewlet/common/viewletService';
import {IActivityService, IBadge} from 'vs/workbench/services/activity/common/activityService';
import {IPartService} from 'vs/workbench/services/part/common/partService';
import {IContextMenuService} from 'vs/platform/contextview/browser/contextView';
import {IEventService} from 'vs/platform/event/common/event';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {IMessageService, Severity} from 'vs/platform/message/common/message';
import {ITelemetryService} from 'vs/platform/telemetry/common/telemetry';
import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService';
export class ActivitybarPart extends Part implements IActivityService {
public serviceId = IActivityService;
private viewletSwitcherBar: ActionBar;
private globalToolBar: ToolBar;
private activityActionItems: { [actionId: string]: IActionItem; };
private viewletIdToActions: { [viewletId: string]: ActivityAction; };
private instantiationService: IInstantiationService;
constructor(
private viewletService: IViewletService,
private messageService: IMessageService,
private telemetryService: ITelemetryService,
private eventService: IEventService,
private contextMenuService: IContextMenuService,
private keybindingService: IKeybindingService,
id: string
) {
super(id);
this.activityActionItems = {};
this.viewletIdToActions = {};
this.registerListeners();
}
public setInstantiationService(service: IInstantiationService): void |
private registerListeners(): void {
// Activate viewlet action on opening of a viewlet
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_OPENING, (e: CompositeEvent) => this.onCompositeOpening(e)));
// Deactivate viewlet action on close
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_CLOSED, (e: CompositeEvent) => this.onCompositeClosed(e)));
}
private onCompositeOpening(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].activate();
// There can only be one active viewlet action
for (let key in this.viewletIdToActions) {
if (this.viewletIdToActions.hasOwnProperty(key) && key !== e.compositeId) {
this.viewletIdToActions[key].deactivate();
}
}
}
}
private onCompositeClosed(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].deactivate();
}
}
public showActivity(viewletId: string, badge: IBadge, clazz?: string): void {
let action = this.viewletIdToActions[viewletId];
if (action) {
action.setBadge(badge);
if (clazz) {
action.class = clazz;
}
}
}
public clearActivity(viewletId: string): void {
this.showActivity(viewletId, null);
}
public createContentArea(parent: Builder): Builder {
let $el = $(parent);
let $result = $('.content').appendTo($el);
// Top Actionbar with action items for each viewlet action
this.createViewletSwitcher($result.clone());
// Bottom Toolbar with action items for global actions
// this.createGlobalToolBarArea($result.clone()); // not used currently
return $result;
}
private createViewletSwitcher(div: Builder): void {
// Viewlet switcher is on top
this.viewletSwitcherBar = new ActionBar(div, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL,
ariaLabel: nls.localize('activityBarAriaLabel', "Active View Switcher")
});
this.viewletSwitcherBar.getContainer().addClass('position-top');
// Build Viewlet Actions in correct order
let activeViewlet = this.viewletService.getActiveViewlet();
let registry = (<ViewletRegistry>Registry.as(ViewletExtensions.Viewlets));
let viewletActions: Action[] = registry.getViewlets()
.sort((v1: ViewletDescriptor, v2: ViewletDescriptor) => v1.order - v2.order)
.map((viewlet: ViewletDescriptor) => {
let action = this.instantiationService.createInstance(ViewletActivityAction, viewlet.id + '.activity-bar-action', viewlet);
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(viewlet.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) {
keybinding = keys[0];
}
this.activityActionItems[action.id] = new ActivityActionItem(action, viewlet.name, keybinding);
this.viewletIdToActions[viewlet.id] = action;
// Mark active viewlet action as active
if (activeViewlet && activeViewlet.getId() === viewlet.id) {
action.activate();
}
return action;
}
);
// Add to viewlet switcher
this.viewletSwitcherBar.push(viewletActions, { label: true, icon: true });
}
private createGlobalToolBarArea(div: Builder): void {
// Global action bar is on the bottom
this.globalToolBar = new ToolBar(div.getHTMLElement(), this.contextMenuService, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL
});
this.globalToolBar.getContainer().addClass('global');
this.globalToolBar.actionRunner.addListener(events.EventType.RUN, (e: any) => {
// Check for Error
if (e.error && !errors.isPromiseCanceledError(e.error)) {
this.messageService.show(Severity.Error, e.error);
}
// Log in telemetry
if (this.telemetryService) {
this.telemetryService.publicLog('workbenchActionExecuted', { id: e.action.id, from: 'activityBar' });
}
});
// Build Global Actions in correct order
let primaryActions = this.getGlobalActions(true);
let secondaryActions = this.getGlobalActions(false);
if (primaryActions.length + secondaryActions.length > 0) {
this.globalToolBar.getContainer().addClass('position-bottom');
}
// Add to global action bar
this.globalToolBar.setActions(prepareActions(primaryActions), prepareActions(secondaryActions))();
}
private getGlobalActions(primary: boolean): IAction[] {
let actionBarRegistry = <IActionBarRegistry>Registry.as(ActionBarExtensions.Actionbar);
// Collect actions from actionbar contributor
let actions: IAction[];
if (primary) {
actions = actionBarRegistry.getActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
} else {
actions = actionBarRegistry.getSecondaryActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
}
return actions.map((action: Action) => {
if (primary) {
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(action.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) {
keybinding = keys[0];
}
let actionItem = actionBarRegistry.getActionItemForContext(Scope.GLOBAL, CONTEXT, action);
if (!actionItem) {
actionItem = new ActivityActionItem(action, action.label, keybinding);
}
if (actionItem instanceof ActivityActionItem) {
(<ActivityActionItem> actionItem).keybinding = keybinding;
}
this.activityActionItems[action.id] = actionItem;
}
return action;
});
}
public dispose(): void {
if (this.viewletSwitcherBar) {
this.viewletSwitcherBar.dispose();
this.viewletSwitcherBar = null;
}
if (this.globalToolBar) {
this.globalToolBar.dispose();
this.globalToolBar = null;
}
super.dispose();
}
}
class ViewletActivityAction extends ActivityAction {
private static preventDoubleClickDelay = 300;
private static lastRun: number = 0;
private viewlet: ViewletDescriptor;
constructor(
id: string, viewlet: ViewletDescriptor,
@IViewletService private viewletService: IViewletService,
@IPartService private partService: IPartService
) {
super(id, viewlet.name, viewlet.cssClass);
this.viewlet = viewlet;
}
public run(): Promise {
// cheap trick to prevent accident trigger on a doubleclick (to help nervous people)
let now = new Date().getTime();
if (now - ViewletActivityAction.lastRun < ViewletActivityAction.preventDoubleClickDelay) {
return Promise.as(true);
}
ViewletActivityAction.lastRun = now;
let sideBarHidden = this.partService.isSideBarHidden();
let activeViewlet = this.viewletService.getActiveViewlet();
// Hide sidebar if selected viewlet already visible
if (!sideBarHidden && activeViewlet && activeViewlet.getId() === this.viewlet.id) {
this.partService.setSideBarHidden(true);
}
// Open viewlet and focus it
else {
this.viewletService.openViewlet(this.viewlet.id, true).done(null, errors.onUnexpectedError);
this.activate();
}
return Promise.as(true);
}
} | {
this.instantiationService = service;
} | identifier_body |
activitybarPart.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./media/activityBarPart';
import nls = require('vs/nls');
import {Promise} from 'vs/base/common/winjs.base';
import {Builder, $} from 'vs/base/browser/builder';
import {Action, IAction} from 'vs/base/common/actions';
import errors = require('vs/base/common/errors');
import events = require('vs/base/common/events');
import {ActionsOrientation, ActionBar, IActionItem} from 'vs/base/browser/ui/actionbar/actionbar';
import {Scope, IActionBarRegistry, Extensions as ActionBarExtensions, prepareActions} from 'vs/workbench/browser/actionBarRegistry';
import {CONTEXT, ToolBar} from 'vs/base/browser/ui/toolbar/toolbar';
import {Registry} from 'vs/platform/platform';
import {CompositeEvent, EventType} from 'vs/workbench/common/events';
import {ViewletDescriptor, ViewletRegistry, Extensions as ViewletExtensions} from 'vs/workbench/browser/viewlet';
import {Part} from 'vs/workbench/browser/part';
import {ActivityAction, ActivityActionItem} from 'vs/workbench/browser/parts/activitybar/activityAction';
import {IViewletService} from 'vs/workbench/services/viewlet/common/viewletService';
import {IActivityService, IBadge} from 'vs/workbench/services/activity/common/activityService';
import {IPartService} from 'vs/workbench/services/part/common/partService';
import {IContextMenuService} from 'vs/platform/contextview/browser/contextView';
import {IEventService} from 'vs/platform/event/common/event';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {IMessageService, Severity} from 'vs/platform/message/common/message';
import {ITelemetryService} from 'vs/platform/telemetry/common/telemetry';
import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService';
export class ActivitybarPart extends Part implements IActivityService {
public serviceId = IActivityService;
private viewletSwitcherBar: ActionBar;
private globalToolBar: ToolBar;
private activityActionItems: { [actionId: string]: IActionItem; };
private viewletIdToActions: { [viewletId: string]: ActivityAction; };
private instantiationService: IInstantiationService;
constructor(
private viewletService: IViewletService,
private messageService: IMessageService,
private telemetryService: ITelemetryService,
private eventService: IEventService,
private contextMenuService: IContextMenuService,
private keybindingService: IKeybindingService,
id: string
) {
super(id);
this.activityActionItems = {};
this.viewletIdToActions = {};
this.registerListeners();
}
public setInstantiationService(service: IInstantiationService): void {
this.instantiationService = service;
}
private registerListeners(): void {
// Activate viewlet action on opening of a viewlet
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_OPENING, (e: CompositeEvent) => this.onCompositeOpening(e)));
// Deactivate viewlet action on close
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_CLOSED, (e: CompositeEvent) => this.onCompositeClosed(e)));
}
private onCompositeOpening(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].activate();
// There can only be one active viewlet action
for (let key in this.viewletIdToActions) {
if (this.viewletIdToActions.hasOwnProperty(key) && key !== e.compositeId) {
this.viewletIdToActions[key].deactivate();
}
}
}
}
private onCompositeClosed(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].deactivate();
}
}
public showActivity(viewletId: string, badge: IBadge, clazz?: string): void {
let action = this.viewletIdToActions[viewletId];
if (action) {
action.setBadge(badge);
if (clazz) {
action.class = clazz;
}
}
}
public clearActivity(viewletId: string): void {
this.showActivity(viewletId, null);
}
public createContentArea(parent: Builder): Builder {
let $el = $(parent);
let $result = $('.content').appendTo($el);
// Top Actionbar with action items for each viewlet action
this.createViewletSwitcher($result.clone());
// Bottom Toolbar with action items for global actions
// this.createGlobalToolBarArea($result.clone()); // not used currently
return $result;
}
private createViewletSwitcher(div: Builder): void {
// Viewlet switcher is on top
this.viewletSwitcherBar = new ActionBar(div, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL,
ariaLabel: nls.localize('activityBarAriaLabel', "Active View Switcher")
});
this.viewletSwitcherBar.getContainer().addClass('position-top');
// Build Viewlet Actions in correct order
let activeViewlet = this.viewletService.getActiveViewlet();
let registry = (<ViewletRegistry>Registry.as(ViewletExtensions.Viewlets));
let viewletActions: Action[] = registry.getViewlets()
.sort((v1: ViewletDescriptor, v2: ViewletDescriptor) => v1.order - v2.order)
.map((viewlet: ViewletDescriptor) => {
let action = this.instantiationService.createInstance(ViewletActivityAction, viewlet.id + '.activity-bar-action', viewlet);
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(viewlet.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) {
keybinding = keys[0];
}
this.activityActionItems[action.id] = new ActivityActionItem(action, viewlet.name, keybinding);
this.viewletIdToActions[viewlet.id] = action;
// Mark active viewlet action as active
if (activeViewlet && activeViewlet.getId() === viewlet.id) {
action.activate();
}
return action;
}
);
// Add to viewlet switcher
this.viewletSwitcherBar.push(viewletActions, { label: true, icon: true });
}
private createGlobalToolBarArea(div: Builder): void {
// Global action bar is on the bottom
this.globalToolBar = new ToolBar(div.getHTMLElement(), this.contextMenuService, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL
});
this.globalToolBar.getContainer().addClass('global');
this.globalToolBar.actionRunner.addListener(events.EventType.RUN, (e: any) => {
// Check for Error
if (e.error && !errors.isPromiseCanceledError(e.error)) {
this.messageService.show(Severity.Error, e.error);
}
// Log in telemetry
if (this.telemetryService) {
this.telemetryService.publicLog('workbenchActionExecuted', { id: e.action.id, from: 'activityBar' });
}
});
// Build Global Actions in correct order
let primaryActions = this.getGlobalActions(true);
let secondaryActions = this.getGlobalActions(false);
if (primaryActions.length + secondaryActions.length > 0) {
this.globalToolBar.getContainer().addClass('position-bottom');
}
// Add to global action bar
this.globalToolBar.setActions(prepareActions(primaryActions), prepareActions(secondaryActions))();
}
private getGlobalActions(primary: boolean): IAction[] {
let actionBarRegistry = <IActionBarRegistry>Registry.as(ActionBarExtensions.Actionbar);
// Collect actions from actionbar contributor
let actions: IAction[];
if (primary) {
actions = actionBarRegistry.getActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
} else {
actions = actionBarRegistry.getSecondaryActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
}
return actions.map((action: Action) => {
if (primary) {
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(action.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) |
let actionItem = actionBarRegistry.getActionItemForContext(Scope.GLOBAL, CONTEXT, action);
if (!actionItem) {
actionItem = new ActivityActionItem(action, action.label, keybinding);
}
if (actionItem instanceof ActivityActionItem) {
(<ActivityActionItem> actionItem).keybinding = keybinding;
}
this.activityActionItems[action.id] = actionItem;
}
return action;
});
}
public dispose(): void {
if (this.viewletSwitcherBar) {
this.viewletSwitcherBar.dispose();
this.viewletSwitcherBar = null;
}
if (this.globalToolBar) {
this.globalToolBar.dispose();
this.globalToolBar = null;
}
super.dispose();
}
}
class ViewletActivityAction extends ActivityAction {
private static preventDoubleClickDelay = 300;
private static lastRun: number = 0;
private viewlet: ViewletDescriptor;
constructor(
id: string, viewlet: ViewletDescriptor,
@IViewletService private viewletService: IViewletService,
@IPartService private partService: IPartService
) {
super(id, viewlet.name, viewlet.cssClass);
this.viewlet = viewlet;
}
public run(): Promise {
// cheap trick to prevent accident trigger on a doubleclick (to help nervous people)
let now = new Date().getTime();
if (now - ViewletActivityAction.lastRun < ViewletActivityAction.preventDoubleClickDelay) {
return Promise.as(true);
}
ViewletActivityAction.lastRun = now;
let sideBarHidden = this.partService.isSideBarHidden();
let activeViewlet = this.viewletService.getActiveViewlet();
// Hide sidebar if selected viewlet already visible
if (!sideBarHidden && activeViewlet && activeViewlet.getId() === this.viewlet.id) {
this.partService.setSideBarHidden(true);
}
// Open viewlet and focus it
else {
this.viewletService.openViewlet(this.viewlet.id, true).done(null, errors.onUnexpectedError);
this.activate();
}
return Promise.as(true);
}
} | {
keybinding = keys[0];
} | conditional_block |
activitybarPart.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./media/activityBarPart';
import nls = require('vs/nls');
import {Promise} from 'vs/base/common/winjs.base';
import {Builder, $} from 'vs/base/browser/builder';
import {Action, IAction} from 'vs/base/common/actions';
import errors = require('vs/base/common/errors');
import events = require('vs/base/common/events');
import {ActionsOrientation, ActionBar, IActionItem} from 'vs/base/browser/ui/actionbar/actionbar';
import {Scope, IActionBarRegistry, Extensions as ActionBarExtensions, prepareActions} from 'vs/workbench/browser/actionBarRegistry';
import {CONTEXT, ToolBar} from 'vs/base/browser/ui/toolbar/toolbar';
import {Registry} from 'vs/platform/platform';
import {CompositeEvent, EventType} from 'vs/workbench/common/events';
import {ViewletDescriptor, ViewletRegistry, Extensions as ViewletExtensions} from 'vs/workbench/browser/viewlet';
import {Part} from 'vs/workbench/browser/part';
import {ActivityAction, ActivityActionItem} from 'vs/workbench/browser/parts/activitybar/activityAction';
import {IViewletService} from 'vs/workbench/services/viewlet/common/viewletService';
import {IActivityService, IBadge} from 'vs/workbench/services/activity/common/activityService';
import {IPartService} from 'vs/workbench/services/part/common/partService';
import {IContextMenuService} from 'vs/platform/contextview/browser/contextView';
import {IEventService} from 'vs/platform/event/common/event';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {IMessageService, Severity} from 'vs/platform/message/common/message';
import {ITelemetryService} from 'vs/platform/telemetry/common/telemetry';
import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService';
export class ActivitybarPart extends Part implements IActivityService {
public serviceId = IActivityService;
private viewletSwitcherBar: ActionBar;
private globalToolBar: ToolBar;
private activityActionItems: { [actionId: string]: IActionItem; };
private viewletIdToActions: { [viewletId: string]: ActivityAction; };
private instantiationService: IInstantiationService;
constructor(
private viewletService: IViewletService,
private messageService: IMessageService,
private telemetryService: ITelemetryService,
private eventService: IEventService,
private contextMenuService: IContextMenuService,
private keybindingService: IKeybindingService,
id: string
) {
super(id);
this.activityActionItems = {};
this.viewletIdToActions = {};
this.registerListeners();
}
public setInstantiationService(service: IInstantiationService): void {
this.instantiationService = service;
}
private registerListeners(): void {
// Activate viewlet action on opening of a viewlet
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_OPENING, (e: CompositeEvent) => this.onCompositeOpening(e)));
// Deactivate viewlet action on close
this.toUnbind.push(this.eventService.addListener(EventType.COMPOSITE_CLOSED, (e: CompositeEvent) => this.onCompositeClosed(e)));
}
private onCompositeOpening(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].activate();
// There can only be one active viewlet action
for (let key in this.viewletIdToActions) {
if (this.viewletIdToActions.hasOwnProperty(key) && key !== e.compositeId) {
this.viewletIdToActions[key].deactivate();
}
}
}
}
private onCompositeClosed(e: CompositeEvent): void {
if (this.viewletIdToActions[e.compositeId]) {
this.viewletIdToActions[e.compositeId].deactivate();
}
}
public showActivity(viewletId: string, badge: IBadge, clazz?: string): void {
let action = this.viewletIdToActions[viewletId];
if (action) {
action.setBadge(badge);
if (clazz) {
action.class = clazz;
}
}
}
public | (viewletId: string): void {
this.showActivity(viewletId, null);
}
public createContentArea(parent: Builder): Builder {
let $el = $(parent);
let $result = $('.content').appendTo($el);
// Top Actionbar with action items for each viewlet action
this.createViewletSwitcher($result.clone());
// Bottom Toolbar with action items for global actions
// this.createGlobalToolBarArea($result.clone()); // not used currently
return $result;
}
private createViewletSwitcher(div: Builder): void {
// Viewlet switcher is on top
this.viewletSwitcherBar = new ActionBar(div, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL,
ariaLabel: nls.localize('activityBarAriaLabel', "Active View Switcher")
});
this.viewletSwitcherBar.getContainer().addClass('position-top');
// Build Viewlet Actions in correct order
let activeViewlet = this.viewletService.getActiveViewlet();
let registry = (<ViewletRegistry>Registry.as(ViewletExtensions.Viewlets));
let viewletActions: Action[] = registry.getViewlets()
.sort((v1: ViewletDescriptor, v2: ViewletDescriptor) => v1.order - v2.order)
.map((viewlet: ViewletDescriptor) => {
let action = this.instantiationService.createInstance(ViewletActivityAction, viewlet.id + '.activity-bar-action', viewlet);
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(viewlet.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) {
keybinding = keys[0];
}
this.activityActionItems[action.id] = new ActivityActionItem(action, viewlet.name, keybinding);
this.viewletIdToActions[viewlet.id] = action;
// Mark active viewlet action as active
if (activeViewlet && activeViewlet.getId() === viewlet.id) {
action.activate();
}
return action;
}
);
// Add to viewlet switcher
this.viewletSwitcherBar.push(viewletActions, { label: true, icon: true });
}
private createGlobalToolBarArea(div: Builder): void {
// Global action bar is on the bottom
this.globalToolBar = new ToolBar(div.getHTMLElement(), this.contextMenuService, {
actionItemProvider: (action: Action) => this.activityActionItems[action.id],
orientation: ActionsOrientation.VERTICAL
});
this.globalToolBar.getContainer().addClass('global');
this.globalToolBar.actionRunner.addListener(events.EventType.RUN, (e: any) => {
// Check for Error
if (e.error && !errors.isPromiseCanceledError(e.error)) {
this.messageService.show(Severity.Error, e.error);
}
// Log in telemetry
if (this.telemetryService) {
this.telemetryService.publicLog('workbenchActionExecuted', { id: e.action.id, from: 'activityBar' });
}
});
// Build Global Actions in correct order
let primaryActions = this.getGlobalActions(true);
let secondaryActions = this.getGlobalActions(false);
if (primaryActions.length + secondaryActions.length > 0) {
this.globalToolBar.getContainer().addClass('position-bottom');
}
// Add to global action bar
this.globalToolBar.setActions(prepareActions(primaryActions), prepareActions(secondaryActions))();
}
private getGlobalActions(primary: boolean): IAction[] {
let actionBarRegistry = <IActionBarRegistry>Registry.as(ActionBarExtensions.Actionbar);
// Collect actions from actionbar contributor
let actions: IAction[];
if (primary) {
actions = actionBarRegistry.getActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
} else {
actions = actionBarRegistry.getSecondaryActionBarActionsForContext(Scope.GLOBAL, CONTEXT);
}
return actions.map((action: Action) => {
if (primary) {
let keybinding: string = null;
let keys = this.keybindingService.lookupKeybindings(action.id).map(k => this.keybindingService.getLabelFor(k));
if (keys && keys.length) {
keybinding = keys[0];
}
let actionItem = actionBarRegistry.getActionItemForContext(Scope.GLOBAL, CONTEXT, action);
if (!actionItem) {
actionItem = new ActivityActionItem(action, action.label, keybinding);
}
if (actionItem instanceof ActivityActionItem) {
(<ActivityActionItem> actionItem).keybinding = keybinding;
}
this.activityActionItems[action.id] = actionItem;
}
return action;
});
}
public dispose(): void {
if (this.viewletSwitcherBar) {
this.viewletSwitcherBar.dispose();
this.viewletSwitcherBar = null;
}
if (this.globalToolBar) {
this.globalToolBar.dispose();
this.globalToolBar = null;
}
super.dispose();
}
}
class ViewletActivityAction extends ActivityAction {
private static preventDoubleClickDelay = 300;
private static lastRun: number = 0;
private viewlet: ViewletDescriptor;
constructor(
id: string, viewlet: ViewletDescriptor,
@IViewletService private viewletService: IViewletService,
@IPartService private partService: IPartService
) {
super(id, viewlet.name, viewlet.cssClass);
this.viewlet = viewlet;
}
public run(): Promise {
// cheap trick to prevent accident trigger on a doubleclick (to help nervous people)
let now = new Date().getTime();
if (now - ViewletActivityAction.lastRun < ViewletActivityAction.preventDoubleClickDelay) {
return Promise.as(true);
}
ViewletActivityAction.lastRun = now;
let sideBarHidden = this.partService.isSideBarHidden();
let activeViewlet = this.viewletService.getActiveViewlet();
// Hide sidebar if selected viewlet already visible
if (!sideBarHidden && activeViewlet && activeViewlet.getId() === this.viewlet.id) {
this.partService.setSideBarHidden(true);
}
// Open viewlet and focus it
else {
this.viewletService.openViewlet(this.viewlet.id, true).done(null, errors.onUnexpectedError);
this.activate();
}
return Promise.as(true);
}
} | clearActivity | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.