file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
Player.js | export default class | extends Phaser.Sprite {
constructor(game) {
super(game, 0, 0, 'hero');
// enable physics for the player
this.game.physics.arcade.enableBody(this);
this.scale.setTo(1.1);
this.body.collideWorldBounds = true;
this.game.physics.arcade.enable(this);
// camera follows player
this.game.camera.follow(this);
// walking animations for the player
this.animations.add("walk-left", [117, 118, 119, 120, 121, 122, 123, 124, 125], 8, true);
this.animations.add("walk-right", [143, 144, 145, 146, 147, 148, 149, 150, 151], 8, true);
this.animations.add("walk-up", [104, 105, 106, 107, 108, 109, 110, 111, 112], 8, true);
this.animations.add("walk-down", [130, 131, 132, 133, 134, 135, 136, 137, 138,], 8, true);
// create control inputs for player
this.cursors = this.game.input.keyboard.createCursorKeys();
}
update() {
this.body.velocity.x = 0;
this.body.velocity.y = 0;
// player moves in specified direction or stands still
if (this.cursors.left.isDown) {
this.animations.play("walk-left");
this.body.velocity.x =- 180;
} else if (this.cursors.right.isDown) {
this.animations.play("walk-right");
this.body.velocity.x =+ 180;
} else {
this.body.velocity.x = 0;
}
if (this.cursors.up.isDown) {
this.animations.play("walk-up");
this.body.velocity.y =- 180;
} else if (this.cursors.down.isDown) {
this.animations.play("walk-down");
this.body.velocity.y =+ 180;
} else {
this.body.velocity.y = 0;
}
if (this.body.velocity.x === 0 && this.body.velocity.y === 0) {
this.animations.stop();
}
}
}
| Player | identifier_name |
Player.js | export default class Player extends Phaser.Sprite {
constructor(game) {
super(game, 0, 0, 'hero');
// enable physics for the player
this.game.physics.arcade.enableBody(this);
this.scale.setTo(1.1);
this.body.collideWorldBounds = true;
this.game.physics.arcade.enable(this);
// camera follows player
this.game.camera.follow(this);
// walking animations for the player
this.animations.add("walk-left", [117, 118, 119, 120, 121, 122, 123, 124, 125], 8, true);
this.animations.add("walk-right", [143, 144, 145, 146, 147, 148, 149, 150, 151], 8, true);
this.animations.add("walk-up", [104, 105, 106, 107, 108, 109, 110, 111, 112], 8, true);
this.animations.add("walk-down", [130, 131, 132, 133, 134, 135, 136, 137, 138,], 8, true);
// create control inputs for player
this.cursors = this.game.input.keyboard.createCursorKeys();
}
update() {
this.body.velocity.x = 0;
this.body.velocity.y = 0;
// player moves in specified direction or stands still
if (this.cursors.left.isDown) {
this.animations.play("walk-left");
this.body.velocity.x =- 180;
} else if (this.cursors.right.isDown) {
this.animations.play("walk-right");
this.body.velocity.x =+ 180;
} else {
this.body.velocity.x = 0;
}
if (this.cursors.up.isDown) {
this.animations.play("walk-up");
this.body.velocity.y =- 180;
} else if (this.cursors.down.isDown) {
this.animations.play("walk-down");
this.body.velocity.y =+ 180;
} else { | this.animations.stop();
}
}
} | this.body.velocity.y = 0;
}
if (this.body.velocity.x === 0 && this.body.velocity.y === 0) { | random_line_split |
Player.js | export default class Player extends Phaser.Sprite {
constructor(game) {
super(game, 0, 0, 'hero');
// enable physics for the player
this.game.physics.arcade.enableBody(this);
this.scale.setTo(1.1);
this.body.collideWorldBounds = true;
this.game.physics.arcade.enable(this);
// camera follows player
this.game.camera.follow(this);
// walking animations for the player
this.animations.add("walk-left", [117, 118, 119, 120, 121, 122, 123, 124, 125], 8, true);
this.animations.add("walk-right", [143, 144, 145, 146, 147, 148, 149, 150, 151], 8, true);
this.animations.add("walk-up", [104, 105, 106, 107, 108, 109, 110, 111, 112], 8, true);
this.animations.add("walk-down", [130, 131, 132, 133, 134, 135, 136, 137, 138,], 8, true);
// create control inputs for player
this.cursors = this.game.input.keyboard.createCursorKeys();
}
update() |
}
| {
this.body.velocity.x = 0;
this.body.velocity.y = 0;
// player moves in specified direction or stands still
if (this.cursors.left.isDown) {
this.animations.play("walk-left");
this.body.velocity.x =- 180;
} else if (this.cursors.right.isDown) {
this.animations.play("walk-right");
this.body.velocity.x =+ 180;
} else {
this.body.velocity.x = 0;
}
if (this.cursors.up.isDown) {
this.animations.play("walk-up");
this.body.velocity.y =- 180;
} else if (this.cursors.down.isDown) {
this.animations.play("walk-down");
this.body.velocity.y =+ 180;
} else {
this.body.velocity.y = 0;
}
if (this.body.velocity.x === 0 && this.body.velocity.y === 0) {
this.animations.stop();
}
} | identifier_body |
Player.js | export default class Player extends Phaser.Sprite {
constructor(game) {
super(game, 0, 0, 'hero');
// enable physics for the player
this.game.physics.arcade.enableBody(this);
this.scale.setTo(1.1);
this.body.collideWorldBounds = true;
this.game.physics.arcade.enable(this);
// camera follows player
this.game.camera.follow(this);
// walking animations for the player
this.animations.add("walk-left", [117, 118, 119, 120, 121, 122, 123, 124, 125], 8, true);
this.animations.add("walk-right", [143, 144, 145, 146, 147, 148, 149, 150, 151], 8, true);
this.animations.add("walk-up", [104, 105, 106, 107, 108, 109, 110, 111, 112], 8, true);
this.animations.add("walk-down", [130, 131, 132, 133, 134, 135, 136, 137, 138,], 8, true);
// create control inputs for player
this.cursors = this.game.input.keyboard.createCursorKeys();
}
update() {
this.body.velocity.x = 0;
this.body.velocity.y = 0;
// player moves in specified direction or stands still
if (this.cursors.left.isDown) {
this.animations.play("walk-left");
this.body.velocity.x =- 180;
} else if (this.cursors.right.isDown) {
this.animations.play("walk-right");
this.body.velocity.x =+ 180;
} else {
this.body.velocity.x = 0;
}
if (this.cursors.up.isDown) {
this.animations.play("walk-up");
this.body.velocity.y =- 180;
} else if (this.cursors.down.isDown) {
this.animations.play("walk-down");
this.body.velocity.y =+ 180;
} else {
this.body.velocity.y = 0;
}
if (this.body.velocity.x === 0 && this.body.velocity.y === 0) |
}
}
| {
this.animations.stop();
} | conditional_block |
lambda-proxy.ts | export interface Event {
resource?: string;
path: string;
httpMethod: string;
headers: EventHeaders;
queryStringParameters?: EventQueryStringParameters;
pathParameters?: EventPathParameters;
stageVariables?: EventStageVariables;
requestContext?: {
accountId: string;
resourceId: string;
stage: string;
requestId: string;
identity: {
cognitoIdentityPoolId: string;
accountId: string;
cognitoIdentityId: string;
caller: string;
apiKey: string; | accessKey: string;
cognitoAuthenticationType: string;
cognitoAuthenticationProvider: string;
userArn: string;
userAgent: string;
user: string;
}
resourcePath: string;
httpMethod: string;
apiId: string;
};
isBase64Encoded?: boolean;
body?: string;
}
export interface EventHeaders {
[key: string]: string;
}
export interface EventQueryStringParameters {
[key: string]: string;
}
export interface EventPathParameters {
[key: string]: string;
}
export interface EventStageVariables {
[key: string]: string;
}
// Response
export interface Response {
statusCode: number;
headers: { [key: string]: string };
body: string;
}
export interface Context {
// Properties
functionName: string;
functionVersion: string;
invokedFunctionArn: string;
memoryLimitInMB: number;
awsRequestId: string;
logGroupName: string;
logStreamName: string;
identity?: CognitoIdentity;
clientContext?: ClientContext;
callbackWaitsForEmptyEventLoop?: boolean;
// Functions
getRemainingTimeInMillis(): number;
}
export interface CognitoIdentity {
cognito_identity_id: string;
cognito_identity_pool_id: string;
}
export interface ClientContext {
client: ClientContextClient;
Custom?: any;
env: ClientContextEnv;
}
export interface ClientContextClient {
installation_id: string;
app_title: string;
app_version_name: string;
app_version_code: string;
app_package_name: string;
}
export interface ClientContextEnv {
platform_version: string;
platform: string;
make: string;
model: string;
locale: string;
} | sourceIp: string, | random_line_split |
youtube-player.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// Workaround for: https://github.com/bazelbuild/rules_nodejs/issues/1265
/// <reference types="youtube" />
import {
AfterViewInit,
ChangeDetectionStrategy,
Component,
ElementRef,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
ViewChild,
ViewEncapsulation,
Inject,
PLATFORM_ID,
} from '@angular/core';
import {isPlatformBrowser} from '@angular/common';
import {
combineLatest,
ConnectableObservable,
merge,
MonoTypeOperatorFunction,
Observable,
of as observableOf,
OperatorFunction,
pipe,
Subject,
of,
BehaviorSubject,
fromEventPattern,
} from 'rxjs';
import {
combineLatest as combineLatestOp,
distinctUntilChanged,
filter,
flatMap,
map,
publish,
scan,
skipWhile,
startWith,
take,
takeUntil,
withLatestFrom,
switchMap,
tap,
} from 'rxjs/operators';
declare global {
interface Window {
YT: typeof YT | undefined;
onYouTubeIframeAPIReady: (() => void) | undefined;
}
}
export const DEFAULT_PLAYER_WIDTH = 640;
export const DEFAULT_PLAYER_HEIGHT = 390;
// The native YT.Player doesn't expose the set videoId, but we need it for
// convenience.
interface Player extends YT.Player {
videoId?: string;
playerVars?: YT.PlayerVars;
}
// The player isn't fully initialized when it's constructed.
// The only field available is destroy and addEventListener.
type UninitializedPlayer = Pick<Player, 'videoId' | 'playerVars' | 'destroy' | 'addEventListener'>;
/**
* Object used to store the state of the player if the
* user tries to interact with the API before it has been loaded.
*/
interface PendingPlayerState {
playbackState?: YT.PlayerState.PLAYING | YT.PlayerState.PAUSED | YT.PlayerState.CUED;
playbackRate?: number;
volume?: number;
muted?: boolean;
seek?: {seconds: number, allowSeekAhead: boolean};
}
/**
* Angular component that renders a YouTube player via the YouTube player
* iframe API.
* @see https://developers.google.com/youtube/iframe_api_reference
*/
@Component({
selector: 'youtube-player',
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
// This div is *replaced* by the YouTube player embed.
template: '<div #youtubeContainer></div>',
})
export class YouTubePlayer implements AfterViewInit, OnDestroy, OnInit {
/** Whether we're currently rendering inside a browser. */
private _isBrowser: boolean;
private _youtubeContainer = new Subject<HTMLElement>();
private _destroyed = new Subject<void>();
private _player: Player | undefined;
private _existingApiReadyCallback: (() => void) | undefined;
private _pendingPlayerState: PendingPlayerState | undefined;
private _playerChanges = new BehaviorSubject<UninitializedPlayer | undefined>(undefined);
/** YouTube Video ID to view */
@Input()
get videoId(): string | undefined { return this._videoId.value; }
set videoId(videoId: string | undefined) {
this._videoId.next(videoId);
}
private _videoId = new BehaviorSubject<string | undefined>(undefined);
/** Height of video player */
@Input()
get height(): number | undefined { return this._height.value; }
set height(height: number | undefined) {
this._height.next(height || DEFAULT_PLAYER_HEIGHT);
}
private _height = new BehaviorSubject<number>(DEFAULT_PLAYER_HEIGHT);
/** Width of video player */
@Input()
get width(): number | undefined { return this._width.value; }
set width(width: number | undefined) {
this._width.next(width || DEFAULT_PLAYER_WIDTH);
}
private _width = new BehaviorSubject<number>(DEFAULT_PLAYER_WIDTH);
/** The moment when the player is supposed to start playing */
@Input()
set startSeconds(startSeconds: number | undefined) {
this._startSeconds.next(startSeconds);
}
private _startSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The moment when the player is supposed to stop playing */
@Input()
set endSeconds(endSeconds: number | undefined) {
this._endSeconds.next(endSeconds);
}
private _endSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The suggested quality of the player */
@Input()
set suggestedQuality(suggestedQuality: YT.SuggestedVideoQuality | undefined) {
this._suggestedQuality.next(suggestedQuality);
}
private _suggestedQuality = new BehaviorSubject<YT.SuggestedVideoQuality | undefined>(undefined);
/**
* Extra parameters used to configure the player. See:
* https://developers.google.com/youtube/player_parameters.html?playerVersion=HTML5#Parameters
*/
@Input()
get playerVars(): YT.PlayerVars | undefined { return this._playerVars.value; }
set playerVars(playerVars: YT.PlayerVars | undefined) {
this._playerVars.next(playerVars);
}
private _playerVars = new BehaviorSubject<YT.PlayerVars | undefined>(undefined);
/**
* Whether the iframe will attempt to load regardless of the status of the api on the
* page. Set this to true if you don't want the `onYouTubeIframeAPIReady` field to be
* set on the global window.
*/
@Input() showBeforeIframeApiLoads: boolean | undefined;
/** Outputs are direct proxies from the player itself. */
@Output() ready: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onReady');
@Output() stateChange: Observable<YT.OnStateChangeEvent> =
this._getLazyEmitter<YT.OnStateChangeEvent>('onStateChange');
@Output() error: Observable<YT.OnErrorEvent> =
this._getLazyEmitter<YT.OnErrorEvent>('onError');
@Output() apiChange: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onApiChange');
@Output() playbackQualityChange: Observable<YT.OnPlaybackQualityChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackQualityChangeEvent>('onPlaybackQualityChange');
@Output() playbackRateChange: Observable<YT.OnPlaybackRateChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackRateChangeEvent>('onPlaybackRateChange');
/** The element that will be replaced by the iframe. */
@ViewChild('youtubeContainer')
youtubeContainer: ElementRef<HTMLElement>;
constructor(private _ngZone: NgZone, @Inject(PLATFORM_ID) platformId: Object) {
this._isBrowser = isPlatformBrowser(platformId);
}
ngOnInit() {
// Don't do anything if we're not in a browser environment.
if (!this._isBrowser) {
return;
}
let iframeApiAvailableObs: Observable<boolean> = observableOf(true);
if (!window.YT) |
// An observable of the currently loaded player.
const playerObs =
createPlayerObservable(
this._youtubeContainer,
this._videoId,
iframeApiAvailableObs,
this._width,
this._height,
this._playerVars,
this._ngZone
).pipe(tap(player => {
// Emit this before the `waitUntilReady` call so that we can bind to
// events that happen as the player is being initialized (e.g. `onReady`).
this._playerChanges.next(player);
}), waitUntilReady(player => {
// Destroy the player if loading was aborted so that we don't end up leaking memory.
if (!playerIsReady(player)) {
player.destroy();
}
}), takeUntil(this._destroyed), publish());
// Set up side effects to bind inputs to the player.
playerObs.subscribe(player => {
this._player = player;
if (player && this._pendingPlayerState) {
this._initializePlayer(player, this._pendingPlayerState);
}
this._pendingPlayerState = undefined;
});
bindSizeToPlayer(playerObs, this._width, this._height);
bindSuggestedQualityToPlayer(playerObs, this._suggestedQuality);
bindCueVideoCall(
playerObs,
this._videoId,
this._startSeconds,
this._endSeconds,
this._suggestedQuality,
this._destroyed);
// After all of the subscriptions are set up, connect the observable.
(playerObs as ConnectableObservable<Player>).connect();
}
/**
* @deprecated No longer being used. To be removed.
* @breaking-change 11.0.0
*/
createEventsBoundInZone(): YT.Events {
return {};
}
ngAfterViewInit() {
this._youtubeContainer.next(this.youtubeContainer.nativeElement);
}
ngOnDestroy() {
if (this._player) {
this._player.destroy();
window.onYouTubeIframeAPIReady = this._existingApiReadyCallback;
}
this._playerChanges.complete();
this._videoId.complete();
this._height.complete();
this._width.complete();
this._startSeconds.complete();
this._endSeconds.complete();
this._suggestedQuality.complete();
this._youtubeContainer.complete();
this._playerVars.complete();
this._destroyed.next();
this._destroyed.complete();
}
/** See https://developers.google.com/youtube/iframe_api_reference#playVideo */
playVideo() {
if (this._player) {
this._player.playVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PLAYING;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#pauseVideo */
pauseVideo() {
if (this._player) {
this._player.pauseVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PAUSED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#stopVideo */
stopVideo() {
if (this._player) {
this._player.stopVideo();
} else {
// It seems like YouTube sets the player to CUED when it's stopped.
this._getPendingState().playbackState = YT.PlayerState.CUED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#seekTo */
seekTo(seconds: number, allowSeekAhead: boolean) {
if (this._player) {
this._player.seekTo(seconds, allowSeekAhead);
} else {
this._getPendingState().seek = {seconds, allowSeekAhead};
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#mute */
mute() {
if (this._player) {
this._player.mute();
} else {
this._getPendingState().muted = true;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#unMute */
unMute() {
if (this._player) {
this._player.unMute();
} else {
this._getPendingState().muted = false;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#isMuted */
isMuted(): boolean {
if (this._player) {
return this._player.isMuted();
}
if (this._pendingPlayerState) {
return !!this._pendingPlayerState.muted;
}
return false;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setVolume */
setVolume(volume: number) {
if (this._player) {
this._player.setVolume(volume);
} else {
this._getPendingState().volume = volume;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVolume */
getVolume(): number {
if (this._player) {
return this._player.getVolume();
}
if (this._pendingPlayerState && this._pendingPlayerState.volume != null) {
return this._pendingPlayerState.volume;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setPlaybackRate */
setPlaybackRate(playbackRate: number) {
if (this._player) {
return this._player.setPlaybackRate(playbackRate);
} else {
this._getPendingState().playbackRate = playbackRate;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackRate */
getPlaybackRate(): number {
if (this._player) {
return this._player.getPlaybackRate();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackRate != null) {
return this._pendingPlayerState.playbackRate;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailablePlaybackRates */
getAvailablePlaybackRates(): number[] {
return this._player ? this._player.getAvailablePlaybackRates() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoLoadedFraction */
getVideoLoadedFraction(): number {
return this._player ? this._player.getVideoLoadedFraction() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlayerState */
getPlayerState(): YT.PlayerState | undefined {
if (!this._isBrowser || !window.YT) {
return undefined;
}
if (this._player) {
return this._player.getPlayerState();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackState != null) {
return this._pendingPlayerState.playbackState;
}
return YT.PlayerState.UNSTARTED;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getCurrentTime */
getCurrentTime(): number {
if (this._player) {
return this._player.getCurrentTime();
}
if (this._pendingPlayerState && this._pendingPlayerState.seek) {
return this._pendingPlayerState.seek.seconds;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackQuality */
getPlaybackQuality(): YT.SuggestedVideoQuality {
return this._player ? this._player.getPlaybackQuality() : 'default';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailableQualityLevels */
getAvailableQualityLevels(): YT.SuggestedVideoQuality[] {
return this._player ? this._player.getAvailableQualityLevels() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getDuration */
getDuration(): number {
return this._player ? this._player.getDuration() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoUrl */
getVideoUrl(): string {
return this._player ? this._player.getVideoUrl() : '';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoEmbedCode */
getVideoEmbedCode(): string {
return this._player ? this._player.getVideoEmbedCode() : '';
}
/** Gets an object that should be used to store the temporary API state. */
private _getPendingState(): PendingPlayerState {
if (!this._pendingPlayerState) {
this._pendingPlayerState = {};
}
return this._pendingPlayerState;
}
/** Initializes a player from a temporary state. */
private _initializePlayer(player: YT.Player, state: PendingPlayerState): void {
const {playbackState, playbackRate, volume, muted, seek} = state;
switch (playbackState) {
case YT.PlayerState.PLAYING: player.playVideo(); break;
case YT.PlayerState.PAUSED: player.pauseVideo(); break;
case YT.PlayerState.CUED: player.stopVideo(); break;
}
if (playbackRate != null) {
player.setPlaybackRate(playbackRate);
}
if (volume != null) {
player.setVolume(volume);
}
if (muted != null) {
muted ? player.mute() : player.unMute();
}
if (seek != null) {
player.seekTo(seek.seconds, seek.allowSeekAhead);
}
}
/** Gets an observable that adds an event listener to the player when a user subscribes to it. */
private _getLazyEmitter<T extends YT.PlayerEvent>(name: keyof YT.Events): Observable<T> {
// Start with the stream of players. This way the events will be transferred
// over to the new player if it gets swapped out under-the-hood.
return this._playerChanges.pipe(
// Switch to the bound event. `switchMap` ensures that the old event is removed when the
// player is changed. If there's no player, return an observable that never emits.
switchMap(player => {
return player ? fromEventPattern<T>((listener: (event: T) => void) => {
player.addEventListener(name, listener);
}, (listener: (event: T) => void) => {
// The API seems to throw when we try to unbind from a destroyed player and it doesn't
// expose whether the player has been destroyed so we have to wrap it in a try/catch to
// prevent the entire stream from erroring out.
try {
if ((player as Player).removeEventListener!) {
(player as Player).removeEventListener(name, listener);
}
} catch {}
}) : observableOf<T>();
}),
// By default we run all the API interactions outside the zone
// so we have to bring the events back in manually when they emit.
(source: Observable<T>) => new Observable<T>(observer => source.subscribe({
next: value => this._ngZone.run(() => observer.next(value)),
error: error => observer.error(error),
complete: () => observer.complete()
})),
// Ensures that everything is cleared out on destroy.
takeUntil(this._destroyed)
);
}
}
/** Listens to changes to the given width and height and sets it on the player. */
function bindSizeToPlayer(
playerObs: Observable<YT.Player | undefined>,
widthObs: Observable<number>,
heightObs: Observable<number>
) {
return combineLatest([playerObs, widthObs, heightObs])
.subscribe(([player, width, height]) => player && player.setSize(width, height));
}
/** Listens to changes from the suggested quality and sets it on the given player. */
function bindSuggestedQualityToPlayer(
playerObs: Observable<YT.Player | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>
) {
return combineLatest([
playerObs,
suggestedQualityObs
]).subscribe(
([player, suggestedQuality]) =>
player && suggestedQuality && player.setPlaybackQuality(suggestedQuality));
}
/**
* Returns an observable that emits the loaded player once it's ready. Certain properties/methods
* won't be available until the iframe finishes loading.
* @param onAbort Callback function that will be invoked if the player loading was aborted before
* it was able to complete. Can be used to clean up any loose references.
*/
function waitUntilReady(onAbort: (player: UninitializedPlayer) => void):
OperatorFunction<UninitializedPlayer | undefined, Player | undefined> {
return flatMap(player => {
if (!player) {
return observableOf<Player|undefined>(undefined);
}
if (playerIsReady(player)) {
return observableOf(player as Player);
}
// Since removeEventListener is not on Player when it's initialized, we can't use fromEvent.
// The player is not initialized fully until the ready is called.
return new Observable<Player>(emitter => {
let aborted = false;
let resolved = false;
const onReady = (event: YT.PlayerEvent) => {
resolved = true;
if (!aborted) {
event.target.removeEventListener('onReady', onReady);
emitter.next(event.target);
}
};
player.addEventListener('onReady', onReady);
return () => {
aborted = true;
if (!resolved) {
onAbort(player);
}
};
}).pipe(take(1), startWith(undefined));
});
}
/** Create an observable for the player based on the given options. */
function createPlayerObservable(
youtubeContainer: Observable<HTMLElement>,
videoIdObs: Observable<string | undefined>,
iframeApiAvailableObs: Observable<boolean>,
widthObs: Observable<number>,
heightObs: Observable<number>,
playerVarsObs: Observable<YT.PlayerVars | undefined>,
ngZone: NgZone
): Observable<UninitializedPlayer | undefined> {
const playerOptions = combineLatest([videoIdObs, playerVarsObs]).pipe(
withLatestFrom(combineLatest([widthObs, heightObs])),
map(([constructorOptions, sizeOptions]) => {
const [videoId, playerVars] = constructorOptions;
const [width, height] = sizeOptions;
return videoId ? ({ videoId, playerVars, width, height }) : undefined;
}),
);
return combineLatest([youtubeContainer, playerOptions, of(ngZone)])
.pipe(
skipUntilRememberLatest(iframeApiAvailableObs),
scan(syncPlayerState, undefined),
distinctUntilChanged());
}
/** Skips the given observable until the other observable emits true, then emit the latest. */
function skipUntilRememberLatest<T>(notifier: Observable<boolean>): MonoTypeOperatorFunction<T> {
return pipe(
combineLatestOp(notifier),
skipWhile(([_, doneSkipping]) => !doneSkipping),
map(([value]) => value));
}
/** Destroy the player if there are no options, or create the player if there are options. */
function syncPlayerState(
player: UninitializedPlayer | undefined,
[container, videoOptions, ngZone]: [HTMLElement, YT.PlayerOptions | undefined, NgZone],
): UninitializedPlayer | undefined {
if (player && videoOptions && player.playerVars !== videoOptions.playerVars) {
// The player needs to be recreated if the playerVars are different.
player.destroy();
} else if (!videoOptions) {
if (player) {
// Destroy the player if the videoId was removed.
player.destroy();
}
return;
} else if (player) {
return player;
}
// Important! We need to create the Player object outside of the `NgZone`, because it kicks
// off a 250ms setInterval which will continually trigger change detection if we don't.
const newPlayer: UninitializedPlayer =
ngZone.runOutsideAngular(() => new YT.Player(container, videoOptions));
newPlayer.videoId = videoOptions.videoId;
newPlayer.playerVars = videoOptions.playerVars;
return newPlayer;
}
/**
* Call cueVideoById if the videoId changes, or when start or end seconds change. cueVideoById will
* change the loaded video id to the given videoId, and set the start and end times to the given
* start/end seconds.
*/
function bindCueVideoCall(
playerObs: Observable<Player | undefined>,
videoIdObs: Observable<string | undefined>,
startSecondsObs: Observable<number | undefined>,
endSecondsObs: Observable<number | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>,
destroyed: Observable<void>,
) {
const cueOptionsObs = combineLatest([startSecondsObs, endSecondsObs])
.pipe(map(([startSeconds, endSeconds]) => ({startSeconds, endSeconds})));
// Only respond to changes in cue options if the player is not running.
const filteredCueOptions = cueOptionsObs
.pipe(filterOnOther(playerObs, player => !!player && !hasPlayerStarted(player)));
// If the video id changed, there's no reason to run 'cue' unless the player
// was initialized with a different video id.
const changedVideoId = videoIdObs
.pipe(filterOnOther(playerObs, (player, videoId) => !!player && player.videoId !== videoId));
// If the player changed, there's no reason to run 'cue' unless there are cue options.
const changedPlayer = playerObs.pipe(
filterOnOther(
combineLatest([videoIdObs, cueOptionsObs]),
([videoId, cueOptions], player) =>
!!player &&
(videoId != player.videoId || !!cueOptions.startSeconds || !!cueOptions.endSeconds)));
merge(changedPlayer, changedVideoId, filteredCueOptions)
.pipe(
withLatestFrom(combineLatest([playerObs, videoIdObs, cueOptionsObs, suggestedQualityObs])),
map(([_, values]) => values),
takeUntil(destroyed),
)
.subscribe(([player, videoId, cueOptions, suggestedQuality]) => {
if (!videoId || !player) {
return;
}
player.videoId = videoId;
player.cueVideoById({
videoId,
suggestedQuality,
...cueOptions,
});
});
}
function hasPlayerStarted(player: YT.Player): boolean {
const state = player.getPlayerState();
return state !== YT.PlayerState.UNSTARTED && state !== YT.PlayerState.CUED;
}
function playerIsReady(player: UninitializedPlayer): player is Player {
return 'getPlayerStatus' in player;
}
/** Combines the two observables temporarily for the filter function. */
function filterOnOther<R, T>(
otherObs: Observable<T>,
filterFn: (t: T, r?: R) => boolean,
): MonoTypeOperatorFunction<R> {
return pipe(
withLatestFrom(otherObs),
filter(([value, other]) => filterFn(other, value)),
map(([value]) => value),
);
}
| {
if (this.showBeforeIframeApiLoads) {
throw new Error('Namespace YT not found, cannot construct embedded youtube player. ' +
'Please install the YouTube Player API Reference for iframe Embeds: ' +
'https://developers.google.com/youtube/iframe_api_reference');
}
const iframeApiAvailableSubject = new Subject<boolean>();
this._existingApiReadyCallback = window.onYouTubeIframeAPIReady;
window.onYouTubeIframeAPIReady = () => {
if (this._existingApiReadyCallback) {
this._existingApiReadyCallback();
}
this._ngZone.run(() => iframeApiAvailableSubject.next(true));
};
iframeApiAvailableObs = iframeApiAvailableSubject.pipe(take(1), startWith(false));
} | conditional_block |
youtube-player.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// Workaround for: https://github.com/bazelbuild/rules_nodejs/issues/1265
/// <reference types="youtube" />
import {
AfterViewInit,
ChangeDetectionStrategy,
Component,
ElementRef,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
ViewChild,
ViewEncapsulation,
Inject,
PLATFORM_ID,
} from '@angular/core';
import {isPlatformBrowser} from '@angular/common';
import {
combineLatest,
ConnectableObservable,
merge,
MonoTypeOperatorFunction,
Observable,
of as observableOf,
OperatorFunction,
pipe,
Subject,
of,
BehaviorSubject,
fromEventPattern,
} from 'rxjs';
import {
combineLatest as combineLatestOp,
distinctUntilChanged,
filter,
flatMap,
map,
publish,
scan,
skipWhile,
startWith,
take,
takeUntil,
withLatestFrom,
switchMap,
tap,
} from 'rxjs/operators';
declare global {
interface Window {
YT: typeof YT | undefined;
onYouTubeIframeAPIReady: (() => void) | undefined;
}
}
export const DEFAULT_PLAYER_WIDTH = 640;
export const DEFAULT_PLAYER_HEIGHT = 390;
// The native YT.Player doesn't expose the set videoId, but we need it for
// convenience.
interface Player extends YT.Player {
videoId?: string;
playerVars?: YT.PlayerVars;
}
// The player isn't fully initialized when it's constructed.
// The only field available is destroy and addEventListener.
type UninitializedPlayer = Pick<Player, 'videoId' | 'playerVars' | 'destroy' | 'addEventListener'>;
/**
* Object used to store the state of the player if the
* user tries to interact with the API before it has been loaded.
*/
interface PendingPlayerState {
playbackState?: YT.PlayerState.PLAYING | YT.PlayerState.PAUSED | YT.PlayerState.CUED;
playbackRate?: number;
volume?: number;
muted?: boolean;
seek?: {seconds: number, allowSeekAhead: boolean};
}
/**
* Angular component that renders a YouTube player via the YouTube player
* iframe API.
* @see https://developers.google.com/youtube/iframe_api_reference
*/
@Component({
selector: 'youtube-player',
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
// This div is *replaced* by the YouTube player embed.
template: '<div #youtubeContainer></div>',
})
export class YouTubePlayer implements AfterViewInit, OnDestroy, OnInit {
/** Whether we're currently rendering inside a browser. */
private _isBrowser: boolean;
private _youtubeContainer = new Subject<HTMLElement>();
private _destroyed = new Subject<void>();
private _player: Player | undefined;
private _existingApiReadyCallback: (() => void) | undefined;
private _pendingPlayerState: PendingPlayerState | undefined;
private _playerChanges = new BehaviorSubject<UninitializedPlayer | undefined>(undefined);
/** YouTube Video ID to view */
@Input()
get videoId(): string | undefined { return this._videoId.value; }
set videoId(videoId: string | undefined) {
this._videoId.next(videoId);
}
private _videoId = new BehaviorSubject<string | undefined>(undefined);
/** Height of video player */
@Input()
get height(): number | undefined { return this._height.value; }
set height(height: number | undefined) {
this._height.next(height || DEFAULT_PLAYER_HEIGHT);
}
private _height = new BehaviorSubject<number>(DEFAULT_PLAYER_HEIGHT);
/** Width of video player */
@Input()
get width(): number | undefined { return this._width.value; }
set width(width: number | undefined) {
this._width.next(width || DEFAULT_PLAYER_WIDTH);
}
private _width = new BehaviorSubject<number>(DEFAULT_PLAYER_WIDTH);
/** The moment when the player is supposed to start playing */
@Input()
set startSeconds(startSeconds: number | undefined) {
this._startSeconds.next(startSeconds);
}
private _startSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The moment when the player is supposed to stop playing */
@Input()
set endSeconds(endSeconds: number | undefined) {
this._endSeconds.next(endSeconds);
}
private _endSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The suggested quality of the player */
@Input()
set suggestedQuality(suggestedQuality: YT.SuggestedVideoQuality | undefined) {
this._suggestedQuality.next(suggestedQuality);
}
private _suggestedQuality = new BehaviorSubject<YT.SuggestedVideoQuality | undefined>(undefined);
/**
* Extra parameters used to configure the player. See:
* https://developers.google.com/youtube/player_parameters.html?playerVersion=HTML5#Parameters
*/
@Input()
get playerVars(): YT.PlayerVars | undefined { return this._playerVars.value; }
set playerVars(playerVars: YT.PlayerVars | undefined) {
this._playerVars.next(playerVars);
}
private _playerVars = new BehaviorSubject<YT.PlayerVars | undefined>(undefined);
/**
* Whether the iframe will attempt to load regardless of the status of the api on the
* page. Set this to true if you don't want the `onYouTubeIframeAPIReady` field to be
* set on the global window.
*/
@Input() showBeforeIframeApiLoads: boolean | undefined;
/** Outputs are direct proxies from the player itself. */
@Output() ready: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onReady');
@Output() stateChange: Observable<YT.OnStateChangeEvent> =
this._getLazyEmitter<YT.OnStateChangeEvent>('onStateChange');
@Output() error: Observable<YT.OnErrorEvent> =
this._getLazyEmitter<YT.OnErrorEvent>('onError');
@Output() apiChange: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onApiChange');
@Output() playbackQualityChange: Observable<YT.OnPlaybackQualityChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackQualityChangeEvent>('onPlaybackQualityChange');
@Output() playbackRateChange: Observable<YT.OnPlaybackRateChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackRateChangeEvent>('onPlaybackRateChange');
/** The element that will be replaced by the iframe. */
@ViewChild('youtubeContainer')
youtubeContainer: ElementRef<HTMLElement>;
constructor(private _ngZone: NgZone, @Inject(PLATFORM_ID) platformId: Object) {
this._isBrowser = isPlatformBrowser(platformId);
}
ngOnInit() {
// Don't do anything if we're not in a browser environment.
if (!this._isBrowser) {
return;
}
let iframeApiAvailableObs: Observable<boolean> = observableOf(true);
if (!window.YT) {
if (this.showBeforeIframeApiLoads) {
throw new Error('Namespace YT not found, cannot construct embedded youtube player. ' +
'Please install the YouTube Player API Reference for iframe Embeds: ' +
'https://developers.google.com/youtube/iframe_api_reference');
}
const iframeApiAvailableSubject = new Subject<boolean>();
this._existingApiReadyCallback = window.onYouTubeIframeAPIReady;
window.onYouTubeIframeAPIReady = () => {
if (this._existingApiReadyCallback) {
this._existingApiReadyCallback();
}
this._ngZone.run(() => iframeApiAvailableSubject.next(true));
};
iframeApiAvailableObs = iframeApiAvailableSubject.pipe(take(1), startWith(false));
}
// An observable of the currently loaded player.
const playerObs =
createPlayerObservable(
this._youtubeContainer,
this._videoId,
iframeApiAvailableObs,
this._width,
this._height,
this._playerVars,
this._ngZone
).pipe(tap(player => {
// Emit this before the `waitUntilReady` call so that we can bind to
// events that happen as the player is being initialized (e.g. `onReady`).
this._playerChanges.next(player);
}), waitUntilReady(player => {
// Destroy the player if loading was aborted so that we don't end up leaking memory.
if (!playerIsReady(player)) {
player.destroy();
}
}), takeUntil(this._destroyed), publish());
// Set up side effects to bind inputs to the player.
playerObs.subscribe(player => {
this._player = player;
if (player && this._pendingPlayerState) {
this._initializePlayer(player, this._pendingPlayerState);
}
this._pendingPlayerState = undefined;
});
bindSizeToPlayer(playerObs, this._width, this._height);
bindSuggestedQualityToPlayer(playerObs, this._suggestedQuality);
bindCueVideoCall(
playerObs,
this._videoId,
this._startSeconds,
this._endSeconds,
this._suggestedQuality,
this._destroyed);
// After all of the subscriptions are set up, connect the observable.
(playerObs as ConnectableObservable<Player>).connect();
}
/**
* @deprecated No longer being used. To be removed.
* @breaking-change 11.0.0
*/
createEventsBoundInZone(): YT.Events {
return {};
}
ngAfterViewInit() {
this._youtubeContainer.next(this.youtubeContainer.nativeElement);
}
ngOnDestroy() {
if (this._player) {
this._player.destroy();
window.onYouTubeIframeAPIReady = this._existingApiReadyCallback;
}
this._playerChanges.complete();
this._videoId.complete();
this._height.complete();
this._width.complete();
this._startSeconds.complete();
this._endSeconds.complete();
this._suggestedQuality.complete();
this._youtubeContainer.complete();
this._playerVars.complete();
this._destroyed.next();
this._destroyed.complete();
}
/** See https://developers.google.com/youtube/iframe_api_reference#playVideo */
playVideo() {
if (this._player) {
this._player.playVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PLAYING;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#pauseVideo */
pauseVideo() {
if (this._player) {
this._player.pauseVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PAUSED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#stopVideo */
stopVideo() {
if (this._player) {
this._player.stopVideo();
} else {
// It seems like YouTube sets the player to CUED when it's stopped.
this._getPendingState().playbackState = YT.PlayerState.CUED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#seekTo */
seekTo(seconds: number, allowSeekAhead: boolean) {
if (this._player) {
this._player.seekTo(seconds, allowSeekAhead);
} else {
this._getPendingState().seek = {seconds, allowSeekAhead};
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#mute */
mute() {
if (this._player) {
this._player.mute();
} else {
this._getPendingState().muted = true;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#unMute */
unMute() {
if (this._player) {
this._player.unMute();
} else {
this._getPendingState().muted = false;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#isMuted */
isMuted(): boolean {
if (this._player) {
return this._player.isMuted();
}
if (this._pendingPlayerState) {
return !!this._pendingPlayerState.muted;
}
return false;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setVolume */
setVolume(volume: number) {
if (this._player) {
this._player.setVolume(volume);
} else {
this._getPendingState().volume = volume;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVolume */
getVolume(): number {
if (this._player) {
return this._player.getVolume();
}
if (this._pendingPlayerState && this._pendingPlayerState.volume != null) {
return this._pendingPlayerState.volume;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setPlaybackRate */
setPlaybackRate(playbackRate: number) {
if (this._player) {
return this._player.setPlaybackRate(playbackRate);
} else {
this._getPendingState().playbackRate = playbackRate;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackRate */
getPlaybackRate(): number {
if (this._player) {
return this._player.getPlaybackRate();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackRate != null) {
return this._pendingPlayerState.playbackRate;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailablePlaybackRates */
getAvailablePlaybackRates(): number[] {
return this._player ? this._player.getAvailablePlaybackRates() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoLoadedFraction */
getVideoLoadedFraction(): number {
return this._player ? this._player.getVideoLoadedFraction() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlayerState */
getPlayerState(): YT.PlayerState | undefined {
if (!this._isBrowser || !window.YT) {
return undefined;
}
if (this._player) {
return this._player.getPlayerState();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackState != null) {
return this._pendingPlayerState.playbackState;
}
return YT.PlayerState.UNSTARTED;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getCurrentTime */
getCurrentTime(): number {
if (this._player) {
return this._player.getCurrentTime();
}
if (this._pendingPlayerState && this._pendingPlayerState.seek) {
return this._pendingPlayerState.seek.seconds;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackQuality */
getPlaybackQuality(): YT.SuggestedVideoQuality {
return this._player ? this._player.getPlaybackQuality() : 'default';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailableQualityLevels */
getAvailableQualityLevels(): YT.SuggestedVideoQuality[] {
return this._player ? this._player.getAvailableQualityLevels() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getDuration */
getDuration(): number {
return this._player ? this._player.getDuration() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoUrl */
getVideoUrl(): string {
return this._player ? this._player.getVideoUrl() : '';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoEmbedCode */
getVideoEmbedCode(): string {
return this._player ? this._player.getVideoEmbedCode() : '';
}
/** Gets an object that should be used to store the temporary API state. */
private _getPendingState(): PendingPlayerState {
if (!this._pendingPlayerState) {
this._pendingPlayerState = {};
}
return this._pendingPlayerState;
}
/** Initializes a player from a temporary state. */
private _initializePlayer(player: YT.Player, state: PendingPlayerState): void {
const {playbackState, playbackRate, volume, muted, seek} = state;
switch (playbackState) {
case YT.PlayerState.PLAYING: player.playVideo(); break;
case YT.PlayerState.PAUSED: player.pauseVideo(); break;
case YT.PlayerState.CUED: player.stopVideo(); break;
}
if (playbackRate != null) {
player.setPlaybackRate(playbackRate);
}
if (volume != null) {
player.setVolume(volume);
}
if (muted != null) {
muted ? player.mute() : player.unMute();
}
if (seek != null) {
player.seekTo(seek.seconds, seek.allowSeekAhead);
}
}
/** Gets an observable that adds an event listener to the player when a user subscribes to it. */
private _getLazyEmitter<T extends YT.PlayerEvent>(name: keyof YT.Events): Observable<T> {
// Start with the stream of players. This way the events will be transferred
// over to the new player if it gets swapped out under-the-hood.
return this._playerChanges.pipe(
// Switch to the bound event. `switchMap` ensures that the old event is removed when the
// player is changed. If there's no player, return an observable that never emits.
switchMap(player => {
return player ? fromEventPattern<T>((listener: (event: T) => void) => {
player.addEventListener(name, listener);
}, (listener: (event: T) => void) => {
// The API seems to throw when we try to unbind from a destroyed player and it doesn't
// expose whether the player has been destroyed so we have to wrap it in a try/catch to
// prevent the entire stream from erroring out.
try {
if ((player as Player).removeEventListener!) {
(player as Player).removeEventListener(name, listener);
}
} catch {}
}) : observableOf<T>();
}),
// By default we run all the API interactions outside the zone
// so we have to bring the events back in manually when they emit.
(source: Observable<T>) => new Observable<T>(observer => source.subscribe({
next: value => this._ngZone.run(() => observer.next(value)),
error: error => observer.error(error),
complete: () => observer.complete()
})),
// Ensures that everything is cleared out on destroy.
takeUntil(this._destroyed)
);
}
}
/** Listens to changes to the given width and height and sets it on the player. */
function bindSizeToPlayer(
playerObs: Observable<YT.Player | undefined>,
widthObs: Observable<number>,
heightObs: Observable<number>
) {
return combineLatest([playerObs, widthObs, heightObs])
.subscribe(([player, width, height]) => player && player.setSize(width, height));
}
/** Listens to changes from the suggested quality and sets it on the given player. */
function bindSuggestedQualityToPlayer(
playerObs: Observable<YT.Player | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>
) {
return combineLatest([
playerObs,
suggestedQualityObs
]).subscribe(
([player, suggestedQuality]) =>
player && suggestedQuality && player.setPlaybackQuality(suggestedQuality));
}
/**
* Returns an observable that emits the loaded player once it's ready. Certain properties/methods
* won't be available until the iframe finishes loading.
* @param onAbort Callback function that will be invoked if the player loading was aborted before
* it was able to complete. Can be used to clean up any loose references.
*/
function waitUntilReady(onAbort: (player: UninitializedPlayer) => void):
OperatorFunction<UninitializedPlayer | undefined, Player | undefined> {
return flatMap(player => {
if (!player) {
return observableOf<Player|undefined>(undefined);
}
if (playerIsReady(player)) {
return observableOf(player as Player);
}
// Since removeEventListener is not on Player when it's initialized, we can't use fromEvent.
// The player is not initialized fully until the ready is called.
return new Observable<Player>(emitter => {
let aborted = false;
let resolved = false;
const onReady = (event: YT.PlayerEvent) => {
resolved = true;
if (!aborted) {
event.target.removeEventListener('onReady', onReady);
emitter.next(event.target);
}
};
player.addEventListener('onReady', onReady);
return () => {
aborted = true;
if (!resolved) {
onAbort(player);
}
};
}).pipe(take(1), startWith(undefined));
});
}
/** Create an observable for the player based on the given options. */
function | (
youtubeContainer: Observable<HTMLElement>,
videoIdObs: Observable<string | undefined>,
iframeApiAvailableObs: Observable<boolean>,
widthObs: Observable<number>,
heightObs: Observable<number>,
playerVarsObs: Observable<YT.PlayerVars | undefined>,
ngZone: NgZone
): Observable<UninitializedPlayer | undefined> {
const playerOptions = combineLatest([videoIdObs, playerVarsObs]).pipe(
withLatestFrom(combineLatest([widthObs, heightObs])),
map(([constructorOptions, sizeOptions]) => {
const [videoId, playerVars] = constructorOptions;
const [width, height] = sizeOptions;
return videoId ? ({ videoId, playerVars, width, height }) : undefined;
}),
);
return combineLatest([youtubeContainer, playerOptions, of(ngZone)])
.pipe(
skipUntilRememberLatest(iframeApiAvailableObs),
scan(syncPlayerState, undefined),
distinctUntilChanged());
}
/** Skips the given observable until the other observable emits true, then emit the latest. */
function skipUntilRememberLatest<T>(notifier: Observable<boolean>): MonoTypeOperatorFunction<T> {
return pipe(
combineLatestOp(notifier),
skipWhile(([_, doneSkipping]) => !doneSkipping),
map(([value]) => value));
}
/** Destroy the player if there are no options, or create the player if there are options. */
function syncPlayerState(
player: UninitializedPlayer | undefined,
[container, videoOptions, ngZone]: [HTMLElement, YT.PlayerOptions | undefined, NgZone],
): UninitializedPlayer | undefined {
if (player && videoOptions && player.playerVars !== videoOptions.playerVars) {
// The player needs to be recreated if the playerVars are different.
player.destroy();
} else if (!videoOptions) {
if (player) {
// Destroy the player if the videoId was removed.
player.destroy();
}
return;
} else if (player) {
return player;
}
// Important! We need to create the Player object outside of the `NgZone`, because it kicks
// off a 250ms setInterval which will continually trigger change detection if we don't.
const newPlayer: UninitializedPlayer =
ngZone.runOutsideAngular(() => new YT.Player(container, videoOptions));
newPlayer.videoId = videoOptions.videoId;
newPlayer.playerVars = videoOptions.playerVars;
return newPlayer;
}
/**
* Call cueVideoById if the videoId changes, or when start or end seconds change. cueVideoById will
* change the loaded video id to the given videoId, and set the start and end times to the given
* start/end seconds.
*/
function bindCueVideoCall(
playerObs: Observable<Player | undefined>,
videoIdObs: Observable<string | undefined>,
startSecondsObs: Observable<number | undefined>,
endSecondsObs: Observable<number | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>,
destroyed: Observable<void>,
) {
const cueOptionsObs = combineLatest([startSecondsObs, endSecondsObs])
.pipe(map(([startSeconds, endSeconds]) => ({startSeconds, endSeconds})));
// Only respond to changes in cue options if the player is not running.
const filteredCueOptions = cueOptionsObs
.pipe(filterOnOther(playerObs, player => !!player && !hasPlayerStarted(player)));
// If the video id changed, there's no reason to run 'cue' unless the player
// was initialized with a different video id.
const changedVideoId = videoIdObs
.pipe(filterOnOther(playerObs, (player, videoId) => !!player && player.videoId !== videoId));
// If the player changed, there's no reason to run 'cue' unless there are cue options.
const changedPlayer = playerObs.pipe(
filterOnOther(
combineLatest([videoIdObs, cueOptionsObs]),
([videoId, cueOptions], player) =>
!!player &&
(videoId != player.videoId || !!cueOptions.startSeconds || !!cueOptions.endSeconds)));
merge(changedPlayer, changedVideoId, filteredCueOptions)
.pipe(
withLatestFrom(combineLatest([playerObs, videoIdObs, cueOptionsObs, suggestedQualityObs])),
map(([_, values]) => values),
takeUntil(destroyed),
)
.subscribe(([player, videoId, cueOptions, suggestedQuality]) => {
if (!videoId || !player) {
return;
}
player.videoId = videoId;
player.cueVideoById({
videoId,
suggestedQuality,
...cueOptions,
});
});
}
function hasPlayerStarted(player: YT.Player): boolean {
const state = player.getPlayerState();
return state !== YT.PlayerState.UNSTARTED && state !== YT.PlayerState.CUED;
}
function playerIsReady(player: UninitializedPlayer): player is Player {
return 'getPlayerStatus' in player;
}
/** Combines the two observables temporarily for the filter function. */
function filterOnOther<R, T>(
otherObs: Observable<T>,
filterFn: (t: T, r?: R) => boolean,
): MonoTypeOperatorFunction<R> {
return pipe(
withLatestFrom(otherObs),
filter(([value, other]) => filterFn(other, value)),
map(([value]) => value),
);
}
| createPlayerObservable | identifier_name |
youtube-player.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// Workaround for: https://github.com/bazelbuild/rules_nodejs/issues/1265
/// <reference types="youtube" />
import {
AfterViewInit,
ChangeDetectionStrategy,
Component,
ElementRef,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
ViewChild,
ViewEncapsulation,
Inject,
PLATFORM_ID,
} from '@angular/core';
import {isPlatformBrowser} from '@angular/common';
import {
combineLatest,
ConnectableObservable,
merge,
MonoTypeOperatorFunction,
Observable,
of as observableOf,
OperatorFunction,
pipe,
Subject,
of,
BehaviorSubject,
fromEventPattern,
} from 'rxjs';
import {
combineLatest as combineLatestOp,
distinctUntilChanged,
filter,
flatMap,
map,
publish,
scan,
skipWhile,
startWith,
take,
takeUntil,
withLatestFrom,
switchMap,
tap,
} from 'rxjs/operators';
declare global {
interface Window {
YT: typeof YT | undefined;
onYouTubeIframeAPIReady: (() => void) | undefined;
}
}
export const DEFAULT_PLAYER_WIDTH = 640;
export const DEFAULT_PLAYER_HEIGHT = 390;
// The native YT.Player doesn't expose the set videoId, but we need it for
// convenience.
interface Player extends YT.Player {
videoId?: string;
playerVars?: YT.PlayerVars;
}
// The player isn't fully initialized when it's constructed.
// The only field available is destroy and addEventListener.
type UninitializedPlayer = Pick<Player, 'videoId' | 'playerVars' | 'destroy' | 'addEventListener'>;
/**
* Object used to store the state of the player if the
* user tries to interact with the API before it has been loaded.
*/
interface PendingPlayerState {
playbackState?: YT.PlayerState.PLAYING | YT.PlayerState.PAUSED | YT.PlayerState.CUED;
playbackRate?: number;
volume?: number;
muted?: boolean;
seek?: {seconds: number, allowSeekAhead: boolean};
}
/**
* Angular component that renders a YouTube player via the YouTube player
* iframe API.
* @see https://developers.google.com/youtube/iframe_api_reference
*/
@Component({
selector: 'youtube-player',
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
// This div is *replaced* by the YouTube player embed.
template: '<div #youtubeContainer></div>',
})
export class YouTubePlayer implements AfterViewInit, OnDestroy, OnInit {
/** Whether we're currently rendering inside a browser. */
private _isBrowser: boolean;
private _youtubeContainer = new Subject<HTMLElement>();
private _destroyed = new Subject<void>();
private _player: Player | undefined;
private _existingApiReadyCallback: (() => void) | undefined;
private _pendingPlayerState: PendingPlayerState | undefined;
private _playerChanges = new BehaviorSubject<UninitializedPlayer | undefined>(undefined);
/** YouTube Video ID to view */
@Input()
get videoId(): string | undefined { return this._videoId.value; }
set videoId(videoId: string | undefined) {
this._videoId.next(videoId);
}
private _videoId = new BehaviorSubject<string | undefined>(undefined);
/** Height of video player */
@Input()
get height(): number | undefined { return this._height.value; }
set height(height: number | undefined) {
this._height.next(height || DEFAULT_PLAYER_HEIGHT);
}
private _height = new BehaviorSubject<number>(DEFAULT_PLAYER_HEIGHT);
/** Width of video player */
@Input()
get width(): number | undefined { return this._width.value; }
set width(width: number | undefined) {
this._width.next(width || DEFAULT_PLAYER_WIDTH);
}
private _width = new BehaviorSubject<number>(DEFAULT_PLAYER_WIDTH);
/** The moment when the player is supposed to start playing */
@Input()
set startSeconds(startSeconds: number | undefined) {
this._startSeconds.next(startSeconds);
}
private _startSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The moment when the player is supposed to stop playing */
@Input()
set endSeconds(endSeconds: number | undefined) {
this._endSeconds.next(endSeconds);
}
private _endSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The suggested quality of the player */
@Input()
set suggestedQuality(suggestedQuality: YT.SuggestedVideoQuality | undefined) {
this._suggestedQuality.next(suggestedQuality);
}
private _suggestedQuality = new BehaviorSubject<YT.SuggestedVideoQuality | undefined>(undefined);
/**
* Extra parameters used to configure the player. See:
* https://developers.google.com/youtube/player_parameters.html?playerVersion=HTML5#Parameters
*/
@Input()
get playerVars(): YT.PlayerVars | undefined { return this._playerVars.value; }
set playerVars(playerVars: YT.PlayerVars | undefined) {
this._playerVars.next(playerVars);
}
private _playerVars = new BehaviorSubject<YT.PlayerVars | undefined>(undefined);
/**
* Whether the iframe will attempt to load regardless of the status of the api on the
* page. Set this to true if you don't want the `onYouTubeIframeAPIReady` field to be
* set on the global window.
*/
@Input() showBeforeIframeApiLoads: boolean | undefined;
/** Outputs are direct proxies from the player itself. */
@Output() ready: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onReady');
@Output() stateChange: Observable<YT.OnStateChangeEvent> =
this._getLazyEmitter<YT.OnStateChangeEvent>('onStateChange');
@Output() error: Observable<YT.OnErrorEvent> =
this._getLazyEmitter<YT.OnErrorEvent>('onError');
@Output() apiChange: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onApiChange');
@Output() playbackQualityChange: Observable<YT.OnPlaybackQualityChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackQualityChangeEvent>('onPlaybackQualityChange');
@Output() playbackRateChange: Observable<YT.OnPlaybackRateChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackRateChangeEvent>('onPlaybackRateChange');
/** The element that will be replaced by the iframe. */
@ViewChild('youtubeContainer')
youtubeContainer: ElementRef<HTMLElement>;
constructor(private _ngZone: NgZone, @Inject(PLATFORM_ID) platformId: Object) {
this._isBrowser = isPlatformBrowser(platformId);
}
ngOnInit() {
// Don't do anything if we're not in a browser environment.
if (!this._isBrowser) {
return;
}
let iframeApiAvailableObs: Observable<boolean> = observableOf(true);
if (!window.YT) {
if (this.showBeforeIframeApiLoads) {
throw new Error('Namespace YT not found, cannot construct embedded youtube player. ' +
'Please install the YouTube Player API Reference for iframe Embeds: ' +
'https://developers.google.com/youtube/iframe_api_reference');
}
const iframeApiAvailableSubject = new Subject<boolean>();
this._existingApiReadyCallback = window.onYouTubeIframeAPIReady;
window.onYouTubeIframeAPIReady = () => {
if (this._existingApiReadyCallback) {
this._existingApiReadyCallback();
}
this._ngZone.run(() => iframeApiAvailableSubject.next(true));
};
iframeApiAvailableObs = iframeApiAvailableSubject.pipe(take(1), startWith(false));
}
// An observable of the currently loaded player.
const playerObs =
createPlayerObservable(
this._youtubeContainer,
this._videoId,
iframeApiAvailableObs,
this._width,
this._height,
this._playerVars,
this._ngZone
).pipe(tap(player => {
// Emit this before the `waitUntilReady` call so that we can bind to
// events that happen as the player is being initialized (e.g. `onReady`).
this._playerChanges.next(player);
}), waitUntilReady(player => {
// Destroy the player if loading was aborted so that we don't end up leaking memory.
if (!playerIsReady(player)) {
player.destroy();
}
}), takeUntil(this._destroyed), publish());
// Set up side effects to bind inputs to the player.
playerObs.subscribe(player => {
this._player = player;
if (player && this._pendingPlayerState) {
this._initializePlayer(player, this._pendingPlayerState);
}
this._pendingPlayerState = undefined;
});
bindSizeToPlayer(playerObs, this._width, this._height);
bindSuggestedQualityToPlayer(playerObs, this._suggestedQuality);
bindCueVideoCall(
playerObs,
this._videoId,
this._startSeconds,
this._endSeconds,
this._suggestedQuality,
this._destroyed);
// After all of the subscriptions are set up, connect the observable.
(playerObs as ConnectableObservable<Player>).connect();
}
/**
* @deprecated No longer being used. To be removed.
* @breaking-change 11.0.0
*/
createEventsBoundInZone(): YT.Events {
return {};
}
ngAfterViewInit() {
this._youtubeContainer.next(this.youtubeContainer.nativeElement);
}
ngOnDestroy() {
if (this._player) {
this._player.destroy();
window.onYouTubeIframeAPIReady = this._existingApiReadyCallback;
}
this._playerChanges.complete();
this._videoId.complete();
this._height.complete();
this._width.complete();
this._startSeconds.complete();
this._endSeconds.complete();
this._suggestedQuality.complete();
this._youtubeContainer.complete();
this._playerVars.complete();
this._destroyed.next();
this._destroyed.complete();
}
/** See https://developers.google.com/youtube/iframe_api_reference#playVideo */
playVideo() {
if (this._player) {
this._player.playVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PLAYING;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#pauseVideo */
pauseVideo() {
if (this._player) {
this._player.pauseVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PAUSED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#stopVideo */
stopVideo() {
if (this._player) {
this._player.stopVideo();
} else {
// It seems like YouTube sets the player to CUED when it's stopped.
this._getPendingState().playbackState = YT.PlayerState.CUED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#seekTo */
seekTo(seconds: number, allowSeekAhead: boolean) {
if (this._player) {
this._player.seekTo(seconds, allowSeekAhead);
} else {
this._getPendingState().seek = {seconds, allowSeekAhead};
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#mute */
mute() {
if (this._player) {
this._player.mute();
} else {
this._getPendingState().muted = true;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#unMute */
unMute() {
if (this._player) {
this._player.unMute();
} else {
this._getPendingState().muted = false;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#isMuted */
isMuted(): boolean {
if (this._player) {
return this._player.isMuted();
}
if (this._pendingPlayerState) {
return !!this._pendingPlayerState.muted;
}
return false;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setVolume */
setVolume(volume: number) {
if (this._player) {
this._player.setVolume(volume);
} else {
this._getPendingState().volume = volume;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVolume */
getVolume(): number {
if (this._player) {
return this._player.getVolume();
}
if (this._pendingPlayerState && this._pendingPlayerState.volume != null) {
return this._pendingPlayerState.volume;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setPlaybackRate */
setPlaybackRate(playbackRate: number) {
if (this._player) {
return this._player.setPlaybackRate(playbackRate);
} else {
this._getPendingState().playbackRate = playbackRate;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackRate */
getPlaybackRate(): number {
if (this._player) {
return this._player.getPlaybackRate();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackRate != null) {
return this._pendingPlayerState.playbackRate;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailablePlaybackRates */
getAvailablePlaybackRates(): number[] {
return this._player ? this._player.getAvailablePlaybackRates() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoLoadedFraction */
getVideoLoadedFraction(): number {
return this._player ? this._player.getVideoLoadedFraction() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlayerState */
getPlayerState(): YT.PlayerState | undefined {
if (!this._isBrowser || !window.YT) {
return undefined;
}
if (this._player) {
return this._player.getPlayerState();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackState != null) {
return this._pendingPlayerState.playbackState;
}
return YT.PlayerState.UNSTARTED;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getCurrentTime */
getCurrentTime(): number {
if (this._player) {
return this._player.getCurrentTime();
}
if (this._pendingPlayerState && this._pendingPlayerState.seek) {
return this._pendingPlayerState.seek.seconds;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackQuality */
getPlaybackQuality(): YT.SuggestedVideoQuality {
return this._player ? this._player.getPlaybackQuality() : 'default';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailableQualityLevels */
getAvailableQualityLevels(): YT.SuggestedVideoQuality[] {
return this._player ? this._player.getAvailableQualityLevels() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getDuration */
getDuration(): number {
return this._player ? this._player.getDuration() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoUrl */
getVideoUrl(): string {
return this._player ? this._player.getVideoUrl() : '';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoEmbedCode */
getVideoEmbedCode(): string {
return this._player ? this._player.getVideoEmbedCode() : '';
}
/** Gets an object that should be used to store the temporary API state. */
private _getPendingState(): PendingPlayerState {
if (!this._pendingPlayerState) {
this._pendingPlayerState = {};
}
return this._pendingPlayerState;
}
/** Initializes a player from a temporary state. */
private _initializePlayer(player: YT.Player, state: PendingPlayerState): void {
const {playbackState, playbackRate, volume, muted, seek} = state;
switch (playbackState) {
case YT.PlayerState.PLAYING: player.playVideo(); break;
case YT.PlayerState.PAUSED: player.pauseVideo(); break;
case YT.PlayerState.CUED: player.stopVideo(); break;
}
if (playbackRate != null) {
player.setPlaybackRate(playbackRate);
}
if (volume != null) {
player.setVolume(volume);
}
if (muted != null) {
muted ? player.mute() : player.unMute();
}
if (seek != null) {
player.seekTo(seek.seconds, seek.allowSeekAhead);
}
}
/** Gets an observable that adds an event listener to the player when a user subscribes to it. */
private _getLazyEmitter<T extends YT.PlayerEvent>(name: keyof YT.Events): Observable<T> {
// Start with the stream of players. This way the events will be transferred
// over to the new player if it gets swapped out under-the-hood.
return this._playerChanges.pipe(
// Switch to the bound event. `switchMap` ensures that the old event is removed when the
// player is changed. If there's no player, return an observable that never emits.
switchMap(player => {
return player ? fromEventPattern<T>((listener: (event: T) => void) => {
player.addEventListener(name, listener);
}, (listener: (event: T) => void) => {
// The API seems to throw when we try to unbind from a destroyed player and it doesn't
// expose whether the player has been destroyed so we have to wrap it in a try/catch to
// prevent the entire stream from erroring out.
try {
if ((player as Player).removeEventListener!) {
(player as Player).removeEventListener(name, listener);
}
} catch {}
}) : observableOf<T>();
}),
// By default we run all the API interactions outside the zone
// so we have to bring the events back in manually when they emit.
(source: Observable<T>) => new Observable<T>(observer => source.subscribe({
next: value => this._ngZone.run(() => observer.next(value)),
error: error => observer.error(error),
complete: () => observer.complete()
})),
// Ensures that everything is cleared out on destroy.
takeUntil(this._destroyed)
);
}
}
/** Listens to changes to the given width and height and sets it on the player. */
function bindSizeToPlayer(
playerObs: Observable<YT.Player | undefined>,
widthObs: Observable<number>,
heightObs: Observable<number>
) {
return combineLatest([playerObs, widthObs, heightObs])
.subscribe(([player, width, height]) => player && player.setSize(width, height));
}
/** Listens to changes from the suggested quality and sets it on the given player. */
function bindSuggestedQualityToPlayer(
playerObs: Observable<YT.Player | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>
) {
return combineLatest([
playerObs,
suggestedQualityObs
]).subscribe(
([player, suggestedQuality]) =>
player && suggestedQuality && player.setPlaybackQuality(suggestedQuality));
}
/**
* Returns an observable that emits the loaded player once it's ready. Certain properties/methods
* won't be available until the iframe finishes loading.
* @param onAbort Callback function that will be invoked if the player loading was aborted before
* it was able to complete. Can be used to clean up any loose references.
*/
function waitUntilReady(onAbort: (player: UninitializedPlayer) => void):
OperatorFunction<UninitializedPlayer | undefined, Player | undefined> {
return flatMap(player => {
if (!player) {
return observableOf<Player|undefined>(undefined);
}
if (playerIsReady(player)) {
return observableOf(player as Player);
}
// Since removeEventListener is not on Player when it's initialized, we can't use fromEvent.
// The player is not initialized fully until the ready is called.
return new Observable<Player>(emitter => {
let aborted = false;
let resolved = false;
const onReady = (event: YT.PlayerEvent) => {
resolved = true;
if (!aborted) {
event.target.removeEventListener('onReady', onReady);
emitter.next(event.target);
}
};
player.addEventListener('onReady', onReady);
return () => {
aborted = true;
if (!resolved) {
onAbort(player);
}
};
}).pipe(take(1), startWith(undefined));
});
}
/** Create an observable for the player based on the given options. */
function createPlayerObservable(
youtubeContainer: Observable<HTMLElement>,
videoIdObs: Observable<string | undefined>,
iframeApiAvailableObs: Observable<boolean>,
widthObs: Observable<number>,
heightObs: Observable<number>,
playerVarsObs: Observable<YT.PlayerVars | undefined>,
ngZone: NgZone
): Observable<UninitializedPlayer | undefined> {
const playerOptions = combineLatest([videoIdObs, playerVarsObs]).pipe(
withLatestFrom(combineLatest([widthObs, heightObs])),
map(([constructorOptions, sizeOptions]) => {
const [videoId, playerVars] = constructorOptions;
const [width, height] = sizeOptions;
return videoId ? ({ videoId, playerVars, width, height }) : undefined;
}),
);
return combineLatest([youtubeContainer, playerOptions, of(ngZone)])
.pipe(
skipUntilRememberLatest(iframeApiAvailableObs),
scan(syncPlayerState, undefined),
distinctUntilChanged());
}
/** Skips the given observable until the other observable emits true, then emit the latest. */
function skipUntilRememberLatest<T>(notifier: Observable<boolean>): MonoTypeOperatorFunction<T> {
return pipe(
combineLatestOp(notifier),
skipWhile(([_, doneSkipping]) => !doneSkipping),
map(([value]) => value));
}
/** Destroy the player if there are no options, or create the player if there are options. */
function syncPlayerState(
player: UninitializedPlayer | undefined,
[container, videoOptions, ngZone]: [HTMLElement, YT.PlayerOptions | undefined, NgZone],
): UninitializedPlayer | undefined {
if (player && videoOptions && player.playerVars !== videoOptions.playerVars) {
// The player needs to be recreated if the playerVars are different.
player.destroy();
} else if (!videoOptions) {
if (player) {
// Destroy the player if the videoId was removed.
player.destroy();
}
return;
} else if (player) {
return player;
}
// Important! We need to create the Player object outside of the `NgZone`, because it kicks
// off a 250ms setInterval which will continually trigger change detection if we don't.
const newPlayer: UninitializedPlayer =
ngZone.runOutsideAngular(() => new YT.Player(container, videoOptions));
newPlayer.videoId = videoOptions.videoId;
newPlayer.playerVars = videoOptions.playerVars; | return newPlayer;
}
/**
* Call cueVideoById if the videoId changes, or when start or end seconds change. cueVideoById will
* change the loaded video id to the given videoId, and set the start and end times to the given
* start/end seconds.
*/
function bindCueVideoCall(
playerObs: Observable<Player | undefined>,
videoIdObs: Observable<string | undefined>,
startSecondsObs: Observable<number | undefined>,
endSecondsObs: Observable<number | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>,
destroyed: Observable<void>,
) {
const cueOptionsObs = combineLatest([startSecondsObs, endSecondsObs])
.pipe(map(([startSeconds, endSeconds]) => ({startSeconds, endSeconds})));
// Only respond to changes in cue options if the player is not running.
const filteredCueOptions = cueOptionsObs
.pipe(filterOnOther(playerObs, player => !!player && !hasPlayerStarted(player)));
// If the video id changed, there's no reason to run 'cue' unless the player
// was initialized with a different video id.
const changedVideoId = videoIdObs
.pipe(filterOnOther(playerObs, (player, videoId) => !!player && player.videoId !== videoId));
// If the player changed, there's no reason to run 'cue' unless there are cue options.
const changedPlayer = playerObs.pipe(
filterOnOther(
combineLatest([videoIdObs, cueOptionsObs]),
([videoId, cueOptions], player) =>
!!player &&
(videoId != player.videoId || !!cueOptions.startSeconds || !!cueOptions.endSeconds)));
merge(changedPlayer, changedVideoId, filteredCueOptions)
.pipe(
withLatestFrom(combineLatest([playerObs, videoIdObs, cueOptionsObs, suggestedQualityObs])),
map(([_, values]) => values),
takeUntil(destroyed),
)
.subscribe(([player, videoId, cueOptions, suggestedQuality]) => {
if (!videoId || !player) {
return;
}
player.videoId = videoId;
player.cueVideoById({
videoId,
suggestedQuality,
...cueOptions,
});
});
}
function hasPlayerStarted(player: YT.Player): boolean {
const state = player.getPlayerState();
return state !== YT.PlayerState.UNSTARTED && state !== YT.PlayerState.CUED;
}
function playerIsReady(player: UninitializedPlayer): player is Player {
return 'getPlayerStatus' in player;
}
/** Combines the two observables temporarily for the filter function. */
function filterOnOther<R, T>(
otherObs: Observable<T>,
filterFn: (t: T, r?: R) => boolean,
): MonoTypeOperatorFunction<R> {
return pipe(
withLatestFrom(otherObs),
filter(([value, other]) => filterFn(other, value)),
map(([value]) => value),
);
} | random_line_split | |
youtube-player.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// Workaround for: https://github.com/bazelbuild/rules_nodejs/issues/1265
/// <reference types="youtube" />
import {
AfterViewInit,
ChangeDetectionStrategy,
Component,
ElementRef,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
ViewChild,
ViewEncapsulation,
Inject,
PLATFORM_ID,
} from '@angular/core';
import {isPlatformBrowser} from '@angular/common';
import {
combineLatest,
ConnectableObservable,
merge,
MonoTypeOperatorFunction,
Observable,
of as observableOf,
OperatorFunction,
pipe,
Subject,
of,
BehaviorSubject,
fromEventPattern,
} from 'rxjs';
import {
combineLatest as combineLatestOp,
distinctUntilChanged,
filter,
flatMap,
map,
publish,
scan,
skipWhile,
startWith,
take,
takeUntil,
withLatestFrom,
switchMap,
tap,
} from 'rxjs/operators';
declare global {
interface Window {
YT: typeof YT | undefined;
onYouTubeIframeAPIReady: (() => void) | undefined;
}
}
export const DEFAULT_PLAYER_WIDTH = 640;
export const DEFAULT_PLAYER_HEIGHT = 390;
// The native YT.Player doesn't expose the set videoId, but we need it for
// convenience.
interface Player extends YT.Player {
videoId?: string;
playerVars?: YT.PlayerVars;
}
// The player isn't fully initialized when it's constructed.
// The only field available is destroy and addEventListener.
type UninitializedPlayer = Pick<Player, 'videoId' | 'playerVars' | 'destroy' | 'addEventListener'>;
/**
* Object used to store the state of the player if the
* user tries to interact with the API before it has been loaded.
*/
interface PendingPlayerState {
playbackState?: YT.PlayerState.PLAYING | YT.PlayerState.PAUSED | YT.PlayerState.CUED;
playbackRate?: number;
volume?: number;
muted?: boolean;
seek?: {seconds: number, allowSeekAhead: boolean};
}
/**
* Angular component that renders a YouTube player via the YouTube player
* iframe API.
* @see https://developers.google.com/youtube/iframe_api_reference
*/
@Component({
selector: 'youtube-player',
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
// This div is *replaced* by the YouTube player embed.
template: '<div #youtubeContainer></div>',
})
export class YouTubePlayer implements AfterViewInit, OnDestroy, OnInit {
/** Whether we're currently rendering inside a browser. */
private _isBrowser: boolean;
private _youtubeContainer = new Subject<HTMLElement>();
private _destroyed = new Subject<void>();
private _player: Player | undefined;
private _existingApiReadyCallback: (() => void) | undefined;
private _pendingPlayerState: PendingPlayerState | undefined;
private _playerChanges = new BehaviorSubject<UninitializedPlayer | undefined>(undefined);
/** YouTube Video ID to view */
@Input()
get videoId(): string | undefined { return this._videoId.value; }
set videoId(videoId: string | undefined) {
this._videoId.next(videoId);
}
private _videoId = new BehaviorSubject<string | undefined>(undefined);
/** Height of video player */
@Input()
get height(): number | undefined { return this._height.value; }
set height(height: number | undefined) {
this._height.next(height || DEFAULT_PLAYER_HEIGHT);
}
private _height = new BehaviorSubject<number>(DEFAULT_PLAYER_HEIGHT);
/** Width of video player */
@Input()
get width(): number | undefined { return this._width.value; }
set width(width: number | undefined) {
this._width.next(width || DEFAULT_PLAYER_WIDTH);
}
private _width = new BehaviorSubject<number>(DEFAULT_PLAYER_WIDTH);
/** The moment when the player is supposed to start playing */
@Input()
set startSeconds(startSeconds: number | undefined) {
this._startSeconds.next(startSeconds);
}
private _startSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The moment when the player is supposed to stop playing */
@Input()
set endSeconds(endSeconds: number | undefined) {
this._endSeconds.next(endSeconds);
}
private _endSeconds = new BehaviorSubject<number | undefined>(undefined);
/** The suggested quality of the player */
@Input()
set suggestedQuality(suggestedQuality: YT.SuggestedVideoQuality | undefined) {
this._suggestedQuality.next(suggestedQuality);
}
private _suggestedQuality = new BehaviorSubject<YT.SuggestedVideoQuality | undefined>(undefined);
/**
* Extra parameters used to configure the player. See:
* https://developers.google.com/youtube/player_parameters.html?playerVersion=HTML5#Parameters
*/
@Input()
get playerVars(): YT.PlayerVars | undefined { return this._playerVars.value; }
set playerVars(playerVars: YT.PlayerVars | undefined) {
this._playerVars.next(playerVars);
}
private _playerVars = new BehaviorSubject<YT.PlayerVars | undefined>(undefined);
/**
* Whether the iframe will attempt to load regardless of the status of the api on the
* page. Set this to true if you don't want the `onYouTubeIframeAPIReady` field to be
* set on the global window.
*/
@Input() showBeforeIframeApiLoads: boolean | undefined;
/** Outputs are direct proxies from the player itself. */
@Output() ready: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onReady');
@Output() stateChange: Observable<YT.OnStateChangeEvent> =
this._getLazyEmitter<YT.OnStateChangeEvent>('onStateChange');
@Output() error: Observable<YT.OnErrorEvent> =
this._getLazyEmitter<YT.OnErrorEvent>('onError');
@Output() apiChange: Observable<YT.PlayerEvent> =
this._getLazyEmitter<YT.PlayerEvent>('onApiChange');
@Output() playbackQualityChange: Observable<YT.OnPlaybackQualityChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackQualityChangeEvent>('onPlaybackQualityChange');
@Output() playbackRateChange: Observable<YT.OnPlaybackRateChangeEvent> =
this._getLazyEmitter<YT.OnPlaybackRateChangeEvent>('onPlaybackRateChange');
/** The element that will be replaced by the iframe. */
@ViewChild('youtubeContainer')
youtubeContainer: ElementRef<HTMLElement>;
constructor(private _ngZone: NgZone, @Inject(PLATFORM_ID) platformId: Object) {
this._isBrowser = isPlatformBrowser(platformId);
}
ngOnInit() {
// Don't do anything if we're not in a browser environment.
if (!this._isBrowser) {
return;
}
let iframeApiAvailableObs: Observable<boolean> = observableOf(true);
if (!window.YT) {
if (this.showBeforeIframeApiLoads) {
throw new Error('Namespace YT not found, cannot construct embedded youtube player. ' +
'Please install the YouTube Player API Reference for iframe Embeds: ' +
'https://developers.google.com/youtube/iframe_api_reference');
}
const iframeApiAvailableSubject = new Subject<boolean>();
this._existingApiReadyCallback = window.onYouTubeIframeAPIReady;
window.onYouTubeIframeAPIReady = () => {
if (this._existingApiReadyCallback) {
this._existingApiReadyCallback();
}
this._ngZone.run(() => iframeApiAvailableSubject.next(true));
};
iframeApiAvailableObs = iframeApiAvailableSubject.pipe(take(1), startWith(false));
}
// An observable of the currently loaded player.
const playerObs =
createPlayerObservable(
this._youtubeContainer,
this._videoId,
iframeApiAvailableObs,
this._width,
this._height,
this._playerVars,
this._ngZone
).pipe(tap(player => {
// Emit this before the `waitUntilReady` call so that we can bind to
// events that happen as the player is being initialized (e.g. `onReady`).
this._playerChanges.next(player);
}), waitUntilReady(player => {
// Destroy the player if loading was aborted so that we don't end up leaking memory.
if (!playerIsReady(player)) {
player.destroy();
}
}), takeUntil(this._destroyed), publish());
// Set up side effects to bind inputs to the player.
playerObs.subscribe(player => {
this._player = player;
if (player && this._pendingPlayerState) {
this._initializePlayer(player, this._pendingPlayerState);
}
this._pendingPlayerState = undefined;
});
bindSizeToPlayer(playerObs, this._width, this._height);
bindSuggestedQualityToPlayer(playerObs, this._suggestedQuality);
bindCueVideoCall(
playerObs,
this._videoId,
this._startSeconds,
this._endSeconds,
this._suggestedQuality,
this._destroyed);
// After all of the subscriptions are set up, connect the observable.
(playerObs as ConnectableObservable<Player>).connect();
}
/**
* @deprecated No longer being used. To be removed.
* @breaking-change 11.0.0
*/
createEventsBoundInZone(): YT.Events {
return {};
}
ngAfterViewInit() {
this._youtubeContainer.next(this.youtubeContainer.nativeElement);
}
ngOnDestroy() {
if (this._player) {
this._player.destroy();
window.onYouTubeIframeAPIReady = this._existingApiReadyCallback;
}
this._playerChanges.complete();
this._videoId.complete();
this._height.complete();
this._width.complete();
this._startSeconds.complete();
this._endSeconds.complete();
this._suggestedQuality.complete();
this._youtubeContainer.complete();
this._playerVars.complete();
this._destroyed.next();
this._destroyed.complete();
}
/** See https://developers.google.com/youtube/iframe_api_reference#playVideo */
playVideo() {
if (this._player) {
this._player.playVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PLAYING;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#pauseVideo */
pauseVideo() {
if (this._player) {
this._player.pauseVideo();
} else {
this._getPendingState().playbackState = YT.PlayerState.PAUSED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#stopVideo */
stopVideo() {
if (this._player) {
this._player.stopVideo();
} else {
// It seems like YouTube sets the player to CUED when it's stopped.
this._getPendingState().playbackState = YT.PlayerState.CUED;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#seekTo */
seekTo(seconds: number, allowSeekAhead: boolean) {
if (this._player) {
this._player.seekTo(seconds, allowSeekAhead);
} else {
this._getPendingState().seek = {seconds, allowSeekAhead};
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#mute */
mute() {
if (this._player) {
this._player.mute();
} else {
this._getPendingState().muted = true;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#unMute */
unMute() {
if (this._player) {
this._player.unMute();
} else {
this._getPendingState().muted = false;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#isMuted */
isMuted(): boolean {
if (this._player) {
return this._player.isMuted();
}
if (this._pendingPlayerState) {
return !!this._pendingPlayerState.muted;
}
return false;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setVolume */
setVolume(volume: number) {
if (this._player) {
this._player.setVolume(volume);
} else {
this._getPendingState().volume = volume;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVolume */
getVolume(): number {
if (this._player) {
return this._player.getVolume();
}
if (this._pendingPlayerState && this._pendingPlayerState.volume != null) {
return this._pendingPlayerState.volume;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#setPlaybackRate */
setPlaybackRate(playbackRate: number) {
if (this._player) {
return this._player.setPlaybackRate(playbackRate);
} else {
this._getPendingState().playbackRate = playbackRate;
}
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackRate */
getPlaybackRate(): number {
if (this._player) {
return this._player.getPlaybackRate();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackRate != null) {
return this._pendingPlayerState.playbackRate;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailablePlaybackRates */
getAvailablePlaybackRates(): number[] |
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoLoadedFraction */
getVideoLoadedFraction(): number {
return this._player ? this._player.getVideoLoadedFraction() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlayerState */
getPlayerState(): YT.PlayerState | undefined {
if (!this._isBrowser || !window.YT) {
return undefined;
}
if (this._player) {
return this._player.getPlayerState();
}
if (this._pendingPlayerState && this._pendingPlayerState.playbackState != null) {
return this._pendingPlayerState.playbackState;
}
return YT.PlayerState.UNSTARTED;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getCurrentTime */
getCurrentTime(): number {
if (this._player) {
return this._player.getCurrentTime();
}
if (this._pendingPlayerState && this._pendingPlayerState.seek) {
return this._pendingPlayerState.seek.seconds;
}
return 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getPlaybackQuality */
getPlaybackQuality(): YT.SuggestedVideoQuality {
return this._player ? this._player.getPlaybackQuality() : 'default';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getAvailableQualityLevels */
getAvailableQualityLevels(): YT.SuggestedVideoQuality[] {
return this._player ? this._player.getAvailableQualityLevels() : [];
}
/** See https://developers.google.com/youtube/iframe_api_reference#getDuration */
getDuration(): number {
return this._player ? this._player.getDuration() : 0;
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoUrl */
getVideoUrl(): string {
return this._player ? this._player.getVideoUrl() : '';
}
/** See https://developers.google.com/youtube/iframe_api_reference#getVideoEmbedCode */
getVideoEmbedCode(): string {
return this._player ? this._player.getVideoEmbedCode() : '';
}
/** Gets an object that should be used to store the temporary API state. */
private _getPendingState(): PendingPlayerState {
if (!this._pendingPlayerState) {
this._pendingPlayerState = {};
}
return this._pendingPlayerState;
}
/** Initializes a player from a temporary state. */
private _initializePlayer(player: YT.Player, state: PendingPlayerState): void {
const {playbackState, playbackRate, volume, muted, seek} = state;
switch (playbackState) {
case YT.PlayerState.PLAYING: player.playVideo(); break;
case YT.PlayerState.PAUSED: player.pauseVideo(); break;
case YT.PlayerState.CUED: player.stopVideo(); break;
}
if (playbackRate != null) {
player.setPlaybackRate(playbackRate);
}
if (volume != null) {
player.setVolume(volume);
}
if (muted != null) {
muted ? player.mute() : player.unMute();
}
if (seek != null) {
player.seekTo(seek.seconds, seek.allowSeekAhead);
}
}
/** Gets an observable that adds an event listener to the player when a user subscribes to it. */
private _getLazyEmitter<T extends YT.PlayerEvent>(name: keyof YT.Events): Observable<T> {
// Start with the stream of players. This way the events will be transferred
// over to the new player if it gets swapped out under-the-hood.
return this._playerChanges.pipe(
// Switch to the bound event. `switchMap` ensures that the old event is removed when the
// player is changed. If there's no player, return an observable that never emits.
switchMap(player => {
return player ? fromEventPattern<T>((listener: (event: T) => void) => {
player.addEventListener(name, listener);
}, (listener: (event: T) => void) => {
// The API seems to throw when we try to unbind from a destroyed player and it doesn't
// expose whether the player has been destroyed so we have to wrap it in a try/catch to
// prevent the entire stream from erroring out.
try {
if ((player as Player).removeEventListener!) {
(player as Player).removeEventListener(name, listener);
}
} catch {}
}) : observableOf<T>();
}),
// By default we run all the API interactions outside the zone
// so we have to bring the events back in manually when they emit.
(source: Observable<T>) => new Observable<T>(observer => source.subscribe({
next: value => this._ngZone.run(() => observer.next(value)),
error: error => observer.error(error),
complete: () => observer.complete()
})),
// Ensures that everything is cleared out on destroy.
takeUntil(this._destroyed)
);
}
}
/** Listens to changes to the given width and height and sets it on the player. */
function bindSizeToPlayer(
playerObs: Observable<YT.Player | undefined>,
widthObs: Observable<number>,
heightObs: Observable<number>
) {
return combineLatest([playerObs, widthObs, heightObs])
.subscribe(([player, width, height]) => player && player.setSize(width, height));
}
/** Listens to changes from the suggested quality and sets it on the given player. */
function bindSuggestedQualityToPlayer(
playerObs: Observable<YT.Player | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>
) {
return combineLatest([
playerObs,
suggestedQualityObs
]).subscribe(
([player, suggestedQuality]) =>
player && suggestedQuality && player.setPlaybackQuality(suggestedQuality));
}
/**
* Returns an observable that emits the loaded player once it's ready. Certain properties/methods
* won't be available until the iframe finishes loading.
* @param onAbort Callback function that will be invoked if the player loading was aborted before
* it was able to complete. Can be used to clean up any loose references.
*/
function waitUntilReady(onAbort: (player: UninitializedPlayer) => void):
OperatorFunction<UninitializedPlayer | undefined, Player | undefined> {
return flatMap(player => {
if (!player) {
return observableOf<Player|undefined>(undefined);
}
if (playerIsReady(player)) {
return observableOf(player as Player);
}
// Since removeEventListener is not on Player when it's initialized, we can't use fromEvent.
// The player is not initialized fully until the ready is called.
return new Observable<Player>(emitter => {
let aborted = false;
let resolved = false;
const onReady = (event: YT.PlayerEvent) => {
resolved = true;
if (!aborted) {
event.target.removeEventListener('onReady', onReady);
emitter.next(event.target);
}
};
player.addEventListener('onReady', onReady);
return () => {
aborted = true;
if (!resolved) {
onAbort(player);
}
};
}).pipe(take(1), startWith(undefined));
});
}
/** Create an observable for the player based on the given options. */
function createPlayerObservable(
youtubeContainer: Observable<HTMLElement>,
videoIdObs: Observable<string | undefined>,
iframeApiAvailableObs: Observable<boolean>,
widthObs: Observable<number>,
heightObs: Observable<number>,
playerVarsObs: Observable<YT.PlayerVars | undefined>,
ngZone: NgZone
): Observable<UninitializedPlayer | undefined> {
const playerOptions = combineLatest([videoIdObs, playerVarsObs]).pipe(
withLatestFrom(combineLatest([widthObs, heightObs])),
map(([constructorOptions, sizeOptions]) => {
const [videoId, playerVars] = constructorOptions;
const [width, height] = sizeOptions;
return videoId ? ({ videoId, playerVars, width, height }) : undefined;
}),
);
return combineLatest([youtubeContainer, playerOptions, of(ngZone)])
.pipe(
skipUntilRememberLatest(iframeApiAvailableObs),
scan(syncPlayerState, undefined),
distinctUntilChanged());
}
/** Skips the given observable until the other observable emits true, then emit the latest. */
function skipUntilRememberLatest<T>(notifier: Observable<boolean>): MonoTypeOperatorFunction<T> {
return pipe(
combineLatestOp(notifier),
skipWhile(([_, doneSkipping]) => !doneSkipping),
map(([value]) => value));
}
/** Destroy the player if there are no options, or create the player if there are options. */
function syncPlayerState(
player: UninitializedPlayer | undefined,
[container, videoOptions, ngZone]: [HTMLElement, YT.PlayerOptions | undefined, NgZone],
): UninitializedPlayer | undefined {
if (player && videoOptions && player.playerVars !== videoOptions.playerVars) {
// The player needs to be recreated if the playerVars are different.
player.destroy();
} else if (!videoOptions) {
if (player) {
// Destroy the player if the videoId was removed.
player.destroy();
}
return;
} else if (player) {
return player;
}
// Important! We need to create the Player object outside of the `NgZone`, because it kicks
// off a 250ms setInterval which will continually trigger change detection if we don't.
const newPlayer: UninitializedPlayer =
ngZone.runOutsideAngular(() => new YT.Player(container, videoOptions));
newPlayer.videoId = videoOptions.videoId;
newPlayer.playerVars = videoOptions.playerVars;
return newPlayer;
}
/**
* Call cueVideoById if the videoId changes, or when start or end seconds change. cueVideoById will
* change the loaded video id to the given videoId, and set the start and end times to the given
* start/end seconds.
*/
function bindCueVideoCall(
playerObs: Observable<Player | undefined>,
videoIdObs: Observable<string | undefined>,
startSecondsObs: Observable<number | undefined>,
endSecondsObs: Observable<number | undefined>,
suggestedQualityObs: Observable<YT.SuggestedVideoQuality | undefined>,
destroyed: Observable<void>,
) {
const cueOptionsObs = combineLatest([startSecondsObs, endSecondsObs])
.pipe(map(([startSeconds, endSeconds]) => ({startSeconds, endSeconds})));
// Only respond to changes in cue options if the player is not running.
const filteredCueOptions = cueOptionsObs
.pipe(filterOnOther(playerObs, player => !!player && !hasPlayerStarted(player)));
// If the video id changed, there's no reason to run 'cue' unless the player
// was initialized with a different video id.
const changedVideoId = videoIdObs
.pipe(filterOnOther(playerObs, (player, videoId) => !!player && player.videoId !== videoId));
// If the player changed, there's no reason to run 'cue' unless there are cue options.
const changedPlayer = playerObs.pipe(
filterOnOther(
combineLatest([videoIdObs, cueOptionsObs]),
([videoId, cueOptions], player) =>
!!player &&
(videoId != player.videoId || !!cueOptions.startSeconds || !!cueOptions.endSeconds)));
merge(changedPlayer, changedVideoId, filteredCueOptions)
.pipe(
withLatestFrom(combineLatest([playerObs, videoIdObs, cueOptionsObs, suggestedQualityObs])),
map(([_, values]) => values),
takeUntil(destroyed),
)
.subscribe(([player, videoId, cueOptions, suggestedQuality]) => {
if (!videoId || !player) {
return;
}
player.videoId = videoId;
player.cueVideoById({
videoId,
suggestedQuality,
...cueOptions,
});
});
}
function hasPlayerStarted(player: YT.Player): boolean {
const state = player.getPlayerState();
return state !== YT.PlayerState.UNSTARTED && state !== YT.PlayerState.CUED;
}
function playerIsReady(player: UninitializedPlayer): player is Player {
return 'getPlayerStatus' in player;
}
/** Combines the two observables temporarily for the filter function. */
function filterOnOther<R, T>(
otherObs: Observable<T>,
filterFn: (t: T, r?: R) => boolean,
): MonoTypeOperatorFunction<R> {
return pipe(
withLatestFrom(otherObs),
filter(([value, other]) => filterFn(other, value)),
map(([value]) => value),
);
}
| {
return this._player ? this._player.getAvailablePlaybackRates() : [];
} | identifier_body |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct | {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
}
| ServoThreadRng | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct ServoThreadRng {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid | {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
} | identifier_body | |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
| #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct ServoThreadRng {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
} | random_line_split | |
widgets.py | # Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
"""
This TinyMCE widget was copied and extended from this code by John D'Agostino:
http://code.djangoproject.com/wiki/CustomWidgetsTinyMCE
"""
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.core.urlresolvers import reverse
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unicode
from django.utils.html import escape
from django.utils import simplejson
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.translation import get_language, ugettext as _
import tinymce.settings
class TinyMCE(forms.Textarea):
"""
TinyMCE widget. Set settings.TINYMCE_JS_URL to set the location of the
javascript file. Default is "MEDIA_URL + 'js/tiny_mce/tiny_mce.js'".
You can customize the configuration with the mce_attrs argument to the
constructor.
In addition to the standard configuration you can set the
'content_language' parameter. It takes the value of the 'language'
parameter by default.
In addition to the default settings from settings.TINYMCE_DEFAULT_CONFIG,
this widget sets the 'language', 'directionality' and
'spellchecker_languages' parameters by default. The first is derived from
the current Django language, the others from the 'content_language'
parameter.
"""
def __init__(self, content_language=None, attrs=None, mce_attrs={}):
|
def render(self, name, value, attrs=None):
if value is None: value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs)
final_attrs['name'] = name
assert 'id' in final_attrs, "TinyMCE widget attributes must contain 'id'"
mce_config = tinymce.settings.DEFAULT_CONFIG.copy()
mce_config.update(get_language_config(self.content_language))
if tinymce.settings.USE_FILEBROWSER:
mce_config['file_browser_callback'] = "djangoFileBrowser"
mce_config.update(self.mce_attrs)
mce_config['mode'] = 'exact'
mce_config['elements'] = final_attrs['id']
mce_config['strict_loading_mode'] = 1
mce_json = simplejson.dumps(mce_config)
html = [u'<textarea%s>%s</textarea>' % (flatatt(final_attrs), escape(value))]
if tinymce.settings.USE_COMPRESSOR:
compressor_config = {
'plugins': mce_config.get('plugins', ''),
'themes': mce_config.get('theme', 'advanced'),
'languages': mce_config.get('language', ''),
'diskcache': True,
'debug': False,
}
compressor_json = simplejson.dumps(compressor_config)
html.append(u'<script type="text/javascript">tinyMCE_GZ.init(%s)</script>' % compressor_json)
html.append(u'<script type="text/javascript">tinyMCE.init(%s)</script>' % mce_json)
return mark_safe(u'\n'.join(html))
def _media(self):
if tinymce.settings.USE_COMPRESSOR:
js = [reverse('tinymce-compressor')]
else:
js = [tinymce.settings.JS_URL]
if tinymce.settings.USE_FILEBROWSER:
js.append(reverse('tinymce-filebrowser'))
return forms.Media(js=js)
media = property(_media)
class AdminTinyMCE(admin_widgets.AdminTextareaWidget, TinyMCE):
pass
def get_language_config(content_language=None):
language = get_language()[:2]
if content_language:
content_language = content_language[:2]
else:
content_language = language
config = {}
config['language'] = language
lang_names = SortedDict()
for lang, name in settings.LANGUAGES:
if lang[:2] not in lang_names: lang_names[lang[:2]] = []
lang_names[lang[:2]].append(_(name))
sp_langs = []
for lang, names in lang_names.items():
if lang == content_language:
default = '+'
else:
default = ''
sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))
config['spellchecker_languages'] = ','.join(sp_langs)
if content_language in settings.LANGUAGES_BIDI:
config['directionality'] = 'rtl'
else:
config['directionality'] = 'ltr'
if tinymce.settings.USE_SPELLCHECKER:
config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')
return config
| super(TinyMCE, self).__init__(attrs)
self.mce_attrs = mce_attrs
if content_language is None:
content_language = mce_attrs.get('language', None)
self.content_language = content_language | identifier_body |
widgets.py | # Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
"""
This TinyMCE widget was copied and extended from this code by John D'Agostino:
http://code.djangoproject.com/wiki/CustomWidgetsTinyMCE
"""
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.core.urlresolvers import reverse
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unicode
from django.utils.html import escape
from django.utils import simplejson
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.translation import get_language, ugettext as _
import tinymce.settings
class TinyMCE(forms.Textarea):
"""
TinyMCE widget. Set settings.TINYMCE_JS_URL to set the location of the
javascript file. Default is "MEDIA_URL + 'js/tiny_mce/tiny_mce.js'".
You can customize the configuration with the mce_attrs argument to the
constructor.
In addition to the standard configuration you can set the
'content_language' parameter. It takes the value of the 'language'
parameter by default.
In addition to the default settings from settings.TINYMCE_DEFAULT_CONFIG,
this widget sets the 'language', 'directionality' and
'spellchecker_languages' parameters by default. The first is derived from
the current Django language, the others from the 'content_language'
parameter.
"""
def __init__(self, content_language=None, attrs=None, mce_attrs={}):
super(TinyMCE, self).__init__(attrs)
self.mce_attrs = mce_attrs
if content_language is None:
content_language = mce_attrs.get('language', None)
self.content_language = content_language
def | (self, name, value, attrs=None):
if value is None: value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs)
final_attrs['name'] = name
assert 'id' in final_attrs, "TinyMCE widget attributes must contain 'id'"
mce_config = tinymce.settings.DEFAULT_CONFIG.copy()
mce_config.update(get_language_config(self.content_language))
if tinymce.settings.USE_FILEBROWSER:
mce_config['file_browser_callback'] = "djangoFileBrowser"
mce_config.update(self.mce_attrs)
mce_config['mode'] = 'exact'
mce_config['elements'] = final_attrs['id']
mce_config['strict_loading_mode'] = 1
mce_json = simplejson.dumps(mce_config)
html = [u'<textarea%s>%s</textarea>' % (flatatt(final_attrs), escape(value))]
if tinymce.settings.USE_COMPRESSOR:
compressor_config = {
'plugins': mce_config.get('plugins', ''),
'themes': mce_config.get('theme', 'advanced'),
'languages': mce_config.get('language', ''),
'diskcache': True,
'debug': False,
}
compressor_json = simplejson.dumps(compressor_config)
html.append(u'<script type="text/javascript">tinyMCE_GZ.init(%s)</script>' % compressor_json)
html.append(u'<script type="text/javascript">tinyMCE.init(%s)</script>' % mce_json)
return mark_safe(u'\n'.join(html))
def _media(self):
if tinymce.settings.USE_COMPRESSOR:
js = [reverse('tinymce-compressor')]
else:
js = [tinymce.settings.JS_URL]
if tinymce.settings.USE_FILEBROWSER:
js.append(reverse('tinymce-filebrowser'))
return forms.Media(js=js)
media = property(_media)
class AdminTinyMCE(admin_widgets.AdminTextareaWidget, TinyMCE):
pass
def get_language_config(content_language=None):
language = get_language()[:2]
if content_language:
content_language = content_language[:2]
else:
content_language = language
config = {}
config['language'] = language
lang_names = SortedDict()
for lang, name in settings.LANGUAGES:
if lang[:2] not in lang_names: lang_names[lang[:2]] = []
lang_names[lang[:2]].append(_(name))
sp_langs = []
for lang, names in lang_names.items():
if lang == content_language:
default = '+'
else:
default = ''
sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))
config['spellchecker_languages'] = ','.join(sp_langs)
if content_language in settings.LANGUAGES_BIDI:
config['directionality'] = 'rtl'
else:
config['directionality'] = 'ltr'
if tinymce.settings.USE_SPELLCHECKER:
config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')
return config
| render | identifier_name |
widgets.py | # Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
"""
This TinyMCE widget was copied and extended from this code by John D'Agostino:
http://code.djangoproject.com/wiki/CustomWidgetsTinyMCE
"""
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.core.urlresolvers import reverse
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unicode
from django.utils.html import escape
from django.utils import simplejson
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.translation import get_language, ugettext as _
import tinymce.settings
class TinyMCE(forms.Textarea):
"""
TinyMCE widget. Set settings.TINYMCE_JS_URL to set the location of the
javascript file. Default is "MEDIA_URL + 'js/tiny_mce/tiny_mce.js'".
You can customize the configuration with the mce_attrs argument to the
constructor.
In addition to the standard configuration you can set the
'content_language' parameter. It takes the value of the 'language'
parameter by default.
In addition to the default settings from settings.TINYMCE_DEFAULT_CONFIG,
this widget sets the 'language', 'directionality' and
'spellchecker_languages' parameters by default. The first is derived from
the current Django language, the others from the 'content_language'
parameter.
"""
def __init__(self, content_language=None, attrs=None, mce_attrs={}):
super(TinyMCE, self).__init__(attrs)
self.mce_attrs = mce_attrs
if content_language is None:
content_language = mce_attrs.get('language', None)
self.content_language = content_language
def render(self, name, value, attrs=None):
if value is None: value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs)
final_attrs['name'] = name
assert 'id' in final_attrs, "TinyMCE widget attributes must contain 'id'"
mce_config = tinymce.settings.DEFAULT_CONFIG.copy()
mce_config.update(get_language_config(self.content_language))
if tinymce.settings.USE_FILEBROWSER:
mce_config['file_browser_callback'] = "djangoFileBrowser"
mce_config.update(self.mce_attrs)
mce_config['mode'] = 'exact'
mce_config['elements'] = final_attrs['id']
mce_config['strict_loading_mode'] = 1
mce_json = simplejson.dumps(mce_config)
html = [u'<textarea%s>%s</textarea>' % (flatatt(final_attrs), escape(value))]
if tinymce.settings.USE_COMPRESSOR:
compressor_config = {
'plugins': mce_config.get('plugins', ''),
'themes': mce_config.get('theme', 'advanced'),
'languages': mce_config.get('language', ''),
'diskcache': True,
'debug': False,
}
compressor_json = simplejson.dumps(compressor_config)
html.append(u'<script type="text/javascript">tinyMCE_GZ.init(%s)</script>' % compressor_json)
html.append(u'<script type="text/javascript">tinyMCE.init(%s)</script>' % mce_json)
return mark_safe(u'\n'.join(html))
def _media(self):
if tinymce.settings.USE_COMPRESSOR:
js = [reverse('tinymce-compressor')]
else:
js = [tinymce.settings.JS_URL]
if tinymce.settings.USE_FILEBROWSER:
js.append(reverse('tinymce-filebrowser'))
return forms.Media(js=js)
media = property(_media)
class AdminTinyMCE(admin_widgets.AdminTextareaWidget, TinyMCE):
pass
def get_language_config(content_language=None):
language = get_language()[:2]
if content_language:
content_language = content_language[:2]
else:
content_language = language
config = {}
config['language'] = language
lang_names = SortedDict()
for lang, name in settings.LANGUAGES:
if lang[:2] not in lang_names: lang_names[lang[:2]] = []
lang_names[lang[:2]].append(_(name))
sp_langs = []
for lang, names in lang_names.items():
if lang == content_language:
|
else:
default = ''
sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))
config['spellchecker_languages'] = ','.join(sp_langs)
if content_language in settings.LANGUAGES_BIDI:
config['directionality'] = 'rtl'
else:
config['directionality'] = 'ltr'
if tinymce.settings.USE_SPELLCHECKER:
config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')
return config
| default = '+' | conditional_block |
widgets.py | # Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
"""
This TinyMCE widget was copied and extended from this code by John D'Agostino:
http://code.djangoproject.com/wiki/CustomWidgetsTinyMCE
"""
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.core.urlresolvers import reverse
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unicode
from django.utils.html import escape
from django.utils import simplejson
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.translation import get_language, ugettext as _
import tinymce.settings
class TinyMCE(forms.Textarea):
"""
TinyMCE widget. Set settings.TINYMCE_JS_URL to set the location of the
javascript file. Default is "MEDIA_URL + 'js/tiny_mce/tiny_mce.js'".
You can customize the configuration with the mce_attrs argument to the
constructor.
In addition to the standard configuration you can set the
'content_language' parameter. It takes the value of the 'language'
parameter by default. |
In addition to the default settings from settings.TINYMCE_DEFAULT_CONFIG,
this widget sets the 'language', 'directionality' and
'spellchecker_languages' parameters by default. The first is derived from
the current Django language, the others from the 'content_language'
parameter.
"""
def __init__(self, content_language=None, attrs=None, mce_attrs={}):
super(TinyMCE, self).__init__(attrs)
self.mce_attrs = mce_attrs
if content_language is None:
content_language = mce_attrs.get('language', None)
self.content_language = content_language
def render(self, name, value, attrs=None):
if value is None: value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs)
final_attrs['name'] = name
assert 'id' in final_attrs, "TinyMCE widget attributes must contain 'id'"
mce_config = tinymce.settings.DEFAULT_CONFIG.copy()
mce_config.update(get_language_config(self.content_language))
if tinymce.settings.USE_FILEBROWSER:
mce_config['file_browser_callback'] = "djangoFileBrowser"
mce_config.update(self.mce_attrs)
mce_config['mode'] = 'exact'
mce_config['elements'] = final_attrs['id']
mce_config['strict_loading_mode'] = 1
mce_json = simplejson.dumps(mce_config)
html = [u'<textarea%s>%s</textarea>' % (flatatt(final_attrs), escape(value))]
if tinymce.settings.USE_COMPRESSOR:
compressor_config = {
'plugins': mce_config.get('plugins', ''),
'themes': mce_config.get('theme', 'advanced'),
'languages': mce_config.get('language', ''),
'diskcache': True,
'debug': False,
}
compressor_json = simplejson.dumps(compressor_config)
html.append(u'<script type="text/javascript">tinyMCE_GZ.init(%s)</script>' % compressor_json)
html.append(u'<script type="text/javascript">tinyMCE.init(%s)</script>' % mce_json)
return mark_safe(u'\n'.join(html))
def _media(self):
if tinymce.settings.USE_COMPRESSOR:
js = [reverse('tinymce-compressor')]
else:
js = [tinymce.settings.JS_URL]
if tinymce.settings.USE_FILEBROWSER:
js.append(reverse('tinymce-filebrowser'))
return forms.Media(js=js)
media = property(_media)
class AdminTinyMCE(admin_widgets.AdminTextareaWidget, TinyMCE):
pass
def get_language_config(content_language=None):
language = get_language()[:2]
if content_language:
content_language = content_language[:2]
else:
content_language = language
config = {}
config['language'] = language
lang_names = SortedDict()
for lang, name in settings.LANGUAGES:
if lang[:2] not in lang_names: lang_names[lang[:2]] = []
lang_names[lang[:2]].append(_(name))
sp_langs = []
for lang, names in lang_names.items():
if lang == content_language:
default = '+'
else:
default = ''
sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))
config['spellchecker_languages'] = ','.join(sp_langs)
if content_language in settings.LANGUAGES_BIDI:
config['directionality'] = 'rtl'
else:
config['directionality'] = 'ltr'
if tinymce.settings.USE_SPELLCHECKER:
config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')
return config | random_line_split | |
UserSettingsEditComponent.ts | import {Input, Component} from 'angular2/core';
import {Router, RouterLink, CanActivate} from 'angular2/router';
import {UserSettingsComponent} from '../userSettings/UserSettingsComponent';
import {UserSettingsService} from '../../shared/services/UserSettingsService';
import {AuthService} from '../../shared/services/AuthService';
import {AlertingService} from '../../shared/services/AlertingService';
import {UserSettings} from '../../shared/models/UserSettings';
import {
PointerType,
PointerSize,
PointerColor,
BackgroundColor} from '../../shared/enums/UserSettingsEnums';
import {TranslatePipe} from 'ng2-translate/ng2-translate';
import {appInjector} from '../../../appInjector';
@Component({
directives: [RouterLink, UserSettingsComponent],
templateUrl: './app/components/userSettingsEdit/userSettingsEdit.html',
pipes: [TranslatePipe]
})
@CanActivate(
(nextInstr: any, currInstr: any) => {
let injector: any = appInjector();
let authService: AuthService = injector.get(AuthService);
let router: Router = injector.get(Router);
let isLogged = authService.isLogged();
if (!isLogged) {
router.navigate(['/Login']);
}
return isLogged;
}
)
export class UserSettingsEditComponent {
public userName: string;
public userSettings: UserSettings;
public userSettingsForJar: string;
constructor(
private alertingService: AlertingService,
private authService: AuthService,
private userSettingsService: UserSettingsService) {
this.userName = authService.getLoggedUser();
this.userSettingsService.getUserSettingsFor(this.userName)
.subscribe(data => this.userSettings = data);
this.userSettingsService.getUserSettingsForJar(this.userName)
.subscribe(data => this.userSettingsForJar = data);
}
| (): void {
this.userSettingsService.saveUserSettingsForUser(this.userName, this.userSettings)
.subscribe(data => {
this.alertingService.addSuccess('SAVE_USER_SETTINGS_SUCCESS_MESSAGE');
}, err => {
this.alertingService.addDanger('SAVE_USER_SETTINGS_ERROR_MESSAGE');
});
}
}
| saveUserSettings | identifier_name |
UserSettingsEditComponent.ts | import {Input, Component} from 'angular2/core';
import {Router, RouterLink, CanActivate} from 'angular2/router';
import {UserSettingsComponent} from '../userSettings/UserSettingsComponent';
import {UserSettingsService} from '../../shared/services/UserSettingsService';
import {AuthService} from '../../shared/services/AuthService';
import {AlertingService} from '../../shared/services/AlertingService';
import {UserSettings} from '../../shared/models/UserSettings';
import {
PointerType,
PointerSize,
PointerColor,
BackgroundColor} from '../../shared/enums/UserSettingsEnums';
import {TranslatePipe} from 'ng2-translate/ng2-translate';
import {appInjector} from '../../../appInjector';
@Component({
directives: [RouterLink, UserSettingsComponent],
templateUrl: './app/components/userSettingsEdit/userSettingsEdit.html',
pipes: [TranslatePipe]
})
@CanActivate(
(nextInstr: any, currInstr: any) => {
let injector: any = appInjector();
let authService: AuthService = injector.get(AuthService);
let router: Router = injector.get(Router);
let isLogged = authService.isLogged();
if (!isLogged) |
return isLogged;
}
)
export class UserSettingsEditComponent {
public userName: string;
public userSettings: UserSettings;
public userSettingsForJar: string;
constructor(
private alertingService: AlertingService,
private authService: AuthService,
private userSettingsService: UserSettingsService) {
this.userName = authService.getLoggedUser();
this.userSettingsService.getUserSettingsFor(this.userName)
.subscribe(data => this.userSettings = data);
this.userSettingsService.getUserSettingsForJar(this.userName)
.subscribe(data => this.userSettingsForJar = data);
}
saveUserSettings(): void {
this.userSettingsService.saveUserSettingsForUser(this.userName, this.userSettings)
.subscribe(data => {
this.alertingService.addSuccess('SAVE_USER_SETTINGS_SUCCESS_MESSAGE');
}, err => {
this.alertingService.addDanger('SAVE_USER_SETTINGS_ERROR_MESSAGE');
});
}
}
| {
router.navigate(['/Login']);
} | conditional_block |
UserSettingsEditComponent.ts | import {Input, Component} from 'angular2/core';
import {Router, RouterLink, CanActivate} from 'angular2/router';
import {UserSettingsComponent} from '../userSettings/UserSettingsComponent';
import {UserSettingsService} from '../../shared/services/UserSettingsService';
import {AuthService} from '../../shared/services/AuthService';
import {AlertingService} from '../../shared/services/AlertingService';
import {UserSettings} from '../../shared/models/UserSettings';
import {
PointerType,
PointerSize,
PointerColor,
BackgroundColor} from '../../shared/enums/UserSettingsEnums';
import {TranslatePipe} from 'ng2-translate/ng2-translate';
import {appInjector} from '../../../appInjector';
@Component({
directives: [RouterLink, UserSettingsComponent],
templateUrl: './app/components/userSettingsEdit/userSettingsEdit.html',
pipes: [TranslatePipe]
})
@CanActivate(
(nextInstr: any, currInstr: any) => {
let injector: any = appInjector();
let authService: AuthService = injector.get(AuthService);
let router: Router = injector.get(Router);
let isLogged = authService.isLogged();
if (!isLogged) {
router.navigate(['/Login']);
}
return isLogged;
}
)
export class UserSettingsEditComponent {
public userName: string;
public userSettings: UserSettings;
public userSettingsForJar: string;
constructor(
private alertingService: AlertingService,
private authService: AuthService,
private userSettingsService: UserSettingsService) |
saveUserSettings(): void {
this.userSettingsService.saveUserSettingsForUser(this.userName, this.userSettings)
.subscribe(data => {
this.alertingService.addSuccess('SAVE_USER_SETTINGS_SUCCESS_MESSAGE');
}, err => {
this.alertingService.addDanger('SAVE_USER_SETTINGS_ERROR_MESSAGE');
});
}
}
| {
this.userName = authService.getLoggedUser();
this.userSettingsService.getUserSettingsFor(this.userName)
.subscribe(data => this.userSettings = data);
this.userSettingsService.getUserSettingsForJar(this.userName)
.subscribe(data => this.userSettingsForJar = data);
} | identifier_body |
UserSettingsEditComponent.ts | import {Input, Component} from 'angular2/core';
import {Router, RouterLink, CanActivate} from 'angular2/router';
import {UserSettingsComponent} from '../userSettings/UserSettingsComponent';
import {UserSettingsService} from '../../shared/services/UserSettingsService';
import {AuthService} from '../../shared/services/AuthService';
import {AlertingService} from '../../shared/services/AlertingService';
import {UserSettings} from '../../shared/models/UserSettings';
import {
PointerType,
PointerSize,
PointerColor,
BackgroundColor} from '../../shared/enums/UserSettingsEnums';
import {TranslatePipe} from 'ng2-translate/ng2-translate';
import {appInjector} from '../../../appInjector';
@Component({
directives: [RouterLink, UserSettingsComponent],
templateUrl: './app/components/userSettingsEdit/userSettingsEdit.html',
pipes: [TranslatePipe]
})
@CanActivate(
(nextInstr: any, currInstr: any) => {
let injector: any = appInjector();
let authService: AuthService = injector.get(AuthService);
let router: Router = injector.get(Router);
let isLogged = authService.isLogged();
if (!isLogged) {
router.navigate(['/Login']);
}
return isLogged;
}
)
export class UserSettingsEditComponent {
public userName: string;
public userSettings: UserSettings;
public userSettingsForJar: string; | constructor(
private alertingService: AlertingService,
private authService: AuthService,
private userSettingsService: UserSettingsService) {
this.userName = authService.getLoggedUser();
this.userSettingsService.getUserSettingsFor(this.userName)
.subscribe(data => this.userSettings = data);
this.userSettingsService.getUserSettingsForJar(this.userName)
.subscribe(data => this.userSettingsForJar = data);
}
saveUserSettings(): void {
this.userSettingsService.saveUserSettingsForUser(this.userName, this.userSettings)
.subscribe(data => {
this.alertingService.addSuccess('SAVE_USER_SETTINGS_SUCCESS_MESSAGE');
}, err => {
this.alertingService.addDanger('SAVE_USER_SETTINGS_ERROR_MESSAGE');
});
}
} | random_line_split | |
XMLname.py | import re
from six import text_type
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id$"
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
def _NCNameStartChar(x):
return x.isalpha() or x == "_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen == 1):
hexval = "000" + hexval
elif (hexlen == 2):
hexval = "00" + hexval
elif (hexlen == 3):
hexval = "0" + hexval
elif (hexlen == 4):
hexval = "" + hexval
elif (hexlen == 5):
hexval = "000" + hexval
elif (hexlen == 6):
hexval = "00" + hexval
elif (hexlen == 7):
hexval = "0" + hexval
elif (hexlen == 8):
hexval = "" + hexval
else:
raise Exception("Illegal Value returned from hex(ord(x))")
return "_x" + hexval + "_"
def _fromUnicodeHex(x):
return eval(r'u"\u' + x[2:-1] + '"')
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1:
(prefix, localname) = string.split(':', 1)
else:
prefix = None
localname = string
T = text_type(localname)
N = len(localname)
X = []
for i in range(N):
if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x':
X.append(u'_x005F_')
elif i == 0 and N >= 3 and \
(T[0] == u'x' or T[0] == u'X') and \
(T[1] == u'm' or T[1] == u'M') and \
(T[2] == u'l' or T[2] == u'L'):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
return "%s:%s" % (prefix, u''.join(X))
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
|
retval = re.sub(r'_x[0-9A-Fa-f]{4}_', fun, retval)
return retval | retval = re.sub(r'_xFFFF_', '', string)
def fun(matchobj):
return _fromUnicodeHex(matchobj.group(0)) | random_line_split |
XMLname.py | import re
from six import text_type
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id$"
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
def _NCNameStartChar(x):
return x.isalpha() or x == "_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen == 1):
hexval = "000" + hexval
elif (hexlen == 2):
hexval = "00" + hexval
elif (hexlen == 3):
hexval = "0" + hexval
elif (hexlen == 4):
hexval = "" + hexval
elif (hexlen == 5):
hexval = "000" + hexval
elif (hexlen == 6):
hexval = "00" + hexval
elif (hexlen == 7):
hexval = "0" + hexval
elif (hexlen == 8):
hexval = "" + hexval
else:
raise Exception("Illegal Value returned from hex(ord(x))")
return "_x" + hexval + "_"
def _fromUnicodeHex(x):
return eval(r'u"\u' + x[2:-1] + '"')
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1:
(prefix, localname) = string.split(':', 1)
else:
prefix = None
localname = string
T = text_type(localname)
N = len(localname)
X = []
for i in range(N):
if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x':
X.append(u'_x005F_')
elif i == 0 and N >= 3 and \
(T[0] == u'x' or T[0] == u'X') and \
(T[1] == u'm' or T[1] == u'M') and \
(T[2] == u'l' or T[2] == u'L'):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
|
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
retval = re.sub(r'_xFFFF_', '', string)
def fun(matchobj):
return _fromUnicodeHex(matchobj.group(0))
retval = re.sub(r'_x[0-9A-Fa-f]{4}_', fun, retval)
return retval
| return "%s:%s" % (prefix, u''.join(X)) | conditional_block |
XMLname.py | import re
from six import text_type
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id$"
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
def _NCNameStartChar(x):
return x.isalpha() or x == "_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen == 1):
hexval = "000" + hexval
elif (hexlen == 2):
hexval = "00" + hexval
elif (hexlen == 3):
hexval = "0" + hexval
elif (hexlen == 4):
hexval = "" + hexval
elif (hexlen == 5):
hexval = "000" + hexval
elif (hexlen == 6):
hexval = "00" + hexval
elif (hexlen == 7):
hexval = "0" + hexval
elif (hexlen == 8):
hexval = "" + hexval
else:
raise Exception("Illegal Value returned from hex(ord(x))")
return "_x" + hexval + "_"
def _fromUnicodeHex(x):
return eval(r'u"\u' + x[2:-1] + '"')
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1:
(prefix, localname) = string.split(':', 1)
else:
prefix = None
localname = string
T = text_type(localname)
N = len(localname)
X = []
for i in range(N):
if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x':
X.append(u'_x005F_')
elif i == 0 and N >= 3 and \
(T[0] == u'x' or T[0] == u'X') and \
(T[1] == u'm' or T[1] == u'M') and \
(T[2] == u'l' or T[2] == u'L'):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
return "%s:%s" % (prefix, u''.join(X))
return u''.join(X)
def fromXMLname(string):
| """Convert XML name to unicode string."""
retval = re.sub(r'_xFFFF_', '', string)
def fun(matchobj):
return _fromUnicodeHex(matchobj.group(0))
retval = re.sub(r'_x[0-9A-Fa-f]{4}_', fun, retval)
return retval | identifier_body | |
XMLname.py | import re
from six import text_type
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id$"
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
def _NCNameStartChar(x):
return x.isalpha() or x == "_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen == 1):
hexval = "000" + hexval
elif (hexlen == 2):
hexval = "00" + hexval
elif (hexlen == 3):
hexval = "0" + hexval
elif (hexlen == 4):
hexval = "" + hexval
elif (hexlen == 5):
hexval = "000" + hexval
elif (hexlen == 6):
hexval = "00" + hexval
elif (hexlen == 7):
hexval = "0" + hexval
elif (hexlen == 8):
hexval = "" + hexval
else:
raise Exception("Illegal Value returned from hex(ord(x))")
return "_x" + hexval + "_"
def _fromUnicodeHex(x):
return eval(r'u"\u' + x[2:-1] + '"')
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1:
(prefix, localname) = string.split(':', 1)
else:
prefix = None
localname = string
T = text_type(localname)
N = len(localname)
X = []
for i in range(N):
if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x':
X.append(u'_x005F_')
elif i == 0 and N >= 3 and \
(T[0] == u'x' or T[0] == u'X') and \
(T[1] == u'm' or T[1] == u'M') and \
(T[2] == u'l' or T[2] == u'L'):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
return "%s:%s" % (prefix, u''.join(X))
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
retval = re.sub(r'_xFFFF_', '', string)
def | (matchobj):
return _fromUnicodeHex(matchobj.group(0))
retval = re.sub(r'_x[0-9A-Fa-f]{4}_', fun, retval)
return retval
| fun | identifier_name |
measure-inequality-filter.ts | /* | * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AdvancedFilter} from './advanced-filter';
import {AggregationType} from '../field/measure-field';
export class MeasureInequalityFilter extends AdvancedFilter {
/**
* 집계 타입
*/
public aggregation: AggregationType;
/**
* 조건
*/
public inequality: InequalityType;
/**
* 조건 값, aggreationType(field) conditionType value
* ex. SUM(param) > 10
*/
public value: number;
constructor() {
super();
this.type = 'measure_inequality';
}
}
export enum InequalityType {
EQUAL_TO = 'EQUAL_TO',
GREATER_THAN = 'GREATER_THAN',
LESS_THAN = 'LESS_THAN',
EQUAL_GREATER_THAN = 'EQUAL_GREATER_THAN',
EQUAL_LESS_THAN = 'EQUAL_LESS_THAN',
} | random_line_split | |
measure-inequality-filter.ts | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AdvancedFilter} from './advanced-filter';
import {AggregationType} from '../field/measure-field';
export class MeasureInequalityFilter extends AdvancedFilter {
/**
* 집계 타입
*/
public aggregation: AggregationType;
/**
* 조건
*/
public inequality: InequalityType;
/**
* 조건 값, aggreationType(field) conditionType value
* ex. SUM(param) > 10
*/
public value: number;
constructor() {
| this.type = 'measure_inequality';
}
}
export enum InequalityType {
EQUAL_TO = 'EQUAL_TO',
GREATER_THAN = 'GREATER_THAN',
LESS_THAN = 'LESS_THAN',
EQUAL_GREATER_THAN = 'EQUAL_GREATER_THAN',
EQUAL_LESS_THAN = 'EQUAL_LESS_THAN',
}
| super();
| identifier_name |
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn sequence() {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() |
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err());
}
#[test]
fn drop_order() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
}
| {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
} | identifier_body |
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn | () {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
}
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err());
}
#[test]
fn drop_order() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
}
| sequence | identifier_name |
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn sequence() {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
}
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err()); | static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
} | }
#[test]
fn drop_order() { | random_line_split |
netconsole.py | from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
|
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
raise ValueError("Cannot start without stopping first")
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def _connectionDropped(self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds)
| pass | identifier_body |
netconsole.py | from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
pass
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
raise ValueError("Cannot start without stopping first")
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def | (self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds)
| _connectionDropped | identifier_name |
netconsole.py | from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
pass
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
|
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def _connectionDropped(self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds)
| raise ValueError("Cannot start without stopping first") | conditional_block |
netconsole.py | from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
pass
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
raise ValueError("Cannot start without stopping first")
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def _connectionDropped(self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
| sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds) | random_line_split | |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn | () -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool {
unsafe { POISON_ON_FREE }
}
| min_stack | identifier_name |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn min_stack() -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool { | } | unsafe { POISON_ON_FREE } | random_line_split |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn min_stack() -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool | {
unsafe { POISON_ON_FREE }
} | identifier_body | |
test_converter.py | __author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Mar 30, 2015 08:25:33 EDT$"
import collections
import json
import os
import os.path
import shutil
import tempfile
import numpy
import h5py
import vigra
import vigra.impex
import nanshe.util.iters
import nanshe.util.xnumpy
import nanshe.io.xtiff
import nanshe.converter
class TestConverter(object):
def setup(self):
|
def test_main(self):
params = {
"axis" : 0,
"channel" : 0,
"z_index" : 0,
"pages_to_channel" : 1
}
config_filename = os.path.join(self.temp_dir, "config.json")
hdf5_filename = os.path.join(self.temp_dir, "test.h5")
hdf5_filepath = hdf5_filename + "/data"
with open(config_filename, "w") as fid:
json.dump(params, fid)
fid.write("\n")
main_args = ["./converter.py"] + ["tiff"] + [config_filename] + list(self.filedata.keys()) + [hdf5_filepath]
assert (nanshe.converter.main(*main_args) == 0)
assert os.path.exists(hdf5_filename)
data = None
with h5py.File(hdf5_filename, "r") as hdf5_handle:
data = hdf5_handle["data"].value
self_data_h5 = nanshe.util.xnumpy.tagging_reorder_array(self.data, to_axis_order="cztyx")[0, 0]
assert (data == self_data_h5).all()
os.remove(hdf5_filename)
def teardown(self):
shutil.rmtree(self.temp_dir)
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
| self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
self.data = numpy.random.random_integers(0, 255, (1000, 1, 102, 101, 1)).astype(numpy.uint8)
self.temp_dir = tempfile.mkdtemp()
for i, i_str, (a_b, a_e) in nanshe.util.iters.filled_stringify_enumerate(
nanshe.util.iters.izip(
*nanshe.util.iters.lagged_generators(
nanshe.util.iters.irange(
0,
self.data.shape[0] + 100 - 1,
100
)
)
)
):
each_filename = os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif")
each_data = self.data[a_b:a_e]
self.filedata[each_filename] = each_data
vigra.impex.writeVolume(nanshe.util.xnumpy.tagging_reorder_array(each_data, to_axis_order="czyxt")[0, 0],
os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif"), "") | identifier_body |
test_converter.py | __author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Mar 30, 2015 08:25:33 EDT$"
import collections
import json
import os
import os.path
import shutil
import tempfile
import numpy
import h5py
import vigra
import vigra.impex
import nanshe.util.iters
import nanshe.util.xnumpy
import nanshe.io.xtiff
import nanshe.converter
class TestConverter(object):
def setup(self):
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
self.data = numpy.random.random_integers(0, 255, (1000, 1, 102, 101, 1)).astype(numpy.uint8)
self.temp_dir = tempfile.mkdtemp()
for i, i_str, (a_b, a_e) in nanshe.util.iters.filled_stringify_enumerate(
nanshe.util.iters.izip(
*nanshe.util.iters.lagged_generators(
nanshe.util.iters.irange(
0,
self.data.shape[0] + 100 - 1,
100
)
)
)
):
|
def test_main(self):
params = {
"axis" : 0,
"channel" : 0,
"z_index" : 0,
"pages_to_channel" : 1
}
config_filename = os.path.join(self.temp_dir, "config.json")
hdf5_filename = os.path.join(self.temp_dir, "test.h5")
hdf5_filepath = hdf5_filename + "/data"
with open(config_filename, "w") as fid:
json.dump(params, fid)
fid.write("\n")
main_args = ["./converter.py"] + ["tiff"] + [config_filename] + list(self.filedata.keys()) + [hdf5_filepath]
assert (nanshe.converter.main(*main_args) == 0)
assert os.path.exists(hdf5_filename)
data = None
with h5py.File(hdf5_filename, "r") as hdf5_handle:
data = hdf5_handle["data"].value
self_data_h5 = nanshe.util.xnumpy.tagging_reorder_array(self.data, to_axis_order="cztyx")[0, 0]
assert (data == self_data_h5).all()
os.remove(hdf5_filename)
def teardown(self):
shutil.rmtree(self.temp_dir)
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
| each_filename = os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif")
each_data = self.data[a_b:a_e]
self.filedata[each_filename] = each_data
vigra.impex.writeVolume(nanshe.util.xnumpy.tagging_reorder_array(each_data, to_axis_order="czyxt")[0, 0],
os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif"), "") | conditional_block |
test_converter.py | __author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Mar 30, 2015 08:25:33 EDT$"
import collections
import json
import os
import os.path
import shutil
import tempfile
import numpy
import h5py
import vigra
import vigra.impex
import nanshe.util.iters
import nanshe.util.xnumpy
import nanshe.io.xtiff
import nanshe.converter
class TestConverter(object):
def setup(self):
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
self.data = numpy.random.random_integers(0, 255, (1000, 1, 102, 101, 1)).astype(numpy.uint8)
self.temp_dir = tempfile.mkdtemp()
for i, i_str, (a_b, a_e) in nanshe.util.iters.filled_stringify_enumerate(
nanshe.util.iters.izip(
*nanshe.util.iters.lagged_generators(
nanshe.util.iters.irange(
0,
self.data.shape[0] + 100 - 1,
100
)
)
)
):
each_filename = os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif")
each_data = self.data[a_b:a_e]
self.filedata[each_filename] = each_data
vigra.impex.writeVolume(nanshe.util.xnumpy.tagging_reorder_array(each_data, to_axis_order="czyxt")[0, 0],
os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif"), "")
def test_main(self):
params = {
"axis" : 0,
"channel" : 0,
"z_index" : 0,
"pages_to_channel" : 1
}
config_filename = os.path.join(self.temp_dir, "config.json")
hdf5_filename = os.path.join(self.temp_dir, "test.h5")
hdf5_filepath = hdf5_filename + "/data"
with open(config_filename, "w") as fid:
json.dump(params, fid)
fid.write("\n")
main_args = ["./converter.py"] + ["tiff"] + [config_filename] + list(self.filedata.keys()) + [hdf5_filepath]
assert (nanshe.converter.main(*main_args) == 0)
assert os.path.exists(hdf5_filename)
data = None
with h5py.File(hdf5_filename, "r") as hdf5_handle:
data = hdf5_handle["data"].value
self_data_h5 = nanshe.util.xnumpy.tagging_reorder_array(self.data, to_axis_order="cztyx")[0, 0]
assert (data == self_data_h5).all()
|
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None | os.remove(hdf5_filename)
def teardown(self):
shutil.rmtree(self.temp_dir) | random_line_split |
test_converter.py | __author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Mar 30, 2015 08:25:33 EDT$"
import collections
import json
import os
import os.path
import shutil
import tempfile
import numpy
import h5py
import vigra
import vigra.impex
import nanshe.util.iters
import nanshe.util.xnumpy
import nanshe.io.xtiff
import nanshe.converter
class TestConverter(object):
def setup(self):
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
self.data = numpy.random.random_integers(0, 255, (1000, 1, 102, 101, 1)).astype(numpy.uint8)
self.temp_dir = tempfile.mkdtemp()
for i, i_str, (a_b, a_e) in nanshe.util.iters.filled_stringify_enumerate(
nanshe.util.iters.izip(
*nanshe.util.iters.lagged_generators(
nanshe.util.iters.irange(
0,
self.data.shape[0] + 100 - 1,
100
)
)
)
):
each_filename = os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif")
each_data = self.data[a_b:a_e]
self.filedata[each_filename] = each_data
vigra.impex.writeVolume(nanshe.util.xnumpy.tagging_reorder_array(each_data, to_axis_order="czyxt")[0, 0],
os.path.join(self.temp_dir, "test_tiff_" + str(i) + ".tif"), "")
def | (self):
params = {
"axis" : 0,
"channel" : 0,
"z_index" : 0,
"pages_to_channel" : 1
}
config_filename = os.path.join(self.temp_dir, "config.json")
hdf5_filename = os.path.join(self.temp_dir, "test.h5")
hdf5_filepath = hdf5_filename + "/data"
with open(config_filename, "w") as fid:
json.dump(params, fid)
fid.write("\n")
main_args = ["./converter.py"] + ["tiff"] + [config_filename] + list(self.filedata.keys()) + [hdf5_filepath]
assert (nanshe.converter.main(*main_args) == 0)
assert os.path.exists(hdf5_filename)
data = None
with h5py.File(hdf5_filename, "r") as hdf5_handle:
data = hdf5_handle["data"].value
self_data_h5 = nanshe.util.xnumpy.tagging_reorder_array(self.data, to_axis_order="cztyx")[0, 0]
assert (data == self_data_h5).all()
os.remove(hdf5_filename)
def teardown(self):
shutil.rmtree(self.temp_dir)
self.temp_dir = ""
self.filedata = collections.OrderedDict()
self.data = None
| test_main | identifier_name |
campus.py | from models.basemodel import BaseModel
class Campus(BaseModel):
CAMPUS_CODES = ['BD', 'DN', 'CS']
LONG_NAMES = {
'BD': 'University of Colorado, Boulder',
'DN': 'University of Colorado, Denver', |
def requiredFields(self):
return ['campus', 'fcqs', 'courses', 'instructors', 'departments', 'colleges', 'id']
def fields(self):
return {
'campus': (self.is_in_list(self.CAMPUS_CODES), ),
'fcqs': (self.is_list, self.schema_list_check(self.is_string, )),
'grades': (self.is_list, self.schema_list_check(self.is_string, ),),
'courses': (self.is_list, self.schema_list_check(self.is_string, )),
'instructors': (self.is_list, self.schema_list_check(self.is_string, )),
'departments': (self.is_list, self.schema_list_check(self.is_string, )),
'colleges': (self.is_list, self.schema_list_check(self.is_string, )),
'id': (self.is_string, self.is_not_empty, ),
}
def default(self):
return {
'campus': '',
'fcqs': [],
'grades': [],
'courses': [],
'instructors': [],
'departments': [],
'colleges': [],
'id': '',
} | 'CS': 'University of Colorado, Colorado Springs'
} | random_line_split |
campus.py | from models.basemodel import BaseModel
class Campus(BaseModel):
CAMPUS_CODES = ['BD', 'DN', 'CS']
LONG_NAMES = {
'BD': 'University of Colorado, Boulder',
'DN': 'University of Colorado, Denver',
'CS': 'University of Colorado, Colorado Springs'
}
def | (self):
return ['campus', 'fcqs', 'courses', 'instructors', 'departments', 'colleges', 'id']
def fields(self):
return {
'campus': (self.is_in_list(self.CAMPUS_CODES), ),
'fcqs': (self.is_list, self.schema_list_check(self.is_string, )),
'grades': (self.is_list, self.schema_list_check(self.is_string, ),),
'courses': (self.is_list, self.schema_list_check(self.is_string, )),
'instructors': (self.is_list, self.schema_list_check(self.is_string, )),
'departments': (self.is_list, self.schema_list_check(self.is_string, )),
'colleges': (self.is_list, self.schema_list_check(self.is_string, )),
'id': (self.is_string, self.is_not_empty, ),
}
def default(self):
return {
'campus': '',
'fcqs': [],
'grades': [],
'courses': [],
'instructors': [],
'departments': [],
'colleges': [],
'id': '',
}
| requiredFields | identifier_name |
campus.py | from models.basemodel import BaseModel
class Campus(BaseModel):
CAMPUS_CODES = ['BD', 'DN', 'CS']
LONG_NAMES = {
'BD': 'University of Colorado, Boulder',
'DN': 'University of Colorado, Denver',
'CS': 'University of Colorado, Colorado Springs'
}
def requiredFields(self):
return ['campus', 'fcqs', 'courses', 'instructors', 'departments', 'colleges', 'id']
def fields(self):
return {
'campus': (self.is_in_list(self.CAMPUS_CODES), ),
'fcqs': (self.is_list, self.schema_list_check(self.is_string, )),
'grades': (self.is_list, self.schema_list_check(self.is_string, ),),
'courses': (self.is_list, self.schema_list_check(self.is_string, )),
'instructors': (self.is_list, self.schema_list_check(self.is_string, )),
'departments': (self.is_list, self.schema_list_check(self.is_string, )),
'colleges': (self.is_list, self.schema_list_check(self.is_string, )),
'id': (self.is_string, self.is_not_empty, ),
}
def default(self):
| return {
'campus': '',
'fcqs': [],
'grades': [],
'courses': [],
'instructors': [],
'departments': [],
'colleges': [],
'id': '',
} | identifier_body | |
string.js | // Compiled by ClojureScript 0.0-2138
goog.provide('clojure.string');
goog.require('cljs.core');
goog.require('goog.string.StringBuffer');
goog.require('goog.string.StringBuffer');
goog.require('goog.string');
goog.require('goog.string');
clojure.string.seq_reverse = (function seq_reverse(coll){return cljs.core.reduce.call(null,cljs.core.conj,cljs.core.List.EMPTY,coll);
});
/**
* Returns s with its characters reversed.
*/
clojure.string.reverse = (function reverse(s){return s.split("").reverse().join("");
});
/**
* Replaces all instance of match with replacement in s.
* match/replacement can be:
*
* string / string
* pattern / (string or function of match).
*/
clojure.string.replace = (function replace(s,match,replacement){if(typeof match === 'string')
{return s.replace((new RegExp(goog.string.regExpEscape(match),"g")),replacement);
} else
{if(cljs.core.truth_(match.hasOwnProperty("source")))
{return s.replace((new RegExp(match.source,"g")),replacement);
} else
{if(new cljs.core.Keyword(null,"else","else",1017020587))
{throw [cljs.core.str("Invalid match arg: "),cljs.core.str(match)].join('');
} else
{return null;
}
}
}
});
/**
* Replaces the first instance of match with replacement in s.
* match/replacement can be:
*
* string / string
* pattern / (string or function of match).
*/
clojure.string.replace_first = (function replace_first(s,match,replacement){return s.replace(match,replacement);
});
/**
* Returns a string of all elements in coll, as returned by (seq coll),
* separated by an optional separator.
*/
clojure.string.join = (function() {
var join = null;
var join__1 = (function (coll){return cljs.core.apply.call(null,cljs.core.str,coll);
});
var join__2 = (function (separator,coll){return cljs.core.apply.call(null,cljs.core.str,cljs.core.interpose.call(null,separator,coll));
});
join = function(separator,coll){
switch(arguments.length){
case 1:
return join__1.call(this,separator);
case 2:
return join__2.call(this,separator,coll);
}
throw(new Error('Invalid arity: ' + arguments.length));
};
join.cljs$core$IFn$_invoke$arity$1 = join__1;
join.cljs$core$IFn$_invoke$arity$2 = join__2;
return join;
})()
;
/**
* Converts string to all upper-case.
*/
clojure.string.upper_case = (function upper_case(s){return s.toUpperCase();
});
/**
* Converts string to all lower-case.
*/
clojure.string.lower_case = (function lower_case(s){return s.toLowerCase();
});
/**
* Converts first character of the string to upper-case, all other
* characters to lower-case.
*/
clojure.string.capitalize = (function capitalize(s){if((cljs.core.count.call(null,s) < 2))
{return clojure.string.upper_case.call(null,s);
} else
{return [cljs.core.str(clojure.string.upper_case.call(null,cljs.core.subs.call(null,s,0,1))),cljs.core.str(clojure.string.lower_case.call(null,cljs.core.subs.call(null,s,1)))].join('');
}
});
clojure.string.pop_last_while_empty = (function pop_last_while_empty(v){var v__$1 = v;while(true) |
});
clojure.string.discard_trailing_if_needed = (function discard_trailing_if_needed(limit,v){if(cljs.core._EQ_.call(null,0,limit))
{return clojure.string.pop_last_while_empty.call(null,v);
} else
{return v;
}
});
clojure.string.split_with_empty_regex = (function split_with_empty_regex(s,limit){if(((limit <= 0)) || ((limit >= (2 + cljs.core.count.call(null,s)))))
{return cljs.core.conj.call(null,cljs.core.vec.call(null,cljs.core.cons.call(null,"",cljs.core.map.call(null,cljs.core.str,cljs.core.seq.call(null,s)))),"");
} else
{var pred__22157 = cljs.core._EQ_;var expr__22158 = limit;if(cljs.core.truth_(pred__22157.call(null,1,expr__22158)))
{return (new cljs.core.PersistentVector(null,1,5,cljs.core.PersistentVector.EMPTY_NODE,[s],null));
} else
{if(cljs.core.truth_(pred__22157.call(null,2,expr__22158)))
{return (new cljs.core.PersistentVector(null,2,5,cljs.core.PersistentVector.EMPTY_NODE,["",s],null));
} else
{var c = (limit - 2);return cljs.core.conj.call(null,cljs.core.vec.call(null,cljs.core.cons.call(null,"",cljs.core.subvec.call(null,cljs.core.vec.call(null,cljs.core.map.call(null,cljs.core.str,cljs.core.seq.call(null,s))),0,c))),cljs.core.subs.call(null,s,c));
}
}
}
});
/**
* Splits string on a regular expression. Optional argument limit is
* the maximum number of splits. Not lazy. Returns vector of the splits.
*/
clojure.string.split = (function() {
var split = null;
var split__2 = (function (s,re){return split.call(null,s,re,0);
});
var split__3 = (function (s,re,limit){return clojure.string.discard_trailing_if_needed.call(null,limit,((cljs.core._EQ_.call(null,[cljs.core.str(re)].join(''),"/(?:)/"))?clojure.string.split_with_empty_regex.call(null,s,limit):(((limit < 1))?cljs.core.vec.call(null,[cljs.core.str(s)].join('').split(re)):(function (){var s__$1 = s;var limit__$1 = limit;var parts = cljs.core.PersistentVector.EMPTY;while(true){
if(cljs.core._EQ_.call(null,limit__$1,1))
{return cljs.core.conj.call(null,parts,s__$1);
} else
{var temp__4090__auto__ = cljs.core.re_find.call(null,re,s__$1);if(cljs.core.truth_(temp__4090__auto__))
{var m = temp__4090__auto__;var index = s__$1.indexOf(m);{
var G__22160 = s__$1.substring((index + cljs.core.count.call(null,m)));
var G__22161 = (limit__$1 - 1);
var G__22162 = cljs.core.conj.call(null,parts,s__$1.substring(0,index));
s__$1 = G__22160;
limit__$1 = G__22161;
parts = G__22162;
continue;
}
} else
{return cljs.core.conj.call(null,parts,s__$1);
}
}
break;
}
})())));
});
split = function(s,re,limit){
switch(arguments.length){
case 2:
return split__2.call(this,s,re);
case 3:
return split__3.call(this,s,re,limit);
}
throw(new Error('Invalid arity: ' + arguments.length));
};
split.cljs$core$IFn$_invoke$arity$2 = split__2;
split.cljs$core$IFn$_invoke$arity$3 = split__3;
return split;
})()
;
/**
* Splits s on
* or
* .
*/
clojure.string.split_lines = (function split_lines(s){return clojure.string.split.call(null,s,/\n|\r\n/);
});
/**
* Removes whitespace from both ends of string.
*/
clojure.string.trim = (function trim(s){return goog.string.trim(s);
});
/**
* Removes whitespace from the left side of string.
*/
clojure.string.triml = (function triml(s){return goog.string.trimLeft(s);
});
/**
* Removes whitespace from the right side of string.
*/
clojure.string.trimr = (function trimr(s){return goog.string.trimRight(s);
});
/**
* Removes all trailing newline \n or return \r characters from
* string. Similar to Perl's chomp.
*/
clojure.string.trim_newline = (function trim_newline(s){var index = s.length;while(true){
if((index === 0))
{return "";
} else
{var ch = cljs.core.get.call(null,s,(index - 1));if((cljs.core._EQ_.call(null,ch,"\n")) || (cljs.core._EQ_.call(null,ch,"\r")))
{{
var G__22163 = (index - 1);
index = G__22163;
continue;
}
} else
{return s.substring(0,index);
}
}
break;
}
});
/**
* True is s is nil, empty, or contains only whitespace.
*/
clojure.string.blank_QMARK_ = (function blank_QMARK_(s){return goog.string.isEmptySafe(s);
});
/**
* Return a new string, using cmap to escape each character ch
* from s as follows:
*
* If (cmap ch) is nil, append ch to the new string.
* If (cmap ch) is non-nil, append (str (cmap ch)) instead.
*/
clojure.string.escape = (function escape__$1(s,cmap){var buffer = (new goog.string.StringBuffer());var length = s.length;var index = 0;while(true){
if(cljs.core._EQ_.call(null,length,index))
{return buffer.toString();
} else
{var ch = s.charAt(index);var temp__4090__auto___22164 = cljs.core.get.call(null,cmap,ch);if(cljs.core.truth_(temp__4090__auto___22164))
{var replacement_22165 = temp__4090__auto___22164;buffer.append([cljs.core.str(replacement_22165)].join(''));
} else
{buffer.append(ch);
}
{
var G__22166 = (index + 1);
index = G__22166;
continue;
}
}
break;
}
});
| {
if(cljs.core._EQ_.call(null,"",cljs.core.peek.call(null,v__$1)))
{{
var G__22153 = cljs.core.pop.call(null,v__$1);
v__$1 = G__22153;
continue;
}
} else
{return v__$1;
}
break;
} | conditional_block |
string.js | // Compiled by ClojureScript 0.0-2138
goog.provide('clojure.string');
goog.require('cljs.core');
goog.require('goog.string.StringBuffer');
goog.require('goog.string.StringBuffer');
goog.require('goog.string');
goog.require('goog.string');
clojure.string.seq_reverse = (function seq_reverse(coll){return cljs.core.reduce.call(null,cljs.core.conj,cljs.core.List.EMPTY,coll);
});
/**
* Returns s with its characters reversed.
*/
clojure.string.reverse = (function reverse(s){return s.split("").reverse().join("");
});
/**
* Replaces all instance of match with replacement in s.
* match/replacement can be:
*
* string / string
* pattern / (string or function of match).
*/
clojure.string.replace = (function replace(s,match,replacement){if(typeof match === 'string')
{return s.replace((new RegExp(goog.string.regExpEscape(match),"g")),replacement);
} else
{if(cljs.core.truth_(match.hasOwnProperty("source")))
{return s.replace((new RegExp(match.source,"g")),replacement);
} else
{if(new cljs.core.Keyword(null,"else","else",1017020587))
{throw [cljs.core.str("Invalid match arg: "),cljs.core.str(match)].join('');
} else
{return null;
}
}
}
});
/**
* Replaces the first instance of match with replacement in s.
* match/replacement can be:
*
* string / string
* pattern / (string or function of match).
*/
clojure.string.replace_first = (function replace_first(s,match,replacement){return s.replace(match,replacement);
});
/**
* Returns a string of all elements in coll, as returned by (seq coll),
* separated by an optional separator.
*/
clojure.string.join = (function() {
var join = null;
var join__1 = (function (coll){return cljs.core.apply.call(null,cljs.core.str,coll);
});
var join__2 = (function (separator,coll){return cljs.core.apply.call(null,cljs.core.str,cljs.core.interpose.call(null,separator,coll));
});
join = function(separator,coll){
switch(arguments.length){
case 1:
return join__1.call(this,separator);
case 2:
return join__2.call(this,separator,coll);
}
throw(new Error('Invalid arity: ' + arguments.length));
};
join.cljs$core$IFn$_invoke$arity$1 = join__1;
join.cljs$core$IFn$_invoke$arity$2 = join__2;
return join;
})()
;
/**
* Converts string to all upper-case.
*/
clojure.string.upper_case = (function upper_case(s){return s.toUpperCase();
});
/**
* Converts string to all lower-case.
*/
clojure.string.lower_case = (function lower_case(s){return s.toLowerCase();
});
/**
* Converts first character of the string to upper-case, all other
* characters to lower-case.
*/
clojure.string.capitalize = (function capitalize(s){if((cljs.core.count.call(null,s) < 2))
{return clojure.string.upper_case.call(null,s);
} else
{return [cljs.core.str(clojure.string.upper_case.call(null,cljs.core.subs.call(null,s,0,1))),cljs.core.str(clojure.string.lower_case.call(null,cljs.core.subs.call(null,s,1)))].join('');
}
});
clojure.string.pop_last_while_empty = (function pop_last_while_empty(v){var v__$1 = v;while(true){
if(cljs.core._EQ_.call(null,"",cljs.core.peek.call(null,v__$1)))
{{
var G__22153 = cljs.core.pop.call(null,v__$1);
v__$1 = G__22153;
continue;
}
} else
{return v__$1;
}
break;
}
});
clojure.string.discard_trailing_if_needed = (function discard_trailing_if_needed(limit,v){if(cljs.core._EQ_.call(null,0,limit))
{return clojure.string.pop_last_while_empty.call(null,v);
} else
{return v;
}
});
| {return cljs.core.conj.call(null,cljs.core.vec.call(null,cljs.core.cons.call(null,"",cljs.core.map.call(null,cljs.core.str,cljs.core.seq.call(null,s)))),"");
} else
{var pred__22157 = cljs.core._EQ_;var expr__22158 = limit;if(cljs.core.truth_(pred__22157.call(null,1,expr__22158)))
{return (new cljs.core.PersistentVector(null,1,5,cljs.core.PersistentVector.EMPTY_NODE,[s],null));
} else
{if(cljs.core.truth_(pred__22157.call(null,2,expr__22158)))
{return (new cljs.core.PersistentVector(null,2,5,cljs.core.PersistentVector.EMPTY_NODE,["",s],null));
} else
{var c = (limit - 2);return cljs.core.conj.call(null,cljs.core.vec.call(null,cljs.core.cons.call(null,"",cljs.core.subvec.call(null,cljs.core.vec.call(null,cljs.core.map.call(null,cljs.core.str,cljs.core.seq.call(null,s))),0,c))),cljs.core.subs.call(null,s,c));
}
}
}
});
/**
* Splits string on a regular expression. Optional argument limit is
* the maximum number of splits. Not lazy. Returns vector of the splits.
*/
clojure.string.split = (function() {
var split = null;
var split__2 = (function (s,re){return split.call(null,s,re,0);
});
var split__3 = (function (s,re,limit){return clojure.string.discard_trailing_if_needed.call(null,limit,((cljs.core._EQ_.call(null,[cljs.core.str(re)].join(''),"/(?:)/"))?clojure.string.split_with_empty_regex.call(null,s,limit):(((limit < 1))?cljs.core.vec.call(null,[cljs.core.str(s)].join('').split(re)):(function (){var s__$1 = s;var limit__$1 = limit;var parts = cljs.core.PersistentVector.EMPTY;while(true){
if(cljs.core._EQ_.call(null,limit__$1,1))
{return cljs.core.conj.call(null,parts,s__$1);
} else
{var temp__4090__auto__ = cljs.core.re_find.call(null,re,s__$1);if(cljs.core.truth_(temp__4090__auto__))
{var m = temp__4090__auto__;var index = s__$1.indexOf(m);{
var G__22160 = s__$1.substring((index + cljs.core.count.call(null,m)));
var G__22161 = (limit__$1 - 1);
var G__22162 = cljs.core.conj.call(null,parts,s__$1.substring(0,index));
s__$1 = G__22160;
limit__$1 = G__22161;
parts = G__22162;
continue;
}
} else
{return cljs.core.conj.call(null,parts,s__$1);
}
}
break;
}
})())));
});
split = function(s,re,limit){
switch(arguments.length){
case 2:
return split__2.call(this,s,re);
case 3:
return split__3.call(this,s,re,limit);
}
throw(new Error('Invalid arity: ' + arguments.length));
};
split.cljs$core$IFn$_invoke$arity$2 = split__2;
split.cljs$core$IFn$_invoke$arity$3 = split__3;
return split;
})()
;
/**
* Splits s on
* or
* .
*/
clojure.string.split_lines = (function split_lines(s){return clojure.string.split.call(null,s,/\n|\r\n/);
});
/**
* Removes whitespace from both ends of string.
*/
clojure.string.trim = (function trim(s){return goog.string.trim(s);
});
/**
* Removes whitespace from the left side of string.
*/
clojure.string.triml = (function triml(s){return goog.string.trimLeft(s);
});
/**
* Removes whitespace from the right side of string.
*/
clojure.string.trimr = (function trimr(s){return goog.string.trimRight(s);
});
/**
* Removes all trailing newline \n or return \r characters from
* string. Similar to Perl's chomp.
*/
clojure.string.trim_newline = (function trim_newline(s){var index = s.length;while(true){
if((index === 0))
{return "";
} else
{var ch = cljs.core.get.call(null,s,(index - 1));if((cljs.core._EQ_.call(null,ch,"\n")) || (cljs.core._EQ_.call(null,ch,"\r")))
{{
var G__22163 = (index - 1);
index = G__22163;
continue;
}
} else
{return s.substring(0,index);
}
}
break;
}
});
/**
* True is s is nil, empty, or contains only whitespace.
*/
clojure.string.blank_QMARK_ = (function blank_QMARK_(s){return goog.string.isEmptySafe(s);
});
/**
* Return a new string, using cmap to escape each character ch
* from s as follows:
*
* If (cmap ch) is nil, append ch to the new string.
* If (cmap ch) is non-nil, append (str (cmap ch)) instead.
*/
clojure.string.escape = (function escape__$1(s,cmap){var buffer = (new goog.string.StringBuffer());var length = s.length;var index = 0;while(true){
if(cljs.core._EQ_.call(null,length,index))
{return buffer.toString();
} else
{var ch = s.charAt(index);var temp__4090__auto___22164 = cljs.core.get.call(null,cmap,ch);if(cljs.core.truth_(temp__4090__auto___22164))
{var replacement_22165 = temp__4090__auto___22164;buffer.append([cljs.core.str(replacement_22165)].join(''));
} else
{buffer.append(ch);
}
{
var G__22166 = (index + 1);
index = G__22166;
continue;
}
}
break;
}
}); | clojure.string.split_with_empty_regex = (function split_with_empty_regex(s,limit){if(((limit <= 0)) || ((limit >= (2 + cljs.core.count.call(null,s)))))
| random_line_split |
cemiFactory.py | # -*- coding: utf-8 -*-
""" Python KNX framework
License
=======
- B{PyKNyX} (U{https://github.com/knxd/pyknyx}) is Copyright:
- © 2016-2017 Matthias Urlichs
- PyKNyX is a fork of pKNyX
- © 2013-2015 Frédéric Mantegazza
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
or see:
- U{http://www.gnu.org/licenses/gpl.html}
Module purpose | cEMI frame management
Implements
==========
- B{CEMIFactory}
- B{CEMIFactoryValueError}
Documentation
=============
Usage
=====
@author: Frédéric Mantegazza
@author: B. Malinowsky
@copyright: (C) 2013-2015 Frédéric Mantegazza
@copyright: (C) 2006, 2011 B. Malinowsky
@license: GPL
"""
from pyknyx.common.exception import PyKNyXValueError
from pyknyx.services.logger import logging; logger = logging.getLogger(__name__)
from pyknyx.stack.cemi.cemi import CEMIValueError
class CEMIFactoryValueError(PyKNyXValueError):
"""
"""
class CEMIFactory(object):
""" cEMI frame creation handling class
"""
def __init__(self):
"""
"""
super(CEMIFactory, self).__init__() | ==============
| random_line_split |
cemiFactory.py | # -*- coding: utf-8 -*-
""" Python KNX framework
License
=======
- B{PyKNyX} (U{https://github.com/knxd/pyknyx}) is Copyright:
- © 2016-2017 Matthias Urlichs
- PyKNyX is a fork of pKNyX
- © 2013-2015 Frédéric Mantegazza
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
or see:
- U{http://www.gnu.org/licenses/gpl.html}
Module purpose
==============
cEMI frame management
Implements
==========
- B{CEMIFactory}
- B{CEMIFactoryValueError}
Documentation
=============
Usage
=====
@author: Frédéric Mantegazza
@author: B. Malinowsky
@copyright: (C) 2013-2015 Frédéric Mantegazza
@copyright: (C) 2006, 2011 B. Malinowsky
@license: GPL
"""
from pyknyx.common.exception import PyKNyXValueError
from pyknyx.services.logger import logging; logger = logging.getLogger(__name__)
from pyknyx.stack.cemi.cemi import CEMIValueError
class CEMIFactoryValueError(PyKNyXValueError):
"""
"""
class CEMIFactory(object):
""" cEMI | frame creation handling class
"""
def __init__(self):
"""
"""
super(CEMIFactory, self).__init__()
| identifier_body | |
cemiFactory.py | # -*- coding: utf-8 -*-
""" Python KNX framework
License
=======
- B{PyKNyX} (U{https://github.com/knxd/pyknyx}) is Copyright:
- © 2016-2017 Matthias Urlichs
- PyKNyX is a fork of pKNyX
- © 2013-2015 Frédéric Mantegazza
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
or see:
- U{http://www.gnu.org/licenses/gpl.html}
Module purpose
==============
cEMI frame management
Implements
==========
- B{CEMIFactory}
- B{CEMIFactoryValueError}
Documentation
=============
Usage
=====
@author: Frédéric Mantegazza
@author: B. Malinowsky
@copyright: (C) 2013-2015 Frédéric Mantegazza
@copyright: (C) 2006, 2011 B. Malinowsky
@license: GPL
"""
from pyknyx.common.exception import PyKNyXValueError
from pyknyx.services.logger import logging; logger = logging.getLogger(__name__)
from pyknyx.stack.cemi.cemi import CEMIValueError
class CEMIFactoryValueError(PyKNyXValueError):
"""
"""
class CEMIFact | :
""" cEMI frame creation handling class
"""
def __init__(self):
"""
"""
super(CEMIFactory, self).__init__()
| ory(object) | identifier_name |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => |
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
}
| {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
} | conditional_block |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> | {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
} | identifier_body | |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn | <T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
}
| unfold_forever | identifier_name |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
}) |
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
} | } | random_line_split |
droptarget.py | __author__ = 'Exter, 0xBADDCAFE'
import wx
class FTDropTarget(wx.DropTarget):
"""
Implements drop target functionality to receive files and text
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
class variable ID_DROP_FILE | def __init__(self, receiver, evt):
"""
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
"""
wx.DropTarget.__init__(self)
self.composite = wx.DataObjectComposite()
self.text_do = wx.TextDataObject()
self.file_do = wx.FileDataObject()
self.composite.Add(self.text_do)
self.composite.Add(self.file_do)
self.SetDataObject(self.composite)
self.receiver = receiver
self.evt = evt
def OnData(self, x, y, result):
"""Handles dropping files/text """
if self.GetData():
drop_type = self.composite.GetReceivedFormat().GetType()
if drop_type in (wx.DF_TEXT, wx.DF_UNICODETEXT):
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_TEXT, text=self.text_do.GetText()))
elif drop_type == wx.DF_FILENAME:
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_FILE, files=self.file_do.GetFilenames()))
assert isinstance(result, object)
return result | class variable ID_DROP_TEXT
"""
ID_DROP_FILE = wx.NewId()
ID_DROP_TEXT = wx.NewId()
| random_line_split |
droptarget.py | __author__ = 'Exter, 0xBADDCAFE'
import wx
class FTDropTarget(wx.DropTarget):
| """
Implements drop target functionality to receive files and text
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
class variable ID_DROP_FILE
class variable ID_DROP_TEXT
"""
ID_DROP_FILE = wx.NewId()
ID_DROP_TEXT = wx.NewId()
def __init__(self, receiver, evt):
"""
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
"""
wx.DropTarget.__init__(self)
self.composite = wx.DataObjectComposite()
self.text_do = wx.TextDataObject()
self.file_do = wx.FileDataObject()
self.composite.Add(self.text_do)
self.composite.Add(self.file_do)
self.SetDataObject(self.composite)
self.receiver = receiver
self.evt = evt
def OnData(self, x, y, result):
"""Handles dropping files/text """
if self.GetData():
drop_type = self.composite.GetReceivedFormat().GetType()
if drop_type in (wx.DF_TEXT, wx.DF_UNICODETEXT):
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_TEXT, text=self.text_do.GetText()))
elif drop_type == wx.DF_FILENAME:
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_FILE, files=self.file_do.GetFilenames()))
assert isinstance(result, object)
return result | identifier_body | |
droptarget.py | __author__ = 'Exter, 0xBADDCAFE'
import wx
class FTDropTarget(wx.DropTarget):
"""
Implements drop target functionality to receive files and text
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
class variable ID_DROP_FILE
class variable ID_DROP_TEXT
"""
ID_DROP_FILE = wx.NewId()
ID_DROP_TEXT = wx.NewId()
def __init__(self, receiver, evt):
"""
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
"""
wx.DropTarget.__init__(self)
self.composite = wx.DataObjectComposite()
self.text_do = wx.TextDataObject()
self.file_do = wx.FileDataObject()
self.composite.Add(self.text_do)
self.composite.Add(self.file_do)
self.SetDataObject(self.composite)
self.receiver = receiver
self.evt = evt
def OnData(self, x, y, result):
"""Handles dropping files/text """
if self.GetData():
drop_type = self.composite.GetReceivedFormat().GetType()
if drop_type in (wx.DF_TEXT, wx.DF_UNICODETEXT):
|
elif drop_type == wx.DF_FILENAME:
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_FILE, files=self.file_do.GetFilenames()))
assert isinstance(result, object)
return result
| wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_TEXT, text=self.text_do.GetText())) | conditional_block |
droptarget.py | __author__ = 'Exter, 0xBADDCAFE'
import wx
class | (wx.DropTarget):
"""
Implements drop target functionality to receive files and text
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
class variable ID_DROP_FILE
class variable ID_DROP_TEXT
"""
ID_DROP_FILE = wx.NewId()
ID_DROP_TEXT = wx.NewId()
def __init__(self, receiver, evt):
"""
receiver - any WX class that can bind to events
evt - class that comes from wx.lib.newevent.NewCommandEvent call
"""
wx.DropTarget.__init__(self)
self.composite = wx.DataObjectComposite()
self.text_do = wx.TextDataObject()
self.file_do = wx.FileDataObject()
self.composite.Add(self.text_do)
self.composite.Add(self.file_do)
self.SetDataObject(self.composite)
self.receiver = receiver
self.evt = evt
def OnData(self, x, y, result):
"""Handles dropping files/text """
if self.GetData():
drop_type = self.composite.GetReceivedFormat().GetType()
if drop_type in (wx.DF_TEXT, wx.DF_UNICODETEXT):
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_TEXT, text=self.text_do.GetText()))
elif drop_type == wx.DF_FILENAME:
wx.PostEvent(self.receiver, self.evt(id=self.ID_DROP_FILE, files=self.file_do.GetFilenames()))
assert isinstance(result, object)
return result
| FTDropTarget | identifier_name |
math-transaction.ts | /*
* math-transaction.ts
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
import { Node as ProsemirrorNode } from 'prosemirror-model';
import { findChildrenByMark } from 'prosemirror-utils';
import { getMarkRange, getMarkAttrs } from '../../api/mark';
import { AppendMarkTransactionHandler, MarkTransaction } from '../../api/transaction';
import { delimiterForType, MathType } from '../../api/math';
import { selectionIsWithinRange } from '../../api/selection';
export function mathAppendMarkTransaction(): AppendMarkTransactionHandler | {
return {
name: 'math-marks',
filter: node => node.isTextblock && node.type.allowsMarkType(node.type.schema.marks.math),
append: (tr: MarkTransaction, node: ProsemirrorNode, pos: number) => {
// find all math blocks and convert them to text if they no longer conform
const schema = node.type.schema;
const maths = findChildrenByMark(node, schema.marks.math, true);
for (const math of maths) {
const from = pos + 1 + math.pos;
const mathRange = getMarkRange(tr.doc.resolve(from), schema.marks.math);
if (mathRange) {
const mathAttr = getMarkAttrs(tr.doc, mathRange, schema.marks.math);
if (mathAttr.type === MathType.Inline) {
const mathDelim = delimiterForType(mathAttr.type);
const mathText = tr.doc.textBetween(mathRange.from, mathRange.to);
const charAfter = tr.doc.textBetween(mathRange.to, mathRange.to + 1);
const noDelims = !mathText.startsWith(mathDelim) || !mathText.endsWith(mathDelim);
const selectionIsWithin = selectionIsWithinRange(tr.selection, mathRange);
const spaceAtLeft =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.startsWith(mathDelim + ' ');
const spaceAtRight =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.endsWith(' ' + mathDelim);
const numberAfter = mathAttr.type === MathType.Inline && /\d/.test(charAfter);
if (noDelims || spaceAtLeft || spaceAtRight || numberAfter) {
tr.removeMark(mathRange.from, mathRange.to, schema.marks.math);
tr.removeStoredMark(schema.marks.math);
}
}
}
}
},
};
} | identifier_body | |
math-transaction.ts | /*
* math-transaction.ts
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
import { Node as ProsemirrorNode } from 'prosemirror-model';
import { findChildrenByMark } from 'prosemirror-utils';
import { getMarkRange, getMarkAttrs } from '../../api/mark';
import { AppendMarkTransactionHandler, MarkTransaction } from '../../api/transaction';
import { delimiterForType, MathType } from '../../api/math';
import { selectionIsWithinRange } from '../../api/selection';
export function mathAppendMarkTransaction(): AppendMarkTransactionHandler {
return {
name: 'math-marks',
filter: node => node.isTextblock && node.type.allowsMarkType(node.type.schema.marks.math),
append: (tr: MarkTransaction, node: ProsemirrorNode, pos: number) => {
// find all math blocks and convert them to text if they no longer conform
const schema = node.type.schema;
const maths = findChildrenByMark(node, schema.marks.math, true);
for (const math of maths) {
const from = pos + 1 + math.pos;
const mathRange = getMarkRange(tr.doc.resolve(from), schema.marks.math);
if (mathRange) {
const mathAttr = getMarkAttrs(tr.doc, mathRange, schema.marks.math);
if (mathAttr.type === MathType.Inline) |
}
}
},
};
}
| {
const mathDelim = delimiterForType(mathAttr.type);
const mathText = tr.doc.textBetween(mathRange.from, mathRange.to);
const charAfter = tr.doc.textBetween(mathRange.to, mathRange.to + 1);
const noDelims = !mathText.startsWith(mathDelim) || !mathText.endsWith(mathDelim);
const selectionIsWithin = selectionIsWithinRange(tr.selection, mathRange);
const spaceAtLeft =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.startsWith(mathDelim + ' ');
const spaceAtRight =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.endsWith(' ' + mathDelim);
const numberAfter = mathAttr.type === MathType.Inline && /\d/.test(charAfter);
if (noDelims || spaceAtLeft || spaceAtRight || numberAfter) {
tr.removeMark(mathRange.from, mathRange.to, schema.marks.math);
tr.removeStoredMark(schema.marks.math);
}
} | conditional_block |
math-transaction.ts | /*
* math-transaction.ts
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
import { Node as ProsemirrorNode } from 'prosemirror-model';
import { findChildrenByMark } from 'prosemirror-utils';
import { getMarkRange, getMarkAttrs } from '../../api/mark';
import { AppendMarkTransactionHandler, MarkTransaction } from '../../api/transaction';
import { delimiterForType, MathType } from '../../api/math';
import { selectionIsWithinRange } from '../../api/selection';
export function mathAppendMarkTransaction(): AppendMarkTransactionHandler {
return {
name: 'math-marks',
filter: node => node.isTextblock && node.type.allowsMarkType(node.type.schema.marks.math),
append: (tr: MarkTransaction, node: ProsemirrorNode, pos: number) => { | const maths = findChildrenByMark(node, schema.marks.math, true);
for (const math of maths) {
const from = pos + 1 + math.pos;
const mathRange = getMarkRange(tr.doc.resolve(from), schema.marks.math);
if (mathRange) {
const mathAttr = getMarkAttrs(tr.doc, mathRange, schema.marks.math);
if (mathAttr.type === MathType.Inline) {
const mathDelim = delimiterForType(mathAttr.type);
const mathText = tr.doc.textBetween(mathRange.from, mathRange.to);
const charAfter = tr.doc.textBetween(mathRange.to, mathRange.to + 1);
const noDelims = !mathText.startsWith(mathDelim) || !mathText.endsWith(mathDelim);
const selectionIsWithin = selectionIsWithinRange(tr.selection, mathRange);
const spaceAtLeft =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.startsWith(mathDelim + ' ');
const spaceAtRight =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.endsWith(' ' + mathDelim);
const numberAfter = mathAttr.type === MathType.Inline && /\d/.test(charAfter);
if (noDelims || spaceAtLeft || spaceAtRight || numberAfter) {
tr.removeMark(mathRange.from, mathRange.to, schema.marks.math);
tr.removeStoredMark(schema.marks.math);
}
}
}
}
},
};
} | // find all math blocks and convert them to text if they no longer conform
const schema = node.type.schema; | random_line_split |
math-transaction.ts | /*
* math-transaction.ts
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
import { Node as ProsemirrorNode } from 'prosemirror-model';
import { findChildrenByMark } from 'prosemirror-utils';
import { getMarkRange, getMarkAttrs } from '../../api/mark';
import { AppendMarkTransactionHandler, MarkTransaction } from '../../api/transaction';
import { delimiterForType, MathType } from '../../api/math';
import { selectionIsWithinRange } from '../../api/selection';
export function | (): AppendMarkTransactionHandler {
return {
name: 'math-marks',
filter: node => node.isTextblock && node.type.allowsMarkType(node.type.schema.marks.math),
append: (tr: MarkTransaction, node: ProsemirrorNode, pos: number) => {
// find all math blocks and convert them to text if they no longer conform
const schema = node.type.schema;
const maths = findChildrenByMark(node, schema.marks.math, true);
for (const math of maths) {
const from = pos + 1 + math.pos;
const mathRange = getMarkRange(tr.doc.resolve(from), schema.marks.math);
if (mathRange) {
const mathAttr = getMarkAttrs(tr.doc, mathRange, schema.marks.math);
if (mathAttr.type === MathType.Inline) {
const mathDelim = delimiterForType(mathAttr.type);
const mathText = tr.doc.textBetween(mathRange.from, mathRange.to);
const charAfter = tr.doc.textBetween(mathRange.to, mathRange.to + 1);
const noDelims = !mathText.startsWith(mathDelim) || !mathText.endsWith(mathDelim);
const selectionIsWithin = selectionIsWithinRange(tr.selection, mathRange);
const spaceAtLeft =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.startsWith(mathDelim + ' ');
const spaceAtRight =
!selectionIsWithin && mathAttr.type === MathType.Inline && mathText.endsWith(' ' + mathDelim);
const numberAfter = mathAttr.type === MathType.Inline && /\d/.test(charAfter);
if (noDelims || spaceAtLeft || spaceAtRight || numberAfter) {
tr.removeMark(mathRange.from, mathRange.to, schema.marks.math);
tr.removeStoredMark(schema.marks.math);
}
}
}
}
},
};
}
| mathAppendMarkTransaction | identifier_name |
test.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
from . import transform
from ..util.yaml import load_yaml
logger = logging.getLogger(__name__)
class TestTask(transform.TransformTask):
"""
A task implementing a Gecko test.
"""
@classmethod
def | (cls, kind, path, config, params, loaded_tasks):
# the kind on which this one depends
if len(config.get('kind-dependencies', [])) != 1:
raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
dep_kind = config['kind-dependencies'][0]
# get build tasks, keyed by build platform
builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
# get the test platforms for those build tasks
test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
# expand the test sets for each of those platforms
test_sets_cfg = load_yaml(path, 'test-sets.yml')
test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
# load the test descriptions
test_descriptions = load_yaml(path, 'tests.yml')
# generate all tests for all test platforms
for test_platform_name, test_platform in test_platforms.iteritems():
for test_name in test_platform['test-names']:
test = copy.deepcopy(test_descriptions[test_name])
test['build-platform'] = test_platform['build-platform']
test['test-platform'] = test_platform_name
test['build-label'] = test_platform['build-label']
test['test-name'] = test_name
if test_platform['nightly']:
test.setdefault('attributes', {})['nightly'] = True
logger.debug("Generating tasks for test {} on platform {}".format(
test_name, test['test-platform']))
yield test
@classmethod
def get_builds_by_platform(cls, dep_kind, loaded_tasks):
"""Find the build tasks on which tests will depend, keyed by
platform/type. Returns a dictionary mapping build platform to task."""
builds_by_platform = {}
for task in loaded_tasks:
if task.kind != dep_kind:
continue
build_platform = task.attributes.get('build_platform')
build_type = task.attributes.get('build_type')
if not build_platform or not build_type:
continue
platform = "{}/{}".format(build_platform, build_type)
if platform in builds_by_platform:
raise Exception("multiple build jobs for " + platform)
builds_by_platform[platform] = task
return builds_by_platform
@classmethod
def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
"""Get the test platforms for which test tasks should be generated,
based on the available build platforms. Returns a dictionary mapping
test platform to {test-set, build-platform, build-label}."""
test_platforms = {}
for test_platform, cfg in test_platforms_cfg.iteritems():
build_platform = cfg['build-platform']
if build_platform not in builds_by_platform:
logger.warning(
"No build task with platform {}; ignoring test platform {}".format(
build_platform, test_platform))
continue
test_platforms[test_platform] = {
'nightly': builds_by_platform[build_platform].attributes.get('nightly', False),
'build-platform': build_platform,
'build-label': builds_by_platform[build_platform].label,
}
test_platforms[test_platform].update(cfg)
return test_platforms
@classmethod
def expand_tests(cls, test_sets_cfg, test_platforms):
"""Expand the test sets in `test_platforms` out to sets of test names.
Returns a dictionary like `get_test_platforms`, with an additional
`test-names` key for each test platform, containing a set of test
names."""
rv = {}
for test_platform, cfg in test_platforms.iteritems():
test_sets = cfg['test-sets']
if not set(test_sets) < set(test_sets_cfg):
raise Exception(
"Test sets {} for test platform {} are not defined".format(
', '.join(test_sets), test_platform))
test_names = set()
for test_set in test_sets:
test_names.update(test_sets_cfg[test_set])
rv[test_platform] = cfg.copy()
rv[test_platform]['test-names'] = test_names
return rv
| get_inputs | identifier_name |
test.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
from . import transform
from ..util.yaml import load_yaml
logger = logging.getLogger(__name__)
class TestTask(transform.TransformTask):
"""
A task implementing a Gecko test.
"""
@classmethod
def get_inputs(cls, kind, path, config, params, loaded_tasks):
# the kind on which this one depends
if len(config.get('kind-dependencies', [])) != 1:
raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
dep_kind = config['kind-dependencies'][0]
# get build tasks, keyed by build platform
builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
# get the test platforms for those build tasks
test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
# expand the test sets for each of those platforms
test_sets_cfg = load_yaml(path, 'test-sets.yml')
test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
# load the test descriptions
test_descriptions = load_yaml(path, 'tests.yml')
# generate all tests for all test platforms
for test_platform_name, test_platform in test_platforms.iteritems():
for test_name in test_platform['test-names']:
test = copy.deepcopy(test_descriptions[test_name])
test['build-platform'] = test_platform['build-platform']
test['test-platform'] = test_platform_name
test['build-label'] = test_platform['build-label']
test['test-name'] = test_name
if test_platform['nightly']:
test.setdefault('attributes', {})['nightly'] = True
logger.debug("Generating tasks for test {} on platform {}".format(
test_name, test['test-platform']))
yield test
@classmethod
def get_builds_by_platform(cls, dep_kind, loaded_tasks):
"""Find the build tasks on which tests will depend, keyed by
platform/type. Returns a dictionary mapping build platform to task."""
builds_by_platform = {}
for task in loaded_tasks:
if task.kind != dep_kind:
continue
build_platform = task.attributes.get('build_platform')
build_type = task.attributes.get('build_type')
if not build_platform or not build_type:
continue
platform = "{}/{}".format(build_platform, build_type)
if platform in builds_by_platform:
raise Exception("multiple build jobs for " + platform)
builds_by_platform[platform] = task
return builds_by_platform
@classmethod
def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
"""Get the test platforms for which test tasks should be generated,
based on the available build platforms. Returns a dictionary mapping
test platform to {test-set, build-platform, build-label}."""
test_platforms = {}
for test_platform, cfg in test_platforms_cfg.iteritems():
build_platform = cfg['build-platform']
if build_platform not in builds_by_platform:
logger.warning(
"No build task with platform {}; ignoring test platform {}".format(
build_platform, test_platform))
continue
test_platforms[test_platform] = {
'nightly': builds_by_platform[build_platform].attributes.get('nightly', False),
'build-platform': build_platform,
'build-label': builds_by_platform[build_platform].label,
}
test_platforms[test_platform].update(cfg)
return test_platforms
@classmethod
def expand_tests(cls, test_sets_cfg, test_platforms):
"""Expand the test sets in `test_platforms` out to sets of test names.
Returns a dictionary like `get_test_platforms`, with an additional
`test-names` key for each test platform, containing a set of test
names."""
rv = {}
for test_platform, cfg in test_platforms.iteritems():
test_sets = cfg['test-sets']
if not set(test_sets) < set(test_sets_cfg):
raise Exception( | test_names.update(test_sets_cfg[test_set])
rv[test_platform] = cfg.copy()
rv[test_platform]['test-names'] = test_names
return rv | "Test sets {} for test platform {} are not defined".format(
', '.join(test_sets), test_platform))
test_names = set()
for test_set in test_sets: | random_line_split |
test.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
from . import transform
from ..util.yaml import load_yaml
logger = logging.getLogger(__name__)
class TestTask(transform.TransformTask):
"""
A task implementing a Gecko test.
"""
@classmethod
def get_inputs(cls, kind, path, config, params, loaded_tasks):
# the kind on which this one depends
if len(config.get('kind-dependencies', [])) != 1:
raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
dep_kind = config['kind-dependencies'][0]
# get build tasks, keyed by build platform
builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
# get the test platforms for those build tasks
test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
# expand the test sets for each of those platforms
test_sets_cfg = load_yaml(path, 'test-sets.yml')
test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
# load the test descriptions
test_descriptions = load_yaml(path, 'tests.yml')
# generate all tests for all test platforms
for test_platform_name, test_platform in test_platforms.iteritems():
for test_name in test_platform['test-names']:
test = copy.deepcopy(test_descriptions[test_name])
test['build-platform'] = test_platform['build-platform']
test['test-platform'] = test_platform_name
test['build-label'] = test_platform['build-label']
test['test-name'] = test_name
if test_platform['nightly']:
test.setdefault('attributes', {})['nightly'] = True
logger.debug("Generating tasks for test {} on platform {}".format(
test_name, test['test-platform']))
yield test
@classmethod
def get_builds_by_platform(cls, dep_kind, loaded_tasks):
"""Find the build tasks on which tests will depend, keyed by
platform/type. Returns a dictionary mapping build platform to task."""
builds_by_platform = {}
for task in loaded_tasks:
if task.kind != dep_kind:
continue
build_platform = task.attributes.get('build_platform')
build_type = task.attributes.get('build_type')
if not build_platform or not build_type:
continue
platform = "{}/{}".format(build_platform, build_type)
if platform in builds_by_platform:
|
builds_by_platform[platform] = task
return builds_by_platform
@classmethod
def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
"""Get the test platforms for which test tasks should be generated,
based on the available build platforms. Returns a dictionary mapping
test platform to {test-set, build-platform, build-label}."""
test_platforms = {}
for test_platform, cfg in test_platforms_cfg.iteritems():
build_platform = cfg['build-platform']
if build_platform not in builds_by_platform:
logger.warning(
"No build task with platform {}; ignoring test platform {}".format(
build_platform, test_platform))
continue
test_platforms[test_platform] = {
'nightly': builds_by_platform[build_platform].attributes.get('nightly', False),
'build-platform': build_platform,
'build-label': builds_by_platform[build_platform].label,
}
test_platforms[test_platform].update(cfg)
return test_platforms
@classmethod
def expand_tests(cls, test_sets_cfg, test_platforms):
"""Expand the test sets in `test_platforms` out to sets of test names.
Returns a dictionary like `get_test_platforms`, with an additional
`test-names` key for each test platform, containing a set of test
names."""
rv = {}
for test_platform, cfg in test_platforms.iteritems():
test_sets = cfg['test-sets']
if not set(test_sets) < set(test_sets_cfg):
raise Exception(
"Test sets {} for test platform {} are not defined".format(
', '.join(test_sets), test_platform))
test_names = set()
for test_set in test_sets:
test_names.update(test_sets_cfg[test_set])
rv[test_platform] = cfg.copy()
rv[test_platform]['test-names'] = test_names
return rv
| raise Exception("multiple build jobs for " + platform) | conditional_block |
test.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
from . import transform
from ..util.yaml import load_yaml
logger = logging.getLogger(__name__)
class TestTask(transform.TransformTask):
| """
A task implementing a Gecko test.
"""
@classmethod
def get_inputs(cls, kind, path, config, params, loaded_tasks):
# the kind on which this one depends
if len(config.get('kind-dependencies', [])) != 1:
raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
dep_kind = config['kind-dependencies'][0]
# get build tasks, keyed by build platform
builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
# get the test platforms for those build tasks
test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
# expand the test sets for each of those platforms
test_sets_cfg = load_yaml(path, 'test-sets.yml')
test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
# load the test descriptions
test_descriptions = load_yaml(path, 'tests.yml')
# generate all tests for all test platforms
for test_platform_name, test_platform in test_platforms.iteritems():
for test_name in test_platform['test-names']:
test = copy.deepcopy(test_descriptions[test_name])
test['build-platform'] = test_platform['build-platform']
test['test-platform'] = test_platform_name
test['build-label'] = test_platform['build-label']
test['test-name'] = test_name
if test_platform['nightly']:
test.setdefault('attributes', {})['nightly'] = True
logger.debug("Generating tasks for test {} on platform {}".format(
test_name, test['test-platform']))
yield test
@classmethod
def get_builds_by_platform(cls, dep_kind, loaded_tasks):
"""Find the build tasks on which tests will depend, keyed by
platform/type. Returns a dictionary mapping build platform to task."""
builds_by_platform = {}
for task in loaded_tasks:
if task.kind != dep_kind:
continue
build_platform = task.attributes.get('build_platform')
build_type = task.attributes.get('build_type')
if not build_platform or not build_type:
continue
platform = "{}/{}".format(build_platform, build_type)
if platform in builds_by_platform:
raise Exception("multiple build jobs for " + platform)
builds_by_platform[platform] = task
return builds_by_platform
@classmethod
def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
"""Get the test platforms for which test tasks should be generated,
based on the available build platforms. Returns a dictionary mapping
test platform to {test-set, build-platform, build-label}."""
test_platforms = {}
for test_platform, cfg in test_platforms_cfg.iteritems():
build_platform = cfg['build-platform']
if build_platform not in builds_by_platform:
logger.warning(
"No build task with platform {}; ignoring test platform {}".format(
build_platform, test_platform))
continue
test_platforms[test_platform] = {
'nightly': builds_by_platform[build_platform].attributes.get('nightly', False),
'build-platform': build_platform,
'build-label': builds_by_platform[build_platform].label,
}
test_platforms[test_platform].update(cfg)
return test_platforms
@classmethod
def expand_tests(cls, test_sets_cfg, test_platforms):
"""Expand the test sets in `test_platforms` out to sets of test names.
Returns a dictionary like `get_test_platforms`, with an additional
`test-names` key for each test platform, containing a set of test
names."""
rv = {}
for test_platform, cfg in test_platforms.iteritems():
test_sets = cfg['test-sets']
if not set(test_sets) < set(test_sets_cfg):
raise Exception(
"Test sets {} for test platform {} are not defined".format(
', '.join(test_sets), test_platform))
test_names = set()
for test_set in test_sets:
test_names.update(test_sets_cfg[test_set])
rv[test_platform] = cfg.copy()
rv[test_platform]['test-names'] = test_names
return rv | identifier_body | |
keycodes.rs | // https://stackoverflow.
// com/questions/3202629/where-can-i-find-a-list-of-mac-virtual-key-codes
/* keycodes for keys that are independent of keyboard layout */
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
pub const kVK_Return: u16 = 0x24;
pub const kVK_Tab: u16 = 0x30;
pub const kVK_Space: u16 = 0x31;
pub const kVK_Delete: u16 = 0x33;
pub const kVK_Escape: u16 = 0x35;
pub const kVK_Command: u16 = 0x37;
pub const kVK_Shift: u16 = 0x38;
pub const kVK_CapsLock: u16 = 0x39;
pub const kVK_Option: u16 = 0x3A;
pub const kVK_Control: u16 = 0x3B;
pub const kVK_RightShift: u16 = 0x3C;
pub const kVK_RightOption: u16 = 0x3D;
pub const kVK_RightControl: u16 = 0x3E;
pub const kVK_Function: u16 = 0x3F;
pub const kVK_F17: u16 = 0x40;
pub const kVK_VolumeUp: u16 = 0x48;
pub const kVK_VolumeDown: u16 = 0x49; | pub const kVK_F5: u16 = 0x60;
pub const kVK_F6: u16 = 0x61;
pub const kVK_F7: u16 = 0x62;
pub const kVK_F3: u16 = 0x63;
pub const kVK_F8: u16 = 0x64;
pub const kVK_F9: u16 = 0x65;
pub const kVK_F11: u16 = 0x67;
pub const kVK_F13: u16 = 0x69;
pub const kVK_F16: u16 = 0x6A;
pub const kVK_F14: u16 = 0x6B;
pub const kVK_F10: u16 = 0x6D;
pub const kVK_F12: u16 = 0x6F;
pub const kVK_F15: u16 = 0x71;
pub const kVK_Help: u16 = 0x72;
pub const kVK_Home: u16 = 0x73;
pub const kVK_PageUp: u16 = 0x74;
pub const kVK_ForwardDelete: u16 = 0x75;
pub const kVK_F4: u16 = 0x76;
pub const kVK_End: u16 = 0x77;
pub const kVK_F2: u16 = 0x78;
pub const kVK_PageDown: u16 = 0x79;
pub const kVK_F1: u16 = 0x7A;
pub const kVK_LeftArrow: u16 = 0x7B;
pub const kVK_RightArrow: u16 = 0x7C;
pub const kVK_DownArrow: u16 = 0x7D;
pub const kVK_UpArrow: u16 = 0x7E; | pub const kVK_Mute: u16 = 0x4A;
pub const kVK_F18: u16 = 0x4F;
pub const kVK_F19: u16 = 0x50;
pub const kVK_F20: u16 = 0x5A; | random_line_split |
conf_manager.py | #!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
"""
Raspymc is a multimedia centre exposed via a http server built with bottlepy
Copyright (C) 2013 Giancarlo Fringuello
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os, inspect, ConfigParser, pickle
from utils import *
from logger import*
from track_obj import *
CNF_SERVER_PATH = sys.path[0]
CNF_FOLDER_PATH = ""
CNF_PLAYLIST_PATH = CNF_SERVER_PATH + "/config/playlist.pkl"
CNF_FOLDER_PATH = CNF_SERVER_PATH + "/config/"
CNF_CONFIG_FILE = CNF_FOLDER_PATH + "config.ini"
#
# Loads the saved playlist from file
def get_playlist():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()")
l_playlist = []
try:
with open(CNF_PLAYLIST_PATH, 'rb') as l_input:
l_playlist = pickle.load(l_input)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
return l_playlist
def store_playlist(p_list):
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()")
try:
with open(CNF_PLAYLIST_PATH, 'wb') as l_output:
pickle.dump(p_list, l_output, pickle.HIGHEST_PROTOCOL)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
#
# Loads the configuration from file
def get_folder_path():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()")
global CNF_FOLDER_PATH
global CNF_PLAYLIST_PATH
global SERVER_PATH
l_config_parser = ConfigParser.ConfigParser()
l_clean_configuration = False
if not os.path.isdir(CNF_FOLDER_PATH): # if config directory does not exist, create it
os.makedirs(CNF_FOLDER_PATH)
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", CNF_FOLDER_PATH + " did not exist, it has been created")
if os.path.isfile(CNF_CONFIG_FILE):
try:
l_config_parser.read(CNF_CONFIG_FILE)
if l_config_parser.has_section("PATH"):
if l_config_parser.has_option("PATH", "CNF_FOLDER_PATH"):
CNF_FOLDER_PATH = l_config_parser.get("PATH","CNF_FOLDER_PATH")
else:
l_clean_configuration = True
else:
# if section does not exist
l_clean_configuration = True
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "unable to load CNF_FOLDER_PATH, using home as default, new config.ini will be generated.")
except:
# if unable to read file (e.g. file damaged)
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "exception: unable to load CNF_FOLDER_PATH from " + CNF_CONFIG_FILE + ", using home path as default, new config.ini will be generated.")
else:
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "no configuration file found, new config.ini will be generated.")
if l_clean_configuration:
# cleanup config file
for l_section in l_config_parser.sections():
l_config_parser.remove_section(l_section)
l_config_parser.add_section("PATH")
l_config_parser.set("PATH", "CNF_FOLDER_PATH", os.path.expanduser("~"))
l_config_parser.write(file(CNF_CONFIG_FILE, 'w'))
if "" == CNF_FOLDER_PATH:
CNF_FOLDER_PATH = os.path.expanduser("~")
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_FOLDER_PATH = " + CNF_FOLDER_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_PLAYLIST_PATH = " + CNF_PLAYLIST_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_SERVER_PATH = " + CNF_SERVER_PATH)
return CNF_FOLDER_PATH
def | ():
return SERVER_PATH
def get_playlist_path():
return CNF_PLAYLIST_PATH | get_server_path | identifier_name |
conf_manager.py | #!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
"""
Raspymc is a multimedia centre exposed via a http server built with bottlepy
Copyright (C) 2013 Giancarlo Fringuello
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os, inspect, ConfigParser, pickle
from utils import *
from logger import*
from track_obj import *
CNF_SERVER_PATH = sys.path[0]
CNF_FOLDER_PATH = ""
CNF_PLAYLIST_PATH = CNF_SERVER_PATH + "/config/playlist.pkl"
CNF_FOLDER_PATH = CNF_SERVER_PATH + "/config/"
CNF_CONFIG_FILE = CNF_FOLDER_PATH + "config.ini"
#
# Loads the saved playlist from file
def get_playlist():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()")
l_playlist = []
try:
with open(CNF_PLAYLIST_PATH, 'rb') as l_input:
l_playlist = pickle.load(l_input)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
return l_playlist
def store_playlist(p_list):
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()")
try:
with open(CNF_PLAYLIST_PATH, 'wb') as l_output:
pickle.dump(p_list, l_output, pickle.HIGHEST_PROTOCOL)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
#
# Loads the configuration from file
def get_folder_path():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()")
global CNF_FOLDER_PATH
global CNF_PLAYLIST_PATH
global SERVER_PATH
l_config_parser = ConfigParser.ConfigParser()
l_clean_configuration = False
if not os.path.isdir(CNF_FOLDER_PATH): # if config directory does not exist, create it
os.makedirs(CNF_FOLDER_PATH)
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", CNF_FOLDER_PATH + " did not exist, it has been created")
if os.path.isfile(CNF_CONFIG_FILE):
try:
l_config_parser.read(CNF_CONFIG_FILE)
if l_config_parser.has_section("PATH"):
if l_config_parser.has_option("PATH", "CNF_FOLDER_PATH"):
CNF_FOLDER_PATH = l_config_parser.get("PATH","CNF_FOLDER_PATH")
else:
l_clean_configuration = True
else:
# if section does not exist
|
except:
# if unable to read file (e.g. file damaged)
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "exception: unable to load CNF_FOLDER_PATH from " + CNF_CONFIG_FILE + ", using home path as default, new config.ini will be generated.")
else:
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "no configuration file found, new config.ini will be generated.")
if l_clean_configuration:
# cleanup config file
for l_section in l_config_parser.sections():
l_config_parser.remove_section(l_section)
l_config_parser.add_section("PATH")
l_config_parser.set("PATH", "CNF_FOLDER_PATH", os.path.expanduser("~"))
l_config_parser.write(file(CNF_CONFIG_FILE, 'w'))
if "" == CNF_FOLDER_PATH:
CNF_FOLDER_PATH = os.path.expanduser("~")
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_FOLDER_PATH = " + CNF_FOLDER_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_PLAYLIST_PATH = " + CNF_PLAYLIST_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_SERVER_PATH = " + CNF_SERVER_PATH)
return CNF_FOLDER_PATH
def get_server_path():
return SERVER_PATH
def get_playlist_path():
return CNF_PLAYLIST_PATH | l_clean_configuration = True
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "unable to load CNF_FOLDER_PATH, using home as default, new config.ini will be generated.") | conditional_block |
conf_manager.py | #!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
"""
Raspymc is a multimedia centre exposed via a http server built with bottlepy
Copyright (C) 2013 Giancarlo Fringuello
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os, inspect, ConfigParser, pickle
from utils import *
from logger import*
from track_obj import *
CNF_SERVER_PATH = sys.path[0]
CNF_FOLDER_PATH = ""
CNF_PLAYLIST_PATH = CNF_SERVER_PATH + "/config/playlist.pkl"
CNF_FOLDER_PATH = CNF_SERVER_PATH + "/config/"
CNF_CONFIG_FILE = CNF_FOLDER_PATH + "config.ini"
#
# Loads the saved playlist from file
def get_playlist():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()")
l_playlist = []
try:
with open(CNF_PLAYLIST_PATH, 'rb') as l_input:
l_playlist = pickle.load(l_input)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
return l_playlist
def store_playlist(p_list):
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()")
try:
with open(CNF_PLAYLIST_PATH, 'wb') as l_output:
pickle.dump(p_list, l_output, pickle.HIGHEST_PROTOCOL)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
#
# Loads the configuration from file
def get_folder_path():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()")
global CNF_FOLDER_PATH
global CNF_PLAYLIST_PATH
global SERVER_PATH
l_config_parser = ConfigParser.ConfigParser()
l_clean_configuration = False
if not os.path.isdir(CNF_FOLDER_PATH): # if config directory does not exist, create it
os.makedirs(CNF_FOLDER_PATH)
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", CNF_FOLDER_PATH + " did not exist, it has been created")
if os.path.isfile(CNF_CONFIG_FILE):
try:
l_config_parser.read(CNF_CONFIG_FILE)
if l_config_parser.has_section("PATH"):
if l_config_parser.has_option("PATH", "CNF_FOLDER_PATH"):
CNF_FOLDER_PATH = l_config_parser.get("PATH","CNF_FOLDER_PATH")
else:
l_clean_configuration = True
else:
# if section does not exist
l_clean_configuration = True
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "unable to load CNF_FOLDER_PATH, using home as default, new config.ini will be generated.")
except:
# if unable to read file (e.g. file damaged)
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "exception: unable to load CNF_FOLDER_PATH from " + CNF_CONFIG_FILE + ", using home path as default, new config.ini will be generated.")
else:
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "no configuration file found, new config.ini will be generated.")
if l_clean_configuration:
# cleanup config file
for l_section in l_config_parser.sections():
l_config_parser.remove_section(l_section)
l_config_parser.add_section("PATH")
l_config_parser.set("PATH", "CNF_FOLDER_PATH", os.path.expanduser("~"))
l_config_parser.write(file(CNF_CONFIG_FILE, 'w'))
|
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_FOLDER_PATH = " + CNF_FOLDER_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_PLAYLIST_PATH = " + CNF_PLAYLIST_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_SERVER_PATH = " + CNF_SERVER_PATH)
return CNF_FOLDER_PATH
def get_server_path():
return SERVER_PATH
def get_playlist_path():
return CNF_PLAYLIST_PATH | if "" == CNF_FOLDER_PATH:
CNF_FOLDER_PATH = os.path.expanduser("~")
| random_line_split |
conf_manager.py | #!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
"""
Raspymc is a multimedia centre exposed via a http server built with bottlepy
Copyright (C) 2013 Giancarlo Fringuello
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os, inspect, ConfigParser, pickle
from utils import *
from logger import*
from track_obj import *
CNF_SERVER_PATH = sys.path[0]
CNF_FOLDER_PATH = ""
CNF_PLAYLIST_PATH = CNF_SERVER_PATH + "/config/playlist.pkl"
CNF_FOLDER_PATH = CNF_SERVER_PATH + "/config/"
CNF_CONFIG_FILE = CNF_FOLDER_PATH + "config.ini"
#
# Loads the saved playlist from file
def get_playlist():
|
def store_playlist(p_list):
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()")
try:
with open(CNF_PLAYLIST_PATH, 'wb') as l_output:
pickle.dump(p_list, l_output, pickle.HIGHEST_PROTOCOL)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::store_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
#
# Loads the configuration from file
def get_folder_path():
log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()")
global CNF_FOLDER_PATH
global CNF_PLAYLIST_PATH
global SERVER_PATH
l_config_parser = ConfigParser.ConfigParser()
l_clean_configuration = False
if not os.path.isdir(CNF_FOLDER_PATH): # if config directory does not exist, create it
os.makedirs(CNF_FOLDER_PATH)
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", CNF_FOLDER_PATH + " did not exist, it has been created")
if os.path.isfile(CNF_CONFIG_FILE):
try:
l_config_parser.read(CNF_CONFIG_FILE)
if l_config_parser.has_section("PATH"):
if l_config_parser.has_option("PATH", "CNF_FOLDER_PATH"):
CNF_FOLDER_PATH = l_config_parser.get("PATH","CNF_FOLDER_PATH")
else:
l_clean_configuration = True
else:
# if section does not exist
l_clean_configuration = True
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "unable to load CNF_FOLDER_PATH, using home as default, new config.ini will be generated.")
except:
# if unable to read file (e.g. file damaged)
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "exception: unable to load CNF_FOLDER_PATH from " + CNF_CONFIG_FILE + ", using home path as default, new config.ini will be generated.")
else:
l_clean_configuration = True
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "no configuration file found, new config.ini will be generated.")
if l_clean_configuration:
# cleanup config file
for l_section in l_config_parser.sections():
l_config_parser.remove_section(l_section)
l_config_parser.add_section("PATH")
l_config_parser.set("PATH", "CNF_FOLDER_PATH", os.path.expanduser("~"))
l_config_parser.write(file(CNF_CONFIG_FILE, 'w'))
if "" == CNF_FOLDER_PATH:
CNF_FOLDER_PATH = os.path.expanduser("~")
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_FOLDER_PATH = " + CNF_FOLDER_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_PLAYLIST_PATH = " + CNF_PLAYLIST_PATH)
log(LOG_VERBOSE, inspect.currentframe().f_lineno, "conf_manager.py::load_configuration()", "CNF_SERVER_PATH = " + CNF_SERVER_PATH)
return CNF_FOLDER_PATH
def get_server_path():
return SERVER_PATH
def get_playlist_path():
return CNF_PLAYLIST_PATH | log(LOG_INFO, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()")
l_playlist = []
try:
with open(CNF_PLAYLIST_PATH, 'rb') as l_input:
l_playlist = pickle.load(l_input)
except:
log(LOG_WARNING, inspect.currentframe().f_lineno, "conf_manager.py::load_playlist()", "unexisting playlist file: " + CNF_PLAYLIST_PATH)
return l_playlist | identifier_body |
lb.config.ts | /* eslint-disable */
/**
* @module LoopBackConfig
* @description
*
* The LoopBackConfig module help developers to externally
* configure the base url and api version for loopback.io
*
* Example
*
* import { LoopBackConfig } from './sdk';
*
* @Component() // No metadata needed for this module
*
* export class MyApp {
* constructor() {
* LoopBackConfig.setBaseURL('http://localhost:3000');
* LoopBackConfig.setApiVersion('api');
* }
* }
**/
export class LoopBackConfig {
private static path: string = '//0.0.0.0:3000';
private static version: string | number = 'api/v3';
private static authPrefix: string = '';
private static debug: boolean = true;
private static filterOn: string = 'headers';
private static whereOn: string = 'headers';
private static secure: boolean = false;
private static withCredentials: boolean = false;
public static setApiVersion(version: string = 'api'): void {
LoopBackConfig.version = version;
}
public static getApiVersion(): string | number {
return LoopBackConfig.version;
}
public static setBaseURL(url: string = '/'): void {
LoopBackConfig.path = url;
}
public static getPath(): string {
return LoopBackConfig.path;
}
public static setAuthPrefix(authPrefix: string = ''): void {
LoopBackConfig.authPrefix = authPrefix;
}
public static getAuthPrefix(): string {
return LoopBackConfig.authPrefix;
}
| LoopBackConfig.debug = isEnabled;
}
public static debuggable(): boolean {
return LoopBackConfig.debug;
}
public static filterOnUrl(): void {
LoopBackConfig.filterOn = 'url';
}
public static filterOnHeaders(): void {
LoopBackConfig.filterOn = 'headers';
}
public static whereOnUrl(): void {
LoopBackConfig.whereOn = 'url';
}
public static whereOnHeaders(): void {
LoopBackConfig.whereOn = 'headers';
}
public static isHeadersFilteringSet(): boolean {
return (LoopBackConfig.filterOn === 'headers');
}
public static isHeadersWhereSet(): boolean {
return (LoopBackConfig.whereOn === 'headers');
}
public static setSecureWebSockets(): void {
LoopBackConfig.secure = true;
}
public static unsetSecureWebSockets(): void {
LoopBackConfig.secure = false;
}
public static isSecureWebSocketsSet(): boolean {
return LoopBackConfig.secure;
}
public static setRequestOptionsCredentials(withCredentials: boolean = false): void {
LoopBackConfig.withCredentials = withCredentials;
}
public static getRequestOptionsCredentials(): boolean {
return LoopBackConfig.withCredentials;
}
} | public static setDebugMode(isEnabled: boolean): void { | random_line_split |
lb.config.ts | /* eslint-disable */
/**
* @module LoopBackConfig
* @description
*
* The LoopBackConfig module help developers to externally
* configure the base url and api version for loopback.io
*
* Example
*
* import { LoopBackConfig } from './sdk';
*
* @Component() // No metadata needed for this module
*
* export class MyApp {
* constructor() {
* LoopBackConfig.setBaseURL('http://localhost:3000');
* LoopBackConfig.setApiVersion('api');
* }
* }
**/
export class LoopBackConfig {
private static path: string = '//0.0.0.0:3000';
private static version: string | number = 'api/v3';
private static authPrefix: string = '';
private static debug: boolean = true;
private static filterOn: string = 'headers';
private static whereOn: string = 'headers';
private static secure: boolean = false;
private static withCredentials: boolean = false;
public static setApiVersion(version: string = 'api'): void {
LoopBackConfig.version = version;
}
public static getApiVersion(): string | number {
return LoopBackConfig.version;
}
public static setBaseURL(url: string = '/'): void {
LoopBackConfig.path = url;
}
public static getPath(): string {
return LoopBackConfig.path;
}
public static setAuthPrefix(authPrefix: string = ''): void {
LoopBackConfig.authPrefix = authPrefix;
}
public static getAuthPrefix(): string {
return LoopBackConfig.authPrefix;
}
public static setDebugMode(isEnabled: boolean): void {
LoopBackConfig.debug = isEnabled;
}
public static debuggable(): boolean {
return LoopBackConfig.debug;
}
public static filterOnUrl(): void {
LoopBackConfig.filterOn = 'url';
}
public static filterOnHeaders(): void {
LoopBackConfig.filterOn = 'headers';
}
public static whereOnUrl(): void {
LoopBackConfig.whereOn = 'url';
}
public static w | ): void {
LoopBackConfig.whereOn = 'headers';
}
public static isHeadersFilteringSet(): boolean {
return (LoopBackConfig.filterOn === 'headers');
}
public static isHeadersWhereSet(): boolean {
return (LoopBackConfig.whereOn === 'headers');
}
public static setSecureWebSockets(): void {
LoopBackConfig.secure = true;
}
public static unsetSecureWebSockets(): void {
LoopBackConfig.secure = false;
}
public static isSecureWebSocketsSet(): boolean {
return LoopBackConfig.secure;
}
public static setRequestOptionsCredentials(withCredentials: boolean = false): void {
LoopBackConfig.withCredentials = withCredentials;
}
public static getRequestOptionsCredentials(): boolean {
return LoopBackConfig.withCredentials;
}
}
| hereOnHeaders( | identifier_name |
lb.config.ts | /* eslint-disable */
/**
* @module LoopBackConfig
* @description
*
* The LoopBackConfig module help developers to externally
* configure the base url and api version for loopback.io
*
* Example
*
* import { LoopBackConfig } from './sdk';
*
* @Component() // No metadata needed for this module
*
* export class MyApp {
* constructor() {
* LoopBackConfig.setBaseURL('http://localhost:3000');
* LoopBackConfig.setApiVersion('api');
* }
* }
**/
export class LoopBackConfig {
private static path: string = '//0.0.0.0:3000';
private static version: string | number = 'api/v3';
private static authPrefix: string = '';
private static debug: boolean = true;
private static filterOn: string = 'headers';
private static whereOn: string = 'headers';
private static secure: boolean = false;
private static withCredentials: boolean = false;
public static setApiVersion(version: string = 'api'): void {
LoopBackConfig.version = version;
}
public static getApiVersion(): string | number {
return LoopBackConfig.version;
}
public static setBaseURL(url: string = '/'): void { |
public static getPath(): string {
return LoopBackConfig.path;
}
public static setAuthPrefix(authPrefix: string = ''): void {
LoopBackConfig.authPrefix = authPrefix;
}
public static getAuthPrefix(): string {
return LoopBackConfig.authPrefix;
}
public static setDebugMode(isEnabled: boolean): void {
LoopBackConfig.debug = isEnabled;
}
public static debuggable(): boolean {
return LoopBackConfig.debug;
}
public static filterOnUrl(): void {
LoopBackConfig.filterOn = 'url';
}
public static filterOnHeaders(): void {
LoopBackConfig.filterOn = 'headers';
}
public static whereOnUrl(): void {
LoopBackConfig.whereOn = 'url';
}
public static whereOnHeaders(): void {
LoopBackConfig.whereOn = 'headers';
}
public static isHeadersFilteringSet(): boolean {
return (LoopBackConfig.filterOn === 'headers');
}
public static isHeadersWhereSet(): boolean {
return (LoopBackConfig.whereOn === 'headers');
}
public static setSecureWebSockets(): void {
LoopBackConfig.secure = true;
}
public static unsetSecureWebSockets(): void {
LoopBackConfig.secure = false;
}
public static isSecureWebSocketsSet(): boolean {
return LoopBackConfig.secure;
}
public static setRequestOptionsCredentials(withCredentials: boolean = false): void {
LoopBackConfig.withCredentials = withCredentials;
}
public static getRequestOptionsCredentials(): boolean {
return LoopBackConfig.withCredentials;
}
}
|
LoopBackConfig.path = url;
}
| identifier_body |
index.tsx | import { FC, useMemo } from 'react';
type Img = '20x20' | '30x30' | '36x60' | '40x40' | '82x20';
type ImageMap = {
[key in Img]: {
src: string;
height: number;
width: number;
};
};
export type LINEButtonProps = {
text?: string;
image?: Img;
alt?: string;
};
const imgSet: ImageMap = {
'20x20': {
src: 'http://i.imgur.com/voMN0NH.png', | height: 30,
width: 30,
},
'36x60': {
src: 'http://i.imgur.com/5sEp1TC.png',
height: 60,
width: 36,
},
'40x40': {
src: 'http://i.imgur.com/ZoU91JG.png',
height: 40,
width: 40,
},
'82x20': {
src: 'http://i.imgur.com/cfjCxrh.png',
height: 20,
width: 82,
},
};
const LINEButton: FC<LINEButtonProps> = ({ image = '20x20', text = '', alt = 'LINEで送る' }) => {
const currentImg = useMemo(() => imgSet[image], [image]);
return (
<div>
<a href={`http://line.me/R/msg/text/?${encodeURIComponent(text)}`}>
<img src={currentImg.src} style={{ height: currentImg.height, width: currentImg.width }} alt={alt} />
</a>
</div>
);
};
export default LINEButton; | height: 20,
width: 20,
},
'30x30': {
src: 'http://i.imgur.com/Lkq9vFO.png', | random_line_split |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn main() {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option != "Y" && user_option != "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
| // FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
} | println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
| random_line_split |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn main() | {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option != "Y" && user_option != "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
} | identifier_body | |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn | () {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option != "Y" && user_option != "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
}
| main | identifier_name |
Lab.EmployeeCollection.js | (function (window, undefined) {
'use strict';
namespace('Lab');
function EmployeeCollection(json) {
/**
*
* @type {Array<Lab.Employee>}
*/
this.collection = [];
this.init(json);
}
EmployeeCollection.prototype = {
constructor : EmployeeCollection,
/**
*
* @param json {Array}
*/
init : function (json) {
| this.json = json;
for (var i = 0, ii = json.length; i < ii; i++) {
this.collection.push(new Lab.Employee(json[i].properties));
}
},
getCollection : function () {
return this.collection;
},
getSize : function () {
return this.collection.length;
},
getMaxDistanceToWork : function () {
var collection = this.getCollection(),
item = _.max(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
},
getMinDistanceToWork : function () {
var collection = this.getCollection(),
item = _.min(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
}
};
Lab.EmployeeCollection = EmployeeCollection;
}(window)); | random_line_split | |
Lab.EmployeeCollection.js | (function (window, undefined) {
'use strict';
namespace('Lab');
function | (json) {
/**
*
* @type {Array<Lab.Employee>}
*/
this.collection = [];
this.init(json);
}
EmployeeCollection.prototype = {
constructor : EmployeeCollection,
/**
*
* @param json {Array}
*/
init : function (json) {
this.json = json;
for (var i = 0, ii = json.length; i < ii; i++) {
this.collection.push(new Lab.Employee(json[i].properties));
}
},
getCollection : function () {
return this.collection;
},
getSize : function () {
return this.collection.length;
},
getMaxDistanceToWork : function () {
var collection = this.getCollection(),
item = _.max(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
},
getMinDistanceToWork : function () {
var collection = this.getCollection(),
item = _.min(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
}
};
Lab.EmployeeCollection = EmployeeCollection;
}(window)); | EmployeeCollection | identifier_name |
Lab.EmployeeCollection.js | (function (window, undefined) {
'use strict';
namespace('Lab');
function EmployeeCollection(json) |
EmployeeCollection.prototype = {
constructor : EmployeeCollection,
/**
*
* @param json {Array}
*/
init : function (json) {
this.json = json;
for (var i = 0, ii = json.length; i < ii; i++) {
this.collection.push(new Lab.Employee(json[i].properties));
}
},
getCollection : function () {
return this.collection;
},
getSize : function () {
return this.collection.length;
},
getMaxDistanceToWork : function () {
var collection = this.getCollection(),
item = _.max(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
},
getMinDistanceToWork : function () {
var collection = this.getCollection(),
item = _.min(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
}
};
Lab.EmployeeCollection = EmployeeCollection;
}(window)); | {
/**
*
* @type {Array<Lab.Employee>}
*/
this.collection = [];
this.init(json);
} | identifier_body |
Lab.EmployeeCollection.js | (function (window, undefined) {
'use strict';
namespace('Lab');
function EmployeeCollection(json) {
/**
*
* @type {Array<Lab.Employee>}
*/
this.collection = [];
this.init(json);
}
EmployeeCollection.prototype = {
constructor : EmployeeCollection,
/**
*
* @param json {Array}
*/
init : function (json) {
this.json = json;
for (var i = 0, ii = json.length; i < ii; i++) |
},
getCollection : function () {
return this.collection;
},
getSize : function () {
return this.collection.length;
},
getMaxDistanceToWork : function () {
var collection = this.getCollection(),
item = _.max(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
},
getMinDistanceToWork : function () {
var collection = this.getCollection(),
item = _.min(collection, function (item) {
return parseFloat(item.getDistanceToWork());
});
return item.getDistanceToWork();
}
};
Lab.EmployeeCollection = EmployeeCollection;
}(window)); | {
this.collection.push(new Lab.Employee(json[i].properties));
} | conditional_block |
bezier.py | # pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import copy
import pygame
from pygame.locals import K_RETURN, K_a, K_s, K_x, K_z, K_UP, K_DOWN
from pygsear.Drawable import Square, Layer
from pygsear.Game import Game
from pygsear.Event import MOUSEBUTTONDOWN_Event, MOUSEBUTTONUP_Event, KEYUP_Event
from pygsear.locals import WHITE, BLACK, COLORS, BLUE, RED, GREEN, ORANGE, YELLOW, TRANSPARENT, LGREEN, LBLUE
class BGame(Game):
def initialize(self):
s = Bezier()
s.center()
self.s = s
self.sprites.add(s)
self.events.add(MOUSEBUTTONDOWN_Event(callback=s.mouseClicked))
self.events.add(MOUSEBUTTONUP_Event(callback=s.mouseReleased))
self.events.add(KEYUP_Event(key=K_RETURN, callback=s.report))
self.events.add(KEYUP_Event(key=K_a, callback=s.add_point_front))
self.events.add(KEYUP_Event(key=K_s, callback=s.add_point_end))
self.events.add(KEYUP_Event(key=K_x, callback=s.remove_point))
self.events.add(KEYUP_Event(key=K_z, callback=s.toggle_fun_display))
self.events.add(KEYUP_Event(key=K_UP, callback=s.add_report_point))
self.events.add(KEYUP_Event(key=K_DOWN, callback=s.remove_report_point))
def checkEvents(self):
Game.checkEvents(self)
self.s.move_point()
class Bezier(Layer):
def __init__(self, points=None):
Layer.__init__(self)
self.image.set_colorkey(TRANSPARENT)
w, h = self.image.get_size()
self.w = w
self.h = h
self.selected = 0
self.button_held = 0
self.display_fun = 1
self.n_report = 10
if points is None:
points = [[30, 53], [265, 571], [462, 445], [695, 56], [781, 572]]
self.points = points
self.drawSpline()
self.drawFun()
def report(self, ev):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
print [(int(x), int(y)) for x, y in points]
self.show_report_points()
def add_report_point(self, ev):
self.n_report += 1
self.drawSpline()
self.show_report_points()
def remove_report_point(self, ev):
self.n_report -= 1
self.drawSpline()
self.show_report_points()
def show_report_points(self):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
for p in points:
pygame.draw.rect(self.image, WHITE, (p, (3, 3)))
def add_point_front(self, ev):
self.points.insert(0, [250, 250])
self.drawSpline()
self.drawFun()
def add_point_end(self, ev):
self.points.append([250, 250])
self.drawSpline()
self.drawFun()
def remove_point(self, ev):
if len(self.points) > 1:
try:
self.points.pop(self.selected)
except IndexError:
try:
self.points.pop()
except IndexError:
pass
self.drawSpline()
self.drawFun()
def toggle_fun_display(self, ev):
self.display_fun = not self.display_fun
self.drawFun()
def mouseClicked(self, ev):
self.button_held = 1
x, y = pygame.mouse.get_pos()
selected = None
rmin = 1000000
for point in self.points:
px, py = point
dx, dy = x-px, y-py
r = dx*dx + dy*dy
if r < rmin:
selected = point
rmin = r
self.selected = self.points.index(selected)
def mouseReleased(self, ev):
self.button_held = 0
def move_point(self):
if self.button_held:
self.points[self.selected][:] = list(pygame.mouse.get_pos())
self.drawSpline()
def drawSpline(self):
self.image.fill(TRANSPARENT)
self.spline = []
# Draw the points
for p in self.points:
|
# Draw the control lines
if len(self.points) > 2:
pygame.draw.lines(self.image, GREEN, False, self.points)
# Draw the curve
step = 1.0 / self.w
t = step
pold = self.points[0]
for k in range(self.w):
pi = copy.deepcopy(self.points)
for j in range(len(self.points)-1, 0, -1):
for i in range(j):
pi[i][0] = (1-t)*pi[i][0] + t*pi[i+1][0]
pi[i][1] = (1-t)*pi[i][1] + t*pi[i+1][1]
p = pi[0]
self.spline.append(p)
pygame.draw.line(self.image, RED, pold, p)
pold = p
t += step
def drawFun(self):
self.window.bg.fill(BLACK)
if self.display_fun:
step = 1.0 / self.w
t = step
n = len(self.points)
B = [0 for x in range(n)]
Bold = copy.copy(B)
B[0] = self.h
for k in range(self.w):
Bold = copy.copy(B)
for j in range(n):
for i in range(j, 0, -1):
B[i] = (1-t)*B[i] + t*B[i-1]
for m in range(n):
color = [RED, GREEN, ORANGE, BLUE, WHITE, YELLOW, LGREEN, LBLUE][m % 8]
p1 = (k, Bold[m])
p2 = (k, B[m])
pygame.draw.line(self.window.bg, color, p1, p2)
t += step
if __name__ == '__main__':
g = BGame()
g.mainloop()
| pygame.draw.rect(self.image, BLUE, (p, (3, 3))) | conditional_block |
bezier.py | # pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import copy
import pygame
from pygame.locals import K_RETURN, K_a, K_s, K_x, K_z, K_UP, K_DOWN
from pygsear.Drawable import Square, Layer
from pygsear.Game import Game
from pygsear.Event import MOUSEBUTTONDOWN_Event, MOUSEBUTTONUP_Event, KEYUP_Event
from pygsear.locals import WHITE, BLACK, COLORS, BLUE, RED, GREEN, ORANGE, YELLOW, TRANSPARENT, LGREEN, LBLUE
class BGame(Game):
def initialize(self):
s = Bezier()
s.center()
self.s = s
self.sprites.add(s)
self.events.add(MOUSEBUTTONDOWN_Event(callback=s.mouseClicked))
self.events.add(MOUSEBUTTONUP_Event(callback=s.mouseReleased))
self.events.add(KEYUP_Event(key=K_RETURN, callback=s.report))
self.events.add(KEYUP_Event(key=K_a, callback=s.add_point_front))
self.events.add(KEYUP_Event(key=K_s, callback=s.add_point_end))
self.events.add(KEYUP_Event(key=K_x, callback=s.remove_point))
self.events.add(KEYUP_Event(key=K_z, callback=s.toggle_fun_display))
self.events.add(KEYUP_Event(key=K_UP, callback=s.add_report_point))
self.events.add(KEYUP_Event(key=K_DOWN, callback=s.remove_report_point))
def checkEvents(self):
Game.checkEvents(self)
self.s.move_point()
class Bezier(Layer):
def __init__(self, points=None):
Layer.__init__(self)
self.image.set_colorkey(TRANSPARENT)
w, h = self.image.get_size()
self.w = w
self.h = h
self.selected = 0
self.button_held = 0
self.display_fun = 1
self.n_report = 10
if points is None:
points = [[30, 53], [265, 571], [462, 445], [695, 56], [781, 572]]
self.points = points
self.drawSpline()
self.drawFun()
def report(self, ev):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
print [(int(x), int(y)) for x, y in points]
self.show_report_points()
def add_report_point(self, ev):
self.n_report += 1
self.drawSpline()
self.show_report_points()
def remove_report_point(self, ev):
self.n_report -= 1
self.drawSpline()
self.show_report_points()
def show_report_points(self):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
for p in points:
pygame.draw.rect(self.image, WHITE, (p, (3, 3)))
def add_point_front(self, ev):
self.points.insert(0, [250, 250])
self.drawSpline()
self.drawFun()
def | (self, ev):
self.points.append([250, 250])
self.drawSpline()
self.drawFun()
def remove_point(self, ev):
if len(self.points) > 1:
try:
self.points.pop(self.selected)
except IndexError:
try:
self.points.pop()
except IndexError:
pass
self.drawSpline()
self.drawFun()
def toggle_fun_display(self, ev):
self.display_fun = not self.display_fun
self.drawFun()
def mouseClicked(self, ev):
self.button_held = 1
x, y = pygame.mouse.get_pos()
selected = None
rmin = 1000000
for point in self.points:
px, py = point
dx, dy = x-px, y-py
r = dx*dx + dy*dy
if r < rmin:
selected = point
rmin = r
self.selected = self.points.index(selected)
def mouseReleased(self, ev):
self.button_held = 0
def move_point(self):
if self.button_held:
self.points[self.selected][:] = list(pygame.mouse.get_pos())
self.drawSpline()
def drawSpline(self):
self.image.fill(TRANSPARENT)
self.spline = []
# Draw the points
for p in self.points:
pygame.draw.rect(self.image, BLUE, (p, (3, 3)))
# Draw the control lines
if len(self.points) > 2:
pygame.draw.lines(self.image, GREEN, False, self.points)
# Draw the curve
step = 1.0 / self.w
t = step
pold = self.points[0]
for k in range(self.w):
pi = copy.deepcopy(self.points)
for j in range(len(self.points)-1, 0, -1):
for i in range(j):
pi[i][0] = (1-t)*pi[i][0] + t*pi[i+1][0]
pi[i][1] = (1-t)*pi[i][1] + t*pi[i+1][1]
p = pi[0]
self.spline.append(p)
pygame.draw.line(self.image, RED, pold, p)
pold = p
t += step
def drawFun(self):
self.window.bg.fill(BLACK)
if self.display_fun:
step = 1.0 / self.w
t = step
n = len(self.points)
B = [0 for x in range(n)]
Bold = copy.copy(B)
B[0] = self.h
for k in range(self.w):
Bold = copy.copy(B)
for j in range(n):
for i in range(j, 0, -1):
B[i] = (1-t)*B[i] + t*B[i-1]
for m in range(n):
color = [RED, GREEN, ORANGE, BLUE, WHITE, YELLOW, LGREEN, LBLUE][m % 8]
p1 = (k, Bold[m])
p2 = (k, B[m])
pygame.draw.line(self.window.bg, color, p1, p2)
t += step
if __name__ == '__main__':
g = BGame()
g.mainloop()
| add_point_end | identifier_name |
bezier.py | # pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import copy
import pygame
from pygame.locals import K_RETURN, K_a, K_s, K_x, K_z, K_UP, K_DOWN
from pygsear.Drawable import Square, Layer
from pygsear.Game import Game
from pygsear.Event import MOUSEBUTTONDOWN_Event, MOUSEBUTTONUP_Event, KEYUP_Event
from pygsear.locals import WHITE, BLACK, COLORS, BLUE, RED, GREEN, ORANGE, YELLOW, TRANSPARENT, LGREEN, LBLUE
class BGame(Game):
def initialize(self):
s = Bezier()
s.center()
self.s = s
self.sprites.add(s)
self.events.add(MOUSEBUTTONDOWN_Event(callback=s.mouseClicked))
self.events.add(MOUSEBUTTONUP_Event(callback=s.mouseReleased))
self.events.add(KEYUP_Event(key=K_RETURN, callback=s.report))
self.events.add(KEYUP_Event(key=K_a, callback=s.add_point_front))
self.events.add(KEYUP_Event(key=K_s, callback=s.add_point_end))
self.events.add(KEYUP_Event(key=K_x, callback=s.remove_point))
self.events.add(KEYUP_Event(key=K_z, callback=s.toggle_fun_display))
self.events.add(KEYUP_Event(key=K_UP, callback=s.add_report_point))
self.events.add(KEYUP_Event(key=K_DOWN, callback=s.remove_report_point))
def checkEvents(self):
Game.checkEvents(self)
self.s.move_point()
class Bezier(Layer):
def __init__(self, points=None):
Layer.__init__(self)
self.image.set_colorkey(TRANSPARENT)
w, h = self.image.get_size()
self.w = w
self.h = h
self.selected = 0
self.button_held = 0
self.display_fun = 1
self.n_report = 10
if points is None:
points = [[30, 53], [265, 571], [462, 445], [695, 56], [781, 572]]
self.points = points
self.drawSpline()
self.drawFun()
def report(self, ev):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
print [(int(x), int(y)) for x, y in points]
self.show_report_points()
def add_report_point(self, ev):
self.n_report += 1
self.drawSpline()
self.show_report_points()
def remove_report_point(self, ev):
self.n_report -= 1
self.drawSpline()
self.show_report_points()
def show_report_points(self):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
for p in points:
pygame.draw.rect(self.image, WHITE, (p, (3, 3)))
def add_point_front(self, ev):
self.points.insert(0, [250, 250])
self.drawSpline()
self.drawFun()
def add_point_end(self, ev):
self.points.append([250, 250])
self.drawSpline()
self.drawFun()
def remove_point(self, ev):
|
def toggle_fun_display(self, ev):
self.display_fun = not self.display_fun
self.drawFun()
def mouseClicked(self, ev):
self.button_held = 1
x, y = pygame.mouse.get_pos()
selected = None
rmin = 1000000
for point in self.points:
px, py = point
dx, dy = x-px, y-py
r = dx*dx + dy*dy
if r < rmin:
selected = point
rmin = r
self.selected = self.points.index(selected)
def mouseReleased(self, ev):
self.button_held = 0
def move_point(self):
if self.button_held:
self.points[self.selected][:] = list(pygame.mouse.get_pos())
self.drawSpline()
def drawSpline(self):
self.image.fill(TRANSPARENT)
self.spline = []
# Draw the points
for p in self.points:
pygame.draw.rect(self.image, BLUE, (p, (3, 3)))
# Draw the control lines
if len(self.points) > 2:
pygame.draw.lines(self.image, GREEN, False, self.points)
# Draw the curve
step = 1.0 / self.w
t = step
pold = self.points[0]
for k in range(self.w):
pi = copy.deepcopy(self.points)
for j in range(len(self.points)-1, 0, -1):
for i in range(j):
pi[i][0] = (1-t)*pi[i][0] + t*pi[i+1][0]
pi[i][1] = (1-t)*pi[i][1] + t*pi[i+1][1]
p = pi[0]
self.spline.append(p)
pygame.draw.line(self.image, RED, pold, p)
pold = p
t += step
def drawFun(self):
self.window.bg.fill(BLACK)
if self.display_fun:
step = 1.0 / self.w
t = step
n = len(self.points)
B = [0 for x in range(n)]
Bold = copy.copy(B)
B[0] = self.h
for k in range(self.w):
Bold = copy.copy(B)
for j in range(n):
for i in range(j, 0, -1):
B[i] = (1-t)*B[i] + t*B[i-1]
for m in range(n):
color = [RED, GREEN, ORANGE, BLUE, WHITE, YELLOW, LGREEN, LBLUE][m % 8]
p1 = (k, Bold[m])
p2 = (k, B[m])
pygame.draw.line(self.window.bg, color, p1, p2)
t += step
if __name__ == '__main__':
g = BGame()
g.mainloop()
| if len(self.points) > 1:
try:
self.points.pop(self.selected)
except IndexError:
try:
self.points.pop()
except IndexError:
pass
self.drawSpline()
self.drawFun() | identifier_body |
bezier.py | # pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import copy
import pygame
from pygame.locals import K_RETURN, K_a, K_s, K_x, K_z, K_UP, K_DOWN
from pygsear.Drawable import Square, Layer
from pygsear.Game import Game
from pygsear.Event import MOUSEBUTTONDOWN_Event, MOUSEBUTTONUP_Event, KEYUP_Event
from pygsear.locals import WHITE, BLACK, COLORS, BLUE, RED, GREEN, ORANGE, YELLOW, TRANSPARENT, LGREEN, LBLUE
class BGame(Game):
def initialize(self):
s = Bezier()
s.center()
self.s = s
self.sprites.add(s)
self.events.add(MOUSEBUTTONDOWN_Event(callback=s.mouseClicked))
self.events.add(MOUSEBUTTONUP_Event(callback=s.mouseReleased))
self.events.add(KEYUP_Event(key=K_RETURN, callback=s.report))
self.events.add(KEYUP_Event(key=K_a, callback=s.add_point_front))
self.events.add(KEYUP_Event(key=K_s, callback=s.add_point_end))
self.events.add(KEYUP_Event(key=K_x, callback=s.remove_point))
self.events.add(KEYUP_Event(key=K_z, callback=s.toggle_fun_display))
self.events.add(KEYUP_Event(key=K_UP, callback=s.add_report_point))
self.events.add(KEYUP_Event(key=K_DOWN, callback=s.remove_report_point))
def checkEvents(self):
Game.checkEvents(self)
self.s.move_point()
class Bezier(Layer):
def __init__(self, points=None):
Layer.__init__(self)
self.image.set_colorkey(TRANSPARENT)
w, h = self.image.get_size()
self.w = w
self.h = h
self.selected = 0
self.button_held = 0
self.display_fun = 1
self.n_report = 10
if points is None:
points = [[30, 53], [265, 571], [462, 445], [695, 56], [781, 572]]
self.points = points
self.drawSpline()
self.drawFun()
def report(self, ev):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
print [(int(x), int(y)) for x, y in points]
self.show_report_points()
def add_report_point(self, ev):
self.n_report += 1
self.drawSpline()
self.show_report_points()
def remove_report_point(self, ev):
self.n_report -= 1
self.drawSpline()
self.show_report_points()
def show_report_points(self):
step = int(len(self.spline) / self.n_report)
points = self.spline[::step]
points.append(self.spline[-1])
for p in points:
pygame.draw.rect(self.image, WHITE, (p, (3, 3)))
def add_point_front(self, ev):
self.points.insert(0, [250, 250])
self.drawSpline()
self.drawFun()
def add_point_end(self, ev):
self.points.append([250, 250])
self.drawSpline()
self.drawFun()
def remove_point(self, ev):
if len(self.points) > 1:
try:
self.points.pop(self.selected)
except IndexError:
try:
self.points.pop()
except IndexError:
pass
self.drawSpline()
self.drawFun()
def toggle_fun_display(self, ev):
self.display_fun = not self.display_fun
self.drawFun()
def mouseClicked(self, ev):
self.button_held = 1
x, y = pygame.mouse.get_pos()
selected = None
rmin = 1000000
for point in self.points:
px, py = point | dx, dy = x-px, y-py
r = dx*dx + dy*dy
if r < rmin:
selected = point
rmin = r
self.selected = self.points.index(selected)
def mouseReleased(self, ev):
self.button_held = 0
def move_point(self):
if self.button_held:
self.points[self.selected][:] = list(pygame.mouse.get_pos())
self.drawSpline()
def drawSpline(self):
self.image.fill(TRANSPARENT)
self.spline = []
# Draw the points
for p in self.points:
pygame.draw.rect(self.image, BLUE, (p, (3, 3)))
# Draw the control lines
if len(self.points) > 2:
pygame.draw.lines(self.image, GREEN, False, self.points)
# Draw the curve
step = 1.0 / self.w
t = step
pold = self.points[0]
for k in range(self.w):
pi = copy.deepcopy(self.points)
for j in range(len(self.points)-1, 0, -1):
for i in range(j):
pi[i][0] = (1-t)*pi[i][0] + t*pi[i+1][0]
pi[i][1] = (1-t)*pi[i][1] + t*pi[i+1][1]
p = pi[0]
self.spline.append(p)
pygame.draw.line(self.image, RED, pold, p)
pold = p
t += step
def drawFun(self):
self.window.bg.fill(BLACK)
if self.display_fun:
step = 1.0 / self.w
t = step
n = len(self.points)
B = [0 for x in range(n)]
Bold = copy.copy(B)
B[0] = self.h
for k in range(self.w):
Bold = copy.copy(B)
for j in range(n):
for i in range(j, 0, -1):
B[i] = (1-t)*B[i] + t*B[i-1]
for m in range(n):
color = [RED, GREEN, ORANGE, BLUE, WHITE, YELLOW, LGREEN, LBLUE][m % 8]
p1 = (k, Bold[m])
p2 = (k, B[m])
pygame.draw.line(self.window.bg, color, p1, p2)
t += step
if __name__ == '__main__':
g = BGame()
g.mainloop() | random_line_split | |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name = 'media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn | (&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?, ?, ?, ?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod = ?, lastUsn = ?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit ?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| set_entry | identifier_name |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name = 'media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() |
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?, ?, ?, ?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod = ?, lastUsn = ?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit ?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| {
res = Err(e);
} | conditional_block |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?; |
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name = 'media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?, ?, ?, ?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod = ?, lastUsn = ?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit ?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
} |
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
} | random_line_split |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name = 'media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?, ?, ?, ?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod = ?, lastUsn = ?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit ?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> |
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
} | identifier_body |
Solution.py | """
Given the root of a binary tree, return the inorder traversal of its nodes' values.
Example 1:
Input: root = [1,null,2,3]
Output: [1,3,2]
Example 2:
Input: root = []
Output: []
Example 3:
Input: root = [1]
Output: [1]
Example 4:
Input: root = [1,2]
Output: [2,1]
Example 5:
Input: root = [1,null,2]
Output: [1,2]
Constraints:
The number of nodes in the tree is in the range [0, 100].
-100 <= Node.val <= 100
Follow up:
Recursive solution is trivial, could you do it iteratively?
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None | class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def inorder(node, ls):
if node is None:
return
inorder(node.left, ls)
ls.append(node.val)
inorder(node.right, ls)
ls = []
inorder(root, ls)
return ls | # self.right = None
| random_line_split |
Solution.py | """
Given the root of a binary tree, return the inorder traversal of its nodes' values.
Example 1:
Input: root = [1,null,2,3]
Output: [1,3,2]
Example 2:
Input: root = []
Output: []
Example 3:
Input: root = [1]
Output: [1]
Example 4:
Input: root = [1,2]
Output: [2,1]
Example 5:
Input: root = [1,null,2]
Output: [1,2]
Constraints:
The number of nodes in the tree is in the range [0, 100].
-100 <= Node.val <= 100
Follow up:
Recursive solution is trivial, could you do it iteratively?
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def inorder(node, ls):
if node is None:
retu | inorder(node.left, ls)
ls.append(node.val)
inorder(node.right, ls)
ls = []
inorder(root, ls)
return ls | rn
| conditional_block |
Solution.py | """
Given the root of a binary tree, return the inorder traversal of its nodes' values.
Example 1:
Input: root = [1,null,2,3]
Output: [1,3,2]
Example 2:
Input: root = []
Output: []
Example 3:
Input: root = [1]
Output: [1]
Example 4:
Input: root = [1,2]
Output: [2,1]
Example 5:
Input: root = [1,null,2]
Output: [1,2]
Constraints:
The number of nodes in the tree is in the range [0, 100].
-100 <= Node.val <= 100
Follow up:
Recursive solution is trivial, could you do it iteratively?
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def inor | e, ls):
if node is None:
return
inorder(node.left, ls)
ls.append(node.val)
inorder(node.right, ls)
ls = []
inorder(root, ls)
return ls | der(nod | identifier_name |
Solution.py | """
Given the root of a binary tree, return the inorder traversal of its nodes' values.
Example 1:
Input: root = [1,null,2,3]
Output: [1,3,2]
Example 2:
Input: root = []
Output: []
Example 3:
Input: root = [1]
Output: [1]
Example 4:
Input: root = [1,2]
Output: [2,1]
Example 5:
Input: root = [1,null,2]
Output: [1,2]
Constraints:
The number of nodes in the tree is in the range [0, 100].
-100 <= Node.val <= 100
Follow up:
Recursive solution is trivial, could you do it iteratively?
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def inorder(node, ls):
if n |
ls = []
inorder(root, ls)
return ls | ode is None:
return
inorder(node.left, ls)
ls.append(node.val)
inorder(node.right, ls)
| identifier_body |
lib.rs | // Copyright 2018 Developers of the Rand project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The PCG random number generators.
//!
//! This is a native Rust implementation of a small selection of PCG generators.
//! The primary goal of this crate is simple, minimal, well-tested code; in
//! other words it is explicitly not a goal to re-implement all of PCG.
//!
//! This crate provides:
//!
//! - `Pcg32` aka `Lcg64Xsh32`, officially known as `pcg32`, a general
//! purpose RNG. This is a good choice on both 32-bit and 64-bit CPUs
//! (for 32-bit output).
//! - `Pcg64` aka `Lcg128Xsl64`, officially known as `pcg64`, a general
//! purpose RNG. This is a good choice on 64-bit CPUs.
//! - `Pcg64Mcg` aka `Mcg128Xsl64`, officially known as `pcg64_fast`,
//! a general purpose RNG using 128-bit multiplications. This has poor
//! performance on 32-bit CPUs but is a good choice on 64-bit CPUs for
//! both 32-bit and 64-bit output.
//!
//! Both of these use 16 bytes of state and 128-bit seeds, and are considered
//! value-stable (i.e. any change affecting the output given a fixed seed would
//! be considered a breaking change to the crate).
#![doc(
html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk.png", | html_root_url = "https://rust-random.github.io/rand/"
)]
#![deny(missing_docs)]
#![deny(missing_debug_implementations)]
#![no_std]
#[cfg(not(target_os = "emscripten"))] mod pcg128;
mod pcg64;
#[cfg(not(target_os = "emscripten"))]
pub use self::pcg128::{Lcg128Xsl64, Mcg128Xsl64, Pcg64, Pcg64Mcg};
pub use self::pcg64::{Lcg64Xsh32, Pcg32}; | html_favicon_url = "https://www.rust-lang.org/favicon.ico", | random_line_split |
search_highlight.js | /* http://www.kryogenix.org/code/browser/searchhi/ */
/* Modified 20021006 to fix query string parsing and add case insensitivity */
/* Modified 20030227 by sgala@hisitech.com to skip words with "-" and cut %2B (+) preceding pages */
function highlightWord(node,word)
{
// Iterate into this nodes childNodes
if (node.hasChildNodes)
{
var hi_cn;
for (hi_cn=0;hi_cn<node.childNodes.length;hi_cn++)
{
highlightWord(node.childNodes[hi_cn],word);
}
}
// And do this node itself
if (node.nodeType == 3) | {
pn = node.parentNode;
if (pn.className != "searchword")
{
// word has not already been highlighted!
nv = node.nodeValue;
ni = tempNodeVal.indexOf(tempWordVal);
// Create a load of replacement nodes
before = document.createTextNode(nv.substr(0,ni));
docWordVal = nv.substr(ni,word.length);
// alert( "Found: " + docWordVal );
after = document.createTextNode(nv.substr(ni+word.length));
hiwordtext = document.createTextNode(docWordVal);
hiword = document.createElement("span");
hiword.className = "searchword";
hiword.appendChild(hiwordtext);
pn.insertBefore(before,node);
pn.insertBefore(hiword,node);
pn.insertBefore(after,node);
pn.removeChild(node);
}
}
}
}
function googleSearchHighlight()
{
if (!document.createElement) return;
ref = document.referrer; //or URL for highlighting in place
if (ref.indexOf('?') == -1) return;
qs = ref.substr(ref.indexOf('?')+1);
qsa = qs.split('&');
for (i=0;i<qsa.length;i++)
{
qsip = qsa[i].split('=');
if (qsip.length == 1) continue;
// q= for Google, p= for Yahoo
// query= for JSPWiki
if (qsip[0] == 'query' || qsip[0] == 'q')
{
words = qsip[1].replace(/%2B/g,'');
words = words.replace(/-\S+\s/g,'');
words = unescape(words.replace(/\+/g,' ')).split(/\s+/);
for (w=0;w<words.length;w++) {
highlightWord(document.getElementsByTagName("body")[0],words[w]);
}
}
}
}
window.onload = googleSearchHighlight; | { // text node
tempNodeVal = node.nodeValue.toLowerCase();
tempWordVal = word.toLowerCase();
if (tempNodeVal.indexOf(tempWordVal) != -1) | random_line_split |
search_highlight.js | /* http://www.kryogenix.org/code/browser/searchhi/ */
/* Modified 20021006 to fix query string parsing and add case insensitivity */
/* Modified 20030227 by sgala@hisitech.com to skip words with "-" and cut %2B (+) preceding pages */
function | (node,word)
{
// Iterate into this nodes childNodes
if (node.hasChildNodes)
{
var hi_cn;
for (hi_cn=0;hi_cn<node.childNodes.length;hi_cn++)
{
highlightWord(node.childNodes[hi_cn],word);
}
}
// And do this node itself
if (node.nodeType == 3)
{ // text node
tempNodeVal = node.nodeValue.toLowerCase();
tempWordVal = word.toLowerCase();
if (tempNodeVal.indexOf(tempWordVal) != -1)
{
pn = node.parentNode;
if (pn.className != "searchword")
{
// word has not already been highlighted!
nv = node.nodeValue;
ni = tempNodeVal.indexOf(tempWordVal);
// Create a load of replacement nodes
before = document.createTextNode(nv.substr(0,ni));
docWordVal = nv.substr(ni,word.length);
// alert( "Found: " + docWordVal );
after = document.createTextNode(nv.substr(ni+word.length));
hiwordtext = document.createTextNode(docWordVal);
hiword = document.createElement("span");
hiword.className = "searchword";
hiword.appendChild(hiwordtext);
pn.insertBefore(before,node);
pn.insertBefore(hiword,node);
pn.insertBefore(after,node);
pn.removeChild(node);
}
}
}
}
function googleSearchHighlight()
{
if (!document.createElement) return;
ref = document.referrer; //or URL for highlighting in place
if (ref.indexOf('?') == -1) return;
qs = ref.substr(ref.indexOf('?')+1);
qsa = qs.split('&');
for (i=0;i<qsa.length;i++)
{
qsip = qsa[i].split('=');
if (qsip.length == 1) continue;
// q= for Google, p= for Yahoo
// query= for JSPWiki
if (qsip[0] == 'query' || qsip[0] == 'q')
{
words = qsip[1].replace(/%2B/g,'');
words = words.replace(/-\S+\s/g,'');
words = unescape(words.replace(/\+/g,' ')).split(/\s+/);
for (w=0;w<words.length;w++) {
highlightWord(document.getElementsByTagName("body")[0],words[w]);
}
}
}
}
window.onload = googleSearchHighlight;
| highlightWord | identifier_name |
search_highlight.js | /* http://www.kryogenix.org/code/browser/searchhi/ */
/* Modified 20021006 to fix query string parsing and add case insensitivity */
/* Modified 20030227 by sgala@hisitech.com to skip words with "-" and cut %2B (+) preceding pages */
function highlightWord(node,word)
{
// Iterate into this nodes childNodes
if (node.hasChildNodes)
{
var hi_cn;
for (hi_cn=0;hi_cn<node.childNodes.length;hi_cn++)
{
highlightWord(node.childNodes[hi_cn],word);
}
}
// And do this node itself
if (node.nodeType == 3)
{ // text node
tempNodeVal = node.nodeValue.toLowerCase();
tempWordVal = word.toLowerCase();
if (tempNodeVal.indexOf(tempWordVal) != -1)
|
}
}
function googleSearchHighlight()
{
if (!document.createElement) return;
ref = document.referrer; //or URL for highlighting in place
if (ref.indexOf('?') == -1) return;
qs = ref.substr(ref.indexOf('?')+1);
qsa = qs.split('&');
for (i=0;i<qsa.length;i++)
{
qsip = qsa[i].split('=');
if (qsip.length == 1) continue;
// q= for Google, p= for Yahoo
// query= for JSPWiki
if (qsip[0] == 'query' || qsip[0] == 'q')
{
words = qsip[1].replace(/%2B/g,'');
words = words.replace(/-\S+\s/g,'');
words = unescape(words.replace(/\+/g,' ')).split(/\s+/);
for (w=0;w<words.length;w++) {
highlightWord(document.getElementsByTagName("body")[0],words[w]);
}
}
}
}
window.onload = googleSearchHighlight;
| {
pn = node.parentNode;
if (pn.className != "searchword")
{
// word has not already been highlighted!
nv = node.nodeValue;
ni = tempNodeVal.indexOf(tempWordVal);
// Create a load of replacement nodes
before = document.createTextNode(nv.substr(0,ni));
docWordVal = nv.substr(ni,word.length);
// alert( "Found: " + docWordVal );
after = document.createTextNode(nv.substr(ni+word.length));
hiwordtext = document.createTextNode(docWordVal);
hiword = document.createElement("span");
hiword.className = "searchword";
hiword.appendChild(hiwordtext);
pn.insertBefore(before,node);
pn.insertBefore(hiword,node);
pn.insertBefore(after,node);
pn.removeChild(node);
}
} | conditional_block |
search_highlight.js | /* http://www.kryogenix.org/code/browser/searchhi/ */
/* Modified 20021006 to fix query string parsing and add case insensitivity */
/* Modified 20030227 by sgala@hisitech.com to skip words with "-" and cut %2B (+) preceding pages */
function highlightWord(node,word)
{
// Iterate into this nodes childNodes
if (node.hasChildNodes)
{
var hi_cn;
for (hi_cn=0;hi_cn<node.childNodes.length;hi_cn++)
{
highlightWord(node.childNodes[hi_cn],word);
}
}
// And do this node itself
if (node.nodeType == 3)
{ // text node
tempNodeVal = node.nodeValue.toLowerCase();
tempWordVal = word.toLowerCase();
if (tempNodeVal.indexOf(tempWordVal) != -1)
{
pn = node.parentNode;
if (pn.className != "searchword")
{
// word has not already been highlighted!
nv = node.nodeValue;
ni = tempNodeVal.indexOf(tempWordVal);
// Create a load of replacement nodes
before = document.createTextNode(nv.substr(0,ni));
docWordVal = nv.substr(ni,word.length);
// alert( "Found: " + docWordVal );
after = document.createTextNode(nv.substr(ni+word.length));
hiwordtext = document.createTextNode(docWordVal);
hiword = document.createElement("span");
hiword.className = "searchword";
hiword.appendChild(hiwordtext);
pn.insertBefore(before,node);
pn.insertBefore(hiword,node);
pn.insertBefore(after,node);
pn.removeChild(node);
}
}
}
}
function googleSearchHighlight()
|
window.onload = googleSearchHighlight;
| {
if (!document.createElement) return;
ref = document.referrer; //or URL for highlighting in place
if (ref.indexOf('?') == -1) return;
qs = ref.substr(ref.indexOf('?')+1);
qsa = qs.split('&');
for (i=0;i<qsa.length;i++)
{
qsip = qsa[i].split('=');
if (qsip.length == 1) continue;
// q= for Google, p= for Yahoo
// query= for JSPWiki
if (qsip[0] == 'query' || qsip[0] == 'q')
{
words = qsip[1].replace(/%2B/g,'');
words = words.replace(/-\S+\s/g,'');
words = unescape(words.replace(/\+/g,' ')).split(/\s+/);
for (w=0;w<words.length;w++) {
highlightWord(document.getElementsByTagName("body")[0],words[w]);
}
}
}
} | identifier_body |
index.d.ts | /*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TypeScript Version: 2.0
/**
* Returns the median for a Fréchet distribution with shape `alpha`, scale `s`, and location `m`.
*
* ## Notes
*
* - If provided `alpha <= 0` or `s <= 0`, the function returns `NaN`.
*
* @param alpha - shape parameter
* @param s - scale parameter
* @param m - location parameter
* @returns median | * // returns ~2.152
*
* @example
* var y = median( 5.0, 2.0, -5.0 );
* // returns ~-2.848
*
* @example
* var y = median( 1.0, 1.0, 0.0 );
* // returns ~1.443
*
* @example
* var y = median( NaN, 1.0, 0.0 );
* // returns NaN
*
* @example
* var y = median( 1.0, NaN, 0.0 );
* // returns NaN
*
* @example
* var y = median( 1.0, 1.0, NaN );
* // returns NaN
*/
declare function median( alpha: number, s: number, m: number ): number;
// EXPORTS //
export = median; | *
* @example
* var y = median( 5.0, 2.0, 0.0 ); | random_line_split |
context.js | var url = require('url')
, path = require('path')
, fs = require('fs')
, utils = require('./utils')
, EventEmitter = require('events').EventEmitter
exports = module.exports = Context
function Context(app, req, res) {
var self = this
this.app = app
this.req = req
this.res = res
this.done = this.done.bind(this)
EventEmitter.call(this)
var socket = res.socket
res.on('finish', done)
socket.on('error', done)
socket.on('close', done)
function done(err) {
res.removeListener('finish', done)
socket.removeListener('error', done)
socket.removeListener('close', done)
self.done(err)
}
}
Context.prototype = {
done: function(err) {
if (this._notifiedDone === true) return
if (err) {
if (this.writable) {
this.resHeaders = {}
this.type = 'text/plain'
this.status = err.code === 'ENOENT' ? 404 : (err.status || 500)
this.length = Buffer.byteLength(err.message)
this.res.end(err.message)
}
this.app.emit('error', err)
}
this._notifiedDone = true
this.emit('done', err)
},
throw: function(status, err) {
status = status || 500
err = err || {}
err.status = status
err.message = err.message || status.toString()
this.done(err)
},
render: function *(view, locals) {
var app = this.app
, viewPath = path.join(app.viewRoot, view)
, ext = path.extname(viewPath)
, exts, engine, content, testPath, i, j
if (!ext || (yield utils.fileExists(viewPath))) {
for (i = 0; app.viewEngines[i]; i++) {
exts = (app.viewEngines[i].exts || ['.' + app.viewEngines[i].name.toLowerCase()])
if (ext) {
if (~exts.indexOf(ext)) {
engine = app.viewEngines[i]
break
}
continue
}
for (j = 0; exts[j]; j++) {
testPath = viewPath + exts[j]
if (yield utils.fileExists(testPath)) {
viewPath = testPath
engine = app.viewEngines[i]
break
}
}
}
}
if (!engine) return this.throw(500, new Error('View does not exist'))
return yield engine.render(viewPath, locals)
},
/*
* opts: { path: ..., domain: ..., expires: ..., maxAge: ..., httpOnly: ..., secure: ..., sign: ... }
*/
cookie: function(name, val, opts) {
if (!opts) opts = {}
if (typeof val == 'object') val = JSON.stringify(val)
if (this.secret && opts.sign) {
val = this.app.cookies.prefix + this.app.cookies.sign(val, this.secret)
}
var headerVal = name + '=' + val + '; Path=' + (opts.path || '/')
if (opts.domain) headerVal += '; Domain=' + opts.domain
if (opts.expires) {
if (typeof opts.expires === 'number') opts.expires = new Date(opts.expires)
if (opts.expires instanceof Date) opts.expires = opts.expires.toUTCString()
headerVal += '; Expires=' + opts.expires
}
if (opts.maxAge) headerVal += '; Max-Age=' + opts.maxAge
if (opts.httpOnly) headerVal += '; HttpOnly'
if (opts.secure) headerVal += '; Secure'
this.setResHeader('Set-Cookie', headerVal)
},
get writable() | ,
get path() {
return url.parse(this.url).pathname
},
set path(val) {
var obj = url.parse(this.url)
obj.pathname = val
this.url = url.format(obj)
},
get status() {
return this._status
},
set status(code) {
this._status = this.res.statusCode = code
},
get type() {
return this.getResHeader('Content-Type')
},
set type(val) {
if (val == null) return this.removeResHeader('Content-Type')
this.setResHeader('Content-Type', val)
},
get length() {
return this.getResHeader('Content-Length')
},
set length(val) {
if (val == null) return this.removeResHeader('Content-Length')
this.setResHeader('Content-Length', val)
},
get body() {
return this._body
},
set body(val) {
this._body = val
}
}
utils.extend(Context.prototype, EventEmitter.prototype)
utils.proxy(Context.prototype, {
req: {
method : 'access',
url : 'access',
secure : 'getter',
headers : ['getter', 'reqHeaders'],
},
res: {
_headers : ['access', 'resHeaders'],
getHeader : ['invoke', 'getResHeader'],
setHeader : ['invoke', 'setResHeader'],
removeHeader : ['invoke', 'removeResHeader']
}
}) | {
var socket = this.res.socket
return socket && socket.writable && !this.res.headersSent
} | identifier_body |
context.js | var url = require('url')
, path = require('path')
, fs = require('fs')
, utils = require('./utils')
, EventEmitter = require('events').EventEmitter
exports = module.exports = Context
function Context(app, req, res) {
var self = this
this.app = app
this.req = req
this.res = res
this.done = this.done.bind(this)
EventEmitter.call(this)
var socket = res.socket
res.on('finish', done)
socket.on('error', done)
socket.on('close', done)
function done(err) {
res.removeListener('finish', done)
socket.removeListener('error', done)
socket.removeListener('close', done)
self.done(err)
}
}
Context.prototype = {
done: function(err) {
if (this._notifiedDone === true) return
if (err) {
if (this.writable) {
this.resHeaders = {}
this.type = 'text/plain'
this.status = err.code === 'ENOENT' ? 404 : (err.status || 500)
this.length = Buffer.byteLength(err.message)
this.res.end(err.message)
}
this.app.emit('error', err)
}
this._notifiedDone = true
this.emit('done', err)
},
throw: function(status, err) {
status = status || 500
err = err || {}
err.status = status
err.message = err.message || status.toString()
this.done(err)
},
render: function *(view, locals) {
var app = this.app
, viewPath = path.join(app.viewRoot, view)
, ext = path.extname(viewPath)
, exts, engine, content, testPath, i, j
if (!ext || (yield utils.fileExists(viewPath))) {
for (i = 0; app.viewEngines[i]; i++) {
exts = (app.viewEngines[i].exts || ['.' + app.viewEngines[i].name.toLowerCase()])
if (ext) {
if (~exts.indexOf(ext)) {
engine = app.viewEngines[i]
break
}
continue
}
for (j = 0; exts[j]; j++) {
testPath = viewPath + exts[j]
if (yield utils.fileExists(testPath)) {
viewPath = testPath
engine = app.viewEngines[i]
break
}
}
}
}
if (!engine) return this.throw(500, new Error('View does not exist'))
return yield engine.render(viewPath, locals)
},
/*
* opts: { path: ..., domain: ..., expires: ..., maxAge: ..., httpOnly: ..., secure: ..., sign: ... }
*/
cookie: function(name, val, opts) {
if (!opts) opts = {}
if (typeof val == 'object') val = JSON.stringify(val)
if (this.secret && opts.sign) {
val = this.app.cookies.prefix + this.app.cookies.sign(val, this.secret)
}
var headerVal = name + '=' + val + '; Path=' + (opts.path || '/')
if (opts.domain) headerVal += '; Domain=' + opts.domain
if (opts.expires) {
if (typeof opts.expires === 'number') opts.expires = new Date(opts.expires)
if (opts.expires instanceof Date) opts.expires = opts.expires.toUTCString()
headerVal += '; Expires=' + opts.expires
}
if (opts.maxAge) headerVal += '; Max-Age=' + opts.maxAge
if (opts.httpOnly) headerVal += '; HttpOnly'
if (opts.secure) headerVal += '; Secure'
this.setResHeader('Set-Cookie', headerVal)
},
get writable() {
var socket = this.res.socket
return socket && socket.writable && !this.res.headersSent
},
get | () {
return url.parse(this.url).pathname
},
set path(val) {
var obj = url.parse(this.url)
obj.pathname = val
this.url = url.format(obj)
},
get status() {
return this._status
},
set status(code) {
this._status = this.res.statusCode = code
},
get type() {
return this.getResHeader('Content-Type')
},
set type(val) {
if (val == null) return this.removeResHeader('Content-Type')
this.setResHeader('Content-Type', val)
},
get length() {
return this.getResHeader('Content-Length')
},
set length(val) {
if (val == null) return this.removeResHeader('Content-Length')
this.setResHeader('Content-Length', val)
},
get body() {
return this._body
},
set body(val) {
this._body = val
}
}
utils.extend(Context.prototype, EventEmitter.prototype)
utils.proxy(Context.prototype, {
req: {
method : 'access',
url : 'access',
secure : 'getter',
headers : ['getter', 'reqHeaders'],
},
res: {
_headers : ['access', 'resHeaders'],
getHeader : ['invoke', 'getResHeader'],
setHeader : ['invoke', 'setResHeader'],
removeHeader : ['invoke', 'removeResHeader']
}
}) | path | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.