file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Python bindings for MLlib.
"""
from __future__ import absolute_import
# MLlib currently needs NumPy 1.4+, so complain if lower
import numpy
ver = [int(x) for x in numpy.version.version.split('.')[:2]]
if ver < [1, 4]:
|
__all__ = ['classification', 'clustering', 'feature', 'fpm', 'linalg', 'random',
'recommendation', 'regression', 'stat', 'tree', 'util']
| raise Exception("MLlib requires NumPy 1.4+") | conditional_block |
test_new_instance.py | from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.urls import reverse
# Declaration of Username and Password
username = 'admin'
password = 'Test1234$'
"""
Method to replicate
~~~~~~~~~~~~~~~~~~~
1. Bring up a new instance of NearBeach (grab from fixtures)
2. Try and log in as the admin user
Expected Results
~~~~~~~~~~~~~~~~
User will log in with no issues, system will create all of the user's permission sets and groups
"""
def login_user(c: object, self: object) -> object:
response = c.post(
reverse('login'),
self.credentials,
follow=True,
)
self.assertTrue(response.context['user'].is_active)
class NewInstanceLoginTest(TestCase):
fixtures = ['NearBeach_no_setup.json']
def setUp(self):
|
def test_admin_login(self):
c = Client()
# User will be logged in
login_user(c, self)
# Make sure the admin user can open up the project
response = c.get(reverse('dashboard'))
self.assertEqual(response.status_code, 200)
| self.credentials = {
'username': username,
'password': password
} | identifier_body |
test_new_instance.py | from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.urls import reverse
# Declaration of Username and Password
username = 'admin'
password = 'Test1234$'
"""
Method to replicate
~~~~~~~~~~~~~~~~~~~
1. Bring up a new instance of NearBeach (grab from fixtures)
2. Try and log in as the admin user
Expected Results
~~~~~~~~~~~~~~~~
User will log in with no issues, system will create all of the user's permission sets and groups
"""
def login_user(c: object, self: object) -> object:
response = c.post(
reverse('login'),
self.credentials,
follow=True,
)
self.assertTrue(response.context['user'].is_active)
class | (TestCase):
fixtures = ['NearBeach_no_setup.json']
def setUp(self):
self.credentials = {
'username': username,
'password': password
}
def test_admin_login(self):
c = Client()
# User will be logged in
login_user(c, self)
# Make sure the admin user can open up the project
response = c.get(reverse('dashboard'))
self.assertEqual(response.status_code, 200)
| NewInstanceLoginTest | identifier_name |
test_new_instance.py | from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.urls import reverse
# Declaration of Username and Password
username = 'admin' | 1. Bring up a new instance of NearBeach (grab from fixtures)
2. Try and log in as the admin user
Expected Results
~~~~~~~~~~~~~~~~
User will log in with no issues, system will create all of the user's permission sets and groups
"""
def login_user(c: object, self: object) -> object:
response = c.post(
reverse('login'),
self.credentials,
follow=True,
)
self.assertTrue(response.context['user'].is_active)
class NewInstanceLoginTest(TestCase):
fixtures = ['NearBeach_no_setup.json']
def setUp(self):
self.credentials = {
'username': username,
'password': password
}
def test_admin_login(self):
c = Client()
# User will be logged in
login_user(c, self)
# Make sure the admin user can open up the project
response = c.get(reverse('dashboard'))
self.assertEqual(response.status_code, 200) | password = 'Test1234$'
"""
Method to replicate
~~~~~~~~~~~~~~~~~~~ | random_line_split |
image.ts | /**
* Entry point for Embed Images
*/
/// <reference path="./embed.d.ts" />
// require('html5shiv');
//import _ = require('underscore');
let pym = require('pym.js');
import ConfigService from '../components/embed/config/service';
import ResizeEl from '../components/utils/resizeEl';
export default class Embed {
private configService;
private resourceOriginalWidth;
private resourceOriginalHeight;
private $embedContainer;
private $embedResource;
private resizer;
private pymChild;
constructor() {
let that = this;
this.configService = new ConfigService();
this.$embedContainer = document.getElementById('embed-content');
this.$embedResource = document.getElementById('embed-image');
this.resourceOriginalWidth = this.configService.get('resource.width');
this.resourceOriginalHeight = this.configService.get('resource.height');
if( this.configService.get('isStandalone') === true ) {
this.initResizer();
} else {
this.$embedResource.style.width = '100%';
this.$embedResource.style.height = 'auto';
this.pymChild = new (<any>pym).Child({id: 'phraseanet-embed-frame', renderCallback: function(windowWidth) {
let ratio = that.resourceOriginalHeight / that.resourceOriginalWidth;
that.$embedResource.style.width = '100%';
that.$embedResource.style.height = 'auto';
// send image calculated height
that.$embedContainer.style.height = windowWidth * ratio + 'px';
}});
if (this.pymChild.parentUrl === '') {
// no parent pym:
this.initResizer();
}
}
}
initResizer() {
this.resizer = new ResizeEl({
target: this.$embedResource,
container: this.$embedContainer,
resizeOnWindowChange: this.configService.get('resource.fitIn') === true ? true : false
});
this.resizer.setContainerDimensions({
width: <any>window.innerWidth,
height: <any>window.innerHeight
});
this.resizer.setTargetDimensions({
width: this.resourceOriginalWidth,
height: this.resourceOriginalHeight | }
}
(<any>window).embedPlugin = new Embed(); | });
this.resizer.resize(); | random_line_split |
image.ts | /**
* Entry point for Embed Images
*/
/// <reference path="./embed.d.ts" />
// require('html5shiv');
//import _ = require('underscore');
let pym = require('pym.js');
import ConfigService from '../components/embed/config/service';
import ResizeEl from '../components/utils/resizeEl';
export default class | {
private configService;
private resourceOriginalWidth;
private resourceOriginalHeight;
private $embedContainer;
private $embedResource;
private resizer;
private pymChild;
constructor() {
let that = this;
this.configService = new ConfigService();
this.$embedContainer = document.getElementById('embed-content');
this.$embedResource = document.getElementById('embed-image');
this.resourceOriginalWidth = this.configService.get('resource.width');
this.resourceOriginalHeight = this.configService.get('resource.height');
if( this.configService.get('isStandalone') === true ) {
this.initResizer();
} else {
this.$embedResource.style.width = '100%';
this.$embedResource.style.height = 'auto';
this.pymChild = new (<any>pym).Child({id: 'phraseanet-embed-frame', renderCallback: function(windowWidth) {
let ratio = that.resourceOriginalHeight / that.resourceOriginalWidth;
that.$embedResource.style.width = '100%';
that.$embedResource.style.height = 'auto';
// send image calculated height
that.$embedContainer.style.height = windowWidth * ratio + 'px';
}});
if (this.pymChild.parentUrl === '') {
// no parent pym:
this.initResizer();
}
}
}
initResizer() {
this.resizer = new ResizeEl({
target: this.$embedResource,
container: this.$embedContainer,
resizeOnWindowChange: this.configService.get('resource.fitIn') === true ? true : false
});
this.resizer.setContainerDimensions({
width: <any>window.innerWidth,
height: <any>window.innerHeight
});
this.resizer.setTargetDimensions({
width: this.resourceOriginalWidth,
height: this.resourceOriginalHeight
});
this.resizer.resize();
}
}
(<any>window).embedPlugin = new Embed();
| Embed | identifier_name |
image.ts | /**
* Entry point for Embed Images
*/
/// <reference path="./embed.d.ts" />
// require('html5shiv');
//import _ = require('underscore');
let pym = require('pym.js');
import ConfigService from '../components/embed/config/service';
import ResizeEl from '../components/utils/resizeEl';
export default class Embed {
private configService;
private resourceOriginalWidth;
private resourceOriginalHeight;
private $embedContainer;
private $embedResource;
private resizer;
private pymChild;
constructor() {
let that = this;
this.configService = new ConfigService();
this.$embedContainer = document.getElementById('embed-content');
this.$embedResource = document.getElementById('embed-image');
this.resourceOriginalWidth = this.configService.get('resource.width');
this.resourceOriginalHeight = this.configService.get('resource.height');
if( this.configService.get('isStandalone') === true ) | else {
this.$embedResource.style.width = '100%';
this.$embedResource.style.height = 'auto';
this.pymChild = new (<any>pym).Child({id: 'phraseanet-embed-frame', renderCallback: function(windowWidth) {
let ratio = that.resourceOriginalHeight / that.resourceOriginalWidth;
that.$embedResource.style.width = '100%';
that.$embedResource.style.height = 'auto';
// send image calculated height
that.$embedContainer.style.height = windowWidth * ratio + 'px';
}});
if (this.pymChild.parentUrl === '') {
// no parent pym:
this.initResizer();
}
}
}
initResizer() {
this.resizer = new ResizeEl({
target: this.$embedResource,
container: this.$embedContainer,
resizeOnWindowChange: this.configService.get('resource.fitIn') === true ? true : false
});
this.resizer.setContainerDimensions({
width: <any>window.innerWidth,
height: <any>window.innerHeight
});
this.resizer.setTargetDimensions({
width: this.resourceOriginalWidth,
height: this.resourceOriginalHeight
});
this.resizer.resize();
}
}
(<any>window).embedPlugin = new Embed();
| {
this.initResizer();
} | conditional_block |
image.ts | /**
* Entry point for Embed Images
*/
/// <reference path="./embed.d.ts" />
// require('html5shiv');
//import _ = require('underscore');
let pym = require('pym.js');
import ConfigService from '../components/embed/config/service';
import ResizeEl from '../components/utils/resizeEl';
export default class Embed {
private configService;
private resourceOriginalWidth;
private resourceOriginalHeight;
private $embedContainer;
private $embedResource;
private resizer;
private pymChild;
constructor() {
let that = this;
this.configService = new ConfigService();
this.$embedContainer = document.getElementById('embed-content');
this.$embedResource = document.getElementById('embed-image');
this.resourceOriginalWidth = this.configService.get('resource.width');
this.resourceOriginalHeight = this.configService.get('resource.height');
if( this.configService.get('isStandalone') === true ) {
this.initResizer();
} else {
this.$embedResource.style.width = '100%';
this.$embedResource.style.height = 'auto';
this.pymChild = new (<any>pym).Child({id: 'phraseanet-embed-frame', renderCallback: function(windowWidth) {
let ratio = that.resourceOriginalHeight / that.resourceOriginalWidth;
that.$embedResource.style.width = '100%';
that.$embedResource.style.height = 'auto';
// send image calculated height
that.$embedContainer.style.height = windowWidth * ratio + 'px';
}});
if (this.pymChild.parentUrl === '') {
// no parent pym:
this.initResizer();
}
}
}
initResizer() |
}
(<any>window).embedPlugin = new Embed();
| {
this.resizer = new ResizeEl({
target: this.$embedResource,
container: this.$embedContainer,
resizeOnWindowChange: this.configService.get('resource.fitIn') === true ? true : false
});
this.resizer.setContainerDimensions({
width: <any>window.innerWidth,
height: <any>window.innerHeight
});
this.resizer.setTargetDimensions({
width: this.resourceOriginalWidth,
height: this.resourceOriginalHeight
});
this.resizer.resize();
} | identifier_body |
RecipeReviewCard.js | import * as React from 'react';
import { experimentalStyled as styled } from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardHeader from '@material-ui/core/CardHeader';
import CardMedia from '@material-ui/core/CardMedia';
import CardContent from '@material-ui/core/CardContent';
import CardActions from '@material-ui/core/CardActions';
import Collapse from '@material-ui/core/Collapse';
import Avatar from '@material-ui/core/Avatar';
import IconButton from '@material-ui/core/IconButton';
import Typography from '@material-ui/core/Typography';
import { red } from '@material-ui/core/colors';
import FavoriteIcon from '@material-ui/icons/Favorite';
import ShareIcon from '@material-ui/icons/Share';
import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
import MoreVertIcon from '@material-ui/icons/MoreVert';
const ExpandMore = styled((props) => {
const { expand, ...other } = props;
return <IconButton {...other} />;
})(({ theme, expand }) => ({
transform: !expand ? 'rotate(0deg)' : 'rotate(180deg)',
marginLeft: 'auto',
transition: theme.transitions.create('transform', {
duration: theme.transitions.duration.shortest,
}),
}));
export default function RecipeReviewCard() {
const [expanded, setExpanded] = React.useState(false);
const handleExpandClick = () => {
setExpanded(!expanded);
};
return (
<Card sx={{ maxWidth: 345 }}>
<CardHeader
avatar={
<Avatar sx={{ bgcolor: red[500] }} aria-label="recipe">
R
</Avatar>
}
action={
<IconButton aria-label="settings"> | title="Shrimp and Chorizo Paella"
subheader="September 14, 2016"
/>
<CardMedia
sx={{
height: 0,
paddingTop: '56.25%', // 16:9
}}
image="/static/images/cards/paella.jpg"
title="Paella dish"
/>
<CardContent>
<Typography variant="body2" color="text.secondary">
This impressive paella is a perfect party dish and a fun meal to cook
together with your guests. Add 1 cup of frozen peas along with the mussels,
if you like.
</Typography>
</CardContent>
<CardActions disableSpacing>
<IconButton aria-label="add to favorites">
<FavoriteIcon />
</IconButton>
<IconButton aria-label="share">
<ShareIcon />
</IconButton>
<ExpandMore
expand={expanded}
onClick={handleExpandClick}
aria-expanded={expanded}
aria-label="show more"
>
<ExpandMoreIcon />
</ExpandMore>
</CardActions>
<Collapse in={expanded} timeout="auto" unmountOnExit>
<CardContent>
<Typography paragraph>Method:</Typography>
<Typography paragraph>
Heat 1/2 cup of the broth in a pot until simmering, add saffron and set
aside for 10 minutes.
</Typography>
<Typography paragraph>
Heat oil in a (14- to 16-inch) paella pan or a large, deep skillet over
medium-high heat. Add chicken, shrimp and chorizo, and cook, stirring
occasionally until lightly browned, 6 to 8 minutes. Transfer shrimp to a
large plate and set aside, leaving chicken and chorizo in the pan. Add
pimentón, bay leaves, garlic, tomatoes, onion, salt and pepper, and cook,
stirring often until thickened and fragrant, about 10 minutes. Add
saffron broth and remaining 4 1/2 cups chicken broth; bring to a boil.
</Typography>
<Typography paragraph>
Add rice and stir very gently to distribute. Top with artichokes and
peppers, and cook without stirring, until most of the liquid is absorbed,
15 to 18 minutes. Reduce heat to medium-low, add reserved shrimp and
mussels, tucking them down into the rice, and cook again without
stirring, until mussels have opened and rice is just tender, 5 to 7
minutes more. (Discard any mussels that don’t open.)
</Typography>
<Typography>
Set aside off of the heat to let rest for 10 minutes, and then serve.
</Typography>
</CardContent>
</Collapse>
</Card>
);
} | <MoreVertIcon />
</IconButton>
} | random_line_split |
RecipeReviewCard.js | import * as React from 'react';
import { experimentalStyled as styled } from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardHeader from '@material-ui/core/CardHeader';
import CardMedia from '@material-ui/core/CardMedia';
import CardContent from '@material-ui/core/CardContent';
import CardActions from '@material-ui/core/CardActions';
import Collapse from '@material-ui/core/Collapse';
import Avatar from '@material-ui/core/Avatar';
import IconButton from '@material-ui/core/IconButton';
import Typography from '@material-ui/core/Typography';
import { red } from '@material-ui/core/colors';
import FavoriteIcon from '@material-ui/icons/Favorite';
import ShareIcon from '@material-ui/icons/Share';
import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
import MoreVertIcon from '@material-ui/icons/MoreVert';
const ExpandMore = styled((props) => {
const { expand, ...other } = props;
return <IconButton {...other} />;
})(({ theme, expand }) => ({
transform: !expand ? 'rotate(0deg)' : 'rotate(180deg)',
marginLeft: 'auto',
transition: theme.transitions.create('transform', {
duration: theme.transitions.duration.shortest,
}),
}));
export default function RecipeReviewCard() | {
const [expanded, setExpanded] = React.useState(false);
const handleExpandClick = () => {
setExpanded(!expanded);
};
return (
<Card sx={{ maxWidth: 345 }}>
<CardHeader
avatar={
<Avatar sx={{ bgcolor: red[500] }} aria-label="recipe">
R
</Avatar>
}
action={
<IconButton aria-label="settings">
<MoreVertIcon />
</IconButton>
}
title="Shrimp and Chorizo Paella"
subheader="September 14, 2016"
/>
<CardMedia
sx={{
height: 0,
paddingTop: '56.25%', // 16:9
}}
image="/static/images/cards/paella.jpg"
title="Paella dish"
/>
<CardContent>
<Typography variant="body2" color="text.secondary">
This impressive paella is a perfect party dish and a fun meal to cook
together with your guests. Add 1 cup of frozen peas along with the mussels,
if you like.
</Typography>
</CardContent>
<CardActions disableSpacing>
<IconButton aria-label="add to favorites">
<FavoriteIcon />
</IconButton>
<IconButton aria-label="share">
<ShareIcon />
</IconButton>
<ExpandMore
expand={expanded}
onClick={handleExpandClick}
aria-expanded={expanded}
aria-label="show more"
>
<ExpandMoreIcon />
</ExpandMore>
</CardActions>
<Collapse in={expanded} timeout="auto" unmountOnExit>
<CardContent>
<Typography paragraph>Method:</Typography>
<Typography paragraph>
Heat 1/2 cup of the broth in a pot until simmering, add saffron and set
aside for 10 minutes.
</Typography>
<Typography paragraph>
Heat oil in a (14- to 16-inch) paella pan or a large, deep skillet over
medium-high heat. Add chicken, shrimp and chorizo, and cook, stirring
occasionally until lightly browned, 6 to 8 minutes. Transfer shrimp to a
large plate and set aside, leaving chicken and chorizo in the pan. Add
pimentón, bay leaves, garlic, tomatoes, onion, salt and pepper, and cook,
stirring often until thickened and fragrant, about 10 minutes. Add
saffron broth and remaining 4 1/2 cups chicken broth; bring to a boil.
</Typography>
<Typography paragraph>
Add rice and stir very gently to distribute. Top with artichokes and
peppers, and cook without stirring, until most of the liquid is absorbed,
15 to 18 minutes. Reduce heat to medium-low, add reserved shrimp and
mussels, tucking them down into the rice, and cook again without
stirring, until mussels have opened and rice is just tender, 5 to 7
minutes more. (Discard any mussels that don’t open.)
</Typography>
<Typography>
Set aside off of the heat to let rest for 10 minutes, and then serve.
</Typography>
</CardContent>
</Collapse>
</Card>
);
}
| identifier_body | |
RecipeReviewCard.js | import * as React from 'react';
import { experimentalStyled as styled } from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardHeader from '@material-ui/core/CardHeader';
import CardMedia from '@material-ui/core/CardMedia';
import CardContent from '@material-ui/core/CardContent';
import CardActions from '@material-ui/core/CardActions';
import Collapse from '@material-ui/core/Collapse';
import Avatar from '@material-ui/core/Avatar';
import IconButton from '@material-ui/core/IconButton';
import Typography from '@material-ui/core/Typography';
import { red } from '@material-ui/core/colors';
import FavoriteIcon from '@material-ui/icons/Favorite';
import ShareIcon from '@material-ui/icons/Share';
import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
import MoreVertIcon from '@material-ui/icons/MoreVert';
const ExpandMore = styled((props) => {
const { expand, ...other } = props;
return <IconButton {...other} />;
})(({ theme, expand }) => ({
transform: !expand ? 'rotate(0deg)' : 'rotate(180deg)',
marginLeft: 'auto',
transition: theme.transitions.create('transform', {
duration: theme.transitions.duration.shortest,
}),
}));
export default function | () {
const [expanded, setExpanded] = React.useState(false);
const handleExpandClick = () => {
setExpanded(!expanded);
};
return (
<Card sx={{ maxWidth: 345 }}>
<CardHeader
avatar={
<Avatar sx={{ bgcolor: red[500] }} aria-label="recipe">
R
</Avatar>
}
action={
<IconButton aria-label="settings">
<MoreVertIcon />
</IconButton>
}
title="Shrimp and Chorizo Paella"
subheader="September 14, 2016"
/>
<CardMedia
sx={{
height: 0,
paddingTop: '56.25%', // 16:9
}}
image="/static/images/cards/paella.jpg"
title="Paella dish"
/>
<CardContent>
<Typography variant="body2" color="text.secondary">
This impressive paella is a perfect party dish and a fun meal to cook
together with your guests. Add 1 cup of frozen peas along with the mussels,
if you like.
</Typography>
</CardContent>
<CardActions disableSpacing>
<IconButton aria-label="add to favorites">
<FavoriteIcon />
</IconButton>
<IconButton aria-label="share">
<ShareIcon />
</IconButton>
<ExpandMore
expand={expanded}
onClick={handleExpandClick}
aria-expanded={expanded}
aria-label="show more"
>
<ExpandMoreIcon />
</ExpandMore>
</CardActions>
<Collapse in={expanded} timeout="auto" unmountOnExit>
<CardContent>
<Typography paragraph>Method:</Typography>
<Typography paragraph>
Heat 1/2 cup of the broth in a pot until simmering, add saffron and set
aside for 10 minutes.
</Typography>
<Typography paragraph>
Heat oil in a (14- to 16-inch) paella pan or a large, deep skillet over
medium-high heat. Add chicken, shrimp and chorizo, and cook, stirring
occasionally until lightly browned, 6 to 8 minutes. Transfer shrimp to a
large plate and set aside, leaving chicken and chorizo in the pan. Add
pimentón, bay leaves, garlic, tomatoes, onion, salt and pepper, and cook,
stirring often until thickened and fragrant, about 10 minutes. Add
saffron broth and remaining 4 1/2 cups chicken broth; bring to a boil.
</Typography>
<Typography paragraph>
Add rice and stir very gently to distribute. Top with artichokes and
peppers, and cook without stirring, until most of the liquid is absorbed,
15 to 18 minutes. Reduce heat to medium-low, add reserved shrimp and
mussels, tucking them down into the rice, and cook again without
stirring, until mussels have opened and rice is just tender, 5 to 7
minutes more. (Discard any mussels that don’t open.)
</Typography>
<Typography>
Set aside off of the heat to let rest for 10 minutes, and then serve.
</Typography>
</CardContent>
</Collapse>
</Card>
);
}
| RecipeReviewCard | identifier_name |
WebRtcProducer.ts | ///<reference path="../../../lib/RTCPeerConnection.d.ts"/>
///<reference path="WebRtcCommons.ts"/>
"use strict";
class WebRtcProducer {
private _id : string;
private _debugMode : boolean = false;
private _successCalled : boolean = false;
private connection: RTCPeerConnection = null;
private channel: RTCDataChannel = null;
private _onPassDataToPeer : IWebRtcConnectionDataCallback = null;
private _onConnectionSucces : () => void = null;
private _onConnectionError : (error: Object) => void = null;
private _config: any = null;
/**
* constructor
*/
constructor(servers: RTCIceServer[], _id?: string, _debugMode?: boolean) {
this._id = _id;
this._debugMode = _debugMode||false;
if (servers != null)
this._config = { "iceServers": servers };
}
/**
* setCallbacks
*/
setCallbacks(onPassDataToPeer: IWebRtcConnectionDataCallback,
onConnectionSucces: () => void, onConnectionError: (error: Object) => void): void {
this._onPassDataToPeer = onPassDataToPeer;
this._onConnectionSucces = onConnectionSucces;
this._onConnectionError = onConnectionError;
}
/**
* isConnected
*/
isConnected(): boolean {
return this.connection != null
&& (this.connection.iceConnectionState === 'completed' //RTCIceConnectionState.completed
|| this.connection.iceConnectionState === 'connected')
&& this.channel != null
&& this.channel.readyState === 'open' ; //RTCDataChannelState.open
}
/**
* configure
*/
configure(data: IWebRtcConnectionData): void {
var self = this;
// step 1
if (data === null) {
if (this._debugMode)
self.log('configure - Step1', data);
this.connection.createOffer(
function(sdp: RTCSessionDescription): void {
if (self._debugMode)
self.log('onOfferCreated', sdp);
self.connection.setLocalDescription(sdp, null);
self._onPassDataToPeer({'RTCSessionDescription': sdp});
},
function (errorInformation: DOMError): void {
console.error('onOfferError', errorInformation);
});
} else
// step 2
if (data['RTCSessionDescription'] != undefined) {
if (this._debugMode)
this.log('configure - Step2', data);
this.connection.setRemoteDescription(data['RTCSessionDescription']);
} else
// step 3
if (data['RTCIceCandidate'] != undefined) {
if (this._debugMode)
this.log('configure - Step3', data);
this.connection.addIceCandidate(data['RTCIceCandidate'],
function(): void {
if (self._debugMode)
self.log('onAddIceCandidateSuccess');
},
function (error): void {
if (self._debugMode)
self.log('onAddIceCandidateError');
});
}
}
/**
* sendText
*/
| (msg: string): void {
if (this._debugMode)
this.log('Sending message: "' +msg +'"');
if (!this.isConnected())
throw new WebRtcConnectionNotInitializedError('');
this.channel.send(msg);
}
/**
* open
*/
open(): void{
if (this._debugMode)
this.log('Creating new; iceServers: ' +JSON.stringify(this._config));
if (typeof webkitRTCPeerConnection === 'function') {
this.connection = new webkitRTCPeerConnection( this._config );
} else if (typeof mozRTCPeerConnection === 'function') {
throw new Error('Not implemented yet.');
//this.connection = new mozRTCPeerConnection( this._config );
} else
throw new Error('unknown implementation of RTCPeerConnection');
this.internalInit();
this._successCalled = false;
}
/**
* close
*/
close(): void{
this._successCalled = false;
if (this.channel != null)
this.channel.close();
if (this.connection != null)
this.connection.close();
}
/**
* internalInit
*/
private internalInit(): void {
this.channel = this.connection.createDataChannel('label', null);
this.channel.onopen = this.onReceiveChannelStateChange;
this.channel.onclose = this.onReceiveChannelStateChange;
this.connection.onicecandidate =
function(event: RTCIceCandidateEvent): void {
if (event.candidate) {
if (this._debugMode)
this.log('onIceCandidate', event.candidate);
this._onPassDataToPeer({'RTCIceCandidate': event.candidate});
}
this.tryCallSuccess();
}.bind(this);
this.connection.oniceconnectionstatechange =
function(event: Event): void {
if (this._debugMode)
this.log('onIceConnectionStateChange: ' +this.connection.iceConnectionState, event);
this.tryCallSuccess();
}.bind(this);
}
/**
* onReceiveChannelStateChange
*/
private onReceiveChannelStateChange = function(event: Event): void {
if (this._debugMode)
this.log('onReceiveChannelStateChange', event);
this.tryCallSuccess();
}.bind(this);
/**
* tryCallSuccess
*/
private tryCallSuccess = function(): void {
if (!this._successCalled && this.isConnected()) {
if (this._debugMode)
this.log('triggering onConnectionSucces callback');
this._successCalled = true;
this._onConnectionSucces();
}
}.bind(this);
/**
* log
*/
private log(msg: string, ...optionalParams: Object[]) {
if (!this._debugMode)
throw new Error('Debug mode is disabled.');
var arr: Object[] = new Array<Object>().concat(this.dbgId() + ' ' + msg).concat(optionalParams);
console.log.apply(console, arr);
document.writeln(this.dbgId() +' ' +msg +' ' +this.connectionState() +'<br>');
}
/**
* connectionState
*/
private connectionState(): string {
return '<b>[connected: '+this.isConnected() +']</b> '
+'connection.iceConnectionState: '+ (this.connection === null ? 'null' : this.connection.iceConnectionState) +'; '
+'connection.iceGatheringState: '+ (this.connection === null ? 'null' : this.connection.iceGatheringState) +'; '
+'connection.signalingState: '+ (this.connection === null ? 'null' : this.connection.signalingState) +'; '
+'channel.readyState: '+ (this.channel === null ? 'null' : this.channel.readyState);
}
/**
* dbgId
*/
private dbgId(): string{
return '[' +(this._id != '' ? this._id +' ' : '') +'producer]';
}
}
| sendMessage | identifier_name |
WebRtcProducer.ts | ///<reference path="../../../lib/RTCPeerConnection.d.ts"/>
///<reference path="WebRtcCommons.ts"/>
"use strict";
class WebRtcProducer {
private _id : string;
private _debugMode : boolean = false;
private _successCalled : boolean = false;
private connection: RTCPeerConnection = null;
private channel: RTCDataChannel = null;
private _onPassDataToPeer : IWebRtcConnectionDataCallback = null;
private _onConnectionSucces : () => void = null;
private _onConnectionError : (error: Object) => void = null;
private _config: any = null;
/**
* constructor
*/
constructor(servers: RTCIceServer[], _id?: string, _debugMode?: boolean) {
this._id = _id;
this._debugMode = _debugMode||false;
if (servers != null)
this._config = { "iceServers": servers };
}
/**
* setCallbacks
*/
setCallbacks(onPassDataToPeer: IWebRtcConnectionDataCallback,
onConnectionSucces: () => void, onConnectionError: (error: Object) => void): void {
this._onPassDataToPeer = onPassDataToPeer;
this._onConnectionSucces = onConnectionSucces;
this._onConnectionError = onConnectionError;
}
/**
* isConnected
*/
isConnected(): boolean {
return this.connection != null
&& (this.connection.iceConnectionState === 'completed' //RTCIceConnectionState.completed
|| this.connection.iceConnectionState === 'connected')
&& this.channel != null
&& this.channel.readyState === 'open' ; //RTCDataChannelState.open
}
/**
* configure
*/
configure(data: IWebRtcConnectionData): void {
var self = this;
// step 1
if (data === null) {
if (this._debugMode)
self.log('configure - Step1', data);
this.connection.createOffer(
function(sdp: RTCSessionDescription): void {
if (self._debugMode)
self.log('onOfferCreated', sdp);
self.connection.setLocalDescription(sdp, null);
self._onPassDataToPeer({'RTCSessionDescription': sdp});
},
function (errorInformation: DOMError): void {
console.error('onOfferError', errorInformation);
});
} else
// step 2
if (data['RTCSessionDescription'] != undefined) {
if (this._debugMode)
this.log('configure - Step2', data);
this.connection.setRemoteDescription(data['RTCSessionDescription']);
} else
// step 3
if (data['RTCIceCandidate'] != undefined) {
if (this._debugMode)
this.log('configure - Step3', data);
this.connection.addIceCandidate(data['RTCIceCandidate'],
function(): void {
if (self._debugMode)
self.log('onAddIceCandidateSuccess');
},
function (error): void {
if (self._debugMode)
self.log('onAddIceCandidateError');
});
}
}
/**
* sendText
*/
sendMessage(msg: string): void {
if (this._debugMode)
this.log('Sending message: "' +msg +'"');
if (!this.isConnected())
throw new WebRtcConnectionNotInitializedError('');
this.channel.send(msg);
}
/**
* open
*/
open(): void{
if (this._debugMode)
this.log('Creating new; iceServers: ' +JSON.stringify(this._config)); | if (typeof webkitRTCPeerConnection === 'function') {
this.connection = new webkitRTCPeerConnection( this._config );
} else if (typeof mozRTCPeerConnection === 'function') {
throw new Error('Not implemented yet.');
//this.connection = new mozRTCPeerConnection( this._config );
} else
throw new Error('unknown implementation of RTCPeerConnection');
this.internalInit();
this._successCalled = false;
}
/**
* close
*/
close(): void{
this._successCalled = false;
if (this.channel != null)
this.channel.close();
if (this.connection != null)
this.connection.close();
}
/**
* internalInit
*/
private internalInit(): void {
this.channel = this.connection.createDataChannel('label', null);
this.channel.onopen = this.onReceiveChannelStateChange;
this.channel.onclose = this.onReceiveChannelStateChange;
this.connection.onicecandidate =
function(event: RTCIceCandidateEvent): void {
if (event.candidate) {
if (this._debugMode)
this.log('onIceCandidate', event.candidate);
this._onPassDataToPeer({'RTCIceCandidate': event.candidate});
}
this.tryCallSuccess();
}.bind(this);
this.connection.oniceconnectionstatechange =
function(event: Event): void {
if (this._debugMode)
this.log('onIceConnectionStateChange: ' +this.connection.iceConnectionState, event);
this.tryCallSuccess();
}.bind(this);
}
/**
* onReceiveChannelStateChange
*/
private onReceiveChannelStateChange = function(event: Event): void {
if (this._debugMode)
this.log('onReceiveChannelStateChange', event);
this.tryCallSuccess();
}.bind(this);
/**
* tryCallSuccess
*/
private tryCallSuccess = function(): void {
if (!this._successCalled && this.isConnected()) {
if (this._debugMode)
this.log('triggering onConnectionSucces callback');
this._successCalled = true;
this._onConnectionSucces();
}
}.bind(this);
/**
* log
*/
private log(msg: string, ...optionalParams: Object[]) {
if (!this._debugMode)
throw new Error('Debug mode is disabled.');
var arr: Object[] = new Array<Object>().concat(this.dbgId() + ' ' + msg).concat(optionalParams);
console.log.apply(console, arr);
document.writeln(this.dbgId() +' ' +msg +' ' +this.connectionState() +'<br>');
}
/**
* connectionState
*/
private connectionState(): string {
return '<b>[connected: '+this.isConnected() +']</b> '
+'connection.iceConnectionState: '+ (this.connection === null ? 'null' : this.connection.iceConnectionState) +'; '
+'connection.iceGatheringState: '+ (this.connection === null ? 'null' : this.connection.iceGatheringState) +'; '
+'connection.signalingState: '+ (this.connection === null ? 'null' : this.connection.signalingState) +'; '
+'channel.readyState: '+ (this.channel === null ? 'null' : this.channel.readyState);
}
/**
* dbgId
*/
private dbgId(): string{
return '[' +(this._id != '' ? this._id +' ' : '') +'producer]';
}
} | random_line_split | |
WebRtcProducer.ts | ///<reference path="../../../lib/RTCPeerConnection.d.ts"/>
///<reference path="WebRtcCommons.ts"/>
"use strict";
class WebRtcProducer {
private _id : string;
private _debugMode : boolean = false;
private _successCalled : boolean = false;
private connection: RTCPeerConnection = null;
private channel: RTCDataChannel = null;
private _onPassDataToPeer : IWebRtcConnectionDataCallback = null;
private _onConnectionSucces : () => void = null;
private _onConnectionError : (error: Object) => void = null;
private _config: any = null;
/**
* constructor
*/
constructor(servers: RTCIceServer[], _id?: string, _debugMode?: boolean) {
this._id = _id;
this._debugMode = _debugMode||false;
if (servers != null)
this._config = { "iceServers": servers };
}
/**
* setCallbacks
*/
setCallbacks(onPassDataToPeer: IWebRtcConnectionDataCallback,
onConnectionSucces: () => void, onConnectionError: (error: Object) => void): void {
this._onPassDataToPeer = onPassDataToPeer;
this._onConnectionSucces = onConnectionSucces;
this._onConnectionError = onConnectionError;
}
/**
* isConnected
*/
isConnected(): boolean {
return this.connection != null
&& (this.connection.iceConnectionState === 'completed' //RTCIceConnectionState.completed
|| this.connection.iceConnectionState === 'connected')
&& this.channel != null
&& this.channel.readyState === 'open' ; //RTCDataChannelState.open
}
/**
* configure
*/
configure(data: IWebRtcConnectionData): void {
var self = this;
// step 1
if (data === null) {
if (this._debugMode)
self.log('configure - Step1', data);
this.connection.createOffer(
function(sdp: RTCSessionDescription): void {
if (self._debugMode)
self.log('onOfferCreated', sdp);
self.connection.setLocalDescription(sdp, null);
self._onPassDataToPeer({'RTCSessionDescription': sdp});
},
function (errorInformation: DOMError): void {
console.error('onOfferError', errorInformation);
});
} else
// step 2
if (data['RTCSessionDescription'] != undefined) {
if (this._debugMode)
this.log('configure - Step2', data);
this.connection.setRemoteDescription(data['RTCSessionDescription']);
} else
// step 3
if (data['RTCIceCandidate'] != undefined) {
if (this._debugMode)
this.log('configure - Step3', data);
this.connection.addIceCandidate(data['RTCIceCandidate'],
function(): void {
if (self._debugMode)
self.log('onAddIceCandidateSuccess');
},
function (error): void {
if (self._debugMode)
self.log('onAddIceCandidateError');
});
}
}
/**
* sendText
*/
sendMessage(msg: string): void {
if (this._debugMode)
this.log('Sending message: "' +msg +'"');
if (!this.isConnected())
throw new WebRtcConnectionNotInitializedError('');
this.channel.send(msg);
}
/**
* open
*/
open(): void{
if (this._debugMode)
this.log('Creating new; iceServers: ' +JSON.stringify(this._config));
if (typeof webkitRTCPeerConnection === 'function') {
this.connection = new webkitRTCPeerConnection( this._config );
} else if (typeof mozRTCPeerConnection === 'function') | else
throw new Error('unknown implementation of RTCPeerConnection');
this.internalInit();
this._successCalled = false;
}
/**
* close
*/
close(): void{
this._successCalled = false;
if (this.channel != null)
this.channel.close();
if (this.connection != null)
this.connection.close();
}
/**
* internalInit
*/
private internalInit(): void {
this.channel = this.connection.createDataChannel('label', null);
this.channel.onopen = this.onReceiveChannelStateChange;
this.channel.onclose = this.onReceiveChannelStateChange;
this.connection.onicecandidate =
function(event: RTCIceCandidateEvent): void {
if (event.candidate) {
if (this._debugMode)
this.log('onIceCandidate', event.candidate);
this._onPassDataToPeer({'RTCIceCandidate': event.candidate});
}
this.tryCallSuccess();
}.bind(this);
this.connection.oniceconnectionstatechange =
function(event: Event): void {
if (this._debugMode)
this.log('onIceConnectionStateChange: ' +this.connection.iceConnectionState, event);
this.tryCallSuccess();
}.bind(this);
}
/**
* onReceiveChannelStateChange
*/
private onReceiveChannelStateChange = function(event: Event): void {
if (this._debugMode)
this.log('onReceiveChannelStateChange', event);
this.tryCallSuccess();
}.bind(this);
/**
* tryCallSuccess
*/
private tryCallSuccess = function(): void {
if (!this._successCalled && this.isConnected()) {
if (this._debugMode)
this.log('triggering onConnectionSucces callback');
this._successCalled = true;
this._onConnectionSucces();
}
}.bind(this);
/**
* log
*/
private log(msg: string, ...optionalParams: Object[]) {
if (!this._debugMode)
throw new Error('Debug mode is disabled.');
var arr: Object[] = new Array<Object>().concat(this.dbgId() + ' ' + msg).concat(optionalParams);
console.log.apply(console, arr);
document.writeln(this.dbgId() +' ' +msg +' ' +this.connectionState() +'<br>');
}
/**
* connectionState
*/
private connectionState(): string {
return '<b>[connected: '+this.isConnected() +']</b> '
+'connection.iceConnectionState: '+ (this.connection === null ? 'null' : this.connection.iceConnectionState) +'; '
+'connection.iceGatheringState: '+ (this.connection === null ? 'null' : this.connection.iceGatheringState) +'; '
+'connection.signalingState: '+ (this.connection === null ? 'null' : this.connection.signalingState) +'; '
+'channel.readyState: '+ (this.channel === null ? 'null' : this.channel.readyState);
}
/**
* dbgId
*/
private dbgId(): string{
return '[' +(this._id != '' ? this._id +' ' : '') +'producer]';
}
}
| {
throw new Error('Not implemented yet.');
//this.connection = new mozRTCPeerConnection( this._config );
} | conditional_block |
WebRtcProducer.ts | ///<reference path="../../../lib/RTCPeerConnection.d.ts"/>
///<reference path="WebRtcCommons.ts"/>
"use strict";
class WebRtcProducer {
private _id : string;
private _debugMode : boolean = false;
private _successCalled : boolean = false;
private connection: RTCPeerConnection = null;
private channel: RTCDataChannel = null;
private _onPassDataToPeer : IWebRtcConnectionDataCallback = null;
private _onConnectionSucces : () => void = null;
private _onConnectionError : (error: Object) => void = null;
private _config: any = null;
/**
* constructor
*/
constructor(servers: RTCIceServer[], _id?: string, _debugMode?: boolean) {
this._id = _id;
this._debugMode = _debugMode||false;
if (servers != null)
this._config = { "iceServers": servers };
}
/**
* setCallbacks
*/
setCallbacks(onPassDataToPeer: IWebRtcConnectionDataCallback,
onConnectionSucces: () => void, onConnectionError: (error: Object) => void): void {
this._onPassDataToPeer = onPassDataToPeer;
this._onConnectionSucces = onConnectionSucces;
this._onConnectionError = onConnectionError;
}
/**
* isConnected
*/
isConnected(): boolean {
return this.connection != null
&& (this.connection.iceConnectionState === 'completed' //RTCIceConnectionState.completed
|| this.connection.iceConnectionState === 'connected')
&& this.channel != null
&& this.channel.readyState === 'open' ; //RTCDataChannelState.open
}
/**
* configure
*/
configure(data: IWebRtcConnectionData): void {
var self = this;
// step 1
if (data === null) {
if (this._debugMode)
self.log('configure - Step1', data);
this.connection.createOffer(
function(sdp: RTCSessionDescription): void {
if (self._debugMode)
self.log('onOfferCreated', sdp);
self.connection.setLocalDescription(sdp, null);
self._onPassDataToPeer({'RTCSessionDescription': sdp});
},
function (errorInformation: DOMError): void {
console.error('onOfferError', errorInformation);
});
} else
// step 2
if (data['RTCSessionDescription'] != undefined) {
if (this._debugMode)
this.log('configure - Step2', data);
this.connection.setRemoteDescription(data['RTCSessionDescription']);
} else
// step 3
if (data['RTCIceCandidate'] != undefined) {
if (this._debugMode)
this.log('configure - Step3', data);
this.connection.addIceCandidate(data['RTCIceCandidate'],
function(): void {
if (self._debugMode)
self.log('onAddIceCandidateSuccess');
},
function (error): void {
if (self._debugMode)
self.log('onAddIceCandidateError');
});
}
}
/**
* sendText
*/
sendMessage(msg: string): void |
/**
* open
*/
open(): void{
if (this._debugMode)
this.log('Creating new; iceServers: ' +JSON.stringify(this._config));
if (typeof webkitRTCPeerConnection === 'function') {
this.connection = new webkitRTCPeerConnection( this._config );
} else if (typeof mozRTCPeerConnection === 'function') {
throw new Error('Not implemented yet.');
//this.connection = new mozRTCPeerConnection( this._config );
} else
throw new Error('unknown implementation of RTCPeerConnection');
this.internalInit();
this._successCalled = false;
}
/**
* close
*/
close(): void{
this._successCalled = false;
if (this.channel != null)
this.channel.close();
if (this.connection != null)
this.connection.close();
}
/**
* internalInit
*/
private internalInit(): void {
this.channel = this.connection.createDataChannel('label', null);
this.channel.onopen = this.onReceiveChannelStateChange;
this.channel.onclose = this.onReceiveChannelStateChange;
this.connection.onicecandidate =
function(event: RTCIceCandidateEvent): void {
if (event.candidate) {
if (this._debugMode)
this.log('onIceCandidate', event.candidate);
this._onPassDataToPeer({'RTCIceCandidate': event.candidate});
}
this.tryCallSuccess();
}.bind(this);
this.connection.oniceconnectionstatechange =
function(event: Event): void {
if (this._debugMode)
this.log('onIceConnectionStateChange: ' +this.connection.iceConnectionState, event);
this.tryCallSuccess();
}.bind(this);
}
/**
* onReceiveChannelStateChange
*/
private onReceiveChannelStateChange = function(event: Event): void {
if (this._debugMode)
this.log('onReceiveChannelStateChange', event);
this.tryCallSuccess();
}.bind(this);
/**
* tryCallSuccess
*/
private tryCallSuccess = function(): void {
if (!this._successCalled && this.isConnected()) {
if (this._debugMode)
this.log('triggering onConnectionSucces callback');
this._successCalled = true;
this._onConnectionSucces();
}
}.bind(this);
/**
* log
*/
private log(msg: string, ...optionalParams: Object[]) {
if (!this._debugMode)
throw new Error('Debug mode is disabled.');
var arr: Object[] = new Array<Object>().concat(this.dbgId() + ' ' + msg).concat(optionalParams);
console.log.apply(console, arr);
document.writeln(this.dbgId() +' ' +msg +' ' +this.connectionState() +'<br>');
}
/**
* connectionState
*/
private connectionState(): string {
return '<b>[connected: '+this.isConnected() +']</b> '
+'connection.iceConnectionState: '+ (this.connection === null ? 'null' : this.connection.iceConnectionState) +'; '
+'connection.iceGatheringState: '+ (this.connection === null ? 'null' : this.connection.iceGatheringState) +'; '
+'connection.signalingState: '+ (this.connection === null ? 'null' : this.connection.signalingState) +'; '
+'channel.readyState: '+ (this.channel === null ? 'null' : this.channel.readyState);
}
/**
* dbgId
*/
private dbgId(): string{
return '[' +(this._id != '' ? this._id +' ' : '') +'producer]';
}
}
| {
if (this._debugMode)
this.log('Sending message: "' +msg +'"');
if (!this.isConnected())
throw new WebRtcConnectionNotInitializedError('');
this.channel.send(msg);
} | identifier_body |
bspline_surface_pure.py | """
Experiment with bezier surface and bspline surface
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.TColStd import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
def bezier_surfaces(event=None):
"""
Create bezier surface, then create bspline surface from import
it and search, what is inside created bspline surface.
"""
array = TColgp_Array2OfPnt(1, 3, 1, 3)
array.SetValue(1, 1, gp_Pnt(1, 1, 1))
array.SetValue(1, 2, gp_Pnt(2, 1, 2))
array.SetValue(1, 3, gp_Pnt(3, 1, 1))
array.SetValue(2, 1, gp_Pnt(1, 2, 1))
array.SetValue(2, 2, gp_Pnt(2, 2, 2))
array.SetValue(2, 3, gp_Pnt(3, 2, 0))
array.SetValue(3, 1, gp_Pnt(1, 3, 2))
array.SetValue(3, 2, gp_Pnt(2, 3, 1))
array.SetValue(3, 3, gp_Pnt(3, 3, 0))
BZ1 = Geom_BezierSurface(array)
bezierarray = TColGeom_Array2OfBezierSurface(1, 1, 1, 1)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
# Poles
poles = BB.Poles().GetObject().Array2()
# print "poles: ", poles, poles.LowerCol(), poles.ColLength(), poles.LowerRow(), poles.RowLength()
for pole_i in range(poles.LowerCol(), poles.ColLength() + 1, 1):
for pole_j in range(poles.LowerRow(), poles.RowLength() + 1, 1):
point = poles.Value(pole_i, pole_j)
print(pole_i, pole_j, ": (", point.X(), point.Y(), point.Z(), ")")
print()
# Knots U and V
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
print("uknots: ", uknots)
for i in range(uknots.Lower(), uknots.Length() + 1, 1):
print(uknots.Value(i))
print("vknots: ", vknots)
for j in range(vknots.Lower(), vknots.Length() + 1, 1):
print(vknots.Value(j))
print()
# Multi U and V
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1() | print(umult.Value(i))
print("vmult: ", vmult)
for j in range(vmult.Lower(), vmult.Length() + 1, 1):
print(vmult.Value(i))
print()
udeg = BB.UDegree()
vdeg = BB.VDegree()
print("udeg, vdeg: ", udeg, vdeg)
BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0)
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
def bspline_surface():
"""
Try to create B-spline surface directly
"""
# Set U and V degree to 2
udeg = 2
vdeg = 2
# Non-periodic surface
uperiod = False
vperiod = False
# Create 2D array of poles (control points)
poles = TColgp_Array2OfPnt(1, 3, 1, 3)
poles.SetValue(1, 1, gp_Pnt(1, 1, 1))
poles.SetValue(1, 2, gp_Pnt(2, 1, 2))
poles.SetValue(1, 3, gp_Pnt(3, 1, 1))
poles.SetValue(2, 1, gp_Pnt(1, 2, 1))
poles.SetValue(2, 2, gp_Pnt(2, 2, 2))
poles.SetValue(2, 3, gp_Pnt(3, 2, 0))
poles.SetValue(3, 1, gp_Pnt(1, 3, 2))
poles.SetValue(3, 2, gp_Pnt(2, 3, 1))
poles.SetValue(3, 3, gp_Pnt(3, 3, 0))
# Create 2D array of weights
weights = TColStd_Array2OfReal(1, 3, 1, 3)
weights.SetValue(1, 1, 1.0)
weights.SetValue(1, 2, 1.0)
weights.SetValue(1, 3, 1.0)
weights.SetValue(2, 1, 1.0)
weights.SetValue(2, 2, 1.0)
weights.SetValue(2, 3, 1.0)
weights.SetValue(3, 1, 1.0)
weights.SetValue(3, 2, 1.0)
weights.SetValue(3, 3, 1.0)
# Length of uknots and umult has to be same
# Same rule is for vknots and vmult
uknot_len = umult_len = 2
vknot_len = vmult_len = 2
# Knots for U and V direction
uknots = TColStd_Array1OfReal(1, uknot_len)
vknots = TColStd_Array1OfReal(1, vknot_len)
# Main curves begins and ends at first and last points
uknots.SetValue(1, 0.0)
uknots.SetValue(2, 1.0)
vknots.SetValue(1, 0.0)
vknots.SetValue(2, 1.0)
# Multiplicities for U and V direction
umult = TColStd_Array1OfInteger(1, umult_len)
vmult = TColStd_Array1OfInteger(1, vmult_len)
# First and last multiplicities are set to udeg + 1 (vdeg respectively),
# because we want main curves to start and finish on the first and
# the last points
umult.SetValue(1, udeg + 1)
umult.SetValue(2, udeg + 1)
vmult.SetValue(1, vdeg + 1)
vmult.SetValue(2, vdeg + 1)
# Some other rules, that has to hold:
# poles.ColLength == sum(umult(i)) - udeg - 1 (e.g.: 3 == 6 - 2 - 1)
# Try to create surface (no weight)
#BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Try to create surface (weights to default values)
BSPLSURF = Geom_BSplineSurface(poles, weights, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Display surface
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
if __name__ == '__main__':
#bezier_surfaces()
bspline_surface() | print("umult: ", umult)
for i in range(umult.Lower(), umult.Length() + 1, 1): | random_line_split |
bspline_surface_pure.py | """
Experiment with bezier surface and bspline surface
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.TColStd import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
def bezier_surfaces(event=None):
"""
Create bezier surface, then create bspline surface from import
it and search, what is inside created bspline surface.
"""
array = TColgp_Array2OfPnt(1, 3, 1, 3)
array.SetValue(1, 1, gp_Pnt(1, 1, 1))
array.SetValue(1, 2, gp_Pnt(2, 1, 2))
array.SetValue(1, 3, gp_Pnt(3, 1, 1))
array.SetValue(2, 1, gp_Pnt(1, 2, 1))
array.SetValue(2, 2, gp_Pnt(2, 2, 2))
array.SetValue(2, 3, gp_Pnt(3, 2, 0))
array.SetValue(3, 1, gp_Pnt(1, 3, 2))
array.SetValue(3, 2, gp_Pnt(2, 3, 1))
array.SetValue(3, 3, gp_Pnt(3, 3, 0))
BZ1 = Geom_BezierSurface(array)
bezierarray = TColGeom_Array2OfBezierSurface(1, 1, 1, 1)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
# Poles
poles = BB.Poles().GetObject().Array2()
# print "poles: ", poles, poles.LowerCol(), poles.ColLength(), poles.LowerRow(), poles.RowLength()
for pole_i in range(poles.LowerCol(), poles.ColLength() + 1, 1):
for pole_j in range(poles.LowerRow(), poles.RowLength() + 1, 1):
point = poles.Value(pole_i, pole_j)
print(pole_i, pole_j, ": (", point.X(), point.Y(), point.Z(), ")")
print()
# Knots U and V
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
print("uknots: ", uknots)
for i in range(uknots.Lower(), uknots.Length() + 1, 1):
|
print("vknots: ", vknots)
for j in range(vknots.Lower(), vknots.Length() + 1, 1):
print(vknots.Value(j))
print()
# Multi U and V
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1()
print("umult: ", umult)
for i in range(umult.Lower(), umult.Length() + 1, 1):
print(umult.Value(i))
print("vmult: ", vmult)
for j in range(vmult.Lower(), vmult.Length() + 1, 1):
print(vmult.Value(i))
print()
udeg = BB.UDegree()
vdeg = BB.VDegree()
print("udeg, vdeg: ", udeg, vdeg)
BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0)
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
def bspline_surface():
"""
Try to create B-spline surface directly
"""
# Set U and V degree to 2
udeg = 2
vdeg = 2
# Non-periodic surface
uperiod = False
vperiod = False
# Create 2D array of poles (control points)
poles = TColgp_Array2OfPnt(1, 3, 1, 3)
poles.SetValue(1, 1, gp_Pnt(1, 1, 1))
poles.SetValue(1, 2, gp_Pnt(2, 1, 2))
poles.SetValue(1, 3, gp_Pnt(3, 1, 1))
poles.SetValue(2, 1, gp_Pnt(1, 2, 1))
poles.SetValue(2, 2, gp_Pnt(2, 2, 2))
poles.SetValue(2, 3, gp_Pnt(3, 2, 0))
poles.SetValue(3, 1, gp_Pnt(1, 3, 2))
poles.SetValue(3, 2, gp_Pnt(2, 3, 1))
poles.SetValue(3, 3, gp_Pnt(3, 3, 0))
# Create 2D array of weights
weights = TColStd_Array2OfReal(1, 3, 1, 3)
weights.SetValue(1, 1, 1.0)
weights.SetValue(1, 2, 1.0)
weights.SetValue(1, 3, 1.0)
weights.SetValue(2, 1, 1.0)
weights.SetValue(2, 2, 1.0)
weights.SetValue(2, 3, 1.0)
weights.SetValue(3, 1, 1.0)
weights.SetValue(3, 2, 1.0)
weights.SetValue(3, 3, 1.0)
# Length of uknots and umult has to be same
# Same rule is for vknots and vmult
uknot_len = umult_len = 2
vknot_len = vmult_len = 2
# Knots for U and V direction
uknots = TColStd_Array1OfReal(1, uknot_len)
vknots = TColStd_Array1OfReal(1, vknot_len)
# Main curves begins and ends at first and last points
uknots.SetValue(1, 0.0)
uknots.SetValue(2, 1.0)
vknots.SetValue(1, 0.0)
vknots.SetValue(2, 1.0)
# Multiplicities for U and V direction
umult = TColStd_Array1OfInteger(1, umult_len)
vmult = TColStd_Array1OfInteger(1, vmult_len)
# First and last multiplicities are set to udeg + 1 (vdeg respectively),
# because we want main curves to start and finish on the first and
# the last points
umult.SetValue(1, udeg + 1)
umult.SetValue(2, udeg + 1)
vmult.SetValue(1, vdeg + 1)
vmult.SetValue(2, vdeg + 1)
# Some other rules, that has to hold:
# poles.ColLength == sum(umult(i)) - udeg - 1 (e.g.: 3 == 6 - 2 - 1)
# Try to create surface (no weight)
#BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Try to create surface (weights to default values)
BSPLSURF = Geom_BSplineSurface(poles, weights, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Display surface
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
if __name__ == '__main__':
#bezier_surfaces()
bspline_surface() | print(uknots.Value(i)) | conditional_block |
bspline_surface_pure.py | """
Experiment with bezier surface and bspline surface
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.TColStd import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
def bezier_surfaces(event=None):
"""
Create bezier surface, then create bspline surface from import
it and search, what is inside created bspline surface.
"""
array = TColgp_Array2OfPnt(1, 3, 1, 3)
array.SetValue(1, 1, gp_Pnt(1, 1, 1))
array.SetValue(1, 2, gp_Pnt(2, 1, 2))
array.SetValue(1, 3, gp_Pnt(3, 1, 1))
array.SetValue(2, 1, gp_Pnt(1, 2, 1))
array.SetValue(2, 2, gp_Pnt(2, 2, 2))
array.SetValue(2, 3, gp_Pnt(3, 2, 0))
array.SetValue(3, 1, gp_Pnt(1, 3, 2))
array.SetValue(3, 2, gp_Pnt(2, 3, 1))
array.SetValue(3, 3, gp_Pnt(3, 3, 0))
BZ1 = Geom_BezierSurface(array)
bezierarray = TColGeom_Array2OfBezierSurface(1, 1, 1, 1)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
# Poles
poles = BB.Poles().GetObject().Array2()
# print "poles: ", poles, poles.LowerCol(), poles.ColLength(), poles.LowerRow(), poles.RowLength()
for pole_i in range(poles.LowerCol(), poles.ColLength() + 1, 1):
for pole_j in range(poles.LowerRow(), poles.RowLength() + 1, 1):
point = poles.Value(pole_i, pole_j)
print(pole_i, pole_j, ": (", point.X(), point.Y(), point.Z(), ")")
print()
# Knots U and V
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
print("uknots: ", uknots)
for i in range(uknots.Lower(), uknots.Length() + 1, 1):
print(uknots.Value(i))
print("vknots: ", vknots)
for j in range(vknots.Lower(), vknots.Length() + 1, 1):
print(vknots.Value(j))
print()
# Multi U and V
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1()
print("umult: ", umult)
for i in range(umult.Lower(), umult.Length() + 1, 1):
print(umult.Value(i))
print("vmult: ", vmult)
for j in range(vmult.Lower(), vmult.Length() + 1, 1):
print(vmult.Value(i))
print()
udeg = BB.UDegree()
vdeg = BB.VDegree()
print("udeg, vdeg: ", udeg, vdeg)
BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0)
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
def bspline_surface():
|
if __name__ == '__main__':
#bezier_surfaces()
bspline_surface() | """
Try to create B-spline surface directly
"""
# Set U and V degree to 2
udeg = 2
vdeg = 2
# Non-periodic surface
uperiod = False
vperiod = False
# Create 2D array of poles (control points)
poles = TColgp_Array2OfPnt(1, 3, 1, 3)
poles.SetValue(1, 1, gp_Pnt(1, 1, 1))
poles.SetValue(1, 2, gp_Pnt(2, 1, 2))
poles.SetValue(1, 3, gp_Pnt(3, 1, 1))
poles.SetValue(2, 1, gp_Pnt(1, 2, 1))
poles.SetValue(2, 2, gp_Pnt(2, 2, 2))
poles.SetValue(2, 3, gp_Pnt(3, 2, 0))
poles.SetValue(3, 1, gp_Pnt(1, 3, 2))
poles.SetValue(3, 2, gp_Pnt(2, 3, 1))
poles.SetValue(3, 3, gp_Pnt(3, 3, 0))
# Create 2D array of weights
weights = TColStd_Array2OfReal(1, 3, 1, 3)
weights.SetValue(1, 1, 1.0)
weights.SetValue(1, 2, 1.0)
weights.SetValue(1, 3, 1.0)
weights.SetValue(2, 1, 1.0)
weights.SetValue(2, 2, 1.0)
weights.SetValue(2, 3, 1.0)
weights.SetValue(3, 1, 1.0)
weights.SetValue(3, 2, 1.0)
weights.SetValue(3, 3, 1.0)
# Length of uknots and umult has to be same
# Same rule is for vknots and vmult
uknot_len = umult_len = 2
vknot_len = vmult_len = 2
# Knots for U and V direction
uknots = TColStd_Array1OfReal(1, uknot_len)
vknots = TColStd_Array1OfReal(1, vknot_len)
# Main curves begins and ends at first and last points
uknots.SetValue(1, 0.0)
uknots.SetValue(2, 1.0)
vknots.SetValue(1, 0.0)
vknots.SetValue(2, 1.0)
# Multiplicities for U and V direction
umult = TColStd_Array1OfInteger(1, umult_len)
vmult = TColStd_Array1OfInteger(1, vmult_len)
# First and last multiplicities are set to udeg + 1 (vdeg respectively),
# because we want main curves to start and finish on the first and
# the last points
umult.SetValue(1, udeg + 1)
umult.SetValue(2, udeg + 1)
vmult.SetValue(1, vdeg + 1)
vmult.SetValue(2, vdeg + 1)
# Some other rules, that has to hold:
# poles.ColLength == sum(umult(i)) - udeg - 1 (e.g.: 3 == 6 - 2 - 1)
# Try to create surface (no weight)
#BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Try to create surface (weights to default values)
BSPLSURF = Geom_BSplineSurface(poles, weights, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Display surface
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display() | identifier_body |
bspline_surface_pure.py | """
Experiment with bezier surface and bspline surface
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.TColStd import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
def | (event=None):
"""
Create bezier surface, then create bspline surface from import
it and search, what is inside created bspline surface.
"""
array = TColgp_Array2OfPnt(1, 3, 1, 3)
array.SetValue(1, 1, gp_Pnt(1, 1, 1))
array.SetValue(1, 2, gp_Pnt(2, 1, 2))
array.SetValue(1, 3, gp_Pnt(3, 1, 1))
array.SetValue(2, 1, gp_Pnt(1, 2, 1))
array.SetValue(2, 2, gp_Pnt(2, 2, 2))
array.SetValue(2, 3, gp_Pnt(3, 2, 0))
array.SetValue(3, 1, gp_Pnt(1, 3, 2))
array.SetValue(3, 2, gp_Pnt(2, 3, 1))
array.SetValue(3, 3, gp_Pnt(3, 3, 0))
BZ1 = Geom_BezierSurface(array)
bezierarray = TColGeom_Array2OfBezierSurface(1, 1, 1, 1)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
# Poles
poles = BB.Poles().GetObject().Array2()
# print "poles: ", poles, poles.LowerCol(), poles.ColLength(), poles.LowerRow(), poles.RowLength()
for pole_i in range(poles.LowerCol(), poles.ColLength() + 1, 1):
for pole_j in range(poles.LowerRow(), poles.RowLength() + 1, 1):
point = poles.Value(pole_i, pole_j)
print(pole_i, pole_j, ": (", point.X(), point.Y(), point.Z(), ")")
print()
# Knots U and V
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
print("uknots: ", uknots)
for i in range(uknots.Lower(), uknots.Length() + 1, 1):
print(uknots.Value(i))
print("vknots: ", vknots)
for j in range(vknots.Lower(), vknots.Length() + 1, 1):
print(vknots.Value(j))
print()
# Multi U and V
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1()
print("umult: ", umult)
for i in range(umult.Lower(), umult.Length() + 1, 1):
print(umult.Value(i))
print("vmult: ", vmult)
for j in range(vmult.Lower(), vmult.Length() + 1, 1):
print(vmult.Value(i))
print()
udeg = BB.UDegree()
vdeg = BB.VDegree()
print("udeg, vdeg: ", udeg, vdeg)
BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0)
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
def bspline_surface():
"""
Try to create B-spline surface directly
"""
# Set U and V degree to 2
udeg = 2
vdeg = 2
# Non-periodic surface
uperiod = False
vperiod = False
# Create 2D array of poles (control points)
poles = TColgp_Array2OfPnt(1, 3, 1, 3)
poles.SetValue(1, 1, gp_Pnt(1, 1, 1))
poles.SetValue(1, 2, gp_Pnt(2, 1, 2))
poles.SetValue(1, 3, gp_Pnt(3, 1, 1))
poles.SetValue(2, 1, gp_Pnt(1, 2, 1))
poles.SetValue(2, 2, gp_Pnt(2, 2, 2))
poles.SetValue(2, 3, gp_Pnt(3, 2, 0))
poles.SetValue(3, 1, gp_Pnt(1, 3, 2))
poles.SetValue(3, 2, gp_Pnt(2, 3, 1))
poles.SetValue(3, 3, gp_Pnt(3, 3, 0))
# Create 2D array of weights
weights = TColStd_Array2OfReal(1, 3, 1, 3)
weights.SetValue(1, 1, 1.0)
weights.SetValue(1, 2, 1.0)
weights.SetValue(1, 3, 1.0)
weights.SetValue(2, 1, 1.0)
weights.SetValue(2, 2, 1.0)
weights.SetValue(2, 3, 1.0)
weights.SetValue(3, 1, 1.0)
weights.SetValue(3, 2, 1.0)
weights.SetValue(3, 3, 1.0)
# Length of uknots and umult has to be same
# Same rule is for vknots and vmult
uknot_len = umult_len = 2
vknot_len = vmult_len = 2
# Knots for U and V direction
uknots = TColStd_Array1OfReal(1, uknot_len)
vknots = TColStd_Array1OfReal(1, vknot_len)
# Main curves begins and ends at first and last points
uknots.SetValue(1, 0.0)
uknots.SetValue(2, 1.0)
vknots.SetValue(1, 0.0)
vknots.SetValue(2, 1.0)
# Multiplicities for U and V direction
umult = TColStd_Array1OfInteger(1, umult_len)
vmult = TColStd_Array1OfInteger(1, vmult_len)
# First and last multiplicities are set to udeg + 1 (vdeg respectively),
# because we want main curves to start and finish on the first and
# the last points
umult.SetValue(1, udeg + 1)
umult.SetValue(2, udeg + 1)
vmult.SetValue(1, vdeg + 1)
vmult.SetValue(2, vdeg + 1)
# Some other rules, that has to hold:
# poles.ColLength == sum(umult(i)) - udeg - 1 (e.g.: 3 == 6 - 2 - 1)
# Try to create surface (no weight)
#BSPLSURF = Geom_BSplineSurface(poles, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Try to create surface (weights to default values)
BSPLSURF = Geom_BSplineSurface(poles, weights, uknots, vknots, umult, vmult, udeg, vdeg, uperiod, vperiod)
# Display surface
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
display.EraseAll()
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
if __name__ == '__main__':
#bezier_surfaces()
bspline_surface() | bezier_surfaces | identifier_name |
all_14.js | var searchData=
[ | ['user',['user',['../class_cool_m_q_t_t.html#a8cd47e45d457f908d4b4390b35aaee83',1,'CoolMQTT']]],
['useractive',['userActive',['../class_cool_board.html#a6395459131d6889a3005f79c7a35e964',1,'CoolBoard']]],
['userdata',['userData',['../class_cool_board.html#ae7358fb6e623cfc81b775f5f1734909b',1,'CoolBoard']]],
['uv',['uv',['../struct_cool_board_sensors_1_1light_active.html#a0e6cfc311425a31f32c32fc3b834ffb8',1,'CoolBoardSensors::lightActive']]]
]; | ['udp',['Udp',['../class_cool_time.html#a4e23216a8121ca79d0fb019f30884b92',1,'CoolTime']]],
['unsubscribe',['unsubscribe',['../class_cool_pub_sub_client.html#a850554280e314d6b5c33c73fd9e809fc',1,'CoolPubSubClient']]],
['update',['update',['../class_cool_board.html#a8612756d3f73198cdde857a66f0fe690',1,'CoolBoard::update()'],['../class_cool_time.html#aae601f795452cfa48d9fb337aed483a8',1,'CoolTime::update()']]],
['updateconfigfiles',['updateConfigFiles',['../class_cool_file_system.html#adfa8e2e80641ae6f0cceabd348a9b841',1,'CoolFileSystem']]], | random_line_split |
TUTableImpl.js | /*
* Name: TUTableImpl.js
* Module:
* Location: Norris/test/unit
* Date: 2015-05-25
* Version: v1.00 | * ================================================================================
* v1.00 2015-06-15 Carlon Chiara Approved
* ================================================================================
* v0.02 2015-06-02 Pavanello Fabio Matteo Verify
* ================================================================================
* v0.01 2015-05-25 Bucco Riccardo Creation
* ================================================================================
*/
var TableImpl = require('../../main/DataModel/NorrisChart/TableImpl.js');
var assert = require("assert");
describe('TableImpl', function(){
describe('TableImpl(id: String)', function(){
it('should memorize the right type of the chart',function(){
var table = new TableImpl('randomID');
assert.equal('table', table.type);
});
it('should memorize the right id of the chart',function(){
var table = new TableImpl('randomID');
assert.equal('randomID', table.uid);
});
it('should memorize some default values for the keys of the settings',function(){
var table = new TableImpl('randomID');
var defaults = {
title: '',
description : 'This is a table.',
maxItems : 10 ,
showTableGrid : true ,
newLinePosition : 'bottom',
allowFilter: false,
allowSort: false,
pageSize: -1
};
assert.deepEqual(defaults,table.settings);
});
})
}); | *
* History:
*
* ================================================================================
* Version Date Programmer Changes | random_line_split |
bootstrap.js | // "node scripts/create-package-app-test.js && node packages/app-test/synchronize.js && node packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js && lerna bootstrap",
'use strict';
require('./create-package-app-test.js');
require('../packages/app-test/synchronize.js');
require('../packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js');
const fs = require('fs-extra');
const path = require('path');
const execSync = require('child_process').execSync;
try {
//begin----加上packages/app-test
const lernaJson = require('../lerna.json');
const packagesFolderName = 'packages/app-test';
if (lernaJson.packages.indexOf(packagesFolderName) === -1) {
| resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----加上packages/app-test
execSync('npm run lerna-bootstrap', { stdio: 'inherit' });
//begin----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
if (lernaJson.packages.indexOf(packagesFolderName) !== -1) {
lernaJson.packages.splice(
lernaJson.packages.indexOf(packagesFolderName),
1
);
}
fs.writeFileSync(
path.resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
} catch (e) {
console.log(e);
}
| //可能中途ctr+c,导致包名没被删除
lernaJson.packages.push(packagesFolderName);
}
fs.writeFileSync(
path. | conditional_block |
bootstrap.js | // "node scripts/create-package-app-test.js && node packages/app-test/synchronize.js && node packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js && lerna bootstrap",
'use strict';
require('./create-package-app-test.js');
require('../packages/app-test/synchronize.js');
require('../packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js');
const fs = require('fs-extra');
const path = require('path');
const execSync = require('child_process').execSync;
try {
//begin----加上packages/app-test
const lernaJson = require('../lerna.json');
const packagesFolderName = 'packages/app-test';
if (lernaJson.packages.indexOf(packagesFolderName) === -1) {
//可能中途ctr+c,导致包名没被删除
lernaJson.packages.push(packagesFolderName);
}
fs.writeFileSync(
path.resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----加上packages/app-test
execSync('npm run lerna-bootstrap', { stdio: 'inherit' });
//begin----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
if (lernaJson.packages.indexOf(packagesFolderName) !== -1) {
lernaJson.packages.splice(
lernaJson.packages.indexOf(packagesFolderName),
1
);
}
fs.writeFileSync(
path.resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
} catch (e) {
console.log(e); | } | random_line_split | |
_hasreltype.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version. | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ....lib.familyreltype import FamilyRelType
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasRelType(Rule):
"""Rule that checks for a person with a particular personal attribute"""
labels = [ _('Relationship type:') ]
name = _('Families with the relationship type')
description = _("Matches families with the relationship type "
"of a particular value")
category = _('General filters')
def prepare(self, db, user):
if self.list[0]:
self.rtype = FamilyRelType()
self.rtype.set_from_xml_str(self.list[0])
else:
self.rtype = None
def apply(self, db, family):
if self.rtype:
if self.rtype.is_custom() and self.use_regex:
if self.regex[0].search(str(family.get_relationship())) is None:
return False
elif self.rtype != family.get_relationship():
return False
return True | #
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of | random_line_split |
_hasreltype.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ....lib.familyreltype import FamilyRelType
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasRelType(Rule):
"""Rule that checks for a person with a particular personal attribute"""
labels = [ _('Relationship type:') ]
name = _('Families with the relationship type')
description = _("Matches families with the relationship type "
"of a particular value")
category = _('General filters')
def prepare(self, db, user):
if self.list[0]:
self.rtype = FamilyRelType()
self.rtype.set_from_xml_str(self.list[0])
else:
self.rtype = None
def apply(self, db, family):
| if self.rtype:
if self.rtype.is_custom() and self.use_regex:
if self.regex[0].search(str(family.get_relationship())) is None:
return False
elif self.rtype != family.get_relationship():
return False
return True | identifier_body | |
_hasreltype.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ....lib.familyreltype import FamilyRelType
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasRelType(Rule):
"""Rule that checks for a person with a particular personal attribute"""
labels = [ _('Relationship type:') ]
name = _('Families with the relationship type')
description = _("Matches families with the relationship type "
"of a particular value")
category = _('General filters')
def prepare(self, db, user):
if self.list[0]:
self.rtype = FamilyRelType()
self.rtype.set_from_xml_str(self.list[0])
else:
self.rtype = None
def | (self, db, family):
if self.rtype:
if self.rtype.is_custom() and self.use_regex:
if self.regex[0].search(str(family.get_relationship())) is None:
return False
elif self.rtype != family.get_relationship():
return False
return True
| apply | identifier_name |
_hasreltype.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ....lib.familyreltype import FamilyRelType
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasRelType(Rule):
"""Rule that checks for a person with a particular personal attribute"""
labels = [ _('Relationship type:') ]
name = _('Families with the relationship type')
description = _("Matches families with the relationship type "
"of a particular value")
category = _('General filters')
def prepare(self, db, user):
if self.list[0]:
self.rtype = FamilyRelType()
self.rtype.set_from_xml_str(self.list[0])
else:
self.rtype = None
def apply(self, db, family):
if self.rtype:
if self.rtype.is_custom() and self.use_regex:
if self.regex[0].search(str(family.get_relationship())) is None:
|
elif self.rtype != family.get_relationship():
return False
return True
| return False | conditional_block |
set.rs | use liner::KeyBindings;
use shell::Shell;
use shell::flags::*;
use std::io::{self, Write};
use std::iter;
const HELP: &'static str = r#"NAME
set - Set or unset values of shell options and positional parameters.
SYNOPSIS
set [ --help ] [-e | +e] [-x | +x] [-o [vi | emacs]] [- | --] [STRING]...
DESCRIPTION
Shell options may be set using the '-' character, and unset using the '+' character.
OPTIONS
-e Exit immediately if a command exits with a non-zero status.
-o Specifies that an argument will follow that sets the key map.
The keymap argument may be either `vi` or `emacs`.
-x Specifies that commands will be printed as they are executed.
-- Following arguments will be set as positional arguments in the shell.
If no argument are supplied, arguments will be unset.
- Following arguments will be set as positional arguments in the shell.
If no arguments are suppled, arguments will not be unset.
"#;
enum PositionalArgs {
UnsetIfNone,
RetainIfNone,
}
use self::PositionalArgs::*;
pub(crate) fn set(args: &[&str], shell: &mut Shell) -> i32 {
let stdout = io::stdout();
let stderr = io::stderr();
let mut args_iter = args.iter();
let mut positionals = None;
while let Some(arg) = args_iter.next() {
if arg.starts_with("--") {
if arg.len() == 2 {
positionals = Some(UnsetIfNone);
break;
}
if &arg[2..] == "help" {
let mut stdout = stdout.lock();
let _ = stdout.write(HELP.as_bytes());
} else |
} else if arg.starts_with('-') {
if arg.len() == 1 {
positionals = Some(RetainIfNone);
break;
}
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags |= ERR_EXIT,
b'o' => match args_iter.next() {
Some(&mode) if mode == "vi" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Vi;
}
}
Some(&mode) if mode == "emacs" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Emacs;
}
}
Some(_) => {
let _ = stderr.lock().write_all(b"set: invalid keymap\n");
return 0;
}
None => {
let _ = stderr.lock().write_all(b"set: no keymap given\n");
return 0;
}
},
b'x' => shell.flags |= PRINT_COMMS,
_ => return 0,
}
}
} else if arg.starts_with('+') {
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags &= 255 ^ ERR_EXIT,
b'x' => shell.flags &= 255 ^ PRINT_COMMS,
_ => return 0,
}
}
}
}
match positionals {
None => (),
Some(kind) => {
let command: String = shell.variables.get_array("args").unwrap()[0].to_owned();
// This used to take a `&[String]` but cloned them all, so although
// this is non-ideal and could probably be better done with `Rc`, it
// hasn't got any slower.
let arguments = iter::once(command).chain(args_iter.map(|i| i.to_string())).collect();
match kind {
UnsetIfNone => shell.variables.set_array("args", arguments),
RetainIfNone => if arguments.len() != 1 {
shell.variables.set_array("args", arguments);
},
}
}
}
0
}
| {
return 0;
} | conditional_block |
set.rs | use liner::KeyBindings;
use shell::Shell;
use shell::flags::*;
use std::io::{self, Write}; |
const HELP: &'static str = r#"NAME
set - Set or unset values of shell options and positional parameters.
SYNOPSIS
set [ --help ] [-e | +e] [-x | +x] [-o [vi | emacs]] [- | --] [STRING]...
DESCRIPTION
Shell options may be set using the '-' character, and unset using the '+' character.
OPTIONS
-e Exit immediately if a command exits with a non-zero status.
-o Specifies that an argument will follow that sets the key map.
The keymap argument may be either `vi` or `emacs`.
-x Specifies that commands will be printed as they are executed.
-- Following arguments will be set as positional arguments in the shell.
If no argument are supplied, arguments will be unset.
- Following arguments will be set as positional arguments in the shell.
If no arguments are suppled, arguments will not be unset.
"#;
enum PositionalArgs {
UnsetIfNone,
RetainIfNone,
}
use self::PositionalArgs::*;
pub(crate) fn set(args: &[&str], shell: &mut Shell) -> i32 {
let stdout = io::stdout();
let stderr = io::stderr();
let mut args_iter = args.iter();
let mut positionals = None;
while let Some(arg) = args_iter.next() {
if arg.starts_with("--") {
if arg.len() == 2 {
positionals = Some(UnsetIfNone);
break;
}
if &arg[2..] == "help" {
let mut stdout = stdout.lock();
let _ = stdout.write(HELP.as_bytes());
} else {
return 0;
}
} else if arg.starts_with('-') {
if arg.len() == 1 {
positionals = Some(RetainIfNone);
break;
}
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags |= ERR_EXIT,
b'o' => match args_iter.next() {
Some(&mode) if mode == "vi" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Vi;
}
}
Some(&mode) if mode == "emacs" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Emacs;
}
}
Some(_) => {
let _ = stderr.lock().write_all(b"set: invalid keymap\n");
return 0;
}
None => {
let _ = stderr.lock().write_all(b"set: no keymap given\n");
return 0;
}
},
b'x' => shell.flags |= PRINT_COMMS,
_ => return 0,
}
}
} else if arg.starts_with('+') {
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags &= 255 ^ ERR_EXIT,
b'x' => shell.flags &= 255 ^ PRINT_COMMS,
_ => return 0,
}
}
}
}
match positionals {
None => (),
Some(kind) => {
let command: String = shell.variables.get_array("args").unwrap()[0].to_owned();
// This used to take a `&[String]` but cloned them all, so although
// this is non-ideal and could probably be better done with `Rc`, it
// hasn't got any slower.
let arguments = iter::once(command).chain(args_iter.map(|i| i.to_string())).collect();
match kind {
UnsetIfNone => shell.variables.set_array("args", arguments),
RetainIfNone => if arguments.len() != 1 {
shell.variables.set_array("args", arguments);
},
}
}
}
0
} | use std::iter; | random_line_split |
set.rs | use liner::KeyBindings;
use shell::Shell;
use shell::flags::*;
use std::io::{self, Write};
use std::iter;
const HELP: &'static str = r#"NAME
set - Set or unset values of shell options and positional parameters.
SYNOPSIS
set [ --help ] [-e | +e] [-x | +x] [-o [vi | emacs]] [- | --] [STRING]...
DESCRIPTION
Shell options may be set using the '-' character, and unset using the '+' character.
OPTIONS
-e Exit immediately if a command exits with a non-zero status.
-o Specifies that an argument will follow that sets the key map.
The keymap argument may be either `vi` or `emacs`.
-x Specifies that commands will be printed as they are executed.
-- Following arguments will be set as positional arguments in the shell.
If no argument are supplied, arguments will be unset.
- Following arguments will be set as positional arguments in the shell.
If no arguments are suppled, arguments will not be unset.
"#;
enum PositionalArgs {
UnsetIfNone,
RetainIfNone,
}
use self::PositionalArgs::*;
pub(crate) fn | (args: &[&str], shell: &mut Shell) -> i32 {
let stdout = io::stdout();
let stderr = io::stderr();
let mut args_iter = args.iter();
let mut positionals = None;
while let Some(arg) = args_iter.next() {
if arg.starts_with("--") {
if arg.len() == 2 {
positionals = Some(UnsetIfNone);
break;
}
if &arg[2..] == "help" {
let mut stdout = stdout.lock();
let _ = stdout.write(HELP.as_bytes());
} else {
return 0;
}
} else if arg.starts_with('-') {
if arg.len() == 1 {
positionals = Some(RetainIfNone);
break;
}
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags |= ERR_EXIT,
b'o' => match args_iter.next() {
Some(&mode) if mode == "vi" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Vi;
}
}
Some(&mode) if mode == "emacs" => {
if let Some(context) = shell.context.as_mut() {
context.key_bindings = KeyBindings::Emacs;
}
}
Some(_) => {
let _ = stderr.lock().write_all(b"set: invalid keymap\n");
return 0;
}
None => {
let _ = stderr.lock().write_all(b"set: no keymap given\n");
return 0;
}
},
b'x' => shell.flags |= PRINT_COMMS,
_ => return 0,
}
}
} else if arg.starts_with('+') {
for flag in arg.bytes().skip(1) {
match flag {
b'e' => shell.flags &= 255 ^ ERR_EXIT,
b'x' => shell.flags &= 255 ^ PRINT_COMMS,
_ => return 0,
}
}
}
}
match positionals {
None => (),
Some(kind) => {
let command: String = shell.variables.get_array("args").unwrap()[0].to_owned();
// This used to take a `&[String]` but cloned them all, so although
// this is non-ideal and could probably be better done with `Rc`, it
// hasn't got any slower.
let arguments = iter::once(command).chain(args_iter.map(|i| i.to_string())).collect();
match kind {
UnsetIfNone => shell.variables.set_array("args", arguments),
RetainIfNone => if arguments.len() != 1 {
shell.variables.set_array("args", arguments);
},
}
}
}
0
}
| set | identifier_name |
__init__.py | from __future__ import unicode_literals
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Oct 29 2014"
__version__ = "3.0.7"
#Useful aliases for commonly used objects and modules.
| from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable | from .core import *
from .serializers.json_coders import pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital | random_line_split |
mod.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Module that contains Protobuf messages used by Exonum.
use anyhow::{ensure, Error};
use exonum_proto::ProtobufConvert;
use std::convert::TryFrom;
use crate::helpers::{Height, Round, ValidatorId};
pub mod schema;
impl ProtobufConvert for Height {
type ProtoStruct = u64;
fn to_pb(&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for Round {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for ValidatorId {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct |
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
ensure!(
u16::try_from(pb).is_ok(),
"{} is out of range for valid ValidatorId",
pb
);
Ok(Self(pb as u16))
}
}
| {
u32::from(self.0)
} | identifier_body |
mod.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Module that contains Protobuf messages used by Exonum.
use anyhow::{ensure, Error};
use exonum_proto::ProtobufConvert;
use std::convert::TryFrom;
use crate::helpers::{Height, Round, ValidatorId};
pub mod schema;
impl ProtobufConvert for Height {
type ProtoStruct = u64;
fn | (&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for Round {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for ValidatorId {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct {
u32::from(self.0)
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
ensure!(
u16::try_from(pb).is_ok(),
"{} is out of range for valid ValidatorId",
pb
);
Ok(Self(pb as u16))
}
}
| to_pb | identifier_name |
mod.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Module that contains Protobuf messages used by Exonum.
use anyhow::{ensure, Error};
use exonum_proto::ProtobufConvert;
use std::convert::TryFrom;
use crate::helpers::{Height, Round, ValidatorId};
pub mod schema;
impl ProtobufConvert for Height {
type ProtoStruct = u64;
fn to_pb(&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for Round {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct {
self.0
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> {
Ok(Self(pb))
}
}
impl ProtobufConvert for ValidatorId {
type ProtoStruct = u32;
fn to_pb(&self) -> Self::ProtoStruct {
u32::from(self.0)
}
fn from_pb(pb: Self::ProtoStruct) -> Result<Self, Error> { | u16::try_from(pb).is_ok(),
"{} is out of range for valid ValidatorId",
pb
);
Ok(Self(pb as u16))
}
} | ensure!( | random_line_split |
properties_e.js | var searchData=
[
['text',['text',['../class_politechnikon_1_1game__elements_1_1_text.html#aa18ff5a85e90a5d19b62976a65863932',1,'Politechnikon::game_elements::Text']]], | ['textbuffor',['TextBuffor',['../class_politechnikon_1_1game__elements_1_1_field.html#a35f9c0081c0928be00204ca03ca56f1e',1,'Politechnikon::game_elements::Field']]],
['texture',['Texture',['../class_politechnikon_1_1engine_1_1_initialized_object_texture.html#a01631ef3363a7c274d3b89d5032a4407',1,'Politechnikon::engine::InitializedObjectTexture']]],
['texturepath',['TexturePath',['../class_politechnikon_1_1engine_1_1_initialized_object_texture.html#af22f9450a1f0be442485364e2ad9a77f',1,'Politechnikon::engine::InitializedObjectTexture']]],
['thisgame',['ThisGame',['../struct_politechnikon_1_1game__logic_1_1_score.html#a50c9999f319885c723ff6026c25f44b4',1,'Politechnikon::game_logic::Score']]]
]; | random_line_split | |
dropbox_account.py | #!/usr/bin/env python
# coding: utf8
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
# mind here session is dropbox session, not current.session
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
access_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').split(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout') | def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def get(self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
return json.loads(self.client.metadata(path))
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs) | return next | random_line_split |
dropbox_account.py | #!/usr/bin/env python
# coding: utf8
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
# mind here session is dropbox session, not current.session
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
access_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').split(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout')
return next
def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def | (self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
return json.loads(self.client.metadata(path))
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs)
| get | identifier_name |
dropbox_account.py | #!/usr/bin/env python
# coding: utf8
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
# mind here session is dropbox session, not current.session
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
access_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').split(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout')
return next
def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def get(self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
|
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs)
| return json.loads(self.client.metadata(path)) | identifier_body |
dropbox_account.py | #!/usr/bin/env python
# coding: utf8
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
# mind here session is dropbox session, not current.session
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
access_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').split(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout')
return next
def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def get(self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
return json.loads(self.client.metadata(path))
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
| request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs) | conditional_block | |
iana.py | #!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
#
# DISCLAIMER
#
# netaddr is not sponsored nor endorsed by IANA.
#
# Use of data from IANA (Internet Assigned Numbers Authority) is subject to
# copyright and is provided with prior written permission.
#
# IANA data files included with netaddr are not modified in any way but are
# parsed and made available to end users through an API.
#
# See README file and source code for URLs to latest copies of the relevant
# files.
#
#-----------------------------------------------------------------------------
"""
Routines for accessing data published by IANA (Internet Assigned Numbers
Authority).
More details can be found at the following URLs :-
- IANA Home Page - http://www.iana.org/
- IEEE Protocols Information Home Page - http://www.iana.org/protocols/
"""
import os as _os
import os.path as _path
import sys as _sys
import re as _re
from xml.sax import make_parser, handler
from netaddr.core import Publisher, Subscriber, PrettyPrinter, dos2unix
from netaddr.ip import IPAddress, IPNetwork, IPRange, \
cidr_abbrev_to_verbose, iprange_to_cidrs
from netaddr.compat import _dict_items, _callable
#-----------------------------------------------------------------------------
#: Topic based lookup dictionary for IANA information.
IANA_INFO = {
'IPv4' : {},
'IPv6' : {},
'multicast' : {},
}
#-----------------------------------------------------------------------------
class SaxRecordParser(handler.ContentHandler):
def __init__(self, callback=None):
self._level = 0
self._is_active = False
self._record = None
self._tag_level = None
self._tag_payload = None
self._tag_feeding = None
self._callback = callback
def startElement(self, name, attrs):
self._level += 1
if self._is_active is False:
if name == 'record':
self._is_active = True
self._tag_level = self._level
self._record = {}
if 'date' in attrs:
self._record['date'] = attrs['date']
elif self._level == self._tag_level + 1:
if name == 'xref':
if 'type' in attrs and 'data' in attrs:
l = self._record.setdefault(attrs['type'], [])
l.append(attrs['data'])
else:
self._tag_payload = []
self._tag_feeding = True
else:
self._tag_feeding = False
def endElement(self, name):
if self._is_active is True:
if name == 'record' and self._tag_level == self._level:
self._is_active = False
self._tag_level = None
if _callable(self._callback):
self._callback(self._record)
self._record = None
elif self._level == self._tag_level + 1:
if name != 'xref':
self._record[name] = ''.join(self._tag_payload)
self._tag_payload = None
self._tag_feeding = False
self._level -= 1
def characters(self, content):
if self._tag_feeding is True:
self._tag_payload.append(content)
class XMLRecordParser(Publisher):
"""
A configurable Parser that understands how to parse XML based records.
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to XML based record data.
"""
super(XMLRecordParser, self).__init__()
self.xmlparser = make_parser()
self.xmlparser.setContentHandler(SaxRecordParser(self.consume_record))
self.fh = fh
self.__dict__.update(kwargs)
def process_record(self, rec):
"""
This is the callback method invoked for every record. It is usually
over-ridden by base classes to provide specific record-based logic.
Any record can be vetoed (not passed to registered Subscriber objects)
by simply returning None.
"""
return rec
def consume_record(self, rec):
record = self.process_record(rec)
if record is not None:
self.notify(record)
def parse(self):
"""
Parse and normalises records, notifying registered subscribers with
record data as it is encountered.
"""
self.xmlparser.parse(self.fh)
#-----------------------------------------------------------------------------
class IPv4Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv4 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 address space file.
kwargs - additional parser options.
"""
super(IPv4Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {}
for key in ('prefix', 'designation', 'date', 'whois', 'status'):
record[key] = str(rec.get(key, '')).strip()
# Strip leading zeros from octet.
if '/' in record['prefix']:
(octet, prefix) = record['prefix'].split('/')
record['prefix'] = '%d/%d' % (int(octet), int(prefix))
record['status'] = record['status'].capitalize()
return record
#-----------------------------------------------------------------------------
class IPv6Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv6 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv6 address space file.
kwargs - additional parser options.
"""
super(IPv6Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {
'prefix': str(rec.get('prefix', '')).strip(),
'allocation': str(rec.get('description', '')).strip(),
'reference': str(rec.get('rfc', [''])[0]).strip(),
}
return record
#-----------------------------------------------------------------------------
class MulticastParser(XMLRecordParser):
"""
A XMLRecordParser that knows how to process the IANA IPv4 multicast address
allocation file.
It can be found online here :-
- http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 multicast address
allocation file.
kwargs - additional parser options.
"""
super(MulticastParser, self).__init__(fh)
def normalise_addr(self, addr):
"""
Removes variations from address entries found in this particular file.
"""
if '-' in addr:
(a1, a2) = addr.split('-')
o1 = a1.strip().split('.')
o2 = a2.strip().split('.')
return '%s-%s' % ('.'.join([str(int(i)) for i in o1]),
'.'.join([str(int(i)) for i in o2]))
else:
o1 = addr.strip().split('.')
return '.'.join([str(int(i)) for i in o1])
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
if 'addr' in rec:
record = {
'address': self.normalise_addr(str(rec['addr'])),
'descr': str(rec.get('description', '')),
}
return record
#-----------------------------------------------------------------------------
class DictUpdater(Subscriber):
"""
Concrete Subscriber that inserts records received from a Publisher into a
dictionary.
"""
def __init__(self, dct, topic, unique_key):
"""
Constructor.
dct - lookup dict or dict like object to insert records into.
topic - high-level category name of data to be processed.
unique_key - key name in data dict that uniquely identifies it.
"""
self.dct = dct
self.topic = topic
self.unique_key = unique_key
def update(self, data):
"""
Callback function used by Publisher to notify this Subscriber about
an update. Stores topic based information into dictionary passed to
constructor.
"""
data_id = data[self.unique_key]
if self.topic == 'IPv4':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'IPv6':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'multicast':
iprange = None
if '-' in data_id:
# See if we can manage a single CIDR.
(first, last) = data_id.split('-')
iprange = IPRange(first, last)
cidrs = iprange.cidrs()
if len(cidrs) == 1:
iprange = cidrs[0]
else:
iprange = IPAddress(data_id)
self.dct[iprange] = data
#-----------------------------------------------------------------------------
def load_info():
"""
Parse and load internal IANA data lookups with the latest information from
data files.
"""
PATH = _path.dirname(__file__)
ipv4 = IPv4Parser(open(_path.join(PATH, 'ipv4-address-space.xml')))
ipv4.attach(DictUpdater(IANA_INFO['IPv4'], 'IPv4', 'prefix'))
ipv4.parse()
ipv6 = IPv6Parser(open(_path.join(PATH, 'ipv6-address-space.xml')))
ipv6.attach(DictUpdater(IANA_INFO['IPv6'], 'IPv6', 'prefix'))
ipv6.parse()
mcast = MulticastParser(open(_path.join(PATH, 'multicast-addresses.xml')))
mcast.attach(DictUpdater(IANA_INFO['multicast'], 'multicast', 'address'))
mcast.parse()
#-----------------------------------------------------------------------------
def pprint_info(fh=None):
"""
Pretty prints IANA information to filehandle.
"""
if fh is None:
fh = _sys.stdout
for category in sorted(IANA_INFO):
fh.write('-' * len(category) + "\n")
fh.write(category + "\n")
fh.write('-' * len(category) + "\n")
ipranges = IANA_INFO[category]
for iprange in sorted(ipranges):
details = ipranges[iprange]
fh.write('%-45r' % (iprange) + details + "\n")
#-----------------------------------------------------------------------------
def query(ip_addr):
|
#-----------------------------------------------------------------------------
def get_latest_files():
"""Download the latest files from IANA"""
if _sys.version_info[0] == 3:
# Python 3.x
from urllib.request import Request, urlopen
else:
# Python 2.x
from urllib2 import Request, urlopen
urls = [
'http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml',
'http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml',
'http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml',
]
for url in urls:
_sys.stdout.write('downloading latest copy of %s\n' % url)
request = Request(url)
response = urlopen(request)
save_path = _path.dirname(__file__)
basename = _os.path.basename(response.geturl().rstrip('/'))
filename = _path.join(save_path, basename)
fh = open(filename, 'wb')
fh.write(response.read())
fh.close()
# Make sure the line endings are consistent across platforms.
dos2unix(filename)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# Generate indices when module is executed as a script.
get_latest_files()
# On module import, read IANA data files and populate lookups dict.
load_info()
| """
Returns informational data specific to this IP address.
"""
info = {}
def within_bounds(ip, ip_range):
# Boundary checking for multiple IP classes.
if hasattr(ip_range, 'first'):
# IP network or IP range.
return ip in ip_range
elif hasattr(ip_range, 'value'):
# IP address.
return ip == ip_range
raise Exception('Unsupported IP range or address: %r!' % ip_range)
if ip_addr.version == 4:
for cidr, record in _dict_items(IANA_INFO['IPv4']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv4', [])
info['IPv4'].append(record)
if ip_addr.is_multicast():
for iprange, record in _dict_items(IANA_INFO['multicast']):
if within_bounds(ip_addr, iprange):
info.setdefault('Multicast', [])
info['Multicast'].append(record)
elif ip_addr.version == 6:
for cidr, record in _dict_items(IANA_INFO['IPv6']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv6', [])
info['IPv6'].append(record)
return info | identifier_body |
iana.py | #!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
#
# DISCLAIMER
#
# netaddr is not sponsored nor endorsed by IANA.
#
# Use of data from IANA (Internet Assigned Numbers Authority) is subject to
# copyright and is provided with prior written permission.
#
# IANA data files included with netaddr are not modified in any way but are
# parsed and made available to end users through an API.
#
# See README file and source code for URLs to latest copies of the relevant
# files.
#
#-----------------------------------------------------------------------------
"""
Routines for accessing data published by IANA (Internet Assigned Numbers
Authority).
More details can be found at the following URLs :-
- IANA Home Page - http://www.iana.org/
- IEEE Protocols Information Home Page - http://www.iana.org/protocols/
"""
import os as _os
import os.path as _path
import sys as _sys
import re as _re
from xml.sax import make_parser, handler
from netaddr.core import Publisher, Subscriber, PrettyPrinter, dos2unix
from netaddr.ip import IPAddress, IPNetwork, IPRange, \
cidr_abbrev_to_verbose, iprange_to_cidrs
from netaddr.compat import _dict_items, _callable
#-----------------------------------------------------------------------------
#: Topic based lookup dictionary for IANA information.
IANA_INFO = {
'IPv4' : {},
'IPv6' : {},
'multicast' : {},
}
#-----------------------------------------------------------------------------
class SaxRecordParser(handler.ContentHandler):
def __init__(self, callback=None):
self._level = 0
self._is_active = False
self._record = None
self._tag_level = None
self._tag_payload = None
self._tag_feeding = None
self._callback = callback
def startElement(self, name, attrs):
self._level += 1
if self._is_active is False:
if name == 'record':
self._is_active = True
self._tag_level = self._level
self._record = {}
if 'date' in attrs:
self._record['date'] = attrs['date']
elif self._level == self._tag_level + 1:
if name == 'xref':
if 'type' in attrs and 'data' in attrs:
l = self._record.setdefault(attrs['type'], [])
l.append(attrs['data'])
else:
self._tag_payload = []
self._tag_feeding = True
else:
self._tag_feeding = False
def endElement(self, name):
if self._is_active is True:
if name == 'record' and self._tag_level == self._level:
self._is_active = False
self._tag_level = None
if _callable(self._callback):
self._callback(self._record)
self._record = None
elif self._level == self._tag_level + 1:
if name != 'xref':
self._record[name] = ''.join(self._tag_payload)
self._tag_payload = None
self._tag_feeding = False
self._level -= 1
def characters(self, content):
if self._tag_feeding is True:
self._tag_payload.append(content)
class XMLRecordParser(Publisher):
"""
A configurable Parser that understands how to parse XML based records.
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to XML based record data.
"""
super(XMLRecordParser, self).__init__()
self.xmlparser = make_parser()
self.xmlparser.setContentHandler(SaxRecordParser(self.consume_record))
self.fh = fh
self.__dict__.update(kwargs)
def process_record(self, rec):
"""
This is the callback method invoked for every record. It is usually
over-ridden by base classes to provide specific record-based logic.
Any record can be vetoed (not passed to registered Subscriber objects)
by simply returning None.
"""
return rec
def consume_record(self, rec):
record = self.process_record(rec)
if record is not None:
self.notify(record)
def parse(self):
"""
Parse and normalises records, notifying registered subscribers with
record data as it is encountered.
"""
self.xmlparser.parse(self.fh)
#-----------------------------------------------------------------------------
class IPv4Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv4 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 address space file.
kwargs - additional parser options.
"""
super(IPv4Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {}
for key in ('prefix', 'designation', 'date', 'whois', 'status'):
record[key] = str(rec.get(key, '')).strip()
# Strip leading zeros from octet.
if '/' in record['prefix']:
(octet, prefix) = record['prefix'].split('/')
record['prefix'] = '%d/%d' % (int(octet), int(prefix))
record['status'] = record['status'].capitalize()
return record
#-----------------------------------------------------------------------------
class IPv6Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv6 address space file.
|
- http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv6 address space file.
kwargs - additional parser options.
"""
super(IPv6Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {
'prefix': str(rec.get('prefix', '')).strip(),
'allocation': str(rec.get('description', '')).strip(),
'reference': str(rec.get('rfc', [''])[0]).strip(),
}
return record
#-----------------------------------------------------------------------------
class MulticastParser(XMLRecordParser):
"""
A XMLRecordParser that knows how to process the IANA IPv4 multicast address
allocation file.
It can be found online here :-
- http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 multicast address
allocation file.
kwargs - additional parser options.
"""
super(MulticastParser, self).__init__(fh)
def normalise_addr(self, addr):
"""
Removes variations from address entries found in this particular file.
"""
if '-' in addr:
(a1, a2) = addr.split('-')
o1 = a1.strip().split('.')
o2 = a2.strip().split('.')
return '%s-%s' % ('.'.join([str(int(i)) for i in o1]),
'.'.join([str(int(i)) for i in o2]))
else:
o1 = addr.strip().split('.')
return '.'.join([str(int(i)) for i in o1])
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
if 'addr' in rec:
record = {
'address': self.normalise_addr(str(rec['addr'])),
'descr': str(rec.get('description', '')),
}
return record
#-----------------------------------------------------------------------------
class DictUpdater(Subscriber):
"""
Concrete Subscriber that inserts records received from a Publisher into a
dictionary.
"""
def __init__(self, dct, topic, unique_key):
"""
Constructor.
dct - lookup dict or dict like object to insert records into.
topic - high-level category name of data to be processed.
unique_key - key name in data dict that uniquely identifies it.
"""
self.dct = dct
self.topic = topic
self.unique_key = unique_key
def update(self, data):
"""
Callback function used by Publisher to notify this Subscriber about
an update. Stores topic based information into dictionary passed to
constructor.
"""
data_id = data[self.unique_key]
if self.topic == 'IPv4':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'IPv6':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'multicast':
iprange = None
if '-' in data_id:
# See if we can manage a single CIDR.
(first, last) = data_id.split('-')
iprange = IPRange(first, last)
cidrs = iprange.cidrs()
if len(cidrs) == 1:
iprange = cidrs[0]
else:
iprange = IPAddress(data_id)
self.dct[iprange] = data
#-----------------------------------------------------------------------------
def load_info():
"""
Parse and load internal IANA data lookups with the latest information from
data files.
"""
PATH = _path.dirname(__file__)
ipv4 = IPv4Parser(open(_path.join(PATH, 'ipv4-address-space.xml')))
ipv4.attach(DictUpdater(IANA_INFO['IPv4'], 'IPv4', 'prefix'))
ipv4.parse()
ipv6 = IPv6Parser(open(_path.join(PATH, 'ipv6-address-space.xml')))
ipv6.attach(DictUpdater(IANA_INFO['IPv6'], 'IPv6', 'prefix'))
ipv6.parse()
mcast = MulticastParser(open(_path.join(PATH, 'multicast-addresses.xml')))
mcast.attach(DictUpdater(IANA_INFO['multicast'], 'multicast', 'address'))
mcast.parse()
#-----------------------------------------------------------------------------
def pprint_info(fh=None):
"""
Pretty prints IANA information to filehandle.
"""
if fh is None:
fh = _sys.stdout
for category in sorted(IANA_INFO):
fh.write('-' * len(category) + "\n")
fh.write(category + "\n")
fh.write('-' * len(category) + "\n")
ipranges = IANA_INFO[category]
for iprange in sorted(ipranges):
details = ipranges[iprange]
fh.write('%-45r' % (iprange) + details + "\n")
#-----------------------------------------------------------------------------
def query(ip_addr):
"""
Returns informational data specific to this IP address.
"""
info = {}
def within_bounds(ip, ip_range):
# Boundary checking for multiple IP classes.
if hasattr(ip_range, 'first'):
# IP network or IP range.
return ip in ip_range
elif hasattr(ip_range, 'value'):
# IP address.
return ip == ip_range
raise Exception('Unsupported IP range or address: %r!' % ip_range)
if ip_addr.version == 4:
for cidr, record in _dict_items(IANA_INFO['IPv4']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv4', [])
info['IPv4'].append(record)
if ip_addr.is_multicast():
for iprange, record in _dict_items(IANA_INFO['multicast']):
if within_bounds(ip_addr, iprange):
info.setdefault('Multicast', [])
info['Multicast'].append(record)
elif ip_addr.version == 6:
for cidr, record in _dict_items(IANA_INFO['IPv6']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv6', [])
info['IPv6'].append(record)
return info
#-----------------------------------------------------------------------------
def get_latest_files():
"""Download the latest files from IANA"""
if _sys.version_info[0] == 3:
# Python 3.x
from urllib.request import Request, urlopen
else:
# Python 2.x
from urllib2 import Request, urlopen
urls = [
'http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml',
'http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml',
'http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml',
]
for url in urls:
_sys.stdout.write('downloading latest copy of %s\n' % url)
request = Request(url)
response = urlopen(request)
save_path = _path.dirname(__file__)
basename = _os.path.basename(response.geturl().rstrip('/'))
filename = _path.join(save_path, basename)
fh = open(filename, 'wb')
fh.write(response.read())
fh.close()
# Make sure the line endings are consistent across platforms.
dos2unix(filename)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# Generate indices when module is executed as a script.
get_latest_files()
# On module import, read IANA data files and populate lookups dict.
load_info() | It can be found online here :- | random_line_split |
iana.py | #!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
#
# DISCLAIMER
#
# netaddr is not sponsored nor endorsed by IANA.
#
# Use of data from IANA (Internet Assigned Numbers Authority) is subject to
# copyright and is provided with prior written permission.
#
# IANA data files included with netaddr are not modified in any way but are
# parsed and made available to end users through an API.
#
# See README file and source code for URLs to latest copies of the relevant
# files.
#
#-----------------------------------------------------------------------------
"""
Routines for accessing data published by IANA (Internet Assigned Numbers
Authority).
More details can be found at the following URLs :-
- IANA Home Page - http://www.iana.org/
- IEEE Protocols Information Home Page - http://www.iana.org/protocols/
"""
import os as _os
import os.path as _path
import sys as _sys
import re as _re
from xml.sax import make_parser, handler
from netaddr.core import Publisher, Subscriber, PrettyPrinter, dos2unix
from netaddr.ip import IPAddress, IPNetwork, IPRange, \
cidr_abbrev_to_verbose, iprange_to_cidrs
from netaddr.compat import _dict_items, _callable
#-----------------------------------------------------------------------------
#: Topic based lookup dictionary for IANA information.
IANA_INFO = {
'IPv4' : {},
'IPv6' : {},
'multicast' : {},
}
#-----------------------------------------------------------------------------
class | (handler.ContentHandler):
def __init__(self, callback=None):
self._level = 0
self._is_active = False
self._record = None
self._tag_level = None
self._tag_payload = None
self._tag_feeding = None
self._callback = callback
def startElement(self, name, attrs):
self._level += 1
if self._is_active is False:
if name == 'record':
self._is_active = True
self._tag_level = self._level
self._record = {}
if 'date' in attrs:
self._record['date'] = attrs['date']
elif self._level == self._tag_level + 1:
if name == 'xref':
if 'type' in attrs and 'data' in attrs:
l = self._record.setdefault(attrs['type'], [])
l.append(attrs['data'])
else:
self._tag_payload = []
self._tag_feeding = True
else:
self._tag_feeding = False
def endElement(self, name):
if self._is_active is True:
if name == 'record' and self._tag_level == self._level:
self._is_active = False
self._tag_level = None
if _callable(self._callback):
self._callback(self._record)
self._record = None
elif self._level == self._tag_level + 1:
if name != 'xref':
self._record[name] = ''.join(self._tag_payload)
self._tag_payload = None
self._tag_feeding = False
self._level -= 1
def characters(self, content):
if self._tag_feeding is True:
self._tag_payload.append(content)
class XMLRecordParser(Publisher):
"""
A configurable Parser that understands how to parse XML based records.
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to XML based record data.
"""
super(XMLRecordParser, self).__init__()
self.xmlparser = make_parser()
self.xmlparser.setContentHandler(SaxRecordParser(self.consume_record))
self.fh = fh
self.__dict__.update(kwargs)
def process_record(self, rec):
"""
This is the callback method invoked for every record. It is usually
over-ridden by base classes to provide specific record-based logic.
Any record can be vetoed (not passed to registered Subscriber objects)
by simply returning None.
"""
return rec
def consume_record(self, rec):
record = self.process_record(rec)
if record is not None:
self.notify(record)
def parse(self):
"""
Parse and normalises records, notifying registered subscribers with
record data as it is encountered.
"""
self.xmlparser.parse(self.fh)
#-----------------------------------------------------------------------------
class IPv4Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv4 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 address space file.
kwargs - additional parser options.
"""
super(IPv4Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {}
for key in ('prefix', 'designation', 'date', 'whois', 'status'):
record[key] = str(rec.get(key, '')).strip()
# Strip leading zeros from octet.
if '/' in record['prefix']:
(octet, prefix) = record['prefix'].split('/')
record['prefix'] = '%d/%d' % (int(octet), int(prefix))
record['status'] = record['status'].capitalize()
return record
#-----------------------------------------------------------------------------
class IPv6Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv6 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv6 address space file.
kwargs - additional parser options.
"""
super(IPv6Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {
'prefix': str(rec.get('prefix', '')).strip(),
'allocation': str(rec.get('description', '')).strip(),
'reference': str(rec.get('rfc', [''])[0]).strip(),
}
return record
#-----------------------------------------------------------------------------
class MulticastParser(XMLRecordParser):
"""
A XMLRecordParser that knows how to process the IANA IPv4 multicast address
allocation file.
It can be found online here :-
- http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 multicast address
allocation file.
kwargs - additional parser options.
"""
super(MulticastParser, self).__init__(fh)
def normalise_addr(self, addr):
"""
Removes variations from address entries found in this particular file.
"""
if '-' in addr:
(a1, a2) = addr.split('-')
o1 = a1.strip().split('.')
o2 = a2.strip().split('.')
return '%s-%s' % ('.'.join([str(int(i)) for i in o1]),
'.'.join([str(int(i)) for i in o2]))
else:
o1 = addr.strip().split('.')
return '.'.join([str(int(i)) for i in o1])
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
if 'addr' in rec:
record = {
'address': self.normalise_addr(str(rec['addr'])),
'descr': str(rec.get('description', '')),
}
return record
#-----------------------------------------------------------------------------
class DictUpdater(Subscriber):
"""
Concrete Subscriber that inserts records received from a Publisher into a
dictionary.
"""
def __init__(self, dct, topic, unique_key):
"""
Constructor.
dct - lookup dict or dict like object to insert records into.
topic - high-level category name of data to be processed.
unique_key - key name in data dict that uniquely identifies it.
"""
self.dct = dct
self.topic = topic
self.unique_key = unique_key
def update(self, data):
"""
Callback function used by Publisher to notify this Subscriber about
an update. Stores topic based information into dictionary passed to
constructor.
"""
data_id = data[self.unique_key]
if self.topic == 'IPv4':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'IPv6':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'multicast':
iprange = None
if '-' in data_id:
# See if we can manage a single CIDR.
(first, last) = data_id.split('-')
iprange = IPRange(first, last)
cidrs = iprange.cidrs()
if len(cidrs) == 1:
iprange = cidrs[0]
else:
iprange = IPAddress(data_id)
self.dct[iprange] = data
#-----------------------------------------------------------------------------
def load_info():
"""
Parse and load internal IANA data lookups with the latest information from
data files.
"""
PATH = _path.dirname(__file__)
ipv4 = IPv4Parser(open(_path.join(PATH, 'ipv4-address-space.xml')))
ipv4.attach(DictUpdater(IANA_INFO['IPv4'], 'IPv4', 'prefix'))
ipv4.parse()
ipv6 = IPv6Parser(open(_path.join(PATH, 'ipv6-address-space.xml')))
ipv6.attach(DictUpdater(IANA_INFO['IPv6'], 'IPv6', 'prefix'))
ipv6.parse()
mcast = MulticastParser(open(_path.join(PATH, 'multicast-addresses.xml')))
mcast.attach(DictUpdater(IANA_INFO['multicast'], 'multicast', 'address'))
mcast.parse()
#-----------------------------------------------------------------------------
def pprint_info(fh=None):
"""
Pretty prints IANA information to filehandle.
"""
if fh is None:
fh = _sys.stdout
for category in sorted(IANA_INFO):
fh.write('-' * len(category) + "\n")
fh.write(category + "\n")
fh.write('-' * len(category) + "\n")
ipranges = IANA_INFO[category]
for iprange in sorted(ipranges):
details = ipranges[iprange]
fh.write('%-45r' % (iprange) + details + "\n")
#-----------------------------------------------------------------------------
def query(ip_addr):
"""
Returns informational data specific to this IP address.
"""
info = {}
def within_bounds(ip, ip_range):
# Boundary checking for multiple IP classes.
if hasattr(ip_range, 'first'):
# IP network or IP range.
return ip in ip_range
elif hasattr(ip_range, 'value'):
# IP address.
return ip == ip_range
raise Exception('Unsupported IP range or address: %r!' % ip_range)
if ip_addr.version == 4:
for cidr, record in _dict_items(IANA_INFO['IPv4']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv4', [])
info['IPv4'].append(record)
if ip_addr.is_multicast():
for iprange, record in _dict_items(IANA_INFO['multicast']):
if within_bounds(ip_addr, iprange):
info.setdefault('Multicast', [])
info['Multicast'].append(record)
elif ip_addr.version == 6:
for cidr, record in _dict_items(IANA_INFO['IPv6']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv6', [])
info['IPv6'].append(record)
return info
#-----------------------------------------------------------------------------
def get_latest_files():
"""Download the latest files from IANA"""
if _sys.version_info[0] == 3:
# Python 3.x
from urllib.request import Request, urlopen
else:
# Python 2.x
from urllib2 import Request, urlopen
urls = [
'http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml',
'http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml',
'http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml',
]
for url in urls:
_sys.stdout.write('downloading latest copy of %s\n' % url)
request = Request(url)
response = urlopen(request)
save_path = _path.dirname(__file__)
basename = _os.path.basename(response.geturl().rstrip('/'))
filename = _path.join(save_path, basename)
fh = open(filename, 'wb')
fh.write(response.read())
fh.close()
# Make sure the line endings are consistent across platforms.
dos2unix(filename)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# Generate indices when module is executed as a script.
get_latest_files()
# On module import, read IANA data files and populate lookups dict.
load_info()
| SaxRecordParser | identifier_name |
iana.py | #!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
#
# DISCLAIMER
#
# netaddr is not sponsored nor endorsed by IANA.
#
# Use of data from IANA (Internet Assigned Numbers Authority) is subject to
# copyright and is provided with prior written permission.
#
# IANA data files included with netaddr are not modified in any way but are
# parsed and made available to end users through an API.
#
# See README file and source code for URLs to latest copies of the relevant
# files.
#
#-----------------------------------------------------------------------------
"""
Routines for accessing data published by IANA (Internet Assigned Numbers
Authority).
More details can be found at the following URLs :-
- IANA Home Page - http://www.iana.org/
- IEEE Protocols Information Home Page - http://www.iana.org/protocols/
"""
import os as _os
import os.path as _path
import sys as _sys
import re as _re
from xml.sax import make_parser, handler
from netaddr.core import Publisher, Subscriber, PrettyPrinter, dos2unix
from netaddr.ip import IPAddress, IPNetwork, IPRange, \
cidr_abbrev_to_verbose, iprange_to_cidrs
from netaddr.compat import _dict_items, _callable
#-----------------------------------------------------------------------------
#: Topic based lookup dictionary for IANA information.
IANA_INFO = {
'IPv4' : {},
'IPv6' : {},
'multicast' : {},
}
#-----------------------------------------------------------------------------
class SaxRecordParser(handler.ContentHandler):
def __init__(self, callback=None):
self._level = 0
self._is_active = False
self._record = None
self._tag_level = None
self._tag_payload = None
self._tag_feeding = None
self._callback = callback
def startElement(self, name, attrs):
self._level += 1
if self._is_active is False:
if name == 'record':
self._is_active = True
self._tag_level = self._level
self._record = {}
if 'date' in attrs:
self._record['date'] = attrs['date']
elif self._level == self._tag_level + 1:
if name == 'xref':
if 'type' in attrs and 'data' in attrs:
l = self._record.setdefault(attrs['type'], [])
l.append(attrs['data'])
else:
self._tag_payload = []
self._tag_feeding = True
else:
self._tag_feeding = False
def endElement(self, name):
if self._is_active is True:
if name == 'record' and self._tag_level == self._level:
self._is_active = False
self._tag_level = None
if _callable(self._callback):
self._callback(self._record)
self._record = None
elif self._level == self._tag_level + 1:
if name != 'xref':
self._record[name] = ''.join(self._tag_payload)
self._tag_payload = None
self._tag_feeding = False
self._level -= 1
def characters(self, content):
if self._tag_feeding is True:
self._tag_payload.append(content)
class XMLRecordParser(Publisher):
"""
A configurable Parser that understands how to parse XML based records.
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to XML based record data.
"""
super(XMLRecordParser, self).__init__()
self.xmlparser = make_parser()
self.xmlparser.setContentHandler(SaxRecordParser(self.consume_record))
self.fh = fh
self.__dict__.update(kwargs)
def process_record(self, rec):
"""
This is the callback method invoked for every record. It is usually
over-ridden by base classes to provide specific record-based logic.
Any record can be vetoed (not passed to registered Subscriber objects)
by simply returning None.
"""
return rec
def consume_record(self, rec):
record = self.process_record(rec)
if record is not None:
self.notify(record)
def parse(self):
"""
Parse and normalises records, notifying registered subscribers with
record data as it is encountered.
"""
self.xmlparser.parse(self.fh)
#-----------------------------------------------------------------------------
class IPv4Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv4 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 address space file.
kwargs - additional parser options.
"""
super(IPv4Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {}
for key in ('prefix', 'designation', 'date', 'whois', 'status'):
record[key] = str(rec.get(key, '')).strip()
# Strip leading zeros from octet.
if '/' in record['prefix']:
(octet, prefix) = record['prefix'].split('/')
record['prefix'] = '%d/%d' % (int(octet), int(prefix))
record['status'] = record['status'].capitalize()
return record
#-----------------------------------------------------------------------------
class IPv6Parser(XMLRecordParser):
"""
A XMLRecordParser that understands how to parse and retrieve data records
from the IANA IPv6 address space file.
It can be found online here :-
- http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv6 address space file.
kwargs - additional parser options.
"""
super(IPv6Parser, self).__init__(fh)
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
record = {
'prefix': str(rec.get('prefix', '')).strip(),
'allocation': str(rec.get('description', '')).strip(),
'reference': str(rec.get('rfc', [''])[0]).strip(),
}
return record
#-----------------------------------------------------------------------------
class MulticastParser(XMLRecordParser):
"""
A XMLRecordParser that knows how to process the IANA IPv4 multicast address
allocation file.
It can be found online here :-
- http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml
"""
def __init__(self, fh, **kwargs):
"""
Constructor.
fh - a valid, open file handle to an IANA IPv4 multicast address
allocation file.
kwargs - additional parser options.
"""
super(MulticastParser, self).__init__(fh)
def normalise_addr(self, addr):
"""
Removes variations from address entries found in this particular file.
"""
if '-' in addr:
(a1, a2) = addr.split('-')
o1 = a1.strip().split('.')
o2 = a2.strip().split('.')
return '%s-%s' % ('.'.join([str(int(i)) for i in o1]),
'.'.join([str(int(i)) for i in o2]))
else:
o1 = addr.strip().split('.')
return '.'.join([str(int(i)) for i in o1])
def process_record(self, rec):
"""
Callback method invoked for every record.
See base class method for more details.
"""
if 'addr' in rec:
record = {
'address': self.normalise_addr(str(rec['addr'])),
'descr': str(rec.get('description', '')),
}
return record
#-----------------------------------------------------------------------------
class DictUpdater(Subscriber):
"""
Concrete Subscriber that inserts records received from a Publisher into a
dictionary.
"""
def __init__(self, dct, topic, unique_key):
"""
Constructor.
dct - lookup dict or dict like object to insert records into.
topic - high-level category name of data to be processed.
unique_key - key name in data dict that uniquely identifies it.
"""
self.dct = dct
self.topic = topic
self.unique_key = unique_key
def update(self, data):
"""
Callback function used by Publisher to notify this Subscriber about
an update. Stores topic based information into dictionary passed to
constructor.
"""
data_id = data[self.unique_key]
if self.topic == 'IPv4':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'IPv6':
cidr = IPNetwork(cidr_abbrev_to_verbose(data_id))
self.dct[cidr] = data
elif self.topic == 'multicast':
iprange = None
if '-' in data_id:
# See if we can manage a single CIDR.
(first, last) = data_id.split('-')
iprange = IPRange(first, last)
cidrs = iprange.cidrs()
if len(cidrs) == 1:
iprange = cidrs[0]
else:
iprange = IPAddress(data_id)
self.dct[iprange] = data
#-----------------------------------------------------------------------------
def load_info():
"""
Parse and load internal IANA data lookups with the latest information from
data files.
"""
PATH = _path.dirname(__file__)
ipv4 = IPv4Parser(open(_path.join(PATH, 'ipv4-address-space.xml')))
ipv4.attach(DictUpdater(IANA_INFO['IPv4'], 'IPv4', 'prefix'))
ipv4.parse()
ipv6 = IPv6Parser(open(_path.join(PATH, 'ipv6-address-space.xml')))
ipv6.attach(DictUpdater(IANA_INFO['IPv6'], 'IPv6', 'prefix'))
ipv6.parse()
mcast = MulticastParser(open(_path.join(PATH, 'multicast-addresses.xml')))
mcast.attach(DictUpdater(IANA_INFO['multicast'], 'multicast', 'address'))
mcast.parse()
#-----------------------------------------------------------------------------
def pprint_info(fh=None):
"""
Pretty prints IANA information to filehandle.
"""
if fh is None:
fh = _sys.stdout
for category in sorted(IANA_INFO):
fh.write('-' * len(category) + "\n")
fh.write(category + "\n")
fh.write('-' * len(category) + "\n")
ipranges = IANA_INFO[category]
for iprange in sorted(ipranges):
details = ipranges[iprange]
fh.write('%-45r' % (iprange) + details + "\n")
#-----------------------------------------------------------------------------
def query(ip_addr):
"""
Returns informational data specific to this IP address.
"""
info = {}
def within_bounds(ip, ip_range):
# Boundary checking for multiple IP classes.
if hasattr(ip_range, 'first'):
# IP network or IP range.
return ip in ip_range
elif hasattr(ip_range, 'value'):
# IP address.
|
raise Exception('Unsupported IP range or address: %r!' % ip_range)
if ip_addr.version == 4:
for cidr, record in _dict_items(IANA_INFO['IPv4']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv4', [])
info['IPv4'].append(record)
if ip_addr.is_multicast():
for iprange, record in _dict_items(IANA_INFO['multicast']):
if within_bounds(ip_addr, iprange):
info.setdefault('Multicast', [])
info['Multicast'].append(record)
elif ip_addr.version == 6:
for cidr, record in _dict_items(IANA_INFO['IPv6']):
if within_bounds(ip_addr, cidr):
info.setdefault('IPv6', [])
info['IPv6'].append(record)
return info
#-----------------------------------------------------------------------------
def get_latest_files():
"""Download the latest files from IANA"""
if _sys.version_info[0] == 3:
# Python 3.x
from urllib.request import Request, urlopen
else:
# Python 2.x
from urllib2 import Request, urlopen
urls = [
'http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml',
'http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml',
'http://www.iana.org/assignments/multicast-addresses/multicast-addresses.xml',
]
for url in urls:
_sys.stdout.write('downloading latest copy of %s\n' % url)
request = Request(url)
response = urlopen(request)
save_path = _path.dirname(__file__)
basename = _os.path.basename(response.geturl().rstrip('/'))
filename = _path.join(save_path, basename)
fh = open(filename, 'wb')
fh.write(response.read())
fh.close()
# Make sure the line endings are consistent across platforms.
dos2unix(filename)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# Generate indices when module is executed as a script.
get_latest_files()
# On module import, read IANA data files and populate lookups dict.
load_info()
| return ip == ip_range | conditional_block |
index.d.ts | // Type definitions for simple-oauth2 1.0
// Project: https://github.com/lelylan/simple-oauth2
// Definitions by: [Michael Müller] <https://github.com/mad-mike>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
import Bluebird = require("bluebird");
/** Creates a new simple-oauth2 client with the passed configuration */
export function create(options: ModuleOptions): OAuthClient;
interface ModuleOptions {
client: {
/** Service registered client id. Required. */
id: string,
/** Service registered client secret. Required. */
secret: string,
/** Parameter name used to send the client secret. Default to client_secret. */
secretParamName?: string,
/** Parameter name used to send the client id. Default to client_id. */
idParamName?: string
};
auth: {
/** String used to set the host to request the tokens to. Required. */
tokenHost: string,
/** String path to request an access token. Default to /oauth/token. */
tokenPath?: string,
/** String path to revoken an access token. Default to /oauth/revoke. */
revokePath?: string,
/** String used to set the host to request an "authorization code". Default to the value set on auth.tokenHost. */
authorizeHost?: string,
/** String path to request an authorization code. Default to /oauth/authorize. */
authorizePath?: string
};
/** optional object used to set global options to the internal http library (request-js). */
http?: {};
options?: {
/** Wheather or not the client.id/client.secret params are sent in the request body. Defaults to true. */
useBodyAuth?: boolean,
useBasicAuthorizationHeader?: boolean
};
}
type TokenType = "access_token" | "refresh_token";
interface AccessToken {
token: {};
/** Check if the access token is expired or not */
expired(): boolean;
/** Refresh the access token */
refresh(params: {}, callback: (error: any, result: AccessToken) => void): Bluebird<AccessToken>;
refresh(callback?: (error: any, result: AccessToken) => void): Bluebird<AccessToken>;
/** Revoke access or refresh token */
revoke(tokenType: TokenType, callback?: (error: any) => void): Bluebird<void>;
}
interface Token {
[x: string]: any;
}
type AuthorizationCode = string;
interface AuthorizationTokenConfig {
code: AuthorizationCode;
redirect_uri: string;
} | /** A string that represents the registered password. */
password: string;
/** A string that represents the application privileges */
scope: string;
}
interface ClientCredentialTokenConfig {
/** A string that represents the application privileges */
scope?: string;
}
export interface OAuthClient {
authorizationCode: {
/**
* Redirect the user to the autorization page
* @return {string} the absolute authorization url
*/
authorizeURL(params?: {
/** A string that represents the registered application URI where the user is redirected after authentication */
redirect_uri?: string,
/** A String that represents the application privileges */
scope?: string,
/** A String that represents an option opaque value used by the client to main the state between the request and the callback */
state?: string
}): string,
/** Returns the Access Token object */
getToken(params: AuthorizationTokenConfig, callback?: (error: any, result: Token) => void): Bluebird<Token>;
};
ownerPassword: {
/** Returns the Access Token Object */
getToken(params: PasswordTokenConfig, callback?: (error: any, result: Token) => void): Bluebird<Token>;
};
clientCredentials: {
/** Returns the Access Token Object */
getToken(params: ClientCredentialTokenConfig, callback?: (error: any, result: Token) => void): Bluebird<Token>;
};
accessToken: {
/** Creates an OAuth2.AccessToken instance */
create(tokenToUse: Token): AccessToken;
};
} |
interface PasswordTokenConfig {
/** A string that represents the registered username */
username: string; | random_line_split |
dataStore.js | var _ = require("lodash");
var fs = require("fs-extra");
module.exports = function(options) {
options = _.extend({
file: "data.json",
saveFrequency: 1,
indexing: false
}, options);
var lastSaved = 0;
//If the specified file doesn't exist, we create an empty JSON object
if (!fs.existsSync(options.file)) {
fs.ensureFileSync(options.file);
fs.writeFileSync(options.file, "{}");
}
var _data;
var _indexes = {};
try {
_data = JSON.parse(fs.readFileSync(options.file, {
encoding: 'utf-8'
}));
} catch (err) {
console.error("Cantrip - Not valid JSON file: " + options.file);
process.exit(5);
}
function clone(object) {
return JSON.parse(JSON.stringify(object));
};
function createIndex(pathIdentifier, data) {
if (_indexes[pathIdentifier]) return;
_indexes[pathIdentifier] = [];
for (var i = 0; i < data.length; i++) {
_indexes[pathIdentifier][data[i][options.idAttribute]] = data[i];
}
}
function resetIndex(pathIdentifier) |
/**
* Sync the data currently in memory to the target file
*/
function syncData() {
if (options.saveFrequency === 0) return;
lastSaved++;
if (lastSaved === options.saveFrequency) {
fs.writeFile(options.file, JSON.stringify(_data, null, "\t"), function(err) {
if (err) {
console.log(err);
}
});
lastSaved = 0;
}
}
/**
* Private function for getting the reference to the target node
*/
var _get = function(path) {
path = _.filter(path.split("/"), function(string) {
return string !== "";
});
var node = _data;
//Loop through the data by the given paths
for (var i = 0; i < path.length; i++) {
//Check if we are looking up an item in an array
if (_.isArray(node) && options.indexing) {
//if indexing is on, create an index if it didn't exist before, and assign the reference to the node
var pathIdentifier = "/" + path.slice(0, i).join("/");
if (!_indexes[pathIdentifier]) {
createIndex(pathIdentifier, node);
}
node = _indexes[pathIdentifier][path[i]];
} else {
var temp = node[path[i]];
//If we found the given key, assign the node object to its value
if (temp !== undefined) {
node = node[path[i]];
//If the given key doesn't exist, try the _id
} else {
temp = _.find(node, function(obj) {
return obj[options.idAttribute] === path[i];
});
//If it's not undefined, then assign it as the value
if (temp !== undefined) {
node = temp;
} else {
return null;
}
}
}
}
return node || null;
};
function getParentPath(path) {
return path.split("/").slice(0, -1).join("/");
}
var _parent = function(path) {
var parent = _get(getParentPath(path));
var err = parent === null ? new Error("Requested node doesn't exist.") : null;
return parent;
};
/**
* Return the datastore
*/
return {
/**
* Public getter function that returns a clone of the target node
*/
get: function(path) {
return clone(_get(path));
},
set: function(path, data, patch) {
var target = _get(path);
if (_.isArray(target)) {
//POST
var ref = clone(data);
target.push(ref);
//If indexing is turned on, make sure to insert the new reference
if (options.indexing) {
if (!_indexes[path]) {
createIndex(path, target);
}
_indexes[path][data[options.idAttribute]] = ref;
}
syncData();
return clone(data);
} else if (_.isObject(target)) {
//PATCH
if (patch) {
target = _.merge(target, data, function(a, b) {
if (_.isArray(a)) {
return b;
}
});
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
} else {
//PUT
var parent = _parent(path);
var toPut = _.last(path.split("/"));
if (toPut === "") {
_data = clone(data);
_index = {};
} else {
if (_.isArray(parent)) {
var item = _.find(parent, function(item) {
return item[options.idAttribute] === toPut;
});
//Delete all keys of the array
for (var key in item) {
if (key !== options.idAttribute && key !== "_modifiedDate" && key !== "_createdDate") {
delete item[key];
}
}
item = _.merge(item, clone(data));
} else {
parent[toPut] = clone(data);
}
}
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
}
} else {
var parent = _parent(path);
parent[_.last(path.split("/"))] = data;
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(parent);
}
},
delete: function(path) {
var key = _.last(path.split("/"));
var parent = _parent(path);
if (_.isArray(parent)) {
var obj;
if (options.indexing) {
var indexKey = path.split("/").slice(0, -1).join("/");
var obj = _indexes[indexKey][key];
delete _indexes[getParentPath(path)][key];
resetIndex(path);
} else {
obj = _.find(parent, function(obj) {
return obj[options.idAttribute] === key;
});
}
var index = _.indexOf(parent, obj)
if (index > -1) {
parent.splice(index, 1);
}
} else if (_.isObject(parent)) {
delete parent[key];
if (options.indexing) {
resetIndex(path);
}
}
syncData();
return clone(parent);
},
parent: function(path) {
return clone(_parent(path));
}
};
} | {
for (var key in _indexes) {
if (key.indexOf(pathIdentifier) === 0) {
delete _indexes[key];
}
}
} | identifier_body |
dataStore.js | var _ = require("lodash");
var fs = require("fs-extra");
module.exports = function(options) {
options = _.extend({
file: "data.json",
saveFrequency: 1,
indexing: false
}, options);
var lastSaved = 0;
//If the specified file doesn't exist, we create an empty JSON object
if (!fs.existsSync(options.file)) {
fs.ensureFileSync(options.file);
fs.writeFileSync(options.file, "{}");
}
var _data;
var _indexes = {};
try {
_data = JSON.parse(fs.readFileSync(options.file, {
encoding: 'utf-8'
}));
} catch (err) {
console.error("Cantrip - Not valid JSON file: " + options.file);
process.exit(5);
}
function clone(object) {
return JSON.parse(JSON.stringify(object));
};
function createIndex(pathIdentifier, data) {
if (_indexes[pathIdentifier]) return;
_indexes[pathIdentifier] = [];
for (var i = 0; i < data.length; i++) {
_indexes[pathIdentifier][data[i][options.idAttribute]] = data[i];
}
}
function resetIndex(pathIdentifier) {
for (var key in _indexes) {
if (key.indexOf(pathIdentifier) === 0) {
delete _indexes[key];
}
}
}
/**
* Sync the data currently in memory to the target file
*/
function syncData() {
if (options.saveFrequency === 0) return;
lastSaved++;
if (lastSaved === options.saveFrequency) {
fs.writeFile(options.file, JSON.stringify(_data, null, "\t"), function(err) {
if (err) {
console.log(err);
}
});
lastSaved = 0;
}
}
/**
* Private function for getting the reference to the target node
*/
var _get = function(path) {
path = _.filter(path.split("/"), function(string) {
return string !== "";
});
var node = _data;
//Loop through the data by the given paths
for (var i = 0; i < path.length; i++) {
//Check if we are looking up an item in an array
if (_.isArray(node) && options.indexing) {
//if indexing is on, create an index if it didn't exist before, and assign the reference to the node
var pathIdentifier = "/" + path.slice(0, i).join("/");
if (!_indexes[pathIdentifier]) {
createIndex(pathIdentifier, node);
}
node = _indexes[pathIdentifier][path[i]];
} else {
var temp = node[path[i]];
//If we found the given key, assign the node object to its value
if (temp !== undefined) {
node = node[path[i]];
//If the given key doesn't exist, try the _id
} else {
temp = _.find(node, function(obj) {
return obj[options.idAttribute] === path[i];
});
//If it's not undefined, then assign it as the value
if (temp !== undefined) {
node = temp;
} else {
return null;
}
}
}
}
return node || null;
};
function getParentPath(path) {
return path.split("/").slice(0, -1).join("/");
}
var _parent = function(path) {
var parent = _get(getParentPath(path));
var err = parent === null ? new Error("Requested node doesn't exist.") : null;
return parent;
};
/**
* Return the datastore
*/
return {
/**
* Public getter function that returns a clone of the target node
*/
get: function(path) {
return clone(_get(path));
},
set: function(path, data, patch) {
var target = _get(path);
if (_.isArray(target)) {
//POST
var ref = clone(data);
target.push(ref);
//If indexing is turned on, make sure to insert the new reference
if (options.indexing) {
if (!_indexes[path]) {
createIndex(path, target);
}
_indexes[path][data[options.idAttribute]] = ref;
}
syncData();
return clone(data); | return b;
}
});
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
} else {
//PUT
var parent = _parent(path);
var toPut = _.last(path.split("/"));
if (toPut === "") {
_data = clone(data);
_index = {};
} else {
if (_.isArray(parent)) {
var item = _.find(parent, function(item) {
return item[options.idAttribute] === toPut;
});
//Delete all keys of the array
for (var key in item) {
if (key !== options.idAttribute && key !== "_modifiedDate" && key !== "_createdDate") {
delete item[key];
}
}
item = _.merge(item, clone(data));
} else {
parent[toPut] = clone(data);
}
}
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
}
} else {
var parent = _parent(path);
parent[_.last(path.split("/"))] = data;
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(parent);
}
},
delete: function(path) {
var key = _.last(path.split("/"));
var parent = _parent(path);
if (_.isArray(parent)) {
var obj;
if (options.indexing) {
var indexKey = path.split("/").slice(0, -1).join("/");
var obj = _indexes[indexKey][key];
delete _indexes[getParentPath(path)][key];
resetIndex(path);
} else {
obj = _.find(parent, function(obj) {
return obj[options.idAttribute] === key;
});
}
var index = _.indexOf(parent, obj)
if (index > -1) {
parent.splice(index, 1);
}
} else if (_.isObject(parent)) {
delete parent[key];
if (options.indexing) {
resetIndex(path);
}
}
syncData();
return clone(parent);
},
parent: function(path) {
return clone(_parent(path));
}
};
} | } else if (_.isObject(target)) {
//PATCH
if (patch) {
target = _.merge(target, data, function(a, b) {
if (_.isArray(a)) { | random_line_split |
dataStore.js | var _ = require("lodash");
var fs = require("fs-extra");
module.exports = function(options) {
options = _.extend({
file: "data.json",
saveFrequency: 1,
indexing: false
}, options);
var lastSaved = 0;
//If the specified file doesn't exist, we create an empty JSON object
if (!fs.existsSync(options.file)) {
fs.ensureFileSync(options.file);
fs.writeFileSync(options.file, "{}");
}
var _data;
var _indexes = {};
try {
_data = JSON.parse(fs.readFileSync(options.file, {
encoding: 'utf-8'
}));
} catch (err) {
console.error("Cantrip - Not valid JSON file: " + options.file);
process.exit(5);
}
function clone(object) {
return JSON.parse(JSON.stringify(object));
};
function createIndex(pathIdentifier, data) {
if (_indexes[pathIdentifier]) return;
_indexes[pathIdentifier] = [];
for (var i = 0; i < data.length; i++) {
_indexes[pathIdentifier][data[i][options.idAttribute]] = data[i];
}
}
function resetIndex(pathIdentifier) {
for (var key in _indexes) {
if (key.indexOf(pathIdentifier) === 0) {
delete _indexes[key];
}
}
}
/**
* Sync the data currently in memory to the target file
*/
function | () {
if (options.saveFrequency === 0) return;
lastSaved++;
if (lastSaved === options.saveFrequency) {
fs.writeFile(options.file, JSON.stringify(_data, null, "\t"), function(err) {
if (err) {
console.log(err);
}
});
lastSaved = 0;
}
}
/**
* Private function for getting the reference to the target node
*/
var _get = function(path) {
path = _.filter(path.split("/"), function(string) {
return string !== "";
});
var node = _data;
//Loop through the data by the given paths
for (var i = 0; i < path.length; i++) {
//Check if we are looking up an item in an array
if (_.isArray(node) && options.indexing) {
//if indexing is on, create an index if it didn't exist before, and assign the reference to the node
var pathIdentifier = "/" + path.slice(0, i).join("/");
if (!_indexes[pathIdentifier]) {
createIndex(pathIdentifier, node);
}
node = _indexes[pathIdentifier][path[i]];
} else {
var temp = node[path[i]];
//If we found the given key, assign the node object to its value
if (temp !== undefined) {
node = node[path[i]];
//If the given key doesn't exist, try the _id
} else {
temp = _.find(node, function(obj) {
return obj[options.idAttribute] === path[i];
});
//If it's not undefined, then assign it as the value
if (temp !== undefined) {
node = temp;
} else {
return null;
}
}
}
}
return node || null;
};
function getParentPath(path) {
return path.split("/").slice(0, -1).join("/");
}
var _parent = function(path) {
var parent = _get(getParentPath(path));
var err = parent === null ? new Error("Requested node doesn't exist.") : null;
return parent;
};
/**
* Return the datastore
*/
return {
/**
* Public getter function that returns a clone of the target node
*/
get: function(path) {
return clone(_get(path));
},
set: function(path, data, patch) {
var target = _get(path);
if (_.isArray(target)) {
//POST
var ref = clone(data);
target.push(ref);
//If indexing is turned on, make sure to insert the new reference
if (options.indexing) {
if (!_indexes[path]) {
createIndex(path, target);
}
_indexes[path][data[options.idAttribute]] = ref;
}
syncData();
return clone(data);
} else if (_.isObject(target)) {
//PATCH
if (patch) {
target = _.merge(target, data, function(a, b) {
if (_.isArray(a)) {
return b;
}
});
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
} else {
//PUT
var parent = _parent(path);
var toPut = _.last(path.split("/"));
if (toPut === "") {
_data = clone(data);
_index = {};
} else {
if (_.isArray(parent)) {
var item = _.find(parent, function(item) {
return item[options.idAttribute] === toPut;
});
//Delete all keys of the array
for (var key in item) {
if (key !== options.idAttribute && key !== "_modifiedDate" && key !== "_createdDate") {
delete item[key];
}
}
item = _.merge(item, clone(data));
} else {
parent[toPut] = clone(data);
}
}
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
}
} else {
var parent = _parent(path);
parent[_.last(path.split("/"))] = data;
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(parent);
}
},
delete: function(path) {
var key = _.last(path.split("/"));
var parent = _parent(path);
if (_.isArray(parent)) {
var obj;
if (options.indexing) {
var indexKey = path.split("/").slice(0, -1).join("/");
var obj = _indexes[indexKey][key];
delete _indexes[getParentPath(path)][key];
resetIndex(path);
} else {
obj = _.find(parent, function(obj) {
return obj[options.idAttribute] === key;
});
}
var index = _.indexOf(parent, obj)
if (index > -1) {
parent.splice(index, 1);
}
} else if (_.isObject(parent)) {
delete parent[key];
if (options.indexing) {
resetIndex(path);
}
}
syncData();
return clone(parent);
},
parent: function(path) {
return clone(_parent(path));
}
};
} | syncData | identifier_name |
dataStore.js | var _ = require("lodash");
var fs = require("fs-extra");
module.exports = function(options) {
options = _.extend({
file: "data.json",
saveFrequency: 1,
indexing: false
}, options);
var lastSaved = 0;
//If the specified file doesn't exist, we create an empty JSON object
if (!fs.existsSync(options.file)) {
fs.ensureFileSync(options.file);
fs.writeFileSync(options.file, "{}");
}
var _data;
var _indexes = {};
try {
_data = JSON.parse(fs.readFileSync(options.file, {
encoding: 'utf-8'
}));
} catch (err) {
console.error("Cantrip - Not valid JSON file: " + options.file);
process.exit(5);
}
function clone(object) {
return JSON.parse(JSON.stringify(object));
};
function createIndex(pathIdentifier, data) {
if (_indexes[pathIdentifier]) return;
_indexes[pathIdentifier] = [];
for (var i = 0; i < data.length; i++) {
_indexes[pathIdentifier][data[i][options.idAttribute]] = data[i];
}
}
function resetIndex(pathIdentifier) {
for (var key in _indexes) {
if (key.indexOf(pathIdentifier) === 0) {
delete _indexes[key];
}
}
}
/**
* Sync the data currently in memory to the target file
*/
function syncData() {
if (options.saveFrequency === 0) return;
lastSaved++;
if (lastSaved === options.saveFrequency) {
fs.writeFile(options.file, JSON.stringify(_data, null, "\t"), function(err) {
if (err) {
console.log(err);
}
});
lastSaved = 0;
}
}
/**
* Private function for getting the reference to the target node
*/
var _get = function(path) {
path = _.filter(path.split("/"), function(string) {
return string !== "";
});
var node = _data;
//Loop through the data by the given paths
for (var i = 0; i < path.length; i++) {
//Check if we are looking up an item in an array
if (_.isArray(node) && options.indexing) {
//if indexing is on, create an index if it didn't exist before, and assign the reference to the node
var pathIdentifier = "/" + path.slice(0, i).join("/");
if (!_indexes[pathIdentifier]) {
createIndex(pathIdentifier, node);
}
node = _indexes[pathIdentifier][path[i]];
} else {
var temp = node[path[i]];
//If we found the given key, assign the node object to its value
if (temp !== undefined) | else {
temp = _.find(node, function(obj) {
return obj[options.idAttribute] === path[i];
});
//If it's not undefined, then assign it as the value
if (temp !== undefined) {
node = temp;
} else {
return null;
}
}
}
}
return node || null;
};
function getParentPath(path) {
return path.split("/").slice(0, -1).join("/");
}
var _parent = function(path) {
var parent = _get(getParentPath(path));
var err = parent === null ? new Error("Requested node doesn't exist.") : null;
return parent;
};
/**
* Return the datastore
*/
return {
/**
* Public getter function that returns a clone of the target node
*/
get: function(path) {
return clone(_get(path));
},
set: function(path, data, patch) {
var target = _get(path);
if (_.isArray(target)) {
//POST
var ref = clone(data);
target.push(ref);
//If indexing is turned on, make sure to insert the new reference
if (options.indexing) {
if (!_indexes[path]) {
createIndex(path, target);
}
_indexes[path][data[options.idAttribute]] = ref;
}
syncData();
return clone(data);
} else if (_.isObject(target)) {
//PATCH
if (patch) {
target = _.merge(target, data, function(a, b) {
if (_.isArray(a)) {
return b;
}
});
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
} else {
//PUT
var parent = _parent(path);
var toPut = _.last(path.split("/"));
if (toPut === "") {
_data = clone(data);
_index = {};
} else {
if (_.isArray(parent)) {
var item = _.find(parent, function(item) {
return item[options.idAttribute] === toPut;
});
//Delete all keys of the array
for (var key in item) {
if (key !== options.idAttribute && key !== "_modifiedDate" && key !== "_createdDate") {
delete item[key];
}
}
item = _.merge(item, clone(data));
} else {
parent[toPut] = clone(data);
}
}
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(target);
}
} else {
var parent = _parent(path);
parent[_.last(path.split("/"))] = data;
syncData();
//Reset the matching indexes if it's turned on
if (options.indexing) {
resetIndex(path);
}
return clone(parent);
}
},
delete: function(path) {
var key = _.last(path.split("/"));
var parent = _parent(path);
if (_.isArray(parent)) {
var obj;
if (options.indexing) {
var indexKey = path.split("/").slice(0, -1).join("/");
var obj = _indexes[indexKey][key];
delete _indexes[getParentPath(path)][key];
resetIndex(path);
} else {
obj = _.find(parent, function(obj) {
return obj[options.idAttribute] === key;
});
}
var index = _.indexOf(parent, obj)
if (index > -1) {
parent.splice(index, 1);
}
} else if (_.isObject(parent)) {
delete parent[key];
if (options.indexing) {
resetIndex(path);
}
}
syncData();
return clone(parent);
},
parent: function(path) {
return clone(_parent(path));
}
};
} | {
node = node[path[i]];
//If the given key doesn't exist, try the _id
} | conditional_block |
mod.rs |
prelude!();
use ::vga;
use super::pic::{PIC_1, PIC_2};
mod item;
pub use self::item::*;
mod idt_ptr;
pub use self::idt_ptr::*;
extern "C" {
fn interrupt0();
fn interrupt1();
}
extern {
fn __kernel_timer_tick();
}
const IDT_SIZE: usize = 64;
static mut IDT_TABLE: [IdtItem; IDT_SIZE] = [IdtItem::new(); IDT_SIZE];
#[no_mangle]
pub unsafe extern "C" fn handle_interrupt(num: u8, error_code: u64) {
// thread timer
if num == PIC_1.get_interrupt_idt_num(0) |
vga::print(b"!! Interrupt !!");
println!("!! Interrupt: {:#x}, Error code: {:#x}", num, error_code);
if PIC_1.has_interrupt(num) {
PIC_1.end_of_interrupt();
}
if PIC_2.has_interrupt(num) {
PIC_1.end_of_interrupt();
PIC_2.end_of_interrupt();
}
}
pub unsafe fn init() {
let diff = interrupt1 as usize - interrupt0 as usize;
for i in 0..IDT_SIZE {
IDT_TABLE[i].set_offset(interrupt0 as usize + diff * i);
}
// 0 pic interrupt is used by thread scheduler
// { // FIXME
// let idx = PIC_1.get_interrupt_idt_num(0) as usize;
// IDT_TABLE[idx].type_attr = InterruptType::ValidInterruptGate;
// }
let ptr = IdtPtr::new(&IDT_TABLE);
ptr.load()
}
| {
__kernel_timer_tick();
// it will send EOI itself
return;
} | conditional_block |
mod.rs | prelude!();
use ::vga;
use super::pic::{PIC_1, PIC_2};
mod item; | pub use self::item::*;
mod idt_ptr;
pub use self::idt_ptr::*;
extern "C" {
fn interrupt0();
fn interrupt1();
}
extern {
fn __kernel_timer_tick();
}
const IDT_SIZE: usize = 64;
static mut IDT_TABLE: [IdtItem; IDT_SIZE] = [IdtItem::new(); IDT_SIZE];
#[no_mangle]
pub unsafe extern "C" fn handle_interrupt(num: u8, error_code: u64) {
// thread timer
if num == PIC_1.get_interrupt_idt_num(0) {
__kernel_timer_tick();
// it will send EOI itself
return;
}
vga::print(b"!! Interrupt !!");
println!("!! Interrupt: {:#x}, Error code: {:#x}", num, error_code);
if PIC_1.has_interrupt(num) {
PIC_1.end_of_interrupt();
}
if PIC_2.has_interrupt(num) {
PIC_1.end_of_interrupt();
PIC_2.end_of_interrupt();
}
}
pub unsafe fn init() {
let diff = interrupt1 as usize - interrupt0 as usize;
for i in 0..IDT_SIZE {
IDT_TABLE[i].set_offset(interrupt0 as usize + diff * i);
}
// 0 pic interrupt is used by thread scheduler
// { // FIXME
// let idx = PIC_1.get_interrupt_idt_num(0) as usize;
// IDT_TABLE[idx].type_attr = InterruptType::ValidInterruptGate;
// }
let ptr = IdtPtr::new(&IDT_TABLE);
ptr.load()
} | random_line_split | |
mod.rs |
prelude!();
use ::vga;
use super::pic::{PIC_1, PIC_2};
mod item;
pub use self::item::*;
mod idt_ptr;
pub use self::idt_ptr::*;
extern "C" {
fn interrupt0();
fn interrupt1();
}
extern {
fn __kernel_timer_tick();
}
const IDT_SIZE: usize = 64;
static mut IDT_TABLE: [IdtItem; IDT_SIZE] = [IdtItem::new(); IDT_SIZE];
#[no_mangle]
pub unsafe extern "C" fn handle_interrupt(num: u8, error_code: u64) {
// thread timer
if num == PIC_1.get_interrupt_idt_num(0) {
__kernel_timer_tick();
// it will send EOI itself
return;
}
vga::print(b"!! Interrupt !!");
println!("!! Interrupt: {:#x}, Error code: {:#x}", num, error_code);
if PIC_1.has_interrupt(num) {
PIC_1.end_of_interrupt();
}
if PIC_2.has_interrupt(num) {
PIC_1.end_of_interrupt();
PIC_2.end_of_interrupt();
}
}
pub unsafe fn init() | {
let diff = interrupt1 as usize - interrupt0 as usize;
for i in 0..IDT_SIZE {
IDT_TABLE[i].set_offset(interrupt0 as usize + diff * i);
}
// 0 pic interrupt is used by thread scheduler
// { // FIXME
// let idx = PIC_1.get_interrupt_idt_num(0) as usize;
// IDT_TABLE[idx].type_attr = InterruptType::ValidInterruptGate;
// }
let ptr = IdtPtr::new(&IDT_TABLE);
ptr.load()
} | identifier_body | |
mod.rs |
prelude!();
use ::vga;
use super::pic::{PIC_1, PIC_2};
mod item;
pub use self::item::*;
mod idt_ptr;
pub use self::idt_ptr::*;
extern "C" {
fn interrupt0();
fn interrupt1();
}
extern {
fn __kernel_timer_tick();
}
const IDT_SIZE: usize = 64;
static mut IDT_TABLE: [IdtItem; IDT_SIZE] = [IdtItem::new(); IDT_SIZE];
#[no_mangle]
pub unsafe extern "C" fn handle_interrupt(num: u8, error_code: u64) {
// thread timer
if num == PIC_1.get_interrupt_idt_num(0) {
__kernel_timer_tick();
// it will send EOI itself
return;
}
vga::print(b"!! Interrupt !!");
println!("!! Interrupt: {:#x}, Error code: {:#x}", num, error_code);
if PIC_1.has_interrupt(num) {
PIC_1.end_of_interrupt();
}
if PIC_2.has_interrupt(num) {
PIC_1.end_of_interrupt();
PIC_2.end_of_interrupt();
}
}
pub unsafe fn | () {
let diff = interrupt1 as usize - interrupt0 as usize;
for i in 0..IDT_SIZE {
IDT_TABLE[i].set_offset(interrupt0 as usize + diff * i);
}
// 0 pic interrupt is used by thread scheduler
// { // FIXME
// let idx = PIC_1.get_interrupt_idt_num(0) as usize;
// IDT_TABLE[idx].type_attr = InterruptType::ValidInterruptGate;
// }
let ptr = IdtPtr::new(&IDT_TABLE);
ptr.load()
}
| init | identifier_name |
Dfn.ts | /*
* Copyright (c) 2014 Jose Carlos Lama. www.typedom.org
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the \"Software\"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* | * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* The {{#crossLink "Dfn"}}{{/crossLink}} Defines a definition term
*
* @class Dfn
* @extends Container
* @constructor
**/
class Dfn extends Container<Dfn, HTMLElement>
{
public static DFN: string = 'dfn';
constructor();
constructor(id: string)
constructor(attributes: Object)
constructor(element: HTMLElement)
constructor(idOrAttributesOrElement?: any) {
super(idOrAttributesOrElement, Dfn.DFN);
}
} | random_line_split | |
Dfn.ts | /*
* Copyright (c) 2014 Jose Carlos Lama. www.typedom.org
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the \"Software\"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* The {{#crossLink "Dfn"}}{{/crossLink}} Defines a definition term
*
* @class Dfn
* @extends Container
* @constructor
**/
class | extends Container<Dfn, HTMLElement>
{
public static DFN: string = 'dfn';
constructor();
constructor(id: string)
constructor(attributes: Object)
constructor(element: HTMLElement)
constructor(idOrAttributesOrElement?: any) {
super(idOrAttributesOrElement, Dfn.DFN);
}
} | Dfn | identifier_name |
strip.ts | /**
* Strips given string from source string.
*
* @ignore
* @param source is the source string to be cleaned.
* @param prefix is the string to delete.
* @returns cleaned string.
*/
export default function | (source: string, strings: string | string[]): string {
let result = source;
const stringArray = Array.isArray(strings) ? strings : [strings];
stringArray.forEach((string) => {
const prefixRx = new RegExp(`^${string}[_\\s-]+`);
const middleRx = new RegExp(`${string}[_\\s-]+`);
const suffixRx = new RegExp(`[_\\s-]*${string}$`);
if (result.match(prefixRx)) {
result = result.replace(prefixRx, "");
} else if (result.match(middleRx)) {
result = result.replace(middleRx, "");
} else {
result = result.replace(suffixRx, "");
}
});
return result;
}
| strip | identifier_name |
strip.ts | /**
* Strips given string from source string.
*
* @ignore
* @param source is the source string to be cleaned.
* @param prefix is the string to delete.
* @returns cleaned string.
*/
export default function strip(source: string, strings: string | string[]): string | {
let result = source;
const stringArray = Array.isArray(strings) ? strings : [strings];
stringArray.forEach((string) => {
const prefixRx = new RegExp(`^${string}[_\\s-]+`);
const middleRx = new RegExp(`${string}[_\\s-]+`);
const suffixRx = new RegExp(`[_\\s-]*${string}$`);
if (result.match(prefixRx)) {
result = result.replace(prefixRx, "");
} else if (result.match(middleRx)) {
result = result.replace(middleRx, "");
} else {
result = result.replace(suffixRx, "");
}
});
return result;
} | identifier_body | |
strip.ts | /**
* Strips given string from source string.
*
* @ignore
* @param source is the source string to be cleaned.
* @param prefix is the string to delete.
* @returns cleaned string.
*/
export default function strip(source: string, strings: string | string[]): string {
let result = source;
const stringArray = Array.isArray(strings) ? strings : [strings];
stringArray.forEach((string) => {
const prefixRx = new RegExp(`^${string}[_\\s-]+`);
const middleRx = new RegExp(`${string}[_\\s-]+`); | if (result.match(prefixRx)) {
result = result.replace(prefixRx, "");
} else if (result.match(middleRx)) {
result = result.replace(middleRx, "");
} else {
result = result.replace(suffixRx, "");
}
});
return result;
} | const suffixRx = new RegExp(`[_\\s-]*${string}$`);
| random_line_split |
strip.ts | /**
* Strips given string from source string.
*
* @ignore
* @param source is the source string to be cleaned.
* @param prefix is the string to delete.
* @returns cleaned string.
*/
export default function strip(source: string, strings: string | string[]): string {
let result = source;
const stringArray = Array.isArray(strings) ? strings : [strings];
stringArray.forEach((string) => {
const prefixRx = new RegExp(`^${string}[_\\s-]+`);
const middleRx = new RegExp(`${string}[_\\s-]+`);
const suffixRx = new RegExp(`[_\\s-]*${string}$`);
if (result.match(prefixRx)) | else if (result.match(middleRx)) {
result = result.replace(middleRx, "");
} else {
result = result.replace(suffixRx, "");
}
});
return result;
}
| {
result = result.replace(prefixRx, "");
} | conditional_block |
eval_paths.rs | use std::time::{Instant, Duration};
use rand::Rng;
use crate::progress::Progress;
use crate::sim::TestPacket;
use crate::dijkstra::Dijkstra;
use crate::graph::*;
/*
* Test if all paths allow for routing.
* This test does not allow the state of the routing algorithm to change.
*/
pub struct EvalPaths {
show_progress: bool,
is_done: bool,
packets_send: u32,
packets_lost: u32,
packets_arrived: u32,
route_costs_sum: u32,
route_costs_min_sum: u32,
nodes_connected: usize,
nodes_disconnected: usize,
max_stretch: u32,
run_time: Duration,
dijkstra: Dijkstra
}
impl EvalPaths {
pub fn new() -> Self {
Self {
show_progress: false,
is_done: false,
packets_send: 0,
packets_lost: 0,
packets_arrived: 0,
route_costs_sum: 0,
route_costs_min_sum: 0,
nodes_connected: 0,
nodes_disconnected: 0,
max_stretch: 2,
run_time: Duration::new(0, 0),
dijkstra: Dijkstra::new(),
}
}
pub fn clear_stats(&mut self) {
self.is_done =false;
self.packets_send = 0;
self.packets_lost = 0;
self.packets_arrived = 0;
self.route_costs_sum = 0;
self.route_costs_min_sum = 0;
self.nodes_connected = 0;
self.nodes_disconnected = 0;
self.run_time = Duration::new(0, 0);
}
pub fn clear(&mut self) {
self.dijkstra.clear();
self.clear_stats();
}
pub fn show_progress(&mut self, show_progress: bool) {
self.show_progress = true;
}
fn test_path(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
source: ID, target: ID, costs_min: u32) {
// maximum stretch we record
let mut packet = TestPacket::new(source, source, source, target);
let mut path_costs = 0u32;
self.packets_send += 1;
// max steps to try until we give up
let max_steps = costs_min * self.max_stretch;
for _ in 0..max_steps {
if let Some(next) = route(&packet) {
// Check if link really exists
if let Some(link) = graph.get_link(packet.receiver, next) {
path_costs += link.cost() as u32;
if next == packet.destination {
// packet arrived
self.packets_arrived += 1;
break;
} else {
// forward packet
packet.transmitter = packet.receiver;
packet.receiver = next;
}
} else {
// invalid next hop
self.packets_lost += 1;
break;
}
} else {
// no next hop
self.packets_lost += 1;
break;
}
}
self.route_costs_sum += path_costs;
self.route_costs_min_sum += costs_min;
}
pub fn run_samples(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
samples: usize) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let mut progress = Progress::new();
let mut sample = 0;
if self.show_progress {
progress.start(samples, 0);
}
for _ in 0..samples {
let source = rand::thread_rng().gen_range(0, node_count);
let target = rand::thread_rng().gen_range(0, node_count);
if source == target {
// we do not test those paths
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
sample += 1;
if self.show_progress {
progress.update(samples, sample);
}
}
if self.show_progress {
progress.update(samples, samples);
}
self.run_time = now.elapsed();
self.is_done = true;
}
pub fn run_all(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let tests = (node_count as usize).pow(2);
//let mut progress = Progress::new("test: ");
//let mut test = 0;
//progress.start(tests, 0);
for source in 0..node_count {
for target in 0..node_count {
if source == target {
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
//test += 1;
//progress.update(tests, test);
}
}
self.run_time = now.elapsed();
//clear progress line
//progress.clear_line();
}
pub fn duration(&self) -> Duration {
self.run_time | }
pub fn stretch(&self) -> f32 {
(self.route_costs_sum as f32) / (self.route_costs_min_sum as f32)
}
pub fn arrived(&self) -> f32 {
100.0 * (self.packets_arrived as f32) / (self.packets_send as f32)
}
pub fn connectivity(&self) -> f32 {
100.0 * (self.nodes_connected as f32) / (self.nodes_connected + self.nodes_disconnected) as f32
}
pub fn get_results(&self) -> Vec<(&'static str, f32)> {
vec![
("arrived", self.arrived()),
("connectivity", self.connectivity()),
("stretch", self.stretch())
]
}
} | random_line_split | |
eval_paths.rs | use std::time::{Instant, Duration};
use rand::Rng;
use crate::progress::Progress;
use crate::sim::TestPacket;
use crate::dijkstra::Dijkstra;
use crate::graph::*;
/*
* Test if all paths allow for routing.
* This test does not allow the state of the routing algorithm to change.
*/
pub struct EvalPaths {
show_progress: bool,
is_done: bool,
packets_send: u32,
packets_lost: u32,
packets_arrived: u32,
route_costs_sum: u32,
route_costs_min_sum: u32,
nodes_connected: usize,
nodes_disconnected: usize,
max_stretch: u32,
run_time: Duration,
dijkstra: Dijkstra
}
impl EvalPaths {
pub fn new() -> Self {
Self {
show_progress: false,
is_done: false,
packets_send: 0,
packets_lost: 0,
packets_arrived: 0,
route_costs_sum: 0,
route_costs_min_sum: 0,
nodes_connected: 0,
nodes_disconnected: 0,
max_stretch: 2,
run_time: Duration::new(0, 0),
dijkstra: Dijkstra::new(),
}
}
pub fn clear_stats(&mut self) {
self.is_done =false;
self.packets_send = 0;
self.packets_lost = 0;
self.packets_arrived = 0;
self.route_costs_sum = 0;
self.route_costs_min_sum = 0;
self.nodes_connected = 0;
self.nodes_disconnected = 0;
self.run_time = Duration::new(0, 0);
}
pub fn | (&mut self) {
self.dijkstra.clear();
self.clear_stats();
}
pub fn show_progress(&mut self, show_progress: bool) {
self.show_progress = true;
}
fn test_path(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
source: ID, target: ID, costs_min: u32) {
// maximum stretch we record
let mut packet = TestPacket::new(source, source, source, target);
let mut path_costs = 0u32;
self.packets_send += 1;
// max steps to try until we give up
let max_steps = costs_min * self.max_stretch;
for _ in 0..max_steps {
if let Some(next) = route(&packet) {
// Check if link really exists
if let Some(link) = graph.get_link(packet.receiver, next) {
path_costs += link.cost() as u32;
if next == packet.destination {
// packet arrived
self.packets_arrived += 1;
break;
} else {
// forward packet
packet.transmitter = packet.receiver;
packet.receiver = next;
}
} else {
// invalid next hop
self.packets_lost += 1;
break;
}
} else {
// no next hop
self.packets_lost += 1;
break;
}
}
self.route_costs_sum += path_costs;
self.route_costs_min_sum += costs_min;
}
pub fn run_samples(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
samples: usize) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let mut progress = Progress::new();
let mut sample = 0;
if self.show_progress {
progress.start(samples, 0);
}
for _ in 0..samples {
let source = rand::thread_rng().gen_range(0, node_count);
let target = rand::thread_rng().gen_range(0, node_count);
if source == target {
// we do not test those paths
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
sample += 1;
if self.show_progress {
progress.update(samples, sample);
}
}
if self.show_progress {
progress.update(samples, samples);
}
self.run_time = now.elapsed();
self.is_done = true;
}
pub fn run_all(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let tests = (node_count as usize).pow(2);
//let mut progress = Progress::new("test: ");
//let mut test = 0;
//progress.start(tests, 0);
for source in 0..node_count {
for target in 0..node_count {
if source == target {
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
//test += 1;
//progress.update(tests, test);
}
}
self.run_time = now.elapsed();
//clear progress line
//progress.clear_line();
}
pub fn duration(&self) -> Duration {
self.run_time
}
pub fn stretch(&self) -> f32 {
(self.route_costs_sum as f32) / (self.route_costs_min_sum as f32)
}
pub fn arrived(&self) -> f32 {
100.0 * (self.packets_arrived as f32) / (self.packets_send as f32)
}
pub fn connectivity(&self) -> f32 {
100.0 * (self.nodes_connected as f32) / (self.nodes_connected + self.nodes_disconnected) as f32
}
pub fn get_results(&self) -> Vec<(&'static str, f32)> {
vec![
("arrived", self.arrived()),
("connectivity", self.connectivity()),
("stretch", self.stretch())
]
}
}
| clear | identifier_name |
eval_paths.rs | use std::time::{Instant, Duration};
use rand::Rng;
use crate::progress::Progress;
use crate::sim::TestPacket;
use crate::dijkstra::Dijkstra;
use crate::graph::*;
/*
* Test if all paths allow for routing.
* This test does not allow the state of the routing algorithm to change.
*/
pub struct EvalPaths {
show_progress: bool,
is_done: bool,
packets_send: u32,
packets_lost: u32,
packets_arrived: u32,
route_costs_sum: u32,
route_costs_min_sum: u32,
nodes_connected: usize,
nodes_disconnected: usize,
max_stretch: u32,
run_time: Duration,
dijkstra: Dijkstra
}
impl EvalPaths {
pub fn new() -> Self {
Self {
show_progress: false,
is_done: false,
packets_send: 0,
packets_lost: 0,
packets_arrived: 0,
route_costs_sum: 0,
route_costs_min_sum: 0,
nodes_connected: 0,
nodes_disconnected: 0,
max_stretch: 2,
run_time: Duration::new(0, 0),
dijkstra: Dijkstra::new(),
}
}
pub fn clear_stats(&mut self) {
self.is_done =false;
self.packets_send = 0;
self.packets_lost = 0;
self.packets_arrived = 0;
self.route_costs_sum = 0;
self.route_costs_min_sum = 0;
self.nodes_connected = 0;
self.nodes_disconnected = 0;
self.run_time = Duration::new(0, 0);
}
pub fn clear(&mut self) {
self.dijkstra.clear();
self.clear_stats();
}
pub fn show_progress(&mut self, show_progress: bool) {
self.show_progress = true;
}
fn test_path(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
source: ID, target: ID, costs_min: u32) {
// maximum stretch we record
let mut packet = TestPacket::new(source, source, source, target);
let mut path_costs = 0u32;
self.packets_send += 1;
// max steps to try until we give up
let max_steps = costs_min * self.max_stretch;
for _ in 0..max_steps {
if let Some(next) = route(&packet) {
// Check if link really exists
if let Some(link) = graph.get_link(packet.receiver, next) | else {
// invalid next hop
self.packets_lost += 1;
break;
}
} else {
// no next hop
self.packets_lost += 1;
break;
}
}
self.route_costs_sum += path_costs;
self.route_costs_min_sum += costs_min;
}
pub fn run_samples(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>,
samples: usize) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let mut progress = Progress::new();
let mut sample = 0;
if self.show_progress {
progress.start(samples, 0);
}
for _ in 0..samples {
let source = rand::thread_rng().gen_range(0, node_count);
let target = rand::thread_rng().gen_range(0, node_count);
if source == target {
// we do not test those paths
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
sample += 1;
if self.show_progress {
progress.update(samples, sample);
}
}
if self.show_progress {
progress.update(samples, samples);
}
self.run_time = now.elapsed();
self.is_done = true;
}
pub fn run_all(&mut self, graph: &Graph, mut route: impl FnMut(&TestPacket) -> Option<u32>) {
self.clear();
let node_count = graph.node_count();
if node_count < 2 {
return;
}
let now = Instant::now();
let tests = (node_count as usize).pow(2);
//let mut progress = Progress::new("test: ");
//let mut test = 0;
//progress.start(tests, 0);
for source in 0..node_count {
for target in 0..node_count {
if source == target {
continue;
}
let min = self.dijkstra.find_shortest_distance(graph, source as ID, target as ID);
if !min.is_finite() {
// no path from target to source => ignore
self.nodes_disconnected += 1;
continue;
} else {
self.nodes_connected += 1;
}
self.test_path(&graph, &mut route, source as ID, target as ID, min as u32);
//test += 1;
//progress.update(tests, test);
}
}
self.run_time = now.elapsed();
//clear progress line
//progress.clear_line();
}
pub fn duration(&self) -> Duration {
self.run_time
}
pub fn stretch(&self) -> f32 {
(self.route_costs_sum as f32) / (self.route_costs_min_sum as f32)
}
pub fn arrived(&self) -> f32 {
100.0 * (self.packets_arrived as f32) / (self.packets_send as f32)
}
pub fn connectivity(&self) -> f32 {
100.0 * (self.nodes_connected as f32) / (self.nodes_connected + self.nodes_disconnected) as f32
}
pub fn get_results(&self) -> Vec<(&'static str, f32)> {
vec![
("arrived", self.arrived()),
("connectivity", self.connectivity()),
("stretch", self.stretch())
]
}
}
| {
path_costs += link.cost() as u32;
if next == packet.destination {
// packet arrived
self.packets_arrived += 1;
break;
} else {
// forward packet
packet.transmitter = packet.receiver;
packet.receiver = next;
}
} | conditional_block |
lexer.py | # -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('editor/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[_type]), atr)
def keywords(self, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
retu | super(Lexer, self).keywords(kset) | rn ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
| conditional_block |
lexer.py | # -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
|
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('editor/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[_type]), atr)
def keywords(self, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
return ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
super(Lexer, self).keywords(kset) | super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
| identifier_body |
lexer.py | # -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C | # Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('editor/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[_type]), atr)
def keywords(self, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
return ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
super(Lexer, self).keywords(kset) | #
# This file is part of Edis | random_line_split |
lexer.py | # -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('editor/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[_type]), atr)
def keyw | f, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
return ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
super(Lexer, self).keywords(kset) | ords(sel | identifier_name |
decorators.ts | export type SpotifyQueryDecorated = any;
// Allow to decoration class methods using the following pattern :
// const SpotifyQuery = SpotifyDecorators(client);
// class A {
// @SpotifyQuery(`
// query {
// me {
// tracks { name }
// }
// }
// `)
// public myMethod(tracks): SpotifyQueryDecorated {
// // ...
// }
// }
export function SpotifyDecorators (client): any {
return {
SpotifyQuery(query: string) |
}
} | {
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
var originalMethod = descriptor.value;
descriptor.value = function (variables?: any): Promise<any> {
return new Promise((resolve, reject) => {
client.query(query, null, null, variables).then((executionResult) => {
if (!executionResult.errors) {
resolve(
originalMethod.call(originalMethod, executionResult.data)
);
} else {
reject(executionResult.errors);
}
}, reject).catch(reject);
});
};
return descriptor;
};
} | identifier_body |
decorators.ts | export type SpotifyQueryDecorated = any;
// Allow to decoration class methods using the following pattern :
// const SpotifyQuery = SpotifyDecorators(client);
// class A {
// @SpotifyQuery(`
// query {
// me {
// tracks { name }
// }
// }
// `)
// public myMethod(tracks): SpotifyQueryDecorated {
// // ...
// }
// }
export function SpotifyDecorators (client): any {
return {
SpotifyQuery(query: string) {
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
var originalMethod = descriptor.value;
descriptor.value = function (variables?: any): Promise<any> {
return new Promise((resolve, reject) => {
client.query(query, null, null, variables).then((executionResult) => {
if (!executionResult.errors) {
resolve(
originalMethod.call(originalMethod, executionResult.data)
);
} else |
}, reject).catch(reject);
});
};
return descriptor;
};
}
}
} | {
reject(executionResult.errors);
} | conditional_block |
decorators.ts | export type SpotifyQueryDecorated = any;
// Allow to decoration class methods using the following pattern :
// const SpotifyQuery = SpotifyDecorators(client);
// class A {
// @SpotifyQuery(`
// query {
// me {
// tracks { name }
// }
// }
// `)
// public myMethod(tracks): SpotifyQueryDecorated {
// // ...
// }
// }
export function SpotifyDecorators (client): any {
return {
| (query: string) {
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
var originalMethod = descriptor.value;
descriptor.value = function (variables?: any): Promise<any> {
return new Promise((resolve, reject) => {
client.query(query, null, null, variables).then((executionResult) => {
if (!executionResult.errors) {
resolve(
originalMethod.call(originalMethod, executionResult.data)
);
} else {
reject(executionResult.errors);
}
}, reject).catch(reject);
});
};
return descriptor;
};
}
}
} | SpotifyQuery | identifier_name |
decorators.ts | export type SpotifyQueryDecorated = any;
// Allow to decoration class methods using the following pattern :
// const SpotifyQuery = SpotifyDecorators(client);
// class A {
// @SpotifyQuery(`
// query {
// me {
// tracks { name }
// }
// }
// `)
// public myMethod(tracks): SpotifyQueryDecorated {
// // ...
// } | // }
export function SpotifyDecorators (client): any {
return {
SpotifyQuery(query: string) {
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
var originalMethod = descriptor.value;
descriptor.value = function (variables?: any): Promise<any> {
return new Promise((resolve, reject) => {
client.query(query, null, null, variables).then((executionResult) => {
if (!executionResult.errors) {
resolve(
originalMethod.call(originalMethod, executionResult.data)
);
} else {
reject(executionResult.errors);
}
}, reject).catch(reject);
});
};
return descriptor;
};
}
}
} | random_line_split | |
endian.rs | // Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::mem;
/// A value casted directly from a little-endian byte buffer. On big-endian
/// processors, the bytes of the value need to be swapped upon reading and writing.
#[repr(C)]
pub struct WireValue<T> {
value: T,
}
impl<T> WireValue<T> where T: Endian {
/// Reads the value, swapping bytes on big-endian processors.
#[inline]
pub fn get(&self) -> T { self.value.get() }
/// Writes the value, swapping bytes on big-endian processors.
#[inline]
pub fn set(&mut self, value: T) { self.value.set(value) }
}
/// Something that can appear in a `WireValue`.
pub trait Endian : Sized {
/// Reads the value, swapping bytes on big-endian processors.
fn get(&self) -> Self;
/// Writes the value, swapping bytes on big-endian processors.
fn set(&mut self, value: Self);
}
macro_rules! endian_impl(
($typ:ty) => (
impl Endian for $typ {
#[inline]
fn get(&self) -> $typ { *self }
#[inline]
fn set(&mut self, value: $typ) {*self = value;}
}
);
($typ:ty, $swapper:ident) => (
impl Endian for $typ {
#[inline]
fn get(&self) -> $typ { self.$swapper() }
#[inline]
fn set(&mut self, value: $typ) {
*self = value.$swapper();
}
}
);
);
// No swapping necessary for primitives of size less than one byte.
endian_impl!(());
endian_impl!(bool);
endian_impl!(u8);
endian_impl!(i8);
// Need to swap bytes for primitives larger than a byte.
endian_impl!(u16, to_le);
endian_impl!(i16, to_le);
endian_impl!(u32, to_le);
endian_impl!(i32, to_le);
endian_impl!(u64, to_le);
endian_impl!(i64, to_le);
impl Endian for f32 {
fn | (&self) -> f32 {
unsafe { mem::transmute(mem::transmute::<f32, u32>(*self).to_le()) }
}
fn set(&mut self, value : f32) {
*self = unsafe { mem::transmute(mem::transmute::<f32, u32>(value).to_le()) };
}
}
impl Endian for f64 {
fn get(&self) -> f64 {
unsafe { mem::transmute(mem::transmute::<f64, u64>(*self).to_le()) }
}
fn set(&mut self, value : f64) {
*self = unsafe { mem::transmute(mem::transmute::<f64, u64>(value).to_le()) };
}
}
| get | identifier_name |
endian.rs | // Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::mem;
/// A value casted directly from a little-endian byte buffer. On big-endian
/// processors, the bytes of the value need to be swapped upon reading and writing.
#[repr(C)]
pub struct WireValue<T> {
value: T,
}
impl<T> WireValue<T> where T: Endian {
/// Reads the value, swapping bytes on big-endian processors.
#[inline]
pub fn get(&self) -> T { self.value.get() }
/// Writes the value, swapping bytes on big-endian processors.
#[inline]
pub fn set(&mut self, value: T) { self.value.set(value) }
}
/// Something that can appear in a `WireValue`.
pub trait Endian : Sized {
/// Reads the value, swapping bytes on big-endian processors.
fn get(&self) -> Self;
/// Writes the value, swapping bytes on big-endian processors.
fn set(&mut self, value: Self);
}
macro_rules! endian_impl(
($typ:ty) => (
impl Endian for $typ {
#[inline]
fn get(&self) -> $typ { *self }
#[inline]
fn set(&mut self, value: $typ) {*self = value;}
}
);
($typ:ty, $swapper:ident) => (
impl Endian for $typ {
#[inline]
fn get(&self) -> $typ { self.$swapper() }
#[inline]
fn set(&mut self, value: $typ) {
*self = value.$swapper();
}
}
);
);
// No swapping necessary for primitives of size less than one byte.
endian_impl!(());
endian_impl!(bool);
endian_impl!(u8);
endian_impl!(i8);
// Need to swap bytes for primitives larger than a byte.
endian_impl!(u16, to_le);
endian_impl!(i16, to_le);
endian_impl!(u32, to_le);
endian_impl!(i32, to_le);
endian_impl!(u64, to_le);
endian_impl!(i64, to_le);
impl Endian for f32 {
fn get(&self) -> f32 {
unsafe { mem::transmute(mem::transmute::<f32, u32>(*self).to_le()) }
}
fn set(&mut self, value : f32) {
*self = unsafe { mem::transmute(mem::transmute::<f32, u32>(value).to_le()) };
}
}
impl Endian for f64 {
fn get(&self) -> f64 {
unsafe { mem::transmute(mem::transmute::<f64, u64>(*self).to_le()) }
}
fn set(&mut self, value : f64) {
*self = unsafe { mem::transmute(mem::transmute::<f64, u64>(value).to_le()) };
} | } | random_line_split | |
converttojson.py | #import results # Not using results?
import loc
import json
#asinfo = results.asinfo
locinfo = loc.locinfo
#fields = asinfo["fields"]
#del asinfo["fields"]
#print fields
# for asn, dat in asinfo.items():
# if int(asn) in locinfo:
# dat.extend(locinfo[int(asn)])
# else:
# dat.extend([0, 0])
#
# for asn, dat in asinfo.items():
# print asn, dat[0]
# if asn != dat[0]:
# print "\n\n\n\n\n\n\n"
print locinfo
# for val in locinfo.values():
# print val
# udata = val[0].decode("utf-8"); | f = open("asinfo.json", "w")
json.dump(locinfo, f)
f.close() | # val[0] = udata.encode("ascii", "ignore")
# val[0] = unicode(val[0])
# print val | random_line_split |
1234_1243_2134_2431_4213.py | from __future__ import print_function
from permuta import *
import permstruct
import permstruct.dag
from permstruct import * | from permstruct.dag import taylor_dag
import sys
# -- Example from Kuszmaul paper -- #
# STATUS ================================================ >
task = '1234_1243_2134_2431_4213'
patts = [ Permutation([ int(c) for c in p ]) for p in task.split('_') ]
# patts = [Permutation([5,2,3,4,1]), Permutation([5,3,2,4,1]), Permutation([5,2,4,3,1]), Permutation([3,5,1,4,2]), Permutation([4,2,5,1,3]), Permutation([3,5,1,6,2,4])]
struct(patts, size=6, perm_bound = 8, subpatts_len=4, subpatts_num=3)
# struct(patts, size = 4, verify_bound = 10, ask_verify_higher = True) | random_line_split | |
total_dos_plotter.py | #!/usr/bin/env python |
__author__ = "Yuji Ikeda"
class TotalDOSPlotter(DOSPlotter):
def load_data(self, data_file='total_dos.dat'):
super(TotalDOSPlotter, self).load_data(data_file)
return self
def run(self):
variables = self._variables
primitive = self.create_primitive()
natoms = primitive.get_number_of_atoms()
symbols = primitive.get_chemical_symbols()
print("natoms:", natoms)
print("symbols:", symbols)
self.set_figure_name_prefix("total_dos")
self.set_plot_symbol(False)
self.set_plot_atom(False)
self.load_data(variables["data_file"])
variables.update({
"freq_unit": "THz",
"unit": 1.0,
"natoms": natoms,
"symbols": symbols,
})
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure()
return
from scipy.constants import eV, Planck
THz2meV = Planck / eV * 1e+15 # 4.135667662340164
# meV
variables.update({
"freq_unit": "meV",
"unit": THz2meV,
})
scale = 4.0
variables["f_min"] *= scale
variables["f_max"] *= scale
variables["d_freq"] *= scale
variables["dos_min"] /= scale
variables["dos_max"] /= scale
variables["dos_ticks"] /= scale
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure() | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from .dos_plotter import DOSPlotter
| random_line_split |
total_dos_plotter.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from .dos_plotter import DOSPlotter
__author__ = "Yuji Ikeda"
class TotalDOSPlotter(DOSPlotter):
def | (self, data_file='total_dos.dat'):
super(TotalDOSPlotter, self).load_data(data_file)
return self
def run(self):
variables = self._variables
primitive = self.create_primitive()
natoms = primitive.get_number_of_atoms()
symbols = primitive.get_chemical_symbols()
print("natoms:", natoms)
print("symbols:", symbols)
self.set_figure_name_prefix("total_dos")
self.set_plot_symbol(False)
self.set_plot_atom(False)
self.load_data(variables["data_file"])
variables.update({
"freq_unit": "THz",
"unit": 1.0,
"natoms": natoms,
"symbols": symbols,
})
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure()
return
from scipy.constants import eV, Planck
THz2meV = Planck / eV * 1e+15 # 4.135667662340164
# meV
variables.update({
"freq_unit": "meV",
"unit": THz2meV,
})
scale = 4.0
variables["f_min"] *= scale
variables["f_max"] *= scale
variables["d_freq"] *= scale
variables["dos_min"] /= scale
variables["dos_max"] /= scale
variables["dos_ticks"] /= scale
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure()
| load_data | identifier_name |
total_dos_plotter.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from .dos_plotter import DOSPlotter
__author__ = "Yuji Ikeda"
class TotalDOSPlotter(DOSPlotter):
def load_data(self, data_file='total_dos.dat'):
super(TotalDOSPlotter, self).load_data(data_file)
return self
def run(self):
| variables = self._variables
primitive = self.create_primitive()
natoms = primitive.get_number_of_atoms()
symbols = primitive.get_chemical_symbols()
print("natoms:", natoms)
print("symbols:", symbols)
self.set_figure_name_prefix("total_dos")
self.set_plot_symbol(False)
self.set_plot_atom(False)
self.load_data(variables["data_file"])
variables.update({
"freq_unit": "THz",
"unit": 1.0,
"natoms": natoms,
"symbols": symbols,
})
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure()
return
from scipy.constants import eV, Planck
THz2meV = Planck / eV * 1e+15 # 4.135667662340164
# meV
variables.update({
"freq_unit": "meV",
"unit": THz2meV,
})
scale = 4.0
variables["f_min"] *= scale
variables["f_max"] *= scale
variables["d_freq"] *= scale
variables["dos_min"] /= scale
variables["dos_max"] /= scale
variables["dos_ticks"] /= scale
self.update_variables(variables)
# self.set_is_horizontal(True)
# self.plot_dos()
self.set_is_horizontal(False)
self.create_figure() | identifier_body | |
computed-pane-properties_test.ts | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import {assert} from 'chai';
import {getBrowserAndPages, goToResource, waitForFunction} from '../../shared/helper.js';
import {describe, it} from '../../shared/mocha-extensions.js';
import {focusElementsTree, getAllPropertiesFromComputedPane, getContentOfComputedPane, navigateToSidePane, toggleShowAllComputedProperties, waitForComputedPaneChange, waitForElementsComputedSection} from '../helpers/elements-helpers.js';
describe('The Computed pane', async () => {
beforeEach(async function() {
await goToResource('elements/simple-styled-page.html');
await navigateToSidePane('Computed');
await waitForElementsComputedSection();
// Note that navigating to the computed pane moved focus away from the
// elements pane. Restore it.
await focusElementsTree();
});
it('can display the CSS properties of the selected element', async () => {
const {frontend} = getBrowserAndPages();
// Select the H1 element and wait for the computed pane to change.
let content = await getContentOfComputedPane();
await frontend.keyboard.press('ArrowDown');
await waitForComputedPaneChange(content);
await waitForElementsComputedSection();
const h1Properties = await getAllPropertiesFromComputedPane();
assert.strictEqual(h1Properties.length, 10, 'There should be 10 computed properties on the H1 element');
const colorProperty = h1Properties.find(property => property && property.name === 'color');
assert.exists(colorProperty, 'H1 element should have a color computed property');
assert.deepEqual(colorProperty, {
name: 'color',
value: 'rgb(255, 0, 102)',
});
// Select the H2 element by pressing down again.
content = await getContentOfComputedPane();
await frontend.keyboard.press('ArrowDown');
await waitForComputedPaneChange(content);
await waitForElementsComputedSection();
const h2Properties = await getAllPropertiesFromComputedPane(); | assert.exists(backgroundProperty, 'H2 element should have a background-color computed property');
assert.deepEqual(backgroundProperty, {
name: 'background-color',
value: 'rgb(255, 215, 0)',
});
});
it('can display inherited CSS properties of the selected element', async () => {
const {frontend} = getBrowserAndPages();
// Select the H1 element and wait for the computed pane to change.
const content = await getContentOfComputedPane();
await frontend.keyboard.press('ArrowDown');
await waitForComputedPaneChange(content);
await toggleShowAllComputedProperties();
await waitForElementsComputedSection();
const getAlignContentProperty = async () => {
const allH1Properties = await getAllPropertiesFromComputedPane();
const prop = allH1Properties.find(property => property && property.name === 'align-content');
return prop;
};
const alignContentProperty = await waitForFunction(getAlignContentProperty);
assert.exists(alignContentProperty, 'H1 element should display the inherited align-content computed property');
assert.deepEqual(alignContentProperty, {
name: 'align-content',
value: 'normal',
});
});
}); | assert.strictEqual(h2Properties.length, 11, 'There should be 11 computed properties on the H2 element');
const backgroundProperty = h2Properties.find(property => property && property.name === 'background-color'); | random_line_split |
console.rs | // Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use std::thread::{self, JoinHandle};
use std::time::Duration;
use console::style;
use humantime::format_duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use super::{
BeginTaskEvent, ChecksumErrorEvent, DeleteEvent, EndBuildEvent,
EndTaskEvent, Event, EventHandler, Timestamp,
};
#[derive(Clone)]
struct TaskState {
/// Time the task started.
start: Timestamp,
/// Progress bar associated with this task.
pb: ProgressBar,
/// String of the task being executed.
name: String,
/// Buffer of output for the task.
buf: Vec<u8>,
}
impl TaskState {
pub fn new(start: Timestamp, pb: ProgressBar) -> Self {
TaskState {
start,
pb,
name: String::new(),
buf: Vec::new(),
}
}
}
/// Calculates the number of spaces a number takes up. Useful for padding
/// decimal numbers.
fn num_width(mut max_value: usize) -> usize {
let mut count = 0;
while max_value > 0 {
max_value /= 10;
count += 1;
}
count
}
/// "Inner" that lives as long as a single build. This is created and destroyed
/// for `BeginBuildEvent`s and `EndBuildEvent`s respectively.
struct Inner {
// Vector of in-flight tasks.
tasks: Vec<TaskState>,
// Time at which the build started. This is used to calculate the duration
// of the build when it finishes.
start_time: Timestamp,
// Progress bar thread.
pb_thread: JoinHandle<Result<(), io::Error>>,
// Continuously updates each of the progress bars.
tick_thread: JoinHandle<()>,
// Name of the build.
name: String,
}
impl Inner {
pub fn new(threads: usize, name: String, timestamp: Timestamp) -> Self {
// Give a bogus start time. This will be changed as we receive events.
let mut tasks = Vec::with_capacity(threads);
let mut bars = Vec::with_capacity(threads);
let progress = MultiProgress::new();
for i in 0..threads {
let pb = progress.add(ProgressBar::new_spinner());
pb.set_style(Console::style_idle());
pb.set_prefix(&format!(
"[{:>width$}]",
i + 1,
width = num_width(threads)
));
pb.set_message(&style("Idle").dim().to_string());
// Clone the progress bar handle so we can update them later.
bars.push(pb.clone());
tasks.push(TaskState::new(timestamp, pb));
}
let pb_thread = thread::spawn(move || progress.join_and_clear());
let tick_thread = thread::spawn(move || loop {
thread::sleep(Duration::from_millis(200));
for pb in &bars {
if pb.is_finished() {
return;
}
pb.tick();
}
});
Inner {
tasks,
start_time: timestamp,
pb_thread,
tick_thread,
name,
}
}
pub fn | (mut self) -> Result<(), io::Error> {
for task in self.tasks.iter_mut() {
task.pb
.finish_with_message(&style("Done").dim().to_string());
}
self.tick_thread.join().unwrap();
self.pb_thread.join().unwrap()?;
Ok(())
}
pub fn end_build(
self,
timestamp: Timestamp,
event: EndBuildEvent,
) -> Result<(), io::Error> {
let duration = (timestamp - self.start_time).to_std().unwrap();
let duration = format_duration(duration);
let msg = match event.result {
Ok(()) => format!(
"{} {} in {}",
style("Finished").bold().green(),
style(&self.name).yellow(),
style(duration).cyan(),
),
Err(err) => format!(
"{} {} after {}: {}",
style("Failed").bold().red(),
style(&self.name).yellow(),
style(duration).cyan(),
err
),
};
for task in &self.tasks {
task.pb.set_style(Console::style_idle());
}
self.tasks[0].pb.println(&msg);
self.finish()
}
pub fn begin_task(
&mut self,
timestamp: Timestamp,
event: BeginTaskEvent,
) -> Result<(), io::Error> {
let mut task = &mut self.tasks[event.id];
task.start = timestamp;
let name = event.task.to_string();
task.pb.reset_elapsed();
task.pb.set_style(Console::style_running());
task.pb.set_message(&name);
task.name = name;
Ok(())
}
pub fn end_task(
&mut self,
timestamp: Timestamp,
event: EndTaskEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
let duration = (timestamp - task.start).to_std().unwrap();
let duration = format_duration(duration);
if let Err(err) = event.result {
writeln!(
&mut task.buf,
"{} after {}: {}",
style("Task failed").bold().red(),
style(duration).cyan(),
style(err).red(),
)?;
task.pb.println(format!(
"> {}\n{}",
style(&task.name).bold().red(),
String::from_utf8_lossy(&task.buf),
));
}
task.buf.clear();
task.pb.set_style(Console::style_idle());
Ok(())
}
pub fn delete(
&mut self,
_timestamp: Timestamp,
event: DeleteEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.set_style(Console::style_running());
task.pb.set_message(&format!("Deleted {}", event.resource));
if let Err(err) = event.result {
task.pb.println(format!(
"{} to delete `{}`: {}",
style("Failed").bold().red(),
style(event.resource).yellow(),
err
));
}
Ok(())
}
pub fn checksum_error(
&mut self,
_timestamp: Timestamp,
event: ChecksumErrorEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.println(format!(
"Failed to compute checksum for {} ({})",
event.resource, event.error
));
Ok(())
}
}
/// Logs events to a console.
#[derive(Default)]
pub struct Console {
// Delay creation of the inner state until we receive our first BeginBuild
// event. This lets us handle any number of threads.
inner: Option<Inner>,
}
impl Console {
fn style_idle() -> ProgressStyle {
ProgressStyle::default_spinner().template("{prefix:.bold.dim} 🚶")
}
fn style_running() -> ProgressStyle {
ProgressStyle::default_spinner().template(&format!(
"{{prefix:.bold.dim}} 🏃 {} {{wide_msg}}",
style("{elapsed}").dim()
))
}
pub fn new() -> Self {
// Delay initialization until we receive a BeginBuild event.
Self::default()
}
}
impl EventHandler for Console {
type Error = io::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(event) => {
if self.inner.is_none() {
self.inner =
Some(Inner::new(event.threads, event.name, timestamp));
}
}
Event::EndBuild(event) => {
if let Some(inner) = self.inner.take() {
inner.end_build(timestamp, event)?;
}
}
Event::BeginTask(event) => {
if let Some(inner) = &mut self.inner {
inner.begin_task(timestamp, event)?;
}
}
Event::TaskOutput(event) => {
if let Some(inner) = &mut self.inner {
inner.tasks[event.id].buf.extend(event.chunk);
}
}
Event::EndTask(event) => {
if let Some(inner) = &mut self.inner {
inner.end_task(timestamp, event)?;
}
}
Event::Delete(event) => {
if let Some(inner) = &mut self.inner {
inner.delete(timestamp, event)?;
}
}
Event::ChecksumError(event) => {
if let Some(inner) = &mut self.inner {
inner.checksum_error(timestamp, event)?;
}
}
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
if let Some(inner) = self.inner.take() {
inner.finish()?;
}
Ok(())
}
}
| finish | identifier_name |
console.rs | // Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use std::thread::{self, JoinHandle};
use std::time::Duration;
use console::style;
use humantime::format_duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use super::{
BeginTaskEvent, ChecksumErrorEvent, DeleteEvent, EndBuildEvent,
EndTaskEvent, Event, EventHandler, Timestamp,
};
#[derive(Clone)]
struct TaskState {
/// Time the task started.
start: Timestamp,
/// Progress bar associated with this task.
pb: ProgressBar,
/// String of the task being executed.
name: String,
/// Buffer of output for the task.
buf: Vec<u8>,
}
impl TaskState {
pub fn new(start: Timestamp, pb: ProgressBar) -> Self {
TaskState {
start,
pb,
name: String::new(),
buf: Vec::new(),
}
}
}
/// Calculates the number of spaces a number takes up. Useful for padding
/// decimal numbers.
fn num_width(mut max_value: usize) -> usize {
let mut count = 0;
while max_value > 0 {
max_value /= 10;
count += 1;
}
count
}
/// "Inner" that lives as long as a single build. This is created and destroyed
/// for `BeginBuildEvent`s and `EndBuildEvent`s respectively.
struct Inner {
// Vector of in-flight tasks.
tasks: Vec<TaskState>,
// Time at which the build started. This is used to calculate the duration
// of the build when it finishes.
start_time: Timestamp,
// Progress bar thread.
pb_thread: JoinHandle<Result<(), io::Error>>,
// Continuously updates each of the progress bars.
tick_thread: JoinHandle<()>,
// Name of the build.
name: String,
}
impl Inner {
pub fn new(threads: usize, name: String, timestamp: Timestamp) -> Self {
// Give a bogus start time. This will be changed as we receive events.
let mut tasks = Vec::with_capacity(threads);
let mut bars = Vec::with_capacity(threads);
let progress = MultiProgress::new();
for i in 0..threads {
let pb = progress.add(ProgressBar::new_spinner());
pb.set_style(Console::style_idle());
pb.set_prefix(&format!(
"[{:>width$}]",
i + 1,
width = num_width(threads)
));
pb.set_message(&style("Idle").dim().to_string());
// Clone the progress bar handle so we can update them later.
bars.push(pb.clone());
tasks.push(TaskState::new(timestamp, pb));
}
let pb_thread = thread::spawn(move || progress.join_and_clear());
let tick_thread = thread::spawn(move || loop {
thread::sleep(Duration::from_millis(200));
for pb in &bars {
if pb.is_finished() {
return;
}
pb.tick();
} | start_time: timestamp,
pb_thread,
tick_thread,
name,
}
}
pub fn finish(mut self) -> Result<(), io::Error> {
for task in self.tasks.iter_mut() {
task.pb
.finish_with_message(&style("Done").dim().to_string());
}
self.tick_thread.join().unwrap();
self.pb_thread.join().unwrap()?;
Ok(())
}
pub fn end_build(
self,
timestamp: Timestamp,
event: EndBuildEvent,
) -> Result<(), io::Error> {
let duration = (timestamp - self.start_time).to_std().unwrap();
let duration = format_duration(duration);
let msg = match event.result {
Ok(()) => format!(
"{} {} in {}",
style("Finished").bold().green(),
style(&self.name).yellow(),
style(duration).cyan(),
),
Err(err) => format!(
"{} {} after {}: {}",
style("Failed").bold().red(),
style(&self.name).yellow(),
style(duration).cyan(),
err
),
};
for task in &self.tasks {
task.pb.set_style(Console::style_idle());
}
self.tasks[0].pb.println(&msg);
self.finish()
}
pub fn begin_task(
&mut self,
timestamp: Timestamp,
event: BeginTaskEvent,
) -> Result<(), io::Error> {
let mut task = &mut self.tasks[event.id];
task.start = timestamp;
let name = event.task.to_string();
task.pb.reset_elapsed();
task.pb.set_style(Console::style_running());
task.pb.set_message(&name);
task.name = name;
Ok(())
}
pub fn end_task(
&mut self,
timestamp: Timestamp,
event: EndTaskEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
let duration = (timestamp - task.start).to_std().unwrap();
let duration = format_duration(duration);
if let Err(err) = event.result {
writeln!(
&mut task.buf,
"{} after {}: {}",
style("Task failed").bold().red(),
style(duration).cyan(),
style(err).red(),
)?;
task.pb.println(format!(
"> {}\n{}",
style(&task.name).bold().red(),
String::from_utf8_lossy(&task.buf),
));
}
task.buf.clear();
task.pb.set_style(Console::style_idle());
Ok(())
}
pub fn delete(
&mut self,
_timestamp: Timestamp,
event: DeleteEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.set_style(Console::style_running());
task.pb.set_message(&format!("Deleted {}", event.resource));
if let Err(err) = event.result {
task.pb.println(format!(
"{} to delete `{}`: {}",
style("Failed").bold().red(),
style(event.resource).yellow(),
err
));
}
Ok(())
}
pub fn checksum_error(
&mut self,
_timestamp: Timestamp,
event: ChecksumErrorEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.println(format!(
"Failed to compute checksum for {} ({})",
event.resource, event.error
));
Ok(())
}
}
/// Logs events to a console.
#[derive(Default)]
pub struct Console {
// Delay creation of the inner state until we receive our first BeginBuild
// event. This lets us handle any number of threads.
inner: Option<Inner>,
}
impl Console {
fn style_idle() -> ProgressStyle {
ProgressStyle::default_spinner().template("{prefix:.bold.dim} 🚶")
}
fn style_running() -> ProgressStyle {
ProgressStyle::default_spinner().template(&format!(
"{{prefix:.bold.dim}} 🏃 {} {{wide_msg}}",
style("{elapsed}").dim()
))
}
pub fn new() -> Self {
// Delay initialization until we receive a BeginBuild event.
Self::default()
}
}
impl EventHandler for Console {
type Error = io::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(event) => {
if self.inner.is_none() {
self.inner =
Some(Inner::new(event.threads, event.name, timestamp));
}
}
Event::EndBuild(event) => {
if let Some(inner) = self.inner.take() {
inner.end_build(timestamp, event)?;
}
}
Event::BeginTask(event) => {
if let Some(inner) = &mut self.inner {
inner.begin_task(timestamp, event)?;
}
}
Event::TaskOutput(event) => {
if let Some(inner) = &mut self.inner {
inner.tasks[event.id].buf.extend(event.chunk);
}
}
Event::EndTask(event) => {
if let Some(inner) = &mut self.inner {
inner.end_task(timestamp, event)?;
}
}
Event::Delete(event) => {
if let Some(inner) = &mut self.inner {
inner.delete(timestamp, event)?;
}
}
Event::ChecksumError(event) => {
if let Some(inner) = &mut self.inner {
inner.checksum_error(timestamp, event)?;
}
}
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
if let Some(inner) = self.inner.take() {
inner.finish()?;
}
Ok(())
}
} | });
Inner {
tasks, | random_line_split |
console.rs | // Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use std::thread::{self, JoinHandle};
use std::time::Duration;
use console::style;
use humantime::format_duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use super::{
BeginTaskEvent, ChecksumErrorEvent, DeleteEvent, EndBuildEvent,
EndTaskEvent, Event, EventHandler, Timestamp,
};
#[derive(Clone)]
struct TaskState {
/// Time the task started.
start: Timestamp,
/// Progress bar associated with this task.
pb: ProgressBar,
/// String of the task being executed.
name: String,
/// Buffer of output for the task.
buf: Vec<u8>,
}
impl TaskState {
pub fn new(start: Timestamp, pb: ProgressBar) -> Self {
TaskState {
start,
pb,
name: String::new(),
buf: Vec::new(),
}
}
}
/// Calculates the number of spaces a number takes up. Useful for padding
/// decimal numbers.
fn num_width(mut max_value: usize) -> usize {
let mut count = 0;
while max_value > 0 {
max_value /= 10;
count += 1;
}
count
}
/// "Inner" that lives as long as a single build. This is created and destroyed
/// for `BeginBuildEvent`s and `EndBuildEvent`s respectively.
struct Inner {
// Vector of in-flight tasks.
tasks: Vec<TaskState>,
// Time at which the build started. This is used to calculate the duration
// of the build when it finishes.
start_time: Timestamp,
// Progress bar thread.
pb_thread: JoinHandle<Result<(), io::Error>>,
// Continuously updates each of the progress bars.
tick_thread: JoinHandle<()>,
// Name of the build.
name: String,
}
impl Inner {
pub fn new(threads: usize, name: String, timestamp: Timestamp) -> Self {
// Give a bogus start time. This will be changed as we receive events.
let mut tasks = Vec::with_capacity(threads);
let mut bars = Vec::with_capacity(threads);
let progress = MultiProgress::new();
for i in 0..threads {
let pb = progress.add(ProgressBar::new_spinner());
pb.set_style(Console::style_idle());
pb.set_prefix(&format!(
"[{:>width$}]",
i + 1,
width = num_width(threads)
));
pb.set_message(&style("Idle").dim().to_string());
// Clone the progress bar handle so we can update them later.
bars.push(pb.clone());
tasks.push(TaskState::new(timestamp, pb));
}
let pb_thread = thread::spawn(move || progress.join_and_clear());
let tick_thread = thread::spawn(move || loop {
thread::sleep(Duration::from_millis(200));
for pb in &bars {
if pb.is_finished() {
return;
}
pb.tick();
}
});
Inner {
tasks,
start_time: timestamp,
pb_thread,
tick_thread,
name,
}
}
pub fn finish(mut self) -> Result<(), io::Error> {
for task in self.tasks.iter_mut() {
task.pb
.finish_with_message(&style("Done").dim().to_string());
}
self.tick_thread.join().unwrap();
self.pb_thread.join().unwrap()?;
Ok(())
}
pub fn end_build(
self,
timestamp: Timestamp,
event: EndBuildEvent,
) -> Result<(), io::Error> |
pub fn begin_task(
&mut self,
timestamp: Timestamp,
event: BeginTaskEvent,
) -> Result<(), io::Error> {
let mut task = &mut self.tasks[event.id];
task.start = timestamp;
let name = event.task.to_string();
task.pb.reset_elapsed();
task.pb.set_style(Console::style_running());
task.pb.set_message(&name);
task.name = name;
Ok(())
}
pub fn end_task(
&mut self,
timestamp: Timestamp,
event: EndTaskEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
let duration = (timestamp - task.start).to_std().unwrap();
let duration = format_duration(duration);
if let Err(err) = event.result {
writeln!(
&mut task.buf,
"{} after {}: {}",
style("Task failed").bold().red(),
style(duration).cyan(),
style(err).red(),
)?;
task.pb.println(format!(
"> {}\n{}",
style(&task.name).bold().red(),
String::from_utf8_lossy(&task.buf),
));
}
task.buf.clear();
task.pb.set_style(Console::style_idle());
Ok(())
}
pub fn delete(
&mut self,
_timestamp: Timestamp,
event: DeleteEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.set_style(Console::style_running());
task.pb.set_message(&format!("Deleted {}", event.resource));
if let Err(err) = event.result {
task.pb.println(format!(
"{} to delete `{}`: {}",
style("Failed").bold().red(),
style(event.resource).yellow(),
err
));
}
Ok(())
}
pub fn checksum_error(
&mut self,
_timestamp: Timestamp,
event: ChecksumErrorEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.println(format!(
"Failed to compute checksum for {} ({})",
event.resource, event.error
));
Ok(())
}
}
/// Logs events to a console.
#[derive(Default)]
pub struct Console {
// Delay creation of the inner state until we receive our first BeginBuild
// event. This lets us handle any number of threads.
inner: Option<Inner>,
}
impl Console {
fn style_idle() -> ProgressStyle {
ProgressStyle::default_spinner().template("{prefix:.bold.dim} 🚶")
}
fn style_running() -> ProgressStyle {
ProgressStyle::default_spinner().template(&format!(
"{{prefix:.bold.dim}} 🏃 {} {{wide_msg}}",
style("{elapsed}").dim()
))
}
pub fn new() -> Self {
// Delay initialization until we receive a BeginBuild event.
Self::default()
}
}
impl EventHandler for Console {
type Error = io::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(event) => {
if self.inner.is_none() {
self.inner =
Some(Inner::new(event.threads, event.name, timestamp));
}
}
Event::EndBuild(event) => {
if let Some(inner) = self.inner.take() {
inner.end_build(timestamp, event)?;
}
}
Event::BeginTask(event) => {
if let Some(inner) = &mut self.inner {
inner.begin_task(timestamp, event)?;
}
}
Event::TaskOutput(event) => {
if let Some(inner) = &mut self.inner {
inner.tasks[event.id].buf.extend(event.chunk);
}
}
Event::EndTask(event) => {
if let Some(inner) = &mut self.inner {
inner.end_task(timestamp, event)?;
}
}
Event::Delete(event) => {
if let Some(inner) = &mut self.inner {
inner.delete(timestamp, event)?;
}
}
Event::ChecksumError(event) => {
if let Some(inner) = &mut self.inner {
inner.checksum_error(timestamp, event)?;
}
}
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
if let Some(inner) = self.inner.take() {
inner.finish()?;
}
Ok(())
}
}
| {
let duration = (timestamp - self.start_time).to_std().unwrap();
let duration = format_duration(duration);
let msg = match event.result {
Ok(()) => format!(
"{} {} in {}",
style("Finished").bold().green(),
style(&self.name).yellow(),
style(duration).cyan(),
),
Err(err) => format!(
"{} {} after {}: {}",
style("Failed").bold().red(),
style(&self.name).yellow(),
style(duration).cyan(),
err
),
};
for task in &self.tasks {
task.pb.set_style(Console::style_idle());
}
self.tasks[0].pb.println(&msg);
self.finish()
} | identifier_body |
console.rs | // Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use std::thread::{self, JoinHandle};
use std::time::Duration;
use console::style;
use humantime::format_duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use super::{
BeginTaskEvent, ChecksumErrorEvent, DeleteEvent, EndBuildEvent,
EndTaskEvent, Event, EventHandler, Timestamp,
};
#[derive(Clone)]
struct TaskState {
/// Time the task started.
start: Timestamp,
/// Progress bar associated with this task.
pb: ProgressBar,
/// String of the task being executed.
name: String,
/// Buffer of output for the task.
buf: Vec<u8>,
}
impl TaskState {
pub fn new(start: Timestamp, pb: ProgressBar) -> Self {
TaskState {
start,
pb,
name: String::new(),
buf: Vec::new(),
}
}
}
/// Calculates the number of spaces a number takes up. Useful for padding
/// decimal numbers.
fn num_width(mut max_value: usize) -> usize {
let mut count = 0;
while max_value > 0 {
max_value /= 10;
count += 1;
}
count
}
/// "Inner" that lives as long as a single build. This is created and destroyed
/// for `BeginBuildEvent`s and `EndBuildEvent`s respectively.
struct Inner {
// Vector of in-flight tasks.
tasks: Vec<TaskState>,
// Time at which the build started. This is used to calculate the duration
// of the build when it finishes.
start_time: Timestamp,
// Progress bar thread.
pb_thread: JoinHandle<Result<(), io::Error>>,
// Continuously updates each of the progress bars.
tick_thread: JoinHandle<()>,
// Name of the build.
name: String,
}
impl Inner {
pub fn new(threads: usize, name: String, timestamp: Timestamp) -> Self {
// Give a bogus start time. This will be changed as we receive events.
let mut tasks = Vec::with_capacity(threads);
let mut bars = Vec::with_capacity(threads);
let progress = MultiProgress::new();
for i in 0..threads {
let pb = progress.add(ProgressBar::new_spinner());
pb.set_style(Console::style_idle());
pb.set_prefix(&format!(
"[{:>width$}]",
i + 1,
width = num_width(threads)
));
pb.set_message(&style("Idle").dim().to_string());
// Clone the progress bar handle so we can update them later.
bars.push(pb.clone());
tasks.push(TaskState::new(timestamp, pb));
}
let pb_thread = thread::spawn(move || progress.join_and_clear());
let tick_thread = thread::spawn(move || loop {
thread::sleep(Duration::from_millis(200));
for pb in &bars {
if pb.is_finished() {
return;
}
pb.tick();
}
});
Inner {
tasks,
start_time: timestamp,
pb_thread,
tick_thread,
name,
}
}
pub fn finish(mut self) -> Result<(), io::Error> {
for task in self.tasks.iter_mut() {
task.pb
.finish_with_message(&style("Done").dim().to_string());
}
self.tick_thread.join().unwrap();
self.pb_thread.join().unwrap()?;
Ok(())
}
pub fn end_build(
self,
timestamp: Timestamp,
event: EndBuildEvent,
) -> Result<(), io::Error> {
let duration = (timestamp - self.start_time).to_std().unwrap();
let duration = format_duration(duration);
let msg = match event.result {
Ok(()) => format!(
"{} {} in {}",
style("Finished").bold().green(),
style(&self.name).yellow(),
style(duration).cyan(),
),
Err(err) => format!(
"{} {} after {}: {}",
style("Failed").bold().red(),
style(&self.name).yellow(),
style(duration).cyan(),
err
),
};
for task in &self.tasks {
task.pb.set_style(Console::style_idle());
}
self.tasks[0].pb.println(&msg);
self.finish()
}
pub fn begin_task(
&mut self,
timestamp: Timestamp,
event: BeginTaskEvent,
) -> Result<(), io::Error> {
let mut task = &mut self.tasks[event.id];
task.start = timestamp;
let name = event.task.to_string();
task.pb.reset_elapsed();
task.pb.set_style(Console::style_running());
task.pb.set_message(&name);
task.name = name;
Ok(())
}
pub fn end_task(
&mut self,
timestamp: Timestamp,
event: EndTaskEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
let duration = (timestamp - task.start).to_std().unwrap();
let duration = format_duration(duration);
if let Err(err) = event.result {
writeln!(
&mut task.buf,
"{} after {}: {}",
style("Task failed").bold().red(),
style(duration).cyan(),
style(err).red(),
)?;
task.pb.println(format!(
"> {}\n{}",
style(&task.name).bold().red(),
String::from_utf8_lossy(&task.buf),
));
}
task.buf.clear();
task.pb.set_style(Console::style_idle());
Ok(())
}
pub fn delete(
&mut self,
_timestamp: Timestamp,
event: DeleteEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.set_style(Console::style_running());
task.pb.set_message(&format!("Deleted {}", event.resource));
if let Err(err) = event.result {
task.pb.println(format!(
"{} to delete `{}`: {}",
style("Failed").bold().red(),
style(event.resource).yellow(),
err
));
}
Ok(())
}
pub fn checksum_error(
&mut self,
_timestamp: Timestamp,
event: ChecksumErrorEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.println(format!(
"Failed to compute checksum for {} ({})",
event.resource, event.error
));
Ok(())
}
}
/// Logs events to a console.
#[derive(Default)]
pub struct Console {
// Delay creation of the inner state until we receive our first BeginBuild
// event. This lets us handle any number of threads.
inner: Option<Inner>,
}
impl Console {
fn style_idle() -> ProgressStyle {
ProgressStyle::default_spinner().template("{prefix:.bold.dim} 🚶")
}
fn style_running() -> ProgressStyle {
ProgressStyle::default_spinner().template(&format!(
"{{prefix:.bold.dim}} 🏃 {} {{wide_msg}}",
style("{elapsed}").dim()
))
}
pub fn new() -> Self {
// Delay initialization until we receive a BeginBuild event.
Self::default()
}
}
impl EventHandler for Console {
type Error = io::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(event) => {
if self.inner.is_none() {
self.inner =
Some(Inner::new(event.threads, event.name, timestamp));
}
}
Event::EndBuild(event) => {
if let Some(inner) = self.inner.take() {
inner.end_build(timestamp, event)?;
}
}
Event::BeginTask(event) => {
if let Some(inner) = &mut self.inner {
inner.begin_task(timestamp, event)?;
}
}
Event::TaskOutput(event) => {
if let Some(inner) = &mut self.inner {
| }
Event::EndTask(event) => {
if let Some(inner) = &mut self.inner {
inner.end_task(timestamp, event)?;
}
}
Event::Delete(event) => {
if let Some(inner) = &mut self.inner {
inner.delete(timestamp, event)?;
}
}
Event::ChecksumError(event) => {
if let Some(inner) = &mut self.inner {
inner.checksum_error(timestamp, event)?;
}
}
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
if let Some(inner) = self.inner.take() {
inner.finish()?;
}
Ok(())
}
}
| inner.tasks[event.id].buf.extend(event.chunk);
}
| conditional_block |
EasyController.py | # Copyright 2009 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def test_hw_pop(self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):
"""Returns the genotype counts for a certain population and locus
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
|
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist)
| if locus_info[0] == locus_name:
return locus_info[1] | conditional_block |
EasyController.py | # Copyright 2009 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def | (self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):
"""Returns the genotype counts for a certain population and locus
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
return locus_info[1]
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist)
| test_hw_pop | identifier_name |
EasyController.py | # Copyright 2009 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package. | This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def test_hw_pop(self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):
"""Returns the genotype counts for a certain population and locus
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
return locus_info[1]
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist) |
""" | random_line_split |
EasyController.py | # Copyright 2009 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def test_hw_pop(self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
|
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):
"""Returns the genotype counts for a certain population and locus
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
return locus_info[1]
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist)
| all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result | identifier_body |
align_of_val.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::mem::align_of_val;
use core::default::Default;
// pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
// unsafe { intrinsics::min_align_of_val(val) }
// }
macro_rules! align_of_val_test {
($T:ty, $size:expr) => ({
let v: $T = Default::default();
let size: usize = align_of_val::<$T>(&v);
assert_eq!(size, $size);
})
}
#[test]
fn align_of_val_test1() {
struct A;
let a: A = A;
let size: usize = align_of_val::<A>(&a);
assert_eq!(size, 1);
}
#[test]
fn align_of_val_test2() |
}
| {
align_of_val_test!( (u8), 1 );
align_of_val_test!( (u8, u16), 2 );
align_of_val_test!( (u8, u16, u32), 4 );
align_of_val_test!( (u8, u16, u32, u64), 8 );
} | identifier_body |
align_of_val.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::mem::align_of_val;
use core::default::Default;
// pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
// unsafe { intrinsics::min_align_of_val(val) }
// }
macro_rules! align_of_val_test {
($T:ty, $size:expr) => ({
let v: $T = Default::default();
let size: usize = align_of_val::<$T>(&v);
assert_eq!(size, $size);
})
}
#[test]
fn align_of_val_test1() {
struct A;
let a: A = A;
let size: usize = align_of_val::<A>(&a);
assert_eq!(size, 1);
}
#[test]
fn | () {
align_of_val_test!( (u8), 1 );
align_of_val_test!( (u8, u16), 2 );
align_of_val_test!( (u8, u16, u32), 4 );
align_of_val_test!( (u8, u16, u32, u64), 8 );
}
}
| align_of_val_test2 | identifier_name |
align_of_val.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::mem::align_of_val;
use core::default::Default;
// pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
// unsafe { intrinsics::min_align_of_val(val) }
// }
macro_rules! align_of_val_test {
($T:ty, $size:expr) => ({
let v: $T = Default::default();
let size: usize = align_of_val::<$T>(&v);
assert_eq!(size, $size);
})
}
#[test]
fn align_of_val_test1() {
struct A;
let a: A = A;
let size: usize = align_of_val::<A>(&a);
assert_eq!(size, 1);
}
#[test]
fn align_of_val_test2() {
align_of_val_test!( (u8), 1 );
align_of_val_test!( (u8, u16), 2 );
align_of_val_test!( (u8, u16, u32), 4 ); | align_of_val_test!( (u8, u16, u32, u64), 8 );
}
} | random_line_split | |
package.ts | import { getFilePath } from './utils';
import { join } from 'path';
import {
ensureDirectory,
readJsonFile,
writeTarball,
writeJsonFile,
exists
} from './fs';
export interface IPackage {
_id: string;
_rev: string;
name: string;
description: string;
'dist-tags': { [tag: string]: string };
versions: {
[version: string]: {
name: string;
version: string;
description: string;
main: string;
scripts: { [script: string]: string };
repository: { type: string; url: string; };
keywords: string[];
author: { name: string; email: string; };
licence: string;
bugs: { url: string; }
homepage: string;
dependencies: { [dep: string]: string; };
os: string[];
gypfile: boolean;
gitHead: string;
_id: string;
_from: string;
_npmVersion: string;
_nodeVersion: string;
_npmUser: { name: string; email: string; };
dist: { shasum: string; tarball: string };
maintainers: { name: string; email: string; }[];
directories: { };
}
};
readme: string;
maintainers: { name: string; email: string; }[];
time: { [version: string]: string; };
homepage: string;
keywords: string[];
repository: { type: string; url: string; };
author: { name: string; email: string; };
licence: string;
bugs: { url: string; };
readmeFilename: string;
users: { [user: string]: boolean };
_attachments: { };
}
export class Package {
data: IPackage;
packageRoot: string;
tarballRoot: string;
pkgJsonPath: string;
constructor(data?: IPackage, name?: string) { | this.tarballRoot = getFilePath(`tarballs/${this.data.name}`);
}
if (name) {
this.packageRoot = getFilePath(`packages/${name}`);
this.tarballRoot = getFilePath(`tarballs/${name}`);
}
this.pkgJsonPath = join(this.packageRoot, 'package.json');
}
init(): Promise<null> {
return this.ensureRootFolders()
.then(() => this.initDataFromPkgJson());
}
private prepareData(data: IPackage): IPackage {
if (!data.time) {
data.time = { modified: new Date().toISOString(), created: new Date().toISOString() };
}
Object.keys(data.versions).forEach(key => {
if (!(key in data.time)) {
data.time[key] = new Date().toISOString();
}
});
return data;
}
getData(): Promise<IPackage> {
if (!this.data) {
return this.initDataFromPkgJson()
.then(() => this.data);
} else {
return Promise.resolve(this.data);
}
}
initDataFromPkgJson(): Promise<null> {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => this.data = jsonData)
.catch(err => console.error(err));
}
initPkgJsonFromData(): Promise<null> {
let data = this.data;
data._attachments = {};
return this.ensureRootFolders()
.then(() => exists(this.pkgJsonPath))
.then(oldVersionExists => {
if (oldVersionExists) {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => {
if (jsonData) {
data.time = jsonData.time;
data = this.prepareData(data);
Object.keys(jsonData.versions).forEach(v => {
if (!data.versions[v]) {
data.versions[v] = jsonData.versions[v];
}
});
}
return writeJsonFile(this.pkgJsonPath, data);
});
} else {
return writeJsonFile(this.pkgJsonPath, this.prepareData(data));
}
});
}
saveTarballFromData(): Promise<null> {
return this.ensureRootFolders()
.then(() => {
let latestVersion = this.data['dist-tags'].latest;
let tarballPath = this.tarballRoot + '/' + this.data.name + '-' + latestVersion + '.tgz';
return writeTarball(this.data.name, this.data._attachments);
});
}
private ensureRootFolders(): Promise<null> {
return ensureDirectory(this.packageRoot)
.then(() => ensureDirectory(this.tarballRoot))
.catch(err => console.error(err));
}
} | if (data) {
this.data = data;
this.packageRoot = getFilePath(`packages/${this.data.name}`); | random_line_split |
package.ts | import { getFilePath } from './utils';
import { join } from 'path';
import {
ensureDirectory,
readJsonFile,
writeTarball,
writeJsonFile,
exists
} from './fs';
export interface IPackage {
_id: string;
_rev: string;
name: string;
description: string;
'dist-tags': { [tag: string]: string };
versions: {
[version: string]: {
name: string;
version: string;
description: string;
main: string;
scripts: { [script: string]: string };
repository: { type: string; url: string; };
keywords: string[];
author: { name: string; email: string; };
licence: string;
bugs: { url: string; }
homepage: string;
dependencies: { [dep: string]: string; };
os: string[];
gypfile: boolean;
gitHead: string;
_id: string;
_from: string;
_npmVersion: string;
_nodeVersion: string;
_npmUser: { name: string; email: string; };
dist: { shasum: string; tarball: string };
maintainers: { name: string; email: string; }[];
directories: { };
}
};
readme: string;
maintainers: { name: string; email: string; }[];
time: { [version: string]: string; };
homepage: string;
keywords: string[];
repository: { type: string; url: string; };
author: { name: string; email: string; };
licence: string;
bugs: { url: string; };
readmeFilename: string;
users: { [user: string]: boolean };
_attachments: { };
}
export class Package {
data: IPackage;
packageRoot: string;
tarballRoot: string;
pkgJsonPath: string;
constructor(data?: IPackage, name?: string) {
if (data) {
this.data = data;
this.packageRoot = getFilePath(`packages/${this.data.name}`);
this.tarballRoot = getFilePath(`tarballs/${this.data.name}`);
}
if (name) |
this.pkgJsonPath = join(this.packageRoot, 'package.json');
}
init(): Promise<null> {
return this.ensureRootFolders()
.then(() => this.initDataFromPkgJson());
}
private prepareData(data: IPackage): IPackage {
if (!data.time) {
data.time = { modified: new Date().toISOString(), created: new Date().toISOString() };
}
Object.keys(data.versions).forEach(key => {
if (!(key in data.time)) {
data.time[key] = new Date().toISOString();
}
});
return data;
}
getData(): Promise<IPackage> {
if (!this.data) {
return this.initDataFromPkgJson()
.then(() => this.data);
} else {
return Promise.resolve(this.data);
}
}
initDataFromPkgJson(): Promise<null> {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => this.data = jsonData)
.catch(err => console.error(err));
}
initPkgJsonFromData(): Promise<null> {
let data = this.data;
data._attachments = {};
return this.ensureRootFolders()
.then(() => exists(this.pkgJsonPath))
.then(oldVersionExists => {
if (oldVersionExists) {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => {
if (jsonData) {
data.time = jsonData.time;
data = this.prepareData(data);
Object.keys(jsonData.versions).forEach(v => {
if (!data.versions[v]) {
data.versions[v] = jsonData.versions[v];
}
});
}
return writeJsonFile(this.pkgJsonPath, data);
});
} else {
return writeJsonFile(this.pkgJsonPath, this.prepareData(data));
}
});
}
saveTarballFromData(): Promise<null> {
return this.ensureRootFolders()
.then(() => {
let latestVersion = this.data['dist-tags'].latest;
let tarballPath = this.tarballRoot + '/' + this.data.name + '-' + latestVersion + '.tgz';
return writeTarball(this.data.name, this.data._attachments);
});
}
private ensureRootFolders(): Promise<null> {
return ensureDirectory(this.packageRoot)
.then(() => ensureDirectory(this.tarballRoot))
.catch(err => console.error(err));
}
}
| {
this.packageRoot = getFilePath(`packages/${name}`);
this.tarballRoot = getFilePath(`tarballs/${name}`);
} | conditional_block |
package.ts | import { getFilePath } from './utils';
import { join } from 'path';
import {
ensureDirectory,
readJsonFile,
writeTarball,
writeJsonFile,
exists
} from './fs';
export interface IPackage {
_id: string;
_rev: string;
name: string;
description: string;
'dist-tags': { [tag: string]: string };
versions: {
[version: string]: {
name: string;
version: string;
description: string;
main: string;
scripts: { [script: string]: string };
repository: { type: string; url: string; };
keywords: string[];
author: { name: string; email: string; };
licence: string;
bugs: { url: string; }
homepage: string;
dependencies: { [dep: string]: string; };
os: string[];
gypfile: boolean;
gitHead: string;
_id: string;
_from: string;
_npmVersion: string;
_nodeVersion: string;
_npmUser: { name: string; email: string; };
dist: { shasum: string; tarball: string };
maintainers: { name: string; email: string; }[];
directories: { };
}
};
readme: string;
maintainers: { name: string; email: string; }[];
time: { [version: string]: string; };
homepage: string;
keywords: string[];
repository: { type: string; url: string; };
author: { name: string; email: string; };
licence: string;
bugs: { url: string; };
readmeFilename: string;
users: { [user: string]: boolean };
_attachments: { };
}
export class Package {
data: IPackage;
packageRoot: string;
tarballRoot: string;
pkgJsonPath: string;
| (data?: IPackage, name?: string) {
if (data) {
this.data = data;
this.packageRoot = getFilePath(`packages/${this.data.name}`);
this.tarballRoot = getFilePath(`tarballs/${this.data.name}`);
}
if (name) {
this.packageRoot = getFilePath(`packages/${name}`);
this.tarballRoot = getFilePath(`tarballs/${name}`);
}
this.pkgJsonPath = join(this.packageRoot, 'package.json');
}
init(): Promise<null> {
return this.ensureRootFolders()
.then(() => this.initDataFromPkgJson());
}
private prepareData(data: IPackage): IPackage {
if (!data.time) {
data.time = { modified: new Date().toISOString(), created: new Date().toISOString() };
}
Object.keys(data.versions).forEach(key => {
if (!(key in data.time)) {
data.time[key] = new Date().toISOString();
}
});
return data;
}
getData(): Promise<IPackage> {
if (!this.data) {
return this.initDataFromPkgJson()
.then(() => this.data);
} else {
return Promise.resolve(this.data);
}
}
initDataFromPkgJson(): Promise<null> {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => this.data = jsonData)
.catch(err => console.error(err));
}
initPkgJsonFromData(): Promise<null> {
let data = this.data;
data._attachments = {};
return this.ensureRootFolders()
.then(() => exists(this.pkgJsonPath))
.then(oldVersionExists => {
if (oldVersionExists) {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => {
if (jsonData) {
data.time = jsonData.time;
data = this.prepareData(data);
Object.keys(jsonData.versions).forEach(v => {
if (!data.versions[v]) {
data.versions[v] = jsonData.versions[v];
}
});
}
return writeJsonFile(this.pkgJsonPath, data);
});
} else {
return writeJsonFile(this.pkgJsonPath, this.prepareData(data));
}
});
}
saveTarballFromData(): Promise<null> {
return this.ensureRootFolders()
.then(() => {
let latestVersion = this.data['dist-tags'].latest;
let tarballPath = this.tarballRoot + '/' + this.data.name + '-' + latestVersion + '.tgz';
return writeTarball(this.data.name, this.data._attachments);
});
}
private ensureRootFolders(): Promise<null> {
return ensureDirectory(this.packageRoot)
.then(() => ensureDirectory(this.tarballRoot))
.catch(err => console.error(err));
}
}
| constructor | identifier_name |
package.ts | import { getFilePath } from './utils';
import { join } from 'path';
import {
ensureDirectory,
readJsonFile,
writeTarball,
writeJsonFile,
exists
} from './fs';
export interface IPackage {
_id: string;
_rev: string;
name: string;
description: string;
'dist-tags': { [tag: string]: string };
versions: {
[version: string]: {
name: string;
version: string;
description: string;
main: string;
scripts: { [script: string]: string };
repository: { type: string; url: string; };
keywords: string[];
author: { name: string; email: string; };
licence: string;
bugs: { url: string; }
homepage: string;
dependencies: { [dep: string]: string; };
os: string[];
gypfile: boolean;
gitHead: string;
_id: string;
_from: string;
_npmVersion: string;
_nodeVersion: string;
_npmUser: { name: string; email: string; };
dist: { shasum: string; tarball: string };
maintainers: { name: string; email: string; }[];
directories: { };
}
};
readme: string;
maintainers: { name: string; email: string; }[];
time: { [version: string]: string; };
homepage: string;
keywords: string[];
repository: { type: string; url: string; };
author: { name: string; email: string; };
licence: string;
bugs: { url: string; };
readmeFilename: string;
users: { [user: string]: boolean };
_attachments: { };
}
export class Package {
data: IPackage;
packageRoot: string;
tarballRoot: string;
pkgJsonPath: string;
constructor(data?: IPackage, name?: string) {
if (data) {
this.data = data;
this.packageRoot = getFilePath(`packages/${this.data.name}`);
this.tarballRoot = getFilePath(`tarballs/${this.data.name}`);
}
if (name) {
this.packageRoot = getFilePath(`packages/${name}`);
this.tarballRoot = getFilePath(`tarballs/${name}`);
}
this.pkgJsonPath = join(this.packageRoot, 'package.json');
}
init(): Promise<null> {
return this.ensureRootFolders()
.then(() => this.initDataFromPkgJson());
}
private prepareData(data: IPackage): IPackage {
if (!data.time) {
data.time = { modified: new Date().toISOString(), created: new Date().toISOString() };
}
Object.keys(data.versions).forEach(key => {
if (!(key in data.time)) {
data.time[key] = new Date().toISOString();
}
});
return data;
}
getData(): Promise<IPackage> |
initDataFromPkgJson(): Promise<null> {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => this.data = jsonData)
.catch(err => console.error(err));
}
initPkgJsonFromData(): Promise<null> {
let data = this.data;
data._attachments = {};
return this.ensureRootFolders()
.then(() => exists(this.pkgJsonPath))
.then(oldVersionExists => {
if (oldVersionExists) {
return readJsonFile(this.pkgJsonPath)
.then((jsonData: IPackage) => {
if (jsonData) {
data.time = jsonData.time;
data = this.prepareData(data);
Object.keys(jsonData.versions).forEach(v => {
if (!data.versions[v]) {
data.versions[v] = jsonData.versions[v];
}
});
}
return writeJsonFile(this.pkgJsonPath, data);
});
} else {
return writeJsonFile(this.pkgJsonPath, this.prepareData(data));
}
});
}
saveTarballFromData(): Promise<null> {
return this.ensureRootFolders()
.then(() => {
let latestVersion = this.data['dist-tags'].latest;
let tarballPath = this.tarballRoot + '/' + this.data.name + '-' + latestVersion + '.tgz';
return writeTarball(this.data.name, this.data._attachments);
});
}
private ensureRootFolders(): Promise<null> {
return ensureDirectory(this.packageRoot)
.then(() => ensureDirectory(this.tarballRoot))
.catch(err => console.error(err));
}
}
| {
if (!this.data) {
return this.initDataFromPkgJson()
.then(() => this.data);
} else {
return Promise.resolve(this.data);
}
} | identifier_body |
mod.rs | //! Variable header in MQTT
use std::io;
use std::string::FromUtf8Error;
use crate::topic_name::{TopicNameDecodeError, TopicNameError};
pub use self::connect_ack_flags::ConnackFlags;
pub use self::connect_flags::ConnectFlags;
pub use self::connect_ret_code::ConnectReturnCode; | pub use self::packet_identifier::PacketIdentifier;
pub use self::protocol_level::ProtocolLevel;
pub use self::protocol_name::ProtocolName;
pub use self::topic_name::TopicNameHeader;
mod connect_ack_flags;
mod connect_flags;
mod connect_ret_code;
mod keep_alive;
mod packet_identifier;
pub mod protocol_level;
mod protocol_name;
mod topic_name;
/// Errors while decoding variable header
#[derive(Debug, thiserror::Error)]
pub enum VariableHeaderError {
#[error(transparent)]
IoError(#[from] io::Error),
#[error("invalid reserved flags")]
InvalidReservedFlag,
#[error(transparent)]
FromUtf8Error(#[from] FromUtf8Error),
#[error(transparent)]
TopicNameError(#[from] TopicNameError),
#[error("invalid protocol version")]
InvalidProtocolVersion,
}
impl From<TopicNameDecodeError> for VariableHeaderError {
fn from(err: TopicNameDecodeError) -> VariableHeaderError {
match err {
TopicNameDecodeError::IoError(e) => Self::IoError(e),
TopicNameDecodeError::InvalidTopicName(e) => Self::TopicNameError(e),
}
}
} | pub use self::keep_alive::KeepAlive; | random_line_split |
mod.rs | //! Variable header in MQTT
use std::io;
use std::string::FromUtf8Error;
use crate::topic_name::{TopicNameDecodeError, TopicNameError};
pub use self::connect_ack_flags::ConnackFlags;
pub use self::connect_flags::ConnectFlags;
pub use self::connect_ret_code::ConnectReturnCode;
pub use self::keep_alive::KeepAlive;
pub use self::packet_identifier::PacketIdentifier;
pub use self::protocol_level::ProtocolLevel;
pub use self::protocol_name::ProtocolName;
pub use self::topic_name::TopicNameHeader;
mod connect_ack_flags;
mod connect_flags;
mod connect_ret_code;
mod keep_alive;
mod packet_identifier;
pub mod protocol_level;
mod protocol_name;
mod topic_name;
/// Errors while decoding variable header
#[derive(Debug, thiserror::Error)]
pub enum VariableHeaderError {
#[error(transparent)]
IoError(#[from] io::Error),
#[error("invalid reserved flags")]
InvalidReservedFlag,
#[error(transparent)]
FromUtf8Error(#[from] FromUtf8Error),
#[error(transparent)]
TopicNameError(#[from] TopicNameError),
#[error("invalid protocol version")]
InvalidProtocolVersion,
}
impl From<TopicNameDecodeError> for VariableHeaderError {
fn | (err: TopicNameDecodeError) -> VariableHeaderError {
match err {
TopicNameDecodeError::IoError(e) => Self::IoError(e),
TopicNameDecodeError::InvalidTopicName(e) => Self::TopicNameError(e),
}
}
}
| from | identifier_name |
mod.rs | //! Variable header in MQTT
use std::io;
use std::string::FromUtf8Error;
use crate::topic_name::{TopicNameDecodeError, TopicNameError};
pub use self::connect_ack_flags::ConnackFlags;
pub use self::connect_flags::ConnectFlags;
pub use self::connect_ret_code::ConnectReturnCode;
pub use self::keep_alive::KeepAlive;
pub use self::packet_identifier::PacketIdentifier;
pub use self::protocol_level::ProtocolLevel;
pub use self::protocol_name::ProtocolName;
pub use self::topic_name::TopicNameHeader;
mod connect_ack_flags;
mod connect_flags;
mod connect_ret_code;
mod keep_alive;
mod packet_identifier;
pub mod protocol_level;
mod protocol_name;
mod topic_name;
/// Errors while decoding variable header
#[derive(Debug, thiserror::Error)]
pub enum VariableHeaderError {
#[error(transparent)]
IoError(#[from] io::Error),
#[error("invalid reserved flags")]
InvalidReservedFlag,
#[error(transparent)]
FromUtf8Error(#[from] FromUtf8Error),
#[error(transparent)]
TopicNameError(#[from] TopicNameError),
#[error("invalid protocol version")]
InvalidProtocolVersion,
}
impl From<TopicNameDecodeError> for VariableHeaderError {
fn from(err: TopicNameDecodeError) -> VariableHeaderError |
}
| {
match err {
TopicNameDecodeError::IoError(e) => Self::IoError(e),
TopicNameDecodeError::InvalidTopicName(e) => Self::TopicNameError(e),
}
} | identifier_body |
shootout-chameneos-redux.rs | // The Computer Language Benchmarks Game
// http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
// Copyright (c) 2012-2014 The Rust Project Developers
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
//
// - Neither the name of "The Computer Language Benchmarks Game" nor
// the name of "The Computer Language Shootout Benchmarks" nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// no-pretty-expanded
use self::Color::{Red, Yellow, Blue};
use std::sync::mpsc::{channel, Sender, Receiver};
use std::fmt;
use std::thread::Thread;
fn print_complements() {
let all = [Blue, Red, Yellow];
for aa in &all {
for bb in &all {
println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb));
}
}
}
#[derive(Copy)]
enum Color {
Red,
Yellow,
Blue,
}
impl fmt::Debug for Color {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
Red => "red",
Yellow => "yellow",
Blue => "blue",
};
write!(f, "{}", str)
}
}
#[derive(Copy)]
struct CreatureInfo {
name: uint,
color: Color
}
fn show_color_list(set: Vec<Color>) -> String {
let mut out = String::new();
for col in &set {
out.push(' ');
out.push_str(&format!("{:?}", col));
}
out
}
fn show_digit(nn: uint) -> &'static str {
match nn {
0 => {" zero"}
1 => {" one"}
2 => {" two"}
3 => {" three"}
4 => {" four"}
5 => {" five"}
6 => {" six"}
7 => {" seven"}
8 => {" eight"}
9 => {" nine"}
_ => {panic!("expected digits from 0 to 9...")}
}
}
struct Number(uint);
impl fmt::Debug for Number {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut out = vec![];
let Number(mut num) = *self;
if num == 0 { out.push(show_digit(0)) };
while num != 0 {
let dig = num % 10;
num = num / 10;
let s = show_digit(dig);
out.push(s);
}
for s in out.iter().rev() {
try!(write!(f, "{}", s))
}
Ok(())
}
}
fn transform(aa: Color, bb: Color) -> Color {
match (aa, bb) {
(Red, Red ) => { Red }
(Red, Yellow) => { Blue }
(Red, Blue ) => { Yellow }
(Yellow, Red ) => { Blue }
(Yellow, Yellow) => { Yellow }
(Yellow, Blue ) => { Red }
(Blue, Red ) => { Yellow }
(Blue, Yellow) => { Red }
(Blue, Blue ) => { Blue }
}
}
fn creature(
name: uint,
mut color: Color,
from_rendezvous: Receiver<CreatureInfo>,
to_rendezvous: Sender<CreatureInfo>,
to_rendezvous_log: Sender<String>
) {
let mut creatures_met = 0i32;
let mut evil_clones_met = 0;
let mut rendezvous = from_rendezvous.iter();
loop {
// ask for a pairing
to_rendezvous.send(CreatureInfo {name: name, color: color}).unwrap();
| // track some statistics
creatures_met += 1;
if other_creature.name == name {
evil_clones_met += 1;
}
}
None => break
}
}
// log creatures met and evil clones of self
let report = format!("{}{:?}", creatures_met, Number(evil_clones_met));
to_rendezvous_log.send(report).unwrap();
}
fn rendezvous(nn: uint, set: Vec<Color>) {
// these ports will allow us to hear from the creatures
let (to_rendezvous, from_creatures) = channel::<CreatureInfo>();
// these channels will be passed to the creatures so they can talk to us
let (to_rendezvous_log, from_creatures_log) = channel::<String>();
// these channels will allow us to talk to each creature by 'name'/index
let to_creature: Vec<Sender<CreatureInfo>> =
set.iter().enumerate().map(|(ii, &col)| {
// create each creature as a listener with a port, and
// give us a channel to talk to each
let to_rendezvous = to_rendezvous.clone();
let to_rendezvous_log = to_rendezvous_log.clone();
let (to_creature, from_rendezvous) = channel();
Thread::spawn(move|| {
creature(ii,
col,
from_rendezvous,
to_rendezvous,
to_rendezvous_log);
});
to_creature
}).collect();
let mut creatures_met = 0;
// set up meetings...
for _ in 0..nn {
let fst_creature = from_creatures.recv().unwrap();
let snd_creature = from_creatures.recv().unwrap();
creatures_met += 2;
to_creature[fst_creature.name].send(snd_creature).unwrap();
to_creature[snd_creature.name].send(fst_creature).unwrap();
}
// tell each creature to stop
drop(to_creature);
// print each color in the set
println!("{}", show_color_list(set));
// print each creature's stats
drop(to_rendezvous_log);
for rep in from_creatures_log.iter() {
println!("{}", rep);
}
// print the total number of creatures met
println!("{:?}\n", Number(creatures_met));
}
fn main() {
let nn = if std::os::getenv("RUST_BENCH").is_some() {
200000
} else {
std::os::args()
.get(1)
.and_then(|arg| arg.parse().ok())
.unwrap_or(600u)
};
print_complements();
println!("");
rendezvous(nn, vec!(Blue, Red, Yellow));
rendezvous(nn,
vec!(Blue, Red, Yellow, Red, Yellow, Blue, Red, Yellow, Red, Blue));
} | // log and change, or quit
match rendezvous.next() {
Some(other_creature) => {
color = transform(color, other_creature.color);
| random_line_split |
shootout-chameneos-redux.rs | // The Computer Language Benchmarks Game
// http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
// Copyright (c) 2012-2014 The Rust Project Developers
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
//
// - Neither the name of "The Computer Language Benchmarks Game" nor
// the name of "The Computer Language Shootout Benchmarks" nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// no-pretty-expanded
use self::Color::{Red, Yellow, Blue};
use std::sync::mpsc::{channel, Sender, Receiver};
use std::fmt;
use std::thread::Thread;
fn print_complements() {
let all = [Blue, Red, Yellow];
for aa in &all {
for bb in &all {
println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb));
}
}
}
#[derive(Copy)]
enum Color {
Red,
Yellow,
Blue,
}
impl fmt::Debug for Color {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
Red => "red",
Yellow => "yellow",
Blue => "blue",
};
write!(f, "{}", str)
}
}
#[derive(Copy)]
struct CreatureInfo {
name: uint,
color: Color
}
fn show_color_list(set: Vec<Color>) -> String |
fn show_digit(nn: uint) -> &'static str {
match nn {
0 => {" zero"}
1 => {" one"}
2 => {" two"}
3 => {" three"}
4 => {" four"}
5 => {" five"}
6 => {" six"}
7 => {" seven"}
8 => {" eight"}
9 => {" nine"}
_ => {panic!("expected digits from 0 to 9...")}
}
}
struct Number(uint);
impl fmt::Debug for Number {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut out = vec![];
let Number(mut num) = *self;
if num == 0 { out.push(show_digit(0)) };
while num != 0 {
let dig = num % 10;
num = num / 10;
let s = show_digit(dig);
out.push(s);
}
for s in out.iter().rev() {
try!(write!(f, "{}", s))
}
Ok(())
}
}
fn transform(aa: Color, bb: Color) -> Color {
match (aa, bb) {
(Red, Red ) => { Red }
(Red, Yellow) => { Blue }
(Red, Blue ) => { Yellow }
(Yellow, Red ) => { Blue }
(Yellow, Yellow) => { Yellow }
(Yellow, Blue ) => { Red }
(Blue, Red ) => { Yellow }
(Blue, Yellow) => { Red }
(Blue, Blue ) => { Blue }
}
}
fn creature(
name: uint,
mut color: Color,
from_rendezvous: Receiver<CreatureInfo>,
to_rendezvous: Sender<CreatureInfo>,
to_rendezvous_log: Sender<String>
) {
let mut creatures_met = 0i32;
let mut evil_clones_met = 0;
let mut rendezvous = from_rendezvous.iter();
loop {
// ask for a pairing
to_rendezvous.send(CreatureInfo {name: name, color: color}).unwrap();
// log and change, or quit
match rendezvous.next() {
Some(other_creature) => {
color = transform(color, other_creature.color);
// track some statistics
creatures_met += 1;
if other_creature.name == name {
evil_clones_met += 1;
}
}
None => break
}
}
// log creatures met and evil clones of self
let report = format!("{}{:?}", creatures_met, Number(evil_clones_met));
to_rendezvous_log.send(report).unwrap();
}
fn rendezvous(nn: uint, set: Vec<Color>) {
// these ports will allow us to hear from the creatures
let (to_rendezvous, from_creatures) = channel::<CreatureInfo>();
// these channels will be passed to the creatures so they can talk to us
let (to_rendezvous_log, from_creatures_log) = channel::<String>();
// these channels will allow us to talk to each creature by 'name'/index
let to_creature: Vec<Sender<CreatureInfo>> =
set.iter().enumerate().map(|(ii, &col)| {
// create each creature as a listener with a port, and
// give us a channel to talk to each
let to_rendezvous = to_rendezvous.clone();
let to_rendezvous_log = to_rendezvous_log.clone();
let (to_creature, from_rendezvous) = channel();
Thread::spawn(move|| {
creature(ii,
col,
from_rendezvous,
to_rendezvous,
to_rendezvous_log);
});
to_creature
}).collect();
let mut creatures_met = 0;
// set up meetings...
for _ in 0..nn {
let fst_creature = from_creatures.recv().unwrap();
let snd_creature = from_creatures.recv().unwrap();
creatures_met += 2;
to_creature[fst_creature.name].send(snd_creature).unwrap();
to_creature[snd_creature.name].send(fst_creature).unwrap();
}
// tell each creature to stop
drop(to_creature);
// print each color in the set
println!("{}", show_color_list(set));
// print each creature's stats
drop(to_rendezvous_log);
for rep in from_creatures_log.iter() {
println!("{}", rep);
}
// print the total number of creatures met
println!("{:?}\n", Number(creatures_met));
}
fn main() {
let nn = if std::os::getenv("RUST_BENCH").is_some() {
200000
} else {
std::os::args()
.get(1)
.and_then(|arg| arg.parse().ok())
.unwrap_or(600u)
};
print_complements();
println!("");
rendezvous(nn, vec!(Blue, Red, Yellow));
rendezvous(nn,
vec!(Blue, Red, Yellow, Red, Yellow, Blue, Red, Yellow, Red, Blue));
}
| {
let mut out = String::new();
for col in &set {
out.push(' ');
out.push_str(&format!("{:?}", col));
}
out
} | identifier_body |
shootout-chameneos-redux.rs | // The Computer Language Benchmarks Game
// http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
// Copyright (c) 2012-2014 The Rust Project Developers
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
//
// - Neither the name of "The Computer Language Benchmarks Game" nor
// the name of "The Computer Language Shootout Benchmarks" nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// no-pretty-expanded
use self::Color::{Red, Yellow, Blue};
use std::sync::mpsc::{channel, Sender, Receiver};
use std::fmt;
use std::thread::Thread;
fn print_complements() {
let all = [Blue, Red, Yellow];
for aa in &all {
for bb in &all {
println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb));
}
}
}
#[derive(Copy)]
enum Color {
Red,
Yellow,
Blue,
}
impl fmt::Debug for Color {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
Red => "red",
Yellow => "yellow",
Blue => "blue",
};
write!(f, "{}", str)
}
}
#[derive(Copy)]
struct CreatureInfo {
name: uint,
color: Color
}
fn show_color_list(set: Vec<Color>) -> String {
let mut out = String::new();
for col in &set {
out.push(' ');
out.push_str(&format!("{:?}", col));
}
out
}
fn show_digit(nn: uint) -> &'static str {
match nn {
0 => {" zero"}
1 => {" one"}
2 => {" two"}
3 => {" three"}
4 => {" four"}
5 => {" five"}
6 => {" six"}
7 => |
8 => {" eight"}
9 => {" nine"}
_ => {panic!("expected digits from 0 to 9...")}
}
}
struct Number(uint);
impl fmt::Debug for Number {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut out = vec![];
let Number(mut num) = *self;
if num == 0 { out.push(show_digit(0)) };
while num != 0 {
let dig = num % 10;
num = num / 10;
let s = show_digit(dig);
out.push(s);
}
for s in out.iter().rev() {
try!(write!(f, "{}", s))
}
Ok(())
}
}
fn transform(aa: Color, bb: Color) -> Color {
match (aa, bb) {
(Red, Red ) => { Red }
(Red, Yellow) => { Blue }
(Red, Blue ) => { Yellow }
(Yellow, Red ) => { Blue }
(Yellow, Yellow) => { Yellow }
(Yellow, Blue ) => { Red }
(Blue, Red ) => { Yellow }
(Blue, Yellow) => { Red }
(Blue, Blue ) => { Blue }
}
}
fn creature(
name: uint,
mut color: Color,
from_rendezvous: Receiver<CreatureInfo>,
to_rendezvous: Sender<CreatureInfo>,
to_rendezvous_log: Sender<String>
) {
let mut creatures_met = 0i32;
let mut evil_clones_met = 0;
let mut rendezvous = from_rendezvous.iter();
loop {
// ask for a pairing
to_rendezvous.send(CreatureInfo {name: name, color: color}).unwrap();
// log and change, or quit
match rendezvous.next() {
Some(other_creature) => {
color = transform(color, other_creature.color);
// track some statistics
creatures_met += 1;
if other_creature.name == name {
evil_clones_met += 1;
}
}
None => break
}
}
// log creatures met and evil clones of self
let report = format!("{}{:?}", creatures_met, Number(evil_clones_met));
to_rendezvous_log.send(report).unwrap();
}
fn rendezvous(nn: uint, set: Vec<Color>) {
// these ports will allow us to hear from the creatures
let (to_rendezvous, from_creatures) = channel::<CreatureInfo>();
// these channels will be passed to the creatures so they can talk to us
let (to_rendezvous_log, from_creatures_log) = channel::<String>();
// these channels will allow us to talk to each creature by 'name'/index
let to_creature: Vec<Sender<CreatureInfo>> =
set.iter().enumerate().map(|(ii, &col)| {
// create each creature as a listener with a port, and
// give us a channel to talk to each
let to_rendezvous = to_rendezvous.clone();
let to_rendezvous_log = to_rendezvous_log.clone();
let (to_creature, from_rendezvous) = channel();
Thread::spawn(move|| {
creature(ii,
col,
from_rendezvous,
to_rendezvous,
to_rendezvous_log);
});
to_creature
}).collect();
let mut creatures_met = 0;
// set up meetings...
for _ in 0..nn {
let fst_creature = from_creatures.recv().unwrap();
let snd_creature = from_creatures.recv().unwrap();
creatures_met += 2;
to_creature[fst_creature.name].send(snd_creature).unwrap();
to_creature[snd_creature.name].send(fst_creature).unwrap();
}
// tell each creature to stop
drop(to_creature);
// print each color in the set
println!("{}", show_color_list(set));
// print each creature's stats
drop(to_rendezvous_log);
for rep in from_creatures_log.iter() {
println!("{}", rep);
}
// print the total number of creatures met
println!("{:?}\n", Number(creatures_met));
}
fn main() {
let nn = if std::os::getenv("RUST_BENCH").is_some() {
200000
} else {
std::os::args()
.get(1)
.and_then(|arg| arg.parse().ok())
.unwrap_or(600u)
};
print_complements();
println!("");
rendezvous(nn, vec!(Blue, Red, Yellow));
rendezvous(nn,
vec!(Blue, Red, Yellow, Red, Yellow, Blue, Red, Yellow, Red, Blue));
}
| {" seven"} | conditional_block |
shootout-chameneos-redux.rs | // The Computer Language Benchmarks Game
// http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
// Copyright (c) 2012-2014 The Rust Project Developers
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
//
// - Neither the name of "The Computer Language Benchmarks Game" nor
// the name of "The Computer Language Shootout Benchmarks" nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// no-pretty-expanded
use self::Color::{Red, Yellow, Blue};
use std::sync::mpsc::{channel, Sender, Receiver};
use std::fmt;
use std::thread::Thread;
fn print_complements() {
let all = [Blue, Red, Yellow];
for aa in &all {
for bb in &all {
println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb));
}
}
}
#[derive(Copy)]
enum Color {
Red,
Yellow,
Blue,
}
impl fmt::Debug for Color {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
Red => "red",
Yellow => "yellow",
Blue => "blue",
};
write!(f, "{}", str)
}
}
#[derive(Copy)]
struct CreatureInfo {
name: uint,
color: Color
}
fn show_color_list(set: Vec<Color>) -> String {
let mut out = String::new();
for col in &set {
out.push(' ');
out.push_str(&format!("{:?}", col));
}
out
}
fn show_digit(nn: uint) -> &'static str {
match nn {
0 => {" zero"}
1 => {" one"}
2 => {" two"}
3 => {" three"}
4 => {" four"}
5 => {" five"}
6 => {" six"}
7 => {" seven"}
8 => {" eight"}
9 => {" nine"}
_ => {panic!("expected digits from 0 to 9...")}
}
}
struct Number(uint);
impl fmt::Debug for Number {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut out = vec![];
let Number(mut num) = *self;
if num == 0 { out.push(show_digit(0)) };
while num != 0 {
let dig = num % 10;
num = num / 10;
let s = show_digit(dig);
out.push(s);
}
for s in out.iter().rev() {
try!(write!(f, "{}", s))
}
Ok(())
}
}
fn | (aa: Color, bb: Color) -> Color {
match (aa, bb) {
(Red, Red ) => { Red }
(Red, Yellow) => { Blue }
(Red, Blue ) => { Yellow }
(Yellow, Red ) => { Blue }
(Yellow, Yellow) => { Yellow }
(Yellow, Blue ) => { Red }
(Blue, Red ) => { Yellow }
(Blue, Yellow) => { Red }
(Blue, Blue ) => { Blue }
}
}
fn creature(
name: uint,
mut color: Color,
from_rendezvous: Receiver<CreatureInfo>,
to_rendezvous: Sender<CreatureInfo>,
to_rendezvous_log: Sender<String>
) {
let mut creatures_met = 0i32;
let mut evil_clones_met = 0;
let mut rendezvous = from_rendezvous.iter();
loop {
// ask for a pairing
to_rendezvous.send(CreatureInfo {name: name, color: color}).unwrap();
// log and change, or quit
match rendezvous.next() {
Some(other_creature) => {
color = transform(color, other_creature.color);
// track some statistics
creatures_met += 1;
if other_creature.name == name {
evil_clones_met += 1;
}
}
None => break
}
}
// log creatures met and evil clones of self
let report = format!("{}{:?}", creatures_met, Number(evil_clones_met));
to_rendezvous_log.send(report).unwrap();
}
fn rendezvous(nn: uint, set: Vec<Color>) {
// these ports will allow us to hear from the creatures
let (to_rendezvous, from_creatures) = channel::<CreatureInfo>();
// these channels will be passed to the creatures so they can talk to us
let (to_rendezvous_log, from_creatures_log) = channel::<String>();
// these channels will allow us to talk to each creature by 'name'/index
let to_creature: Vec<Sender<CreatureInfo>> =
set.iter().enumerate().map(|(ii, &col)| {
// create each creature as a listener with a port, and
// give us a channel to talk to each
let to_rendezvous = to_rendezvous.clone();
let to_rendezvous_log = to_rendezvous_log.clone();
let (to_creature, from_rendezvous) = channel();
Thread::spawn(move|| {
creature(ii,
col,
from_rendezvous,
to_rendezvous,
to_rendezvous_log);
});
to_creature
}).collect();
let mut creatures_met = 0;
// set up meetings...
for _ in 0..nn {
let fst_creature = from_creatures.recv().unwrap();
let snd_creature = from_creatures.recv().unwrap();
creatures_met += 2;
to_creature[fst_creature.name].send(snd_creature).unwrap();
to_creature[snd_creature.name].send(fst_creature).unwrap();
}
// tell each creature to stop
drop(to_creature);
// print each color in the set
println!("{}", show_color_list(set));
// print each creature's stats
drop(to_rendezvous_log);
for rep in from_creatures_log.iter() {
println!("{}", rep);
}
// print the total number of creatures met
println!("{:?}\n", Number(creatures_met));
}
fn main() {
let nn = if std::os::getenv("RUST_BENCH").is_some() {
200000
} else {
std::os::args()
.get(1)
.and_then(|arg| arg.parse().ok())
.unwrap_or(600u)
};
print_complements();
println!("");
rendezvous(nn, vec!(Blue, Red, Yellow));
rendezvous(nn,
vec!(Blue, Red, Yellow, Red, Yellow, Blue, Red, Yellow, Red, Blue));
}
| transform | identifier_name |
paragraph.tsx | import { Editor, EditorState, RichUtils } from "draft-js"
import { debounce } from "lodash"
import React, { Component } from "react"
import ReactDOM from "react-dom"
import styled from "styled-components"
import { TextInputUrl } from "../components/text_input_url"
import { TextNav } from "../components/text_nav"
import { decorators } from "../shared/decorators"
import { confirmLink, linkDataFromSelection, removeLink } from "../shared/links"
import {
handleReturn,
insertPastedState,
styleMapFromNodes,
styleNamesFromMap,
} from "../shared/shared"
import { AllowedStyles, StyleMap, StyleNamesParagraph } from "../typings"
import { convertDraftToHtml, convertHtmlToDraft } from "./utils/convert"
import {
allowedStylesParagraph,
blockRenderMap,
keyBindingFn,
} from "./utils/utils"
interface Props {
allowedStyles?: AllowedStyles
allowEmptyLines?: boolean // Users can insert br tags
html?: string
hasLinks: boolean
onChange: (html: string) => void
placeholder?: string
stripLinebreaks: boolean // Return a single p block
isDark?: boolean
isReadOnly?: boolean
}
interface State {
editorPosition: ClientRect | null
editorState: EditorState
html: string
showNav: boolean
showUrlInput: boolean
urlValue: string
}
/**
* Supports HTML with bold and italic styles in <p> blocks.
* Allowed styles can be limited by passing allowedStyles.
* Optionally supports links, and linebreak stripping.
*/
export class Paragraph extends Component<Props, State> {
private editor
private allowedStyles: StyleMap
private debouncedOnChange
static defaultProps = {
allowEmptyLines: false,
hasLinks: false,
stripLinebreaks: false,
}
| (props: Props) {
super(props)
this.allowedStyles = styleMapFromNodes(
props.allowedStyles || allowedStylesParagraph
)
this.state = {
editorPosition: null,
editorState: this.setEditorState(),
html: props.html || "",
showNav: false,
showUrlInput: false,
urlValue: "",
}
this.debouncedOnChange = debounce((html: string) => {
props.onChange(html)
}, 250)
}
setEditorState = () => {
const { hasLinks, html } = this.props
if (html) {
return this.editorStateFromHTML(html)
} else {
return EditorState.createEmpty(decorators(hasLinks))
}
}
editorStateToHTML = (editorState: EditorState) => {
const { allowEmptyLines, stripLinebreaks } = this.props
const currentContent = editorState.getCurrentContent()
return convertDraftToHtml(
currentContent,
this.allowedStyles,
stripLinebreaks,
allowEmptyLines
)
}
editorStateFromHTML = (html: string) => {
const { hasLinks, allowEmptyLines } = this.props
const contentBlocks = convertHtmlToDraft(
html,
hasLinks,
this.allowedStyles,
allowEmptyLines
)
return EditorState.createWithContent(contentBlocks, decorators(hasLinks))
}
onChange = (editorState: EditorState) => {
const html = this.editorStateToHTML(editorState)
this.setState({ editorState, html })
if (html !== this.props.html) {
// Return html if changed
this.debouncedOnChange(html)
}
}
focus = () => {
this.editor.focus()
this.checkSelection()
}
handleReturn = e => {
const { editorState } = this.state
const { stripLinebreaks, allowEmptyLines } = this.props
if (stripLinebreaks) {
// Do nothing if linebreaks are disallowed
return "handled"
} else if (allowEmptyLines) {
return "not-handled"
} else {
// Maybe split-block, but don't create empty paragraphs
return handleReturn(e, editorState)
}
}
handleKeyCommand = (command: string) => {
const { hasLinks } = this.props
switch (command) {
case "link-prompt": {
if (hasLinks) {
// Open link input if links are supported
return this.promptForLink()
}
break
}
case "bold":
case "italic": {
return this.keyCommandInlineStyle(command)
}
}
// let draft defaults or browser handle
return "not-handled"
}
keyCommandInlineStyle = (command: "italic" | "bold") => {
// Handle style changes from key command
const { editorState } = this.state
const styles = styleNamesFromMap(this.allowedStyles)
if (styles.includes(command.toUpperCase())) {
const newState = RichUtils.handleKeyCommand(editorState, command)
// If an updated state is returned, command is handled
if (newState) {
this.onChange(newState)
return "handled"
}
} else {
return "not-handled"
}
}
toggleInlineStyle = (command: StyleNamesParagraph) => {
// Handle style changes from menu click
const { editorState } = this.state
const styles = styleNamesFromMap(this.allowedStyles)
let newEditorState
if (styles.includes(command)) {
newEditorState = RichUtils.toggleInlineStyle(editorState, command)
}
if (newEditorState) {
this.onChange(newEditorState)
}
}
handlePastedText = (text: string, html?: string) => {
const { editorState } = this.state
if (!html) {
// Wrap pasted plain text in html
html = "<p>" + text + "</p>"
}
const stateFromPastedFragment = this.editorStateFromHTML(html)
const stateWithPastedText = insertPastedState(
stateFromPastedFragment,
editorState
)
this.onChange(stateWithPastedText)
return true
}
promptForLink = () => {
// Opens a popup link input populated with selection data if link is selected
const { editorState } = this.state
const linkData = linkDataFromSelection(editorState)
const urlValue = linkData ? linkData.url : ""
const editor = ReactDOM.findDOMNode(this.editor) as Element
const editorPosition: ClientRect = editor.getBoundingClientRect()
this.setState({
editorPosition,
showUrlInput: true,
showNav: false,
urlValue,
})
return "handled"
}
confirmLink = (url: string) => {
const { editorState } = this.state
const newEditorState = confirmLink(url, editorState)
this.setState({
editorPosition: null,
showNav: false,
showUrlInput: false,
urlValue: "",
})
this.onChange(newEditorState)
}
removeLink = () => {
const editorState = removeLink(this.state.editorState)
if (editorState) {
this.setState({
editorPosition: null,
showUrlInput: false,
urlValue: "",
})
this.onChange(editorState)
}
}
checkSelection = () => {
let showNav = false
let editorPosition: ClientRect | null = null
const hasSelection = !window.getSelection().isCollapsed
if (hasSelection) {
showNav = true
const editor = ReactDOM.findDOMNode(this.editor) as Element
editorPosition = editor.getBoundingClientRect()
}
this.setState({ showNav, editorPosition })
}
render() {
const { hasLinks, isDark, isReadOnly, placeholder } = this.props
const {
editorPosition,
editorState,
showNav,
showUrlInput,
urlValue,
} = this.state
const promptForLink = hasLinks ? this.promptForLink : undefined
return (
<ParagraphContainer>
{showNav && (
<TextNav
allowedStyles={this.allowedStyles}
editorPosition={editorPosition}
onClickOff={() => this.setState({ showNav: false })}
promptForLink={promptForLink}
toggleStyle={this.toggleInlineStyle}
/>
)}
{showUrlInput && (
<TextInputUrl
backgroundColor={isDark ? "white" : undefined}
editorPosition={editorPosition}
onClickOff={() => this.setState({ showUrlInput: false })}
onConfirmLink={this.confirmLink}
onRemoveLink={this.removeLink}
urlValue={urlValue}
/>
)}
<div
onClick={this.focus}
onMouseUp={this.checkSelection}
onKeyUp={this.checkSelection}
>
<Editor
blockRenderMap={blockRenderMap as any}
editorState={editorState}
keyBindingFn={keyBindingFn}
handleKeyCommand={this.handleKeyCommand as any}
handlePastedText={this.handlePastedText as any}
handleReturn={this.handleReturn}
onChange={this.onChange}
placeholder={placeholder || "Start typing..."}
readOnly={isReadOnly}
ref={ref => {
this.editor = ref
}}
spellCheck
/>
</div>
</ParagraphContainer>
)
}
}
const ParagraphContainer = styled.div`
position: relative;
`
| constructor | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.